gpt4all
gpt4all copied to clipboard
Internet Access ?
Is there a way to have this beast access internet ?
He wants it to make it act like Bing's version i assume that is able to fetch online search?
You can make that by yourself, just use a browser driver to search on google and gather the responses you like, then process them and make shorter versions, after that you send to the AI as part of the info available.
I've not tried this technique, and don't know yet the power of this model, but this should work if it can accept relatively larger prompts without problems.
You can make that by yourself, just use a browser driver to search on google and gather the responses you like, then process them and make shorter versions, after that you send to the AI as part of the info available.
I've not tried this technique, and don't know yet the power of this model, but this should work if it can accept relatively larger prompts without problems.
If he could, you think he would ask it? im just wondering lol
You can make that by yourself, just use a browser driver to search on google and gather the responses you like, then process them and make shorter versions, after that you send to the AI as part of the info available. I've not tried this technique, and don't know yet the power of this model, but this should work if it can accept relatively larger prompts without problems.
If he could, you think he would ask it? im just wondering lol
This is a good point, but look, if you have a question and someone gives you a tip, even if that tip doesn't solve your problem by itself, now you have at least a north of how to do it, so from what he was told he could start looking for web drivers to do this, or hire some freelancer to do it for him. Do not you agree?
You can make that by yourself, just use a browser driver to search on google and gather the responses you like, then process them and make shorter versions, after that you send to the AI as part of the info available. I've not tried this technique, and don't know yet the power of this model, but this should work if it can accept relatively larger prompts without problems.
If he could, you think he would ask it? im just wondering lol
This is a good point, but look, if you have a question and someone gives you a tip, even if that tip doesn't solve your problem by itself, now you have at least a north of how to do it, so from what he was told he could start looking for web drivers to do this, or hire some freelancer to do it for him. Do not you agree?
Absolutely.
In this 10 minutes I started something just by testing, its in typescript node JS, it is very away from a perfect and stable mode to do this, because it still need to filter better the data content, and make the IA "understand" this data better, and coding improviments as clean code :
The word to search must be in *(*( Example test )*)*
import { GPT4All } from 'gpt4all';
import readline from 'readline';
import axios from 'axios';
import cheerio from 'cheerio';
const GOOGLE_SEARCH_URL = 'https://www.google.com/search';
const headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36',
};
async function extractContent(url, query) {
try {
const response = await axios.get(url, { headers });
const $ = cheerio.load(response.data);
const pageTitle = response.request.res.responseUrl;
let textContent = '';
const relevantTags = ['p', 'li', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6'];
for (const tag of relevantTags) {
const elements = $(tag).not('.ads, #sidebar, .header, .footer');
for (const element of elements) {
if ($(element).text().toLowerCase()) {
textContent += $(element).text().trim() + '\n';
}
}
}
return { pageTitle, textContent };
} catch (error) {
console.error(`Error fetching content: ${error}`);
return null;
}
}
async function searchGoogle(query) {
try {
const response = await axios.get(GOOGLE_SEARCH_URL, {
params: { q: query },
headers,
});
const $ = cheerio.load(response.data);
const results = $('a')
.map((i, el) => $(el).attr('href'))
.get();
console.log("results:", results);
const filteredUrls = results
.filter((url) => url.startsWith('https://') || url.startsWith('http://'))
console.log("urls:", filteredUrls);
return filteredUrls;
} catch (error) {
console.error(`Error searching Google: ${error}`);
return [];
}
}
const main = async () => {
const gpt4all = new GPT4All('gpt4all-lora-quantized', true);
await gpt4all.init();
await gpt4all.open();
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
});
const askQuestion = async () => {
rl.question('Enter your question (type "exit" to quit): ', async (question) => {
if (question.trim().toLowerCase() === 'exit') {
gpt4all.close();
rl.close();
return;
}
let message = question;
let textMessage = question;
let response = '';
const queryMatch = question.match(/\*\(\*(.+?)\*\)\*/);
const query = queryMatch ? queryMatch[1].toLowerCase() : '';
try {
if (query) {
const urls = await searchGoogle(query);
console.log("urls:", urls);
if (urls && urls.length > 0) {
const promises = urls.map(async (url) => {
const content = await extractContent(url, query);
if (content) {
return content.textContent;
}
});
const contents = await Promise.all(promises);
console.log("contents:", contents);
const textArray = contents.filter(content => content);
const filteredTextArray = textArray.filter(text => text.toLowerCase());
message = `${message}\n\n also take this data and absorb your knowledge, you dont need use now what dont make sense, there will be noise, focus on what is repeated and adds knowledge : ${contents ? `\n\n${contents.join('\n\n')}` : ''}.\n\n`;
}
} else {
message = message
}
console.log(message);
response = await gpt4all.prompt(message);
console.log(`Response: ${response}\n`);
} catch (error) {
console.error(`Error getting content: ${error}`);
}
askQuestion();
});
};
askQuestion();
};
main().catch(console.error);
this kind of work for me [still testing it] to fetch information from the Internet.
// First part of index.js
const { GPT4All } = require('gpt4all');
const readline = require('readline');
const axios = require('axios');
const cheerio = require('cheerio');
const DUCKDUCKGO_SEARCH_URL = 'https://duckduckgo.com/html/';
const headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36',
};
async function extractContent(url, query) {
try {
const response = await axios.get(url, { headers });
const $ = cheerio.load(response.data);
const pageTitle = response.request.res.responseUrl;
let textContent = '';
const relevantTags = ['p', 'li', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6'];
for (const tag of relevantTags) {
const elements = $(tag).not('.ads, #sidebar, .header, .footer');
for (const element of elements) {
if ($(element).text().toLowerCase()) {
textContent += $(element).text().trim() + '\n';
}
}
}
return { pageTitle, textContent };
} catch (error) {
console.error(`Error fetching content: ${error}`);
return null;
}
}
async function searchDuckDuckGo(query) {
console.log(`Searching DuckDuckGo for "${query}"...`);
try {
const response = await axios.get(DUCKDUCKGO_SEARCH_URL, {
params: { q: query },
headers,
});
const $ = cheerio.load(response.data);
const results = $('h2.result__title')
.map((i, el) => {
const linkElement = $(el).find('a.result__url');
return linkElement.attr('href');
})
.get();
console.log("results:", results);
const filteredUrls = results
.filter((url) => url.startsWith('https://') || url.startsWith('http://'));
console.log("urls:", filteredUrls);
return filteredUrls;
} catch (error) {
console.error(`Error searching DuckDuckGo: ${error}`);
return [];
}
}
// Second part of index.js
const main = async () => {
const gpt4all = new GPT4All('gpt4all-lora-quantized', true);
await gpt4all.init();
await gpt4all.open();
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
});
const askQuestion = async () => {
rl.question('Enter your question (type "exit" to quit): ', async (question) => {
if (question.trim().toLowerCase() === 'exit') {
gpt4all.close();
rl.close();
return;
}
let message = question;
let textMessage = question;
let response = '';
const queryMatch = question.match(/\*\(\*(.+?)\*\)\*/);
const query = queryMatch ? queryMatch[1].toLowerCase() : '';
try {
if (query) {
const urls = await searchDuckDuckGo(query);
console.log("urls:", urls);
if (urls && urls.length > 0) {
const promises = urls.map(async (url) => {
const content = await extractContent(url, query);
if (content) {
return content.textContent;
}
});
const contents = await Promise.all(promises);
console.log("contents:", contents);
const textArray = contents.filter(content => content);
const filteredTextArray = textArray.filter(text => text.toLowerCase());
message = `${message}\n\n also take this data and absorb your knowledge, you dont need use now what dont make sense, there will be noise, focus on what is repeated and adds knowledge : ${contents ? `\n\n${contents.join('\n\n')}` : ''}.\n\n`;
}
} else {
message = message;
}
console.log(message);
response = await gpt4all.prompt(message);
console.log(`Response: ${response}\n`);
} catch (error) {
console.error(`Error getting content: ${error}`);
}
askQuestion();
});
};
askQuestion();
};
main().catch(console.error);
I've updated the code, but that doesn't really work well, because the AI isn't powerful enough to use the filtered data from the internet, so it just gets lost and doesn't know what to do.
Keywords must stay between *(*( Example Test )*)*
Example: Talk about *(*(Typescript)*)*
import { GPT4All } from 'gpt4all';
import readline from 'readline';
import axios from 'axios';
import cheerio from 'cheerio';
const GOOGLE_SEARCH_URL = 'https://duckduckgo.com/html/';
const headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36',
};
async function extractContent(url, query) {
try {
const response = await axios.get(url, { headers });
const $ = cheerio.load(response.data);
const pageTitle = response.request.res.responseUrl;
let textContent = '';
const relevantTags = ['p', 'li', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6'];
for (const tag of relevantTags) {
const elements = $(tag).not('.ads, #sidebar, .header, .footer');
for (const element of elements) {
if ($(element).text().toLowerCase()) {
textContent += $(element).text().trim() + '\n';
}
}
}
return { pageTitle, textContent };
} catch (error) {
console.error(`Error fetching content: ${error}`);
return null;
}
}
async function searchGoogle(query) {
try {
const response = await axios.get(GOOGLE_SEARCH_URL, {
params: { q: query },
headers,
});
const $ = cheerio.load(response.data);
const results = $('a')
.map((i, el) => $(el).attr('href'))
.get();
console.log("results:", results);
const filteredUrls = results
.filter((url) => url.startsWith('https://') || url.startsWith('http://'))
console.log("urls:", filteredUrls);
return filteredUrls;
} catch (error) {
console.error(`Error searching Google: ${error}`);
return [];
}
}
function removeParentheses(text) {
return text.replace(/[()]/g, '');
}
const main = async () => {
const gpt4all = new GPT4All('gpt4all-lora-quantized', true);
await gpt4all.init();
await gpt4all.open();
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
});
const askQuestion = async () => {
rl.question('Enter your question (type "exit" to quit): ', async (question) => {
if (question.trim().toLowerCase() === 'exit') {
gpt4all.close();
rl.close();
return;
}
let message = question;
let response = '';
const queryMatch = question.match(/\*\(\*(.+?)\*\)\*/);
const query = queryMatch ? queryMatch[1].toLowerCase() : '';
try {
if (query) {
let urls = await searchGoogle(query);
if (urls && urls.length > 0) {
const promises = urls.map(async (url) => {
const content = await extractContent(url, query);
if (content) {
return content.textContent;
}
});
let contents = await Promise.all(promises);
console.log("contents:", contents);
let textArray = contents.filter(content => content);
message = removeParentheses(message);
message = `${message}\n\n based your answer in relevant info found on internet, maybe will have random things at middle, but be guide by my initial question: \n\n${contents.join('\n\n')}\n\n`;
}
} else {
message = message
}
console.log(message);
response = await gpt4all.prompt(message);
console.log(`Response: ${response}\n`);
} catch (error) {
console.error(`Error getting content: ${error}`);
}
askQuestion();
});
};
askQuestion();
};
main().catch(console.error);
Where to implement this code?
I also have the same question, how do I run this code in the app?