Refactor context out of chatgpt command
parent
112e960135
commit
1abaa24a2b
|
@ -3,5 +3,7 @@
|
||||||
"port": 6697,
|
"port": 6697,
|
||||||
"nick": "chatgpt",
|
"nick": "chatgpt",
|
||||||
"channels": ["#chatgpt"],
|
"channels": ["#chatgpt"],
|
||||||
"openaiApiKey": "[redacted]"
|
"openaiApiKey": "[redacted]",
|
||||||
|
"initialSystemMessage": "You are a helpful assistant.",
|
||||||
|
"alwaysRemember": false
|
||||||
}
|
}
|
224
index.js
224
index.js
|
@ -4,12 +4,17 @@ const irc = require('irc');
|
||||||
const axios = require('axios');
|
const axios = require('axios');
|
||||||
const config = require('./config.json');
|
const config = require('./config.json');
|
||||||
|
|
||||||
|
const seed_messages = [{role: 'system', content: config.initialSystemMessage}];
|
||||||
|
|
||||||
// context is a list of strings that are used to seed the chatgpt api and it's responses
|
// context is a list of strings that are used to seed the chatgpt api and it's responses
|
||||||
class Context {
|
class Context {
|
||||||
messages = [];
|
|
||||||
currentLine = '';
|
|
||||||
currentResponse = '';
|
currentResponse = '';
|
||||||
|
|
||||||
|
constructor(messages = seed_messages) {
|
||||||
|
this.messages = messages;
|
||||||
|
}
|
||||||
|
|
||||||
add_user_prompt(message) {
|
add_user_prompt(message) {
|
||||||
this.messages.push({ role: 'user', content: message });
|
this.messages.push({ role: 'user', content: message });
|
||||||
}
|
}
|
||||||
|
@ -18,41 +23,41 @@ class Context {
|
||||||
this.messages.push({ role: 'assistant', content: message });
|
this.messages.push({ role: 'assistant', content: message });
|
||||||
}
|
}
|
||||||
|
|
||||||
append_to_line(message) {
|
end_line(line) {
|
||||||
this.currentLine += message;
|
this.currentResponse += `${line}\n\n`;
|
||||||
}
|
return line;
|
||||||
|
|
||||||
end_line() {
|
|
||||||
const the_line = this.currentLine;
|
|
||||||
this.currentResponse += `${the_line}\n\n`;
|
|
||||||
this.currentLine = '';
|
|
||||||
return the_line;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
finish_current_response() {
|
finish_current_response() {
|
||||||
this.add_assistant_message(this.currentResponse);
|
this.add_assistant_message(this.currentResponse);
|
||||||
const theLine = this.currentLine;
|
const theLine = this.currentLine;
|
||||||
this.currentResponse = '';
|
this.currentResponse = '';
|
||||||
this.currentLine = '';
|
this.save_history();
|
||||||
return theLine;
|
return theLine;
|
||||||
}
|
}
|
||||||
|
|
||||||
is_response_in_progress() {
|
save_history() {
|
||||||
return this.currentResponse !== '' || this.currentLine !== '';
|
const prettyData = JSON.stringify(this.messages, null, 2);
|
||||||
|
fs.writeFileSync('./messages.json', prettyData);
|
||||||
}
|
}
|
||||||
|
|
||||||
peek_line() {
|
is_response_in_progress() {
|
||||||
return this.currentLine;
|
return this.currentResponse !== '';
|
||||||
}
|
}
|
||||||
|
|
||||||
clear() {
|
clear() {
|
||||||
this.messages = [];
|
this.messages = [];
|
||||||
this.currentResponse = '';
|
this.currentResponse = '';
|
||||||
this.currentLine = '';
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const context = new Context();
|
let context = new Context(savedMessages);
|
||||||
|
if (fs.existsSync('./messages.json')) {
|
||||||
|
savedMessages = require('./messages.json');
|
||||||
|
context = new Context(savedMessages);
|
||||||
|
} else {
|
||||||
|
context = new Context(seed_messages);
|
||||||
|
}
|
||||||
|
|
||||||
const client = new irc.Client(config.server, config.nick, {
|
const client = new irc.Client(config.server, config.nick, {
|
||||||
channels: config.channels,
|
channels: config.channels,
|
||||||
|
@ -60,101 +65,114 @@ const client = new irc.Client(config.server, config.nick, {
|
||||||
|
|
||||||
// listen for messages that start with !chat and call the chatgpt api with a callback that prints the response line by line
|
// listen for messages that start with !chat and call the chatgpt api with a callback that prints the response line by line
|
||||||
client.addListener('message', async (from, to, message) => {
|
client.addListener('message', async (from, to, message) => {
|
||||||
is_chat_cmd = message.startsWith('!chat');
|
let is_chat_cmd = message.startsWith('!chat');
|
||||||
is_cont_cmd = message.startsWith('!cont');
|
let is_cont_cmd = message.startsWith('!cont');
|
||||||
|
|
||||||
if (is_chat_cmd || is_cont_cmd) {
|
if (is_chat_cmd || is_cont_cmd) {
|
||||||
if(context.is_response_in_progress()) { return; }
|
if (context.is_response_in_progress()) {
|
||||||
if(is_chat_cmd) {
|
message(`(chat from ${from} ignored, response in progress)`)
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if(is_chat_cmd && !config.alwaysRemember) {
|
||||||
context.clear();
|
context.clear();
|
||||||
}
|
}
|
||||||
|
context.add_user_prompt(query);
|
||||||
const query = message.slice(6);
|
const query = message.slice(6);
|
||||||
chatgpt(query, (line) => {
|
try {
|
||||||
client.say(to, line);
|
await chatgpt(query, context.messages, handleChatGPTResponseLine);
|
||||||
});
|
context.finish_current_response();
|
||||||
|
} catch (e) {
|
||||||
|
console.log(e);
|
||||||
|
client.say(to, 'Error: ' + e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleChatGPTResponseLine(line) {
|
||||||
|
context.end_line(line);
|
||||||
|
client.say(to, line);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
// function that calls the chatgpt streaming api (with server send events) and calls the callback function for each line
|
// function that calls the chatgpt streaming api (with server send events) and calls the callback function for each line
|
||||||
async function chatgpt(query, callback) {
|
function chatgpt(query, messages, callback) {
|
||||||
// a very primitive mutex to prevent multiple calls to the api at once
|
|
||||||
if(context.is_response_in_progress()) { return; }
|
|
||||||
context.add_user_prompt(query);
|
|
||||||
const apiUrl = 'https://api.openai.com/v1/chat/completions';
|
const apiUrl = 'https://api.openai.com/v1/chat/completions';
|
||||||
let response = null;
|
let currentLine = '';
|
||||||
try {
|
|
||||||
response = await axios.post(apiUrl, {
|
|
||||||
messages: context.messages,
|
|
||||||
model: 'gpt-3.5-turbo',
|
|
||||||
stream: true,
|
|
||||||
}, {
|
|
||||||
headers: {
|
|
||||||
Authorization: `Bearer ${config.openaiApiKey}`,
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
},
|
|
||||||
responseType: 'stream',
|
|
||||||
});
|
|
||||||
} catch(error) {
|
|
||||||
if (error.response) {
|
|
||||||
// The request was made and the server responded with a status code
|
|
||||||
// that falls out of the range of 2xx
|
|
||||||
console.log(error.toJSON());
|
|
||||||
} else if (error.request) {
|
|
||||||
// The request was made but no response was received
|
|
||||||
// `error.request` is an instance of XMLHttpRequest in the browser and an instance of
|
|
||||||
// http.ClientRequest in node.js
|
|
||||||
console.log(error.request);
|
|
||||||
} else {
|
|
||||||
// Something happened in setting up the request that triggered an Error
|
|
||||||
console.log('Error', error.message);
|
|
||||||
}
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
response.data.on('data', (event) => {
|
return new Promise((resolve, reject) => {
|
||||||
let data = event.toString();
|
axios.post(apiUrl, {
|
||||||
let parts = data.split('\n');
|
messages: messages,
|
||||||
// parse if starts with data:
|
model: 'gpt-3.5-turbo',
|
||||||
for(part of parts) {
|
stream: true,
|
||||||
console.log(part);
|
}, {
|
||||||
if(part === 'data: [DONE]') {
|
headers: {
|
||||||
callback(context.finish_current_response());
|
Authorization: `Bearer ${config.openaiApiKey}`,
|
||||||
} else if(part.startsWith('data: ')) {
|
'Content-Type': 'application/json',
|
||||||
let jsonString = part.slice(part.indexOf('{'), part.lastIndexOf('}') + 1);
|
},
|
||||||
try {
|
responseType: 'stream',
|
||||||
let json = JSON.parse(jsonString);
|
}).then(response => {
|
||||||
let chunk = json.choices[0].delta.content;
|
response.data.on('data', (event) => {
|
||||||
if (!chunk) {
|
let data = event.toString();
|
||||||
continue;
|
let parts = data.split('\n');
|
||||||
}
|
// parse if starts with data:
|
||||||
//split the chunk into lines leaving the delimiter in the array
|
for (let part of parts) {
|
||||||
const lines = chunk.split(/\r?\n/); // split by new lines
|
|
||||||
|
|
||||||
let hasStartNewline = chunk.startsWith("\n");
|
|
||||||
let hasEndNewline = chunk.endsWith("\n");
|
|
||||||
|
|
||||||
if(hasStartNewline) {
|
|
||||||
callback(context.end_line())
|
|
||||||
}
|
|
||||||
|
|
||||||
for (let i = 0; i < lines.length - 1; i++) {
|
|
||||||
context.append_to_line(lines[i]);
|
|
||||||
callback(context.end_line());
|
|
||||||
}
|
|
||||||
|
|
||||||
context.append_to_line(lines[lines.length - 1]);
|
|
||||||
|
|
||||||
if(hasEndNewline) {
|
|
||||||
callback(context.end_line());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (context.peek_line().length > 400) {
|
|
||||||
callback(context.end_line());
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
console.log(e);
|
|
||||||
console.log(part);
|
console.log(part);
|
||||||
|
|
||||||
|
if (part === 'data: [DONE]') {
|
||||||
|
callback(currentLine);
|
||||||
|
resolve();
|
||||||
|
} else if (part.startsWith('data: ')) {
|
||||||
|
let jsonString = part.slice(part.indexOf('{'), part.lastIndexOf('}') + 1);
|
||||||
|
|
||||||
|
try {
|
||||||
|
let json = JSON.parse(jsonString);
|
||||||
|
let chunk = json.choices[0].delta.content;
|
||||||
|
|
||||||
|
if (!chunk) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const lines = chunk.split(/\r?\n/);
|
||||||
|
let hasStartNewline = chunk.startsWith("\n");
|
||||||
|
let hasEndNewline = chunk.endsWith("\n");
|
||||||
|
|
||||||
|
if (hasStartNewline) {
|
||||||
|
callback(currentLine);
|
||||||
|
currentLine = '';
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let i = 0; i < lines.length - 1; i++) {
|
||||||
|
currentLine += lines[i];
|
||||||
|
callback(currentLine);
|
||||||
|
currentLine = '';
|
||||||
|
}
|
||||||
|
|
||||||
|
currentLine += lines[lines.length - 1];
|
||||||
|
|
||||||
|
if (hasEndNewline) {
|
||||||
|
callback(currentLine);
|
||||||
|
currentLine = '';
|
||||||
|
}
|
||||||
|
|
||||||
|
if (currentLine.length > 400) {
|
||||||
|
callback(currentLine);
|
||||||
|
currentLine = '';
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
console.log(e);
|
||||||
|
console.log(part);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
});
|
||||||
|
}).catch(error => {
|
||||||
|
if (error.response) {
|
||||||
|
console.log(error.toJSON());
|
||||||
|
} else if (error.request) {
|
||||||
|
console.log(error.request);
|
||||||
|
} else {
|
||||||
|
console.log('Error', error.message);
|
||||||
}
|
}
|
||||||
}
|
reject(error);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
Loading…
Reference in New Issue