ircgpt/index.js

137 lines
4.4 KiB
JavaScript
Raw Normal View History

2023-03-07 03:30:39 +00:00
#!/usr/bin/env node
//IRC bot that responds to !chat, queries the chatgpt api, and prints the response line by line
const irc = require('irc');
const axios = require('axios');
const config = require('./config.json');
// context is a list of strings that are used to seed the chatgpt api and it's responses
class Context {
messages = [];
currentResponse = '';
add_user_message(message) {
this.messages.push({ role: 'user', content: message });
}
add_assistant_message(message) {
this.messages.push({ role: 'assistant', content: message });
}
append_current_response(message) {
this.currentResponse += message;
}
end_line() {
const response_so_far = this.currentResponse;
this.currentResponse += "\n\n"
return response_so_far;
}
finish_current_response() {
this.add_assistant_message(this.currentResponse);
const the_response = this.currentResponse;
this.currentResponse = '';
return the_response;
}
is_response_in_progress() {
return this.currentResponse !== '';
}
clear() {
this.messages = [];
this.currentResponse = '';
}
}
const context = new Context();
2023-03-07 03:30:39 +00:00
const client = new irc.Client(config.server, config.nick, {
channels: config.channels,
});
// listen for messages that start with !chat and call the chatgpt api with a callback that prints the response line by line
client.addListener('message', async (from, to, message) => {
is_chat_cmd = message.startsWith('!chat');
is_cont_cmd = message.startsWith('!cont');
if (is_chat_cmd || is_cont_cmd) {
if(context.is_response_in_progress()) { return; }
if(is_chat_cmd) {
context.clear();
}
2023-03-07 03:30:39 +00:00
const query = message.slice(6);
chatgpt(query, (line) => {
client.say(to, line);
});
}
});
// function that calls the chatgpt streaming api (with server send events) and calls the callback function for each line
2023-03-07 03:59:22 +00:00
async function chatgpt(query, callback) {
// a very primitive mutex to prevent multiple calls to the api at once
if(context.is_response_in_progress()) { return; }
context.add_user_message(query);
2023-03-07 03:30:39 +00:00
const apiUrl = 'https://api.openai.com/v1/chat/completions';
2023-03-07 03:30:39 +00:00
const response = await axios.post(apiUrl, {
messages: [context.messages],
2023-03-07 03:30:39 +00:00
model: 'gpt-3.5-turbo',
stream: true,
}, {
headers: {
Authorization: `Bearer ${config.openaiApiKey}`,
'Content-Type': 'application/json',
},
responseType: 'stream',
});
response.data.on('data', (event) => {
let data = event.toString();
let parts = data.split('\n');
// parse if starts with data:
for(part of parts) {
2023-03-08 02:44:07 +00:00
console.log(part);
2023-03-07 03:30:39 +00:00
if(part === 'data: [DONE]') {
callback(context.finish_current_response());
2023-03-07 03:30:39 +00:00
} else if(part.startsWith('data: ')) {
let jsonString = part.slice(part.indexOf('{'), part.lastIndexOf('}') + 1);
try {
let json = JSON.parse(jsonString);
let chunk = json.choices[0].delta.content;
2023-03-08 02:44:07 +00:00
if (!chunk) {
callback(context.end_line());
continue;
}
2023-03-07 03:30:39 +00:00
//split the chunk into lines leaving the delimiter in the array
2023-03-07 03:59:22 +00:00
const lines = chunk.split(/\r?\n/); // split by new lines
2023-03-07 03:30:39 +00:00
2023-03-07 03:59:22 +00:00
let hasStartNewline = chunk.startsWith("\n");
let hasEndNewline = chunk.endsWith("\n");
2023-03-07 03:30:39 +00:00
if(hasStartNewline) {
callback(context.end_line())
2023-03-07 03:30:39 +00:00
}
for (let i = 0; i < lines.length - 1; i++) {
context.append_current_response(lines[i]);
callback(context.end_line());
2023-03-07 03:30:39 +00:00
}
context.append_current_response(lines[lines.length - 1]);
2023-03-07 03:30:39 +00:00
if(hasEndNewline) {
callback(context.end_line());
2023-03-07 03:30:39 +00:00
}
2023-03-08 02:30:57 +00:00
2023-03-08 02:44:07 +00:00
if (line.length > 400) {
callback(context.end_line());
}
2023-03-07 03:30:39 +00:00
} catch (e) {
console.log(e);
console.log(part);
}
}
}
});
2023-03-07 03:59:22 +00:00
}