fixup context
parent
0a726b18c8
commit
112e960135
76
index.js
76
index.js
|
@ -7,9 +7,10 @@ const config = require('./config.json');
|
|||
// context is a list of strings that are used to seed the chatgpt api and it's responses
|
||||
class Context {
|
||||
messages = [];
|
||||
currentLine = '';
|
||||
currentResponse = '';
|
||||
|
||||
add_user_message(message) {
|
||||
add_user_prompt(message) {
|
||||
this.messages.push({ role: 'user', content: message });
|
||||
}
|
||||
|
||||
|
@ -17,30 +18,37 @@ class Context {
|
|||
this.messages.push({ role: 'assistant', content: message });
|
||||
}
|
||||
|
||||
append_current_response(message) {
|
||||
this.currentResponse += message;
|
||||
append_to_line(message) {
|
||||
this.currentLine += message;
|
||||
}
|
||||
|
||||
end_line() {
|
||||
const response_so_far = this.currentResponse;
|
||||
this.currentResponse += "\n\n"
|
||||
return response_so_far;
|
||||
const the_line = this.currentLine;
|
||||
this.currentResponse += `${the_line}\n\n`;
|
||||
this.currentLine = '';
|
||||
return the_line;
|
||||
}
|
||||
|
||||
finish_current_response() {
|
||||
this.add_assistant_message(this.currentResponse);
|
||||
const the_response = this.currentResponse;
|
||||
const theLine = this.currentLine;
|
||||
this.currentResponse = '';
|
||||
return the_response;
|
||||
this.currentLine = '';
|
||||
return theLine;
|
||||
}
|
||||
|
||||
is_response_in_progress() {
|
||||
return this.currentResponse !== '';
|
||||
return this.currentResponse !== '' || this.currentLine !== '';
|
||||
}
|
||||
|
||||
peek_line() {
|
||||
return this.currentLine;
|
||||
}
|
||||
|
||||
clear() {
|
||||
this.messages = [];
|
||||
this.currentResponse = '';
|
||||
this.currentLine = '';
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -70,20 +78,37 @@ client.addListener('message', async (from, to, message) => {
|
|||
async function chatgpt(query, callback) {
|
||||
// a very primitive mutex to prevent multiple calls to the api at once
|
||||
if(context.is_response_in_progress()) { return; }
|
||||
context.add_user_message(query);
|
||||
context.add_user_prompt(query);
|
||||
const apiUrl = 'https://api.openai.com/v1/chat/completions';
|
||||
|
||||
const response = await axios.post(apiUrl, {
|
||||
messages: [context.messages],
|
||||
model: 'gpt-3.5-turbo',
|
||||
stream: true,
|
||||
}, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${config.openaiApiKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
responseType: 'stream',
|
||||
});
|
||||
let response = null;
|
||||
try {
|
||||
response = await axios.post(apiUrl, {
|
||||
messages: context.messages,
|
||||
model: 'gpt-3.5-turbo',
|
||||
stream: true,
|
||||
}, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${config.openaiApiKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
responseType: 'stream',
|
||||
});
|
||||
} catch(error) {
|
||||
if (error.response) {
|
||||
// The request was made and the server responded with a status code
|
||||
// that falls out of the range of 2xx
|
||||
console.log(error.toJSON());
|
||||
} else if (error.request) {
|
||||
// The request was made but no response was received
|
||||
// `error.request` is an instance of XMLHttpRequest in the browser and an instance of
|
||||
// http.ClientRequest in node.js
|
||||
console.log(error.request);
|
||||
} else {
|
||||
// Something happened in setting up the request that triggered an Error
|
||||
console.log('Error', error.message);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
response.data.on('data', (event) => {
|
||||
let data = event.toString();
|
||||
|
@ -99,7 +124,6 @@ async function chatgpt(query, callback) {
|
|||
let json = JSON.parse(jsonString);
|
||||
let chunk = json.choices[0].delta.content;
|
||||
if (!chunk) {
|
||||
callback(context.end_line());
|
||||
continue;
|
||||
}
|
||||
//split the chunk into lines leaving the delimiter in the array
|
||||
|
@ -113,17 +137,17 @@ async function chatgpt(query, callback) {
|
|||
}
|
||||
|
||||
for (let i = 0; i < lines.length - 1; i++) {
|
||||
context.append_current_response(lines[i]);
|
||||
context.append_to_line(lines[i]);
|
||||
callback(context.end_line());
|
||||
}
|
||||
|
||||
context.append_current_response(lines[lines.length - 1]);
|
||||
context.append_to_line(lines[lines.length - 1]);
|
||||
|
||||
if(hasEndNewline) {
|
||||
callback(context.end_line());
|
||||
}
|
||||
|
||||
if (line.length > 400) {
|
||||
if (context.peek_line().length > 400) {
|
||||
callback(context.end_line());
|
||||
}
|
||||
} catch (e) {
|
||||
|
|
Loading…
Reference in New Issue