Compare commits

..

3 commits

Author SHA1 Message Date
124ac5cbf0 Simplify the ChatCompletion calling loop
removes duplicate code in while loop
2023-07-30 01:32:09 +02:00
56869a2dc2 Make error of function json parsing more descriptive 2023-07-30 01:21:19 +02:00
67d4361c26 Log message data when an error occurs 2023-07-30 01:18:25 +02:00
2 changed files with 31 additions and 26 deletions

View file

@ -1,4 +1,5 @@
import DiscordApi, { GuildTextBasedChannel } from "discord.js";
import { ChatCompletionRequestMessage, ChatCompletionResponseMessage } from "openai";
import { database, openai } from "./index";
import Moderation from "./moderation";
@ -220,6 +221,7 @@ async function executeFromQueue(channel: string) {
const channelQueue = channelsRunning.get(channel) as ChannelQueue;
const message = channelQueue.at(0) as RequestMessage;
let functionRanCounter = 0;
let OpenAImessages: ChatCompletionRequestMessage[] = [];
// ignore if we can't even send anything to reply
if (!canReplyToRequest(message)) return;
@ -238,28 +240,11 @@ async function executeFromQueue(channel: string) {
message.deferReply();
}
const OpenAImessages = toOpenAIMessages(messages);
let answer = await openai.createChatCompletion({
...config.chatCompletionConfig,
messages: OpenAImessages,
// FIXME: don't use new instance of FunctionManager
functions: new FunctionManager().getFunctions(),
});
OpenAImessages = toOpenAIMessages(messages);
let generatedMessage: ChatCompletionResponseMessage | undefined = undefined;
let answer: Awaited<ReturnType<typeof openai.createChatCompletion>>;
logUsedTokens(answer, message, ++functionRanCounter);
let generatedMessage = answer.data.choices[0].message;
if (!generatedMessage) throw new Error("empty message received");
// handle function calls
while (generatedMessage.function_call) {
OpenAImessages.push(generatedMessage);
OpenAImessages.push({
role: "function",
name: generatedMessage.function_call.name,
// FIXME: don't use new instance of FunctionManager
content: new FunctionManager().handleFunction(generatedMessage.function_call),
});
do {
answer = await openai.createChatCompletion({
...config.chatCompletionConfig,
messages: OpenAImessages,
@ -270,8 +255,19 @@ async function executeFromQueue(channel: string) {
logUsedTokens(answer, message, ++functionRanCounter);
generatedMessage = answer.data.choices[0].message;
if (!generatedMessage) throw new Error("empty message received");
if (!generatedMessage) throw new Error("Empty message received");
// handle function calls
if (generatedMessage.function_call) {
OpenAImessages.push(generatedMessage);
OpenAImessages.push({
role: "function",
name: generatedMessage.function_call.name,
// FIXME: don't use new instance of FunctionManager
content: new FunctionManager().handleFunction(generatedMessage.function_call),
});
}
} while (generatedMessage.function_call);
const answerContent = answer.data.choices[0].message?.content;
@ -299,6 +295,10 @@ async function executeFromQueue(channel: string) {
} catch (e) {
console.error(`Error ocurred while handling chat completion request (${(e as object).constructor.name}):`);
console.error(e);
if (OpenAImessages.length != 0) {
console.error("Messages:");
console.error(OpenAImessages);
}
let errorText = "\n";

View file

@ -61,10 +61,15 @@ export default class FunctionManager {
}
public handleFunction(request: ChatCompletionRequestMessageFunctionCall) {
try {
const parsedArguments = JSON.parse(request.arguments ?? "");
return this.store.get(request.name ?? "")?.execute(parsedArguments);
}
catch (e) {
console.error("Function arguments raw: " + request.arguments);
throw new Error(`Failed to parse the function JSON arguments when running function [${request.name}]`, {cause: e});
}
}
}
// buildins