Compare commits

..

No commits in common. "124ac5cbf044675bfdf99c85e43c317e379b778f" and "9c3f25312b4146b5cdf9e9abe8a68eddc72662a4" have entirely different histories.

2 changed files with 26 additions and 31 deletions

View file

@ -1,5 +1,4 @@
import DiscordApi, { GuildTextBasedChannel } from "discord.js";
import { ChatCompletionRequestMessage, ChatCompletionResponseMessage } from "openai";
import { database, openai } from "./index";
import Moderation from "./moderation";
@ -221,7 +220,6 @@ async function executeFromQueue(channel: string) {
const channelQueue = channelsRunning.get(channel) as ChannelQueue;
const message = channelQueue.at(0) as RequestMessage;
let functionRanCounter = 0;
let OpenAImessages: ChatCompletionRequestMessage[] = [];
// ignore if we can't even send anything to reply
if (!canReplyToRequest(message)) return;
@ -240,11 +238,28 @@ async function executeFromQueue(channel: string) {
message.deferReply();
}
OpenAImessages = toOpenAIMessages(messages);
let generatedMessage: ChatCompletionResponseMessage | undefined = undefined;
let answer: Awaited<ReturnType<typeof openai.createChatCompletion>>;
const OpenAImessages = toOpenAIMessages(messages);
let answer = await openai.createChatCompletion({
...config.chatCompletionConfig,
messages: OpenAImessages,
// FIXME: don't use new instance of FunctionManager
functions: new FunctionManager().getFunctions(),
});
do {
logUsedTokens(answer, message, ++functionRanCounter);
let generatedMessage = answer.data.choices[0].message;
if (!generatedMessage) throw new Error("empty message received");
// handle function calls
while (generatedMessage.function_call) {
OpenAImessages.push(generatedMessage);
OpenAImessages.push({
role: "function",
name: generatedMessage.function_call.name,
// FIXME: don't use new instance of FunctionManager
content: new FunctionManager().handleFunction(generatedMessage.function_call),
});
answer = await openai.createChatCompletion({
...config.chatCompletionConfig,
messages: OpenAImessages,
@ -255,19 +270,8 @@ async function executeFromQueue(channel: string) {
logUsedTokens(answer, message, ++functionRanCounter);
generatedMessage = answer.data.choices[0].message;
if (!generatedMessage) throw new Error("Empty message received");
// handle function calls
if (generatedMessage.function_call) {
OpenAImessages.push(generatedMessage);
OpenAImessages.push({
role: "function",
name: generatedMessage.function_call.name,
// FIXME: don't use new instance of FunctionManager
content: new FunctionManager().handleFunction(generatedMessage.function_call),
});
}
} while (generatedMessage.function_call);
if (!generatedMessage) throw new Error("empty message received");
}
const answerContent = answer.data.choices[0].message?.content;
@ -295,10 +299,6 @@ async function executeFromQueue(channel: string) {
} catch (e) {
console.error(`Error ocurred while handling chat completion request (${(e as object).constructor.name}):`);
console.error(e);
if (OpenAImessages.length != 0) {
console.error("Messages:");
console.error(OpenAImessages);
}
let errorText = "\n";

View file

@ -61,14 +61,9 @@ export default class FunctionManager {
}
public handleFunction(request: ChatCompletionRequestMessageFunctionCall) {
try {
const parsedArguments = JSON.parse(request.arguments ?? "");
return this.store.get(request.name ?? "")?.execute(parsedArguments);
}
catch (e) {
console.error("Function arguments raw: " + request.arguments);
throw new Error(`Failed to parse the function JSON arguments when running function [${request.name}]`, {cause: e});
}
const parsedArguments = JSON.parse(request.arguments ?? "");
return this.store.get(request.name ?? "")?.execute(parsedArguments);
}
}