Compare commits
3 commits
9c3f25312b
...
124ac5cbf0
Author | SHA1 | Date | |
---|---|---|---|
124ac5cbf0 | |||
56869a2dc2 | |||
67d4361c26 |
2 changed files with 31 additions and 26 deletions
|
@ -1,4 +1,5 @@
|
||||||
import DiscordApi, { GuildTextBasedChannel } from "discord.js";
|
import DiscordApi, { GuildTextBasedChannel } from "discord.js";
|
||||||
|
import { ChatCompletionRequestMessage, ChatCompletionResponseMessage } from "openai";
|
||||||
|
|
||||||
import { database, openai } from "./index";
|
import { database, openai } from "./index";
|
||||||
import Moderation from "./moderation";
|
import Moderation from "./moderation";
|
||||||
|
@ -220,6 +221,7 @@ async function executeFromQueue(channel: string) {
|
||||||
const channelQueue = channelsRunning.get(channel) as ChannelQueue;
|
const channelQueue = channelsRunning.get(channel) as ChannelQueue;
|
||||||
const message = channelQueue.at(0) as RequestMessage;
|
const message = channelQueue.at(0) as RequestMessage;
|
||||||
let functionRanCounter = 0;
|
let functionRanCounter = 0;
|
||||||
|
let OpenAImessages: ChatCompletionRequestMessage[] = [];
|
||||||
|
|
||||||
// ignore if we can't even send anything to reply
|
// ignore if we can't even send anything to reply
|
||||||
if (!canReplyToRequest(message)) return;
|
if (!canReplyToRequest(message)) return;
|
||||||
|
@ -238,28 +240,11 @@ async function executeFromQueue(channel: string) {
|
||||||
message.deferReply();
|
message.deferReply();
|
||||||
}
|
}
|
||||||
|
|
||||||
const OpenAImessages = toOpenAIMessages(messages);
|
OpenAImessages = toOpenAIMessages(messages);
|
||||||
let answer = await openai.createChatCompletion({
|
let generatedMessage: ChatCompletionResponseMessage | undefined = undefined;
|
||||||
...config.chatCompletionConfig,
|
let answer: Awaited<ReturnType<typeof openai.createChatCompletion>>;
|
||||||
messages: OpenAImessages,
|
|
||||||
// FIXME: don't use new instance of FunctionManager
|
|
||||||
functions: new FunctionManager().getFunctions(),
|
|
||||||
});
|
|
||||||
|
|
||||||
logUsedTokens(answer, message, ++functionRanCounter);
|
do {
|
||||||
|
|
||||||
let generatedMessage = answer.data.choices[0].message;
|
|
||||||
if (!generatedMessage) throw new Error("empty message received");
|
|
||||||
|
|
||||||
// handle function calls
|
|
||||||
while (generatedMessage.function_call) {
|
|
||||||
OpenAImessages.push(generatedMessage);
|
|
||||||
OpenAImessages.push({
|
|
||||||
role: "function",
|
|
||||||
name: generatedMessage.function_call.name,
|
|
||||||
// FIXME: don't use new instance of FunctionManager
|
|
||||||
content: new FunctionManager().handleFunction(generatedMessage.function_call),
|
|
||||||
});
|
|
||||||
answer = await openai.createChatCompletion({
|
answer = await openai.createChatCompletion({
|
||||||
...config.chatCompletionConfig,
|
...config.chatCompletionConfig,
|
||||||
messages: OpenAImessages,
|
messages: OpenAImessages,
|
||||||
|
@ -270,8 +255,19 @@ async function executeFromQueue(channel: string) {
|
||||||
logUsedTokens(answer, message, ++functionRanCounter);
|
logUsedTokens(answer, message, ++functionRanCounter);
|
||||||
|
|
||||||
generatedMessage = answer.data.choices[0].message;
|
generatedMessage = answer.data.choices[0].message;
|
||||||
if (!generatedMessage) throw new Error("empty message received");
|
if (!generatedMessage) throw new Error("Empty message received");
|
||||||
}
|
|
||||||
|
// handle function calls
|
||||||
|
if (generatedMessage.function_call) {
|
||||||
|
OpenAImessages.push(generatedMessage);
|
||||||
|
OpenAImessages.push({
|
||||||
|
role: "function",
|
||||||
|
name: generatedMessage.function_call.name,
|
||||||
|
// FIXME: don't use new instance of FunctionManager
|
||||||
|
content: new FunctionManager().handleFunction(generatedMessage.function_call),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} while (generatedMessage.function_call);
|
||||||
|
|
||||||
const answerContent = answer.data.choices[0].message?.content;
|
const answerContent = answer.data.choices[0].message?.content;
|
||||||
|
|
||||||
|
@ -299,6 +295,10 @@ async function executeFromQueue(channel: string) {
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error(`Error ocurred while handling chat completion request (${(e as object).constructor.name}):`);
|
console.error(`Error ocurred while handling chat completion request (${(e as object).constructor.name}):`);
|
||||||
console.error(e);
|
console.error(e);
|
||||||
|
if (OpenAImessages.length != 0) {
|
||||||
|
console.error("Messages:");
|
||||||
|
console.error(OpenAImessages);
|
||||||
|
}
|
||||||
|
|
||||||
let errorText = "\n";
|
let errorText = "\n";
|
||||||
|
|
||||||
|
|
|
@ -61,9 +61,14 @@ export default class FunctionManager {
|
||||||
}
|
}
|
||||||
|
|
||||||
public handleFunction(request: ChatCompletionRequestMessageFunctionCall) {
|
public handleFunction(request: ChatCompletionRequestMessageFunctionCall) {
|
||||||
|
try {
|
||||||
const parsedArguments = JSON.parse(request.arguments ?? "");
|
const parsedArguments = JSON.parse(request.arguments ?? "");
|
||||||
return this.store.get(request.name ?? "")?.execute(parsedArguments);
|
return this.store.get(request.name ?? "")?.execute(parsedArguments);
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
console.error("Function arguments raw: " + request.arguments);
|
||||||
|
throw new Error(`Failed to parse the function JSON arguments when running function [${request.name}]`, {cause: e});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue