execution: factor out chat completion process
This commit is contained in:
parent
d3567c3607
commit
482f72a4d1
1 changed files with 40 additions and 25 deletions
|
@ -219,7 +219,6 @@ function logUsedTokens(
|
|||
async function executeFromQueue(channel: string) {
|
||||
const channelQueue = channelsRunning.get(channel) as ChannelsRunningValue;
|
||||
const message = channelQueue.at(0) as RequestMessage;
|
||||
let functionRanCounter = 0;
|
||||
let OpenAImessages: ChatCompletionMessageParam[] = [];
|
||||
|
||||
// ignore if we can't even send anything to reply
|
||||
|
@ -247,30 +246,7 @@ async function executeFromQueue(channel: string) {
|
|||
});
|
||||
|
||||
OpenAImessages = toOpenAIMessages(messages.values());
|
||||
let generatedMessage: ChatCompletionMessage | undefined = undefined;
|
||||
let answer: Awaited<ReturnType<typeof openai.chat.completions.create>>;
|
||||
|
||||
do {
|
||||
answer = await openai.chat.completions.create({
|
||||
...config.chatCompletionParams,
|
||||
messages: OpenAImessages,
|
||||
// FIXME: don't use new instance of FunctionManager
|
||||
tools: new FunctionManager().getToolsForOpenAi(),
|
||||
});
|
||||
|
||||
functionRanCounter += answer.choices[0].message?.tool_calls?.length ?? 0;
|
||||
logUsedTokens(answer, message, ++functionRanCounter);
|
||||
|
||||
generatedMessage = answer.choices[0].message;
|
||||
if (!generatedMessage) throw new Error("Empty message received");
|
||||
|
||||
// handle tool calls
|
||||
if (generatedMessage.tool_calls !== undefined && generatedMessage.tool_calls.length > 0) {
|
||||
OpenAImessages.push(generatedMessage);
|
||||
// FIXME: don't use new instance of FunctionManager
|
||||
OpenAImessages.push(...(await new FunctionManager().handleToolCalls(generatedMessage.tool_calls)));
|
||||
}
|
||||
} while (generatedMessage.tool_calls !== undefined && generatedMessage.tool_calls.length > 0);
|
||||
const answer = await executeChatCompletion(OpenAImessages, message);
|
||||
|
||||
channelQueue.stopTyping();
|
||||
|
||||
|
@ -349,3 +325,42 @@ async function executeFromQueue(channel: string) {
|
|||
else
|
||||
return executeFromQueue(channel);
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes the chat completion process.
|
||||
*
|
||||
* @param OpenAImessages An array of ChatCompletionMessageParam objects representing the messages for chat completion.
|
||||
* @param message An optional RequestMessage object representing the request message, used for logging.
|
||||
* @returns A Promise that resolves to the answer from the chat completion process.
|
||||
*/
|
||||
async function executeChatCompletion(
|
||||
OpenAImessages: ChatCompletionMessageParam[],
|
||||
message: RequestMessage | undefined,
|
||||
) {
|
||||
let generatedMessage: ChatCompletionMessage | undefined = undefined;
|
||||
let answer: Awaited<ReturnType<typeof openai.chat.completions.create>>;
|
||||
let functionRanCounter = 0;
|
||||
|
||||
do {
|
||||
answer = await openai.chat.completions.create({
|
||||
...config.chatCompletionParams,
|
||||
messages: OpenAImessages,
|
||||
// FIXME: don't use new instance of FunctionManager
|
||||
tools: new FunctionManager().getToolsForOpenAi(),
|
||||
});
|
||||
|
||||
functionRanCounter += answer.choices[0].message?.tool_calls?.length ?? 0;
|
||||
logUsedTokens(answer, message, ++functionRanCounter);
|
||||
|
||||
generatedMessage = answer.choices[0].message;
|
||||
if (!generatedMessage) throw new Error("Empty message received");
|
||||
|
||||
// handle tool calls
|
||||
if (generatedMessage.tool_calls !== undefined && generatedMessage.tool_calls.length > 0) {
|
||||
OpenAImessages.push(generatedMessage);
|
||||
// FIXME: don't use new instance of FunctionManager
|
||||
OpenAImessages.push(...(await new FunctionManager().handleToolCalls(generatedMessage.tool_calls)));
|
||||
}
|
||||
} while (generatedMessage.tool_calls !== undefined && generatedMessage.tool_calls.length > 0);
|
||||
return answer;
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue