Update OpenAI dependency

This commit is contained in:
Wroclaw 2023-09-27 17:14:17 +02:00
parent 8ed2e758f8
commit 23ceca5cd3
8 changed files with 250 additions and 81 deletions

View file

@ -1,11 +1,11 @@
import DiscordApi, { GuildTextBasedChannel, TextBasedChannel } from "discord.js";
import { ChatCompletionRequestMessage, ChatCompletionResponseMessage } from "openai";
import Axios from "axios";
import {APIError as OpenAIError} from "openai";
import { database, openai, config } from "./index";
import Moderation from "./moderation";
import toOpenAIMessages from "./toOpenAIMessages";
import FunctionManager from "./funcitonManager";
import { ChatCompletion, ChatCompletionMessage, ChatCompletionMessageParam } from "openai/resources/chat";
type NonNullableInObject<T, V> = { [k in keyof T]: k extends V ? NonNullable<T[k]> : T[k] };
export type apiRequest = DiscordApi.Message | DiscordApi.RepliableInteraction;
@ -171,12 +171,12 @@ export async function queueRequest(request: apiRequest) {
* @param functionRan counter of how many function have been ran
*/
function logUsedTokens(
answer: Awaited<ReturnType<typeof openai.createChatCompletion>>,
answer: ChatCompletion,
message: RequestMessage,
functionRan: number,
) {
const usage = answer.data.usage;
const functionName = answer.data.choices[0].message?.function_call?.name;
const usage = answer.usage;
const functionName = answer.choices[0].message?.function_call?.name;
if (usage !== undefined) {
const channelName: string = !message.channel.isDMBased() ? `${message.channel.name} (${message.guild?.name})` : `@${getAuthor(message).tag}`;
console.log(`Used ${usage.total_tokens} (${usage.prompt_tokens} + ${usage.completion_tokens}) tokens for ${getAuthor(message).tag} (${getAuthor(message).id}) in #${channelName}${functionName ? " [Function: " + functionName + "]" : ""}`);
@ -207,7 +207,7 @@ async function executeFromQueue(channel: string) {
const channelQueue = channelsRunning.get(channel) as ChannelsRunningValue;
const message = channelQueue.at(0) as RequestMessage;
let functionRanCounter = 0;
let OpenAImessages: ChatCompletionRequestMessage[] = [];
let OpenAImessages: ChatCompletionMessageParam[] = [];
// ignore if we can't even send anything to reply
if (!canReplyToRequest(message)) return;
@ -234,11 +234,11 @@ async function executeFromQueue(channel: string) {
});
OpenAImessages = toOpenAIMessages(messages.values());
let generatedMessage: ChatCompletionResponseMessage | undefined = undefined;
let answer: Awaited<ReturnType<typeof openai.createChatCompletion>>;
let generatedMessage: ChatCompletionMessage | undefined = undefined;
let answer: Awaited<ReturnType<typeof openai.chat.completions.create>>;
do {
answer = await openai.createChatCompletion({
answer = await openai.chat.completions.create({
...config.chatCompletionParams,
messages: OpenAImessages,
// FIXME: don't use new instance of FunctionManager
@ -247,7 +247,7 @@ async function executeFromQueue(channel: string) {
logUsedTokens(answer, message, ++functionRanCounter);
generatedMessage = answer.data.choices[0].message;
generatedMessage = answer.choices[0].message;
if (!generatedMessage) throw new Error("Empty message received");
// handle function calls
@ -262,9 +262,9 @@ async function executeFromQueue(channel: string) {
channelQueue.stopTyping();
const answerContent = answer.data.choices[0].message?.content;
const answerContent = answer.choices[0].message?.content;
if (answerContent === undefined || answerContent === "") {
if (answerContent === null || answerContent === "") {
if (message instanceof DiscordApi.Message) message.react("😶").catch(() => {/* GRACEFAIL: It's okay if the bot won't reply */});
}
else {
@ -285,29 +285,37 @@ async function executeFromQueue(channel: string) {
}
}
} catch (e) {
let errorText: string = "";
channelQueue.stopTyping();
console.error(`Error ocurred while handling chat completion request (${(e as object).constructor.name}):`);
if (Axios.isAxiosError(e)) {
console.error(JSON.stringify(e.response?.data));
}
else {
if (typeof e !== "object") {
console.error(`Error ocurred while handling chat completion request (${typeof e}):`);
console.error(e);
}
if (OpenAImessages.length !== 0) {
console.error("Messages:");
console.error(OpenAImessages);
else if (e === null) {
console.error ("Error ocurred while handling chat completion request: null");
}
else {
console.error(`Error ocurred while handling chat completion request (${e.constructor.name}):`);
if (e instanceof OpenAIError) {
console.error(JSON.stringify(e));
}
else {
console.error(e);
}
if (OpenAImessages.length !== 0) {
console.error("Messages:");
console.error(OpenAImessages);
}
let errorText = "\n";
if (e instanceof Error) {
errorText += e.message;
}
else errorText = "";
if (Axios.isAxiosError(e) && e.code?.match(/^5..$/) && channelQueue.tries < 3) {
channelQueue.tries++;
await new Promise(r => setTimeout(r, 2000)); // pause for 2 seconds before retrying
return executeFromQueue(channel);
if (e instanceof Error) {
errorText = e.message;
}
else errorText = "";
if (e instanceof OpenAIError && e.code?.match(/^5..$/) && channelQueue.tries < 3) {
channelQueue.tries++;
await new Promise(r => setTimeout(r, 2000)); // pause for 2 seconds before retrying
return executeFromQueue(channel);
}
}
requestReply(