Compare commits

..

No commits in common. "23ceca5cd3b686508c091e95c492da76bdce797b" and "03a1c62cd5fa745058559adb505036740bf86f2f" have entirely different histories.

10 changed files with 413 additions and 454 deletions

754
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -15,9 +15,8 @@
"discord.js": "^14.8.0", "discord.js": "^14.8.0",
"fold-to-ascii": "^5.0.1", "fold-to-ascii": "^5.0.1",
"gpt-3-encoder": "^1.1.4", "gpt-3-encoder": "^1.1.4",
"openai": "^4.10.0", "openai": "^3.2.1",
"require-directory": "^2.1.1", "require-directory": "^2.1.1"
"typescript": "^5.1.6"
}, },
"devDependencies": { "devDependencies": {
"@types/fold-to-ascii": "^5.0.0", "@types/fold-to-ascii": "^5.0.0",
@ -25,6 +24,7 @@
"@typescript-eslint/eslint-plugin": "^6.2.0", "@typescript-eslint/eslint-plugin": "^6.2.0",
"@typescript-eslint/parser": "^6.2.0", "@typescript-eslint/parser": "^6.2.0",
"eslint": "^8.46.0", "eslint": "^8.46.0",
"prisma": "^5.0.0" "prisma": "^5.0.0",
"typescript": "^5.1.6"
} }
} }

View file

@ -1,6 +1,6 @@
import { AutocompleteInteraction, PermissionsBitField } from "discord.js"; import { AutocompleteInteraction, PermissionsBitField } from "discord.js";
import { RESTPostAPIApplicationCommandsJSONBody } from "discord.js"; import { RESTPostAPIApplicationCommandsJSONBody } from "discord.js";
import { APIApplicationCommandOption, ApplicationCommandType, ChatInputCommandInteraction, LocalizationMap, MessageInteraction, PermissionResolvable, UserSelectMenuInteraction } from "discord.js"; import { ApplicationCommandOption, ApplicationCommandType, ChatInputCommandInteraction, LocalizationMap, MessageInteraction, PermissionResolvable, UserSelectMenuInteraction } from "discord.js";
type InteractionTypeMap = { type InteractionTypeMap = {
[ApplicationCommandType.ChatInput]: [ChatInputCommandInteraction, string]; [ApplicationCommandType.ChatInput]: [ChatInputCommandInteraction, string];
@ -13,7 +13,7 @@ interface Command<Type extends keyof InteractionTypeMap = ApplicationCommandType
readonly name_localizations?: LocalizationMap; readonly name_localizations?: LocalizationMap;
readonly description: InteractionTypeMap[Type][1]; readonly description: InteractionTypeMap[Type][1];
readonly description_localizations?: LocalizationMap; readonly description_localizations?: LocalizationMap;
readonly options?: APIApplicationCommandOption[]; readonly options?: ApplicationCommandOption[];
readonly default_member_permissions?: PermissionResolvable; readonly default_member_permissions?: PermissionResolvable;
readonly type: Type; readonly type: Type;
readonly nsfw?: boolean; readonly nsfw?: boolean;

View file

@ -1,4 +1,4 @@
import { ApplicationCommandType, ChatInputCommandInteraction, APIApplicationCommandOption, ApplicationCommandOptionType, APIEmbedField } from "discord.js"; import { ApplicationCommandType, ChatInputCommandInteraction, ApplicationCommandOption, ApplicationCommandOptionType, APIEmbedField } from "discord.js";
import Command from "../command"; import Command from "../command";
import { config } from "../index"; import { config } from "../index";
@ -7,7 +7,7 @@ export default class MyLimit extends Command implements Command {
name = "check-limit"; name = "check-limit";
description = "Checks your limit and usage"; description = "Checks your limit and usage";
type = ApplicationCommandType.ChatInput; type = ApplicationCommandType.ChatInput;
options: APIApplicationCommandOption[] = [ options: ApplicationCommandOption[] = [
{ {
name: "recovery-for", name: "recovery-for",
description: "Calculate the limit recovery time for given message count (default: amount required to use the bot again or 1)", description: "Calculate the limit recovery time for given message count (default: amount required to use the bot again or 1)",

View file

@ -1,8 +1,8 @@
import { Message } from "discord.js"; import { Message } from "discord.js";
import { import {
ChatCompletionMessageParam as OpenAIMessage, ChatCompletionRequestMessage as OpenAIMessage,
ChatCompletionCreateParamsNonStreaming as ChatCompletionRequestData, CreateChatCompletionRequest as ChatCompletionRequestData,
} from "openai/resources/chat"; } from "openai";
import IQuota from "./IQuota"; import IQuota from "./IQuota";
import MessageCount from "./quota/messageCount"; import MessageCount from "./quota/messageCount";

View file

@ -1,11 +1,11 @@
import DiscordApi, { GuildTextBasedChannel, TextBasedChannel } from "discord.js"; import DiscordApi, { GuildTextBasedChannel, TextBasedChannel } from "discord.js";
import {APIError as OpenAIError} from "openai"; import { ChatCompletionRequestMessage, ChatCompletionResponseMessage } from "openai";
import Axios from "axios";
import { database, openai, config } from "./index"; import { database, openai, config } from "./index";
import Moderation from "./moderation"; import Moderation from "./moderation";
import toOpenAIMessages from "./toOpenAIMessages"; import toOpenAIMessages from "./toOpenAIMessages";
import FunctionManager from "./funcitonManager"; import FunctionManager from "./funcitonManager";
import { ChatCompletion, ChatCompletionMessage, ChatCompletionMessageParam } from "openai/resources/chat";
type NonNullableInObject<T, V> = { [k in keyof T]: k extends V ? NonNullable<T[k]> : T[k] }; type NonNullableInObject<T, V> = { [k in keyof T]: k extends V ? NonNullable<T[k]> : T[k] };
export type apiRequest = DiscordApi.Message | DiscordApi.RepliableInteraction; export type apiRequest = DiscordApi.Message | DiscordApi.RepliableInteraction;
@ -171,12 +171,12 @@ export async function queueRequest(request: apiRequest) {
* @param functionRan counter of how many function have been ran * @param functionRan counter of how many function have been ran
*/ */
function logUsedTokens( function logUsedTokens(
answer: ChatCompletion, answer: Awaited<ReturnType<typeof openai.createChatCompletion>>,
message: RequestMessage, message: RequestMessage,
functionRan: number, functionRan: number,
) { ) {
const usage = answer.usage; const usage = answer.data.usage;
const functionName = answer.choices[0].message?.function_call?.name; const functionName = answer.data.choices[0].message?.function_call?.name;
if (usage !== undefined) { if (usage !== undefined) {
const channelName: string = !message.channel.isDMBased() ? `${message.channel.name} (${message.guild?.name})` : `@${getAuthor(message).tag}`; const channelName: string = !message.channel.isDMBased() ? `${message.channel.name} (${message.guild?.name})` : `@${getAuthor(message).tag}`;
console.log(`Used ${usage.total_tokens} (${usage.prompt_tokens} + ${usage.completion_tokens}) tokens for ${getAuthor(message).tag} (${getAuthor(message).id}) in #${channelName}${functionName ? " [Function: " + functionName + "]" : ""}`); console.log(`Used ${usage.total_tokens} (${usage.prompt_tokens} + ${usage.completion_tokens}) tokens for ${getAuthor(message).tag} (${getAuthor(message).id}) in #${channelName}${functionName ? " [Function: " + functionName + "]" : ""}`);
@ -207,7 +207,7 @@ async function executeFromQueue(channel: string) {
const channelQueue = channelsRunning.get(channel) as ChannelsRunningValue; const channelQueue = channelsRunning.get(channel) as ChannelsRunningValue;
const message = channelQueue.at(0) as RequestMessage; const message = channelQueue.at(0) as RequestMessage;
let functionRanCounter = 0; let functionRanCounter = 0;
let OpenAImessages: ChatCompletionMessageParam[] = []; let OpenAImessages: ChatCompletionRequestMessage[] = [];
// ignore if we can't even send anything to reply // ignore if we can't even send anything to reply
if (!canReplyToRequest(message)) return; if (!canReplyToRequest(message)) return;
@ -234,11 +234,11 @@ async function executeFromQueue(channel: string) {
}); });
OpenAImessages = toOpenAIMessages(messages.values()); OpenAImessages = toOpenAIMessages(messages.values());
let generatedMessage: ChatCompletionMessage | undefined = undefined; let generatedMessage: ChatCompletionResponseMessage | undefined = undefined;
let answer: Awaited<ReturnType<typeof openai.chat.completions.create>>; let answer: Awaited<ReturnType<typeof openai.createChatCompletion>>;
do { do {
answer = await openai.chat.completions.create({ answer = await openai.createChatCompletion({
...config.chatCompletionParams, ...config.chatCompletionParams,
messages: OpenAImessages, messages: OpenAImessages,
// FIXME: don't use new instance of FunctionManager // FIXME: don't use new instance of FunctionManager
@ -247,7 +247,7 @@ async function executeFromQueue(channel: string) {
logUsedTokens(answer, message, ++functionRanCounter); logUsedTokens(answer, message, ++functionRanCounter);
generatedMessage = answer.choices[0].message; generatedMessage = answer.data.choices[0].message;
if (!generatedMessage) throw new Error("Empty message received"); if (!generatedMessage) throw new Error("Empty message received");
// handle function calls // handle function calls
@ -262,9 +262,9 @@ async function executeFromQueue(channel: string) {
channelQueue.stopTyping(); channelQueue.stopTyping();
const answerContent = answer.choices[0].message?.content; const answerContent = answer.data.choices[0].message?.content;
if (answerContent === null || answerContent === "") { if (answerContent === undefined || answerContent === "") {
if (message instanceof DiscordApi.Message) message.react("😶").catch(() => {/* GRACEFAIL: It's okay if the bot won't reply */}); if (message instanceof DiscordApi.Message) message.react("😶").catch(() => {/* GRACEFAIL: It's okay if the bot won't reply */});
} }
else { else {
@ -285,19 +285,10 @@ async function executeFromQueue(channel: string) {
} }
} }
} catch (e) { } catch (e) {
let errorText: string = "";
channelQueue.stopTyping(); channelQueue.stopTyping();
if (typeof e !== "object") { console.error(`Error ocurred while handling chat completion request (${(e as object).constructor.name}):`);
console.error(`Error ocurred while handling chat completion request (${typeof e}):`); if (Axios.isAxiosError(e)) {
console.error(e); console.error(JSON.stringify(e.response?.data));
}
else if (e === null) {
console.error ("Error ocurred while handling chat completion request: null");
}
else {
console.error(`Error ocurred while handling chat completion request (${e.constructor.name}):`);
if (e instanceof OpenAIError) {
console.error(JSON.stringify(e));
} }
else { else {
console.error(e); console.error(e);
@ -307,16 +298,17 @@ async function executeFromQueue(channel: string) {
console.error(OpenAImessages); console.error(OpenAImessages);
} }
let errorText = "\n";
if (e instanceof Error) { if (e instanceof Error) {
errorText = e.message; errorText += e.message;
} }
else errorText = ""; else errorText = "";
if (e instanceof OpenAIError && e.code?.match(/^5..$/) && channelQueue.tries < 3) { if (Axios.isAxiosError(e) && e.code?.match(/^5..$/) && channelQueue.tries < 3) {
channelQueue.tries++; channelQueue.tries++;
await new Promise(r => setTimeout(r, 2000)); // pause for 2 seconds before retrying await new Promise(r => setTimeout(r, 2000)); // pause for 2 seconds before retrying
return executeFromQueue(channel); return executeFromQueue(channel);
} }
}
requestReply( requestReply(
message, message,

View file

@ -1,4 +1,4 @@
import { ChatCompletionCreateParams, ChatCompletionMessage, ChatCompletionMessageParam } from "openai/resources/chat"; import { ChatCompletionFunctions, ChatCompletionRequestMessage, ChatCompletionRequestMessageFunctionCall } from "openai";
import { config } from "./index"; import { config } from "./index";
@ -13,9 +13,6 @@ type OpenAIFunctionRequestData<T extends nameTypeMap> = {
[name in keyof T]: T[name]; [name in keyof T]: T[name];
}; };
type ChatCompletionFunctions = ChatCompletionCreateParams.Function;
type ChatCompletionFunctionCall = ChatCompletionMessage.FunctionCall;
/** /**
* Represents the function that can be ran by the OpenAI model * Represents the function that can be ran by the OpenAI model
*/ */
@ -64,7 +61,7 @@ export default class FunctionManager {
return rvalue; return rvalue;
} }
public handleFunction(request: ChatCompletionFunctionCall): ChatCompletionMessageParam { public handleFunction(request: ChatCompletionRequestMessageFunctionCall): ChatCompletionRequestMessage {
// eslint-disable-next-line @typescript-eslint/no-explicit-any // eslint-disable-next-line @typescript-eslint/no-explicit-any
let parsedArguments: any; let parsedArguments: any;

View file

@ -1,5 +1,5 @@
import DiscordApi from "discord.js"; import DiscordApi from "discord.js";
import OpenAIApi from "openai"; import { Configuration as OpenAIApiConfiguration, OpenAIApi } from "openai";
import { PrismaClient } from "@prisma/client"; import { PrismaClient } from "@prisma/client";
import Typescript from "typescript"; import Typescript from "typescript";
import fs from "node:fs"; import fs from "node:fs";
@ -43,9 +43,9 @@ function getConfig() {
export const config: IConfigRequired = getConfig(); export const config: IConfigRequired = getConfig();
export const openai = new OpenAIApi({ export const openai = new OpenAIApi(new OpenAIApiConfiguration({
apiKey: config.tokens.OpenAI apiKey: config.tokens.OpenAI
}); }));
export const database = new PrismaClient(); export const database = new PrismaClient();

View file

@ -28,11 +28,11 @@ export default class Moderation {
} }
} }
const answer = await openai.moderations.create({ const answer = await openai.createModeration({
input: await formatRequestOrResponse(message), input: await formatRequestOrResponse(message),
}); });
const flagged = answer.results[0].flagged; const flagged = answer.data.results[0].flagged;
this.cache.set(message.id, flagged); this.cache.set(message.id, flagged);
// FIXME: These next 7 lines does not belong there and should be refactored out. // FIXME: These next 7 lines does not belong there and should be refactored out.
if (flagged) if (message instanceof Message) { if (flagged) if (message instanceof Message) {

View file

@ -1,4 +1,4 @@
import { ChatCompletionMessageParam as OpenAIMessage } from "openai/resources/chat"; import { ChatCompletionRequestMessage as OpenAIMessage } from "openai";
import { Collection, Message as DiscordMessage, InteractionResponse } from "discord.js"; import { Collection, Message as DiscordMessage, InteractionResponse } from "discord.js";
import FoldToAscii from "fold-to-ascii"; import FoldToAscii from "fold-to-ascii";