Limit sent chat to 2048 tokens.

This also solves the issue where we would request more tokens,
than the model is capable of (over 4096)
This commit is contained in:
Wroclaw 2023-03-19 04:15:08 +01:00
parent aafefc3ad0
commit 2a38ae4a95
4 changed files with 30 additions and 12 deletions

6
package-lock.json generated
View file

@ -11,6 +11,7 @@
"dependencies": {
"discord.js": "^14.8.0",
"fold-to-ascii": "^5.0.1",
"gpt-3-encoder": "^1.1.4",
"openai": "^3.2.1"
},
"devDependencies": {
@ -1161,6 +1162,11 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/gpt-3-encoder": {
"version": "1.1.4",
"resolved": "https://registry.npmjs.org/gpt-3-encoder/-/gpt-3-encoder-1.1.4.tgz",
"integrity": "sha512-fSQRePV+HUAhCn7+7HL7lNIXNm6eaFWFbNLOOGtmSJ0qJycyQvj60OvRlH7mee8xAMjBDNRdMXlMwjAbMTDjkg=="
},
"node_modules/grapheme-splitter": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz",