2
0

(openai) Truncate messages sequence automatically if reaching token limit

This commit is contained in:
Baptiste Arnaud
2023-05-02 13:37:02 -04:00
parent 94735638a6
commit e58016e43a
6 changed files with 73 additions and 18 deletions

View File

@ -14,6 +14,15 @@ export const chatCompletionModels = [
'gpt-3.5-turbo-0301',
] as const
export const modelLimit = {
'gpt-3.5-turbo': 4096,
'gpt-3.5-turbo-0301': 4096,
'gpt-4': 8192,
'gpt-4-0314': 8192,
'gpt-4-32k': 32768,
'gpt-4-32k-0314': 32768,
} as const
export const chatCompletionMessageRoles = [
'system',
'user',