⚡ (openai) Add new models and remove tiktoken
Instead of computing total tokens with tiktoken we just attempt retries after trimming the first message
This commit is contained in:
@ -1066,13 +1066,28 @@
|
||||
"additionalProperties": false
|
||||
},
|
||||
"min": {
|
||||
"type": "number"
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "number"
|
||||
},
|
||||
{}
|
||||
]
|
||||
},
|
||||
"max": {
|
||||
"type": "number"
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "number"
|
||||
},
|
||||
{}
|
||||
]
|
||||
},
|
||||
"step": {
|
||||
"type": "number"
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "number"
|
||||
},
|
||||
{}
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
@ -1966,6 +1981,7 @@
|
||||
"Yesterday",
|
||||
"Tomorrow",
|
||||
"Random ID",
|
||||
"Moment of the day",
|
||||
"Map item with same index"
|
||||
]
|
||||
},
|
||||
@ -2833,12 +2849,17 @@
|
||||
"model": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"gpt-4",
|
||||
"gpt-4-0314",
|
||||
"gpt-4-32k",
|
||||
"gpt-4-32k-0314",
|
||||
"gpt-3.5-turbo",
|
||||
"gpt-3.5-turbo-0301"
|
||||
"gpt-3.5-turbo-0613",
|
||||
"gpt-3.5-turbo-16k",
|
||||
"gpt-3.5-turbo-16k-0613",
|
||||
"gpt-3.5-turbo-0301",
|
||||
"gpt-4",
|
||||
"gpt-4-0613",
|
||||
"gpt-4-32k",
|
||||
"gpt-4-32k-0613",
|
||||
"gpt-4-32k-0314",
|
||||
"gpt-4-0314"
|
||||
]
|
||||
},
|
||||
"messages": {
|
||||
@ -2861,6 +2882,9 @@
|
||||
},
|
||||
"content": {
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
|
Reference in New Issue
Block a user