@@ -107,6 +107,8 @@ export const createChatCompletionOpenAI = async (
|
||||
messages,
|
||||
model: options.model,
|
||||
temperature,
|
||||
baseUrl: options.baseUrl,
|
||||
apiVersion: options.apiVersion,
|
||||
})
|
||||
if (!response)
|
||||
return {
|
||||
|
||||
@@ -1,24 +1,30 @@
|
||||
import { isNotEmpty } from '@typebot.io/lib/utils'
|
||||
import { ChatReply } from '@typebot.io/schemas'
|
||||
import got, { HTTPError } from 'got'
|
||||
import type {
|
||||
CreateChatCompletionRequest,
|
||||
CreateChatCompletionResponse,
|
||||
} from 'openai'
|
||||
|
||||
const createChatEndpoint = 'https://api.openai.com/v1/chat/completions'
|
||||
import { OpenAIBlock } from '@typebot.io/schemas/features/blocks/integrations/openai'
|
||||
import { HTTPError } from 'got'
|
||||
import {
|
||||
Configuration,
|
||||
OpenAIApi,
|
||||
type CreateChatCompletionRequest,
|
||||
type CreateChatCompletionResponse,
|
||||
ResponseTypes,
|
||||
} from 'openai-edge'
|
||||
|
||||
type Props = Pick<CreateChatCompletionRequest, 'messages' | 'model'> & {
|
||||
apiKey: string
|
||||
temperature: number | undefined
|
||||
currentLogs?: ChatReply['logs']
|
||||
isRetrying?: boolean
|
||||
}
|
||||
} & Pick<OpenAIBlock['options'], 'apiVersion' | 'baseUrl'>
|
||||
|
||||
export const executeChatCompletionOpenAIRequest = async ({
|
||||
apiKey,
|
||||
model,
|
||||
messages,
|
||||
temperature,
|
||||
baseUrl,
|
||||
apiVersion,
|
||||
isRetrying,
|
||||
currentLogs = [],
|
||||
}: Props): Promise<{
|
||||
response?: CreateChatCompletionResponse
|
||||
@@ -27,22 +33,40 @@ export const executeChatCompletionOpenAIRequest = async ({
|
||||
const logs: ChatReply['logs'] = currentLogs
|
||||
if (messages.length === 0) return { logs }
|
||||
try {
|
||||
const response = await got
|
||||
.post(createChatEndpoint, {
|
||||
const config = new Configuration({
|
||||
apiKey,
|
||||
basePath: baseUrl,
|
||||
baseOptions: {
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
'api-key': apiKey,
|
||||
},
|
||||
json: {
|
||||
model,
|
||||
messages,
|
||||
temperature,
|
||||
} satisfies CreateChatCompletionRequest,
|
||||
})
|
||||
.json<CreateChatCompletionResponse>()
|
||||
return { response, logs }
|
||||
},
|
||||
defaultQueryParams: isNotEmpty(apiVersion)
|
||||
? new URLSearchParams({
|
||||
'api-version': apiVersion,
|
||||
})
|
||||
: undefined,
|
||||
})
|
||||
|
||||
const openai = new OpenAIApi(config)
|
||||
|
||||
const response = await openai.createChatCompletion({
|
||||
model,
|
||||
messages,
|
||||
temperature,
|
||||
})
|
||||
|
||||
const completion =
|
||||
(await response.json()) as ResponseTypes['createChatCompletion']
|
||||
return { response: completion, logs }
|
||||
} catch (error) {
|
||||
if (error instanceof HTTPError) {
|
||||
if (error.response.statusCode === 503) {
|
||||
if (
|
||||
(error.response.statusCode === 503 ||
|
||||
error.response.statusCode === 500 ||
|
||||
error.response.statusCode === 403) &&
|
||||
!isRetrying
|
||||
) {
|
||||
console.log('OpenAI API error - 503, retrying in 3 seconds')
|
||||
await new Promise((resolve) => setTimeout(resolve, 3000))
|
||||
return executeChatCompletionOpenAIRequest({
|
||||
@@ -51,6 +75,9 @@ export const executeChatCompletionOpenAIRequest = async ({
|
||||
messages,
|
||||
temperature,
|
||||
currentLogs: logs,
|
||||
baseUrl,
|
||||
apiVersion,
|
||||
isRetrying: true,
|
||||
})
|
||||
}
|
||||
if (error.response.statusCode === 400) {
|
||||
@@ -67,6 +94,8 @@ export const executeChatCompletionOpenAIRequest = async ({
|
||||
messages: messages.slice(1),
|
||||
temperature,
|
||||
currentLogs: logs,
|
||||
baseUrl,
|
||||
apiVersion,
|
||||
})
|
||||
}
|
||||
logs.push({
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { parseVariableNumber } from '@/features/variables/parseVariableNumber'
|
||||
import { Connection } from '@planetscale/database'
|
||||
import { decrypt } from '@typebot.io/lib/api/encryption'
|
||||
import { isNotEmpty } from '@typebot.io/lib/utils'
|
||||
import {
|
||||
ChatCompletionOpenAIOptions,
|
||||
OpenAICredentials,
|
||||
@@ -42,6 +43,17 @@ export const getChatCompletionStream =
|
||||
|
||||
const config = new Configuration({
|
||||
apiKey,
|
||||
basePath: options.baseUrl,
|
||||
baseOptions: {
|
||||
headers: {
|
||||
'api-key': apiKey,
|
||||
},
|
||||
},
|
||||
defaultQueryParams: isNotEmpty(options.apiVersion)
|
||||
? new URLSearchParams({
|
||||
'api-version': options.apiVersion,
|
||||
})
|
||||
: undefined,
|
||||
})
|
||||
|
||||
const openai = new OpenAIApi(config)
|
||||
|
||||
@@ -3,7 +3,7 @@ import { transformStringVariablesToList } from '@/features/variables/transformVa
|
||||
import { byId, isNotEmpty } from '@typebot.io/lib'
|
||||
import { Variable, VariableWithValue } from '@typebot.io/schemas'
|
||||
import { ChatCompletionOpenAIOptions } from '@typebot.io/schemas/features/blocks/integrations/openai'
|
||||
import type { ChatCompletionRequestMessage } from 'openai'
|
||||
import type { ChatCompletionRequestMessage } from 'openai-edge'
|
||||
|
||||
export const parseChatCompletionMessages =
|
||||
(variables: Variable[]) =>
|
||||
|
||||
Reference in New Issue
Block a user