2024-03-01 15:33:22 +01:00
|
|
|
import { createAction } from '@typebot.io/forge'
|
2023-12-13 10:22:02 +01:00
|
|
|
import OpenAI, { ClientOptions } from 'openai'
|
2024-03-01 15:33:22 +01:00
|
|
|
import { defaultOpenAIOptions } from '../constants'
|
2023-12-13 10:22:02 +01:00
|
|
|
import { auth } from '../auth'
|
|
|
|
import { baseOptions } from '../baseOptions'
|
2024-03-01 15:33:22 +01:00
|
|
|
import { parseChatCompletionOptions } from '../shared/parseChatCompletionOptions'
|
|
|
|
import { getChatCompletionSetVarIds } from '../shared/getChatCompletionSetVarIds'
|
|
|
|
import { runChatCompletion } from '../shared/runChatCompletion'
|
|
|
|
import { runChatCompletionStream } from '../shared/runChatCompletionStream'
|
|
|
|
import { getChatCompletionStreamVarId } from '../shared/getChatCompletionStreamVarId'
|
2023-12-13 10:22:02 +01:00
|
|
|
|
|
|
|
export const createChatCompletion = createAction({
|
|
|
|
name: 'Create chat completion',
|
|
|
|
auth,
|
|
|
|
baseOptions,
|
2024-03-01 15:33:22 +01:00
|
|
|
options: parseChatCompletionOptions({
|
|
|
|
defaultModel: defaultOpenAIOptions.model,
|
|
|
|
defaultTemperature: defaultOpenAIOptions.temperature,
|
|
|
|
modelFetchId: 'fetchModels',
|
|
|
|
}),
|
|
|
|
getSetVariableIds: getChatCompletionSetVarIds,
|
2024-03-05 15:46:28 +01:00
|
|
|
turnableInto: [
|
|
|
|
{
|
|
|
|
blockType: 'open-router',
|
|
|
|
},
|
|
|
|
{
|
|
|
|
blockType: 'together-ai',
|
|
|
|
},
|
|
|
|
{ blockType: 'mistral' },
|
2024-03-15 14:17:06 +01:00
|
|
|
{
|
|
|
|
blockType: 'anthropic',
|
|
|
|
transform: (options) => ({
|
|
|
|
...options,
|
|
|
|
action: 'Create Chat Message',
|
|
|
|
}),
|
|
|
|
},
|
2024-03-05 15:46:28 +01:00
|
|
|
],
|
2023-12-13 10:22:02 +01:00
|
|
|
fetchers: [
|
|
|
|
{
|
|
|
|
id: 'fetchModels',
|
|
|
|
dependencies: ['baseUrl', 'apiVersion'],
|
|
|
|
fetch: async ({ credentials, options }) => {
|
|
|
|
const baseUrl = options?.baseUrl ?? defaultOpenAIOptions.baseUrl
|
|
|
|
const config = {
|
|
|
|
apiKey: credentials.apiKey,
|
|
|
|
baseURL: baseUrl ?? defaultOpenAIOptions.baseUrl,
|
|
|
|
defaultHeaders: {
|
|
|
|
'api-key': credentials.apiKey,
|
|
|
|
},
|
|
|
|
defaultQuery: options?.apiVersion
|
|
|
|
? {
|
|
|
|
'api-version': options.apiVersion,
|
|
|
|
}
|
|
|
|
: undefined,
|
|
|
|
} satisfies ClientOptions
|
|
|
|
|
|
|
|
const openai = new OpenAI(config)
|
|
|
|
|
|
|
|
const models = await openai.models.list()
|
|
|
|
|
|
|
|
return (
|
|
|
|
models.data
|
|
|
|
.filter((model) => model.id.includes('gpt'))
|
|
|
|
.sort((a, b) => b.created - a.created)
|
|
|
|
.map((model) => model.id) ?? []
|
|
|
|
)
|
|
|
|
},
|
|
|
|
},
|
|
|
|
],
|
|
|
|
run: {
|
2024-03-01 15:33:22 +01:00
|
|
|
server: (params) =>
|
|
|
|
runChatCompletion({
|
|
|
|
...params,
|
|
|
|
config: {
|
|
|
|
baseUrl: defaultOpenAIOptions.baseUrl,
|
|
|
|
defaultModel: defaultOpenAIOptions.model,
|
2023-12-13 10:22:02 +01:00
|
|
|
},
|
2024-03-01 15:33:22 +01:00
|
|
|
}),
|
2023-12-13 10:22:02 +01:00
|
|
|
stream: {
|
2024-03-01 15:33:22 +01:00
|
|
|
getStreamVariableId: getChatCompletionStreamVarId,
|
|
|
|
run: (params) =>
|
|
|
|
runChatCompletionStream({
|
|
|
|
...params,
|
|
|
|
config: {
|
|
|
|
baseUrl: defaultOpenAIOptions.baseUrl,
|
|
|
|
defaultModel: defaultOpenAIOptions.model,
|
2023-12-13 10:22:02 +01:00
|
|
|
},
|
2024-03-01 15:33:22 +01:00
|
|
|
}),
|
2023-12-13 10:22:02 +01:00
|
|
|
},
|
|
|
|
},
|
|
|
|
})
|