@ -1,167 +1,24 @@
|
||||
import { option, createAction } from '@typebot.io/forge'
|
||||
import { createAction } from '@typebot.io/forge'
|
||||
import OpenAI, { ClientOptions } from 'openai'
|
||||
import { defaultOpenAIOptions, maxToolCalls } from '../constants'
|
||||
import { OpenAIStream, ToolCallPayload } from 'ai'
|
||||
import { parseChatCompletionMessages } from '../helpers/parseChatCompletionMessages'
|
||||
import { isDefined } from '@typebot.io/lib'
|
||||
import { defaultOpenAIOptions } from '../constants'
|
||||
import { auth } from '../auth'
|
||||
import { baseOptions } from '../baseOptions'
|
||||
import {
|
||||
ChatCompletionMessage,
|
||||
ChatCompletionTool,
|
||||
} from 'openai/resources/chat/completions'
|
||||
import { parseToolParameters } from '../helpers/parseToolParameters'
|
||||
import { executeFunction } from '@typebot.io/variables/executeFunction'
|
||||
|
||||
const nativeMessageContentSchema = {
|
||||
content: option.string.layout({
|
||||
inputType: 'textarea',
|
||||
placeholder: 'Content',
|
||||
}),
|
||||
}
|
||||
|
||||
const systemMessageItemSchema = option
|
||||
.object({
|
||||
role: option.literal('system'),
|
||||
})
|
||||
.extend(nativeMessageContentSchema)
|
||||
|
||||
const userMessageItemSchema = option
|
||||
.object({
|
||||
role: option.literal('user'),
|
||||
})
|
||||
.extend(nativeMessageContentSchema)
|
||||
|
||||
const assistantMessageItemSchema = option
|
||||
.object({
|
||||
role: option.literal('assistant'),
|
||||
})
|
||||
.extend(nativeMessageContentSchema)
|
||||
|
||||
const parameterBase = {
|
||||
name: option.string.layout({
|
||||
label: 'Name',
|
||||
placeholder: 'myVariable',
|
||||
withVariableButton: false,
|
||||
}),
|
||||
description: option.string.layout({
|
||||
label: 'Description',
|
||||
withVariableButton: false,
|
||||
}),
|
||||
required: option.boolean.layout({
|
||||
label: 'Is required?',
|
||||
}),
|
||||
}
|
||||
|
||||
export const toolParametersSchema = option
|
||||
.array(
|
||||
option.discriminatedUnion('type', [
|
||||
option
|
||||
.object({
|
||||
type: option.literal('string'),
|
||||
})
|
||||
.extend(parameterBase),
|
||||
option
|
||||
.object({
|
||||
type: option.literal('number'),
|
||||
})
|
||||
.extend(parameterBase),
|
||||
option
|
||||
.object({
|
||||
type: option.literal('boolean'),
|
||||
})
|
||||
.extend(parameterBase),
|
||||
option
|
||||
.object({
|
||||
type: option.literal('enum'),
|
||||
values: option
|
||||
.array(option.string)
|
||||
.layout({ itemLabel: 'possible value' }),
|
||||
})
|
||||
.extend(parameterBase),
|
||||
])
|
||||
)
|
||||
.layout({
|
||||
accordion: 'Parameters',
|
||||
itemLabel: 'parameter',
|
||||
})
|
||||
|
||||
const functionToolItemSchema = option.object({
|
||||
type: option.literal('function'),
|
||||
name: option.string.layout({
|
||||
label: 'Name',
|
||||
placeholder: 'myFunctionName',
|
||||
withVariableButton: false,
|
||||
}),
|
||||
description: option.string.layout({
|
||||
label: 'Description',
|
||||
placeholder: 'A brief description of what this function does.',
|
||||
withVariableButton: false,
|
||||
}),
|
||||
parameters: toolParametersSchema,
|
||||
code: option.string.layout({
|
||||
inputType: 'code',
|
||||
label: 'Code',
|
||||
lang: 'javascript',
|
||||
moreInfoTooltip:
|
||||
'A javascript code snippet that can use the defined parameters. It should return a value.',
|
||||
withVariableButton: false,
|
||||
}),
|
||||
})
|
||||
|
||||
const dialogueMessageItemSchema = option.object({
|
||||
role: option.literal('Dialogue'),
|
||||
dialogueVariableId: option.string.layout({
|
||||
inputType: 'variableDropdown',
|
||||
placeholder: 'Dialogue variable',
|
||||
}),
|
||||
startsBy: option.enum(['user', 'assistant']).layout({
|
||||
label: 'starts by',
|
||||
direction: 'row',
|
||||
defaultValue: 'user',
|
||||
}),
|
||||
})
|
||||
|
||||
export const options = option.object({
|
||||
model: option.string.layout({
|
||||
placeholder: 'Select a model',
|
||||
defaultValue: defaultOpenAIOptions.model,
|
||||
fetcher: 'fetchModels',
|
||||
}),
|
||||
messages: option
|
||||
.array(
|
||||
option.discriminatedUnion('role', [
|
||||
systemMessageItemSchema,
|
||||
userMessageItemSchema,
|
||||
assistantMessageItemSchema,
|
||||
dialogueMessageItemSchema,
|
||||
])
|
||||
)
|
||||
.layout({ accordion: 'Messages', itemLabel: 'message', isOrdered: true }),
|
||||
tools: option
|
||||
.array(option.discriminatedUnion('type', [functionToolItemSchema]))
|
||||
.layout({ accordion: 'Tools', itemLabel: 'tool' }),
|
||||
temperature: option.number.layout({
|
||||
accordion: 'Advanced settings',
|
||||
label: 'Temperature',
|
||||
direction: 'row',
|
||||
defaultValue: defaultOpenAIOptions.temperature,
|
||||
}),
|
||||
responseMapping: option
|
||||
.saveResponseArray(['Message content', 'Total tokens'] as const)
|
||||
.layout({
|
||||
accordion: 'Save response',
|
||||
}),
|
||||
})
|
||||
import { parseChatCompletionOptions } from '../shared/parseChatCompletionOptions'
|
||||
import { getChatCompletionSetVarIds } from '../shared/getChatCompletionSetVarIds'
|
||||
import { runChatCompletion } from '../shared/runChatCompletion'
|
||||
import { runChatCompletionStream } from '../shared/runChatCompletionStream'
|
||||
import { getChatCompletionStreamVarId } from '../shared/getChatCompletionStreamVarId'
|
||||
|
||||
export const createChatCompletion = createAction({
|
||||
name: 'Create chat completion',
|
||||
auth,
|
||||
baseOptions,
|
||||
options,
|
||||
getSetVariableIds: (options) =>
|
||||
options.responseMapping?.map((res) => res.variableId).filter(isDefined) ??
|
||||
[],
|
||||
options: parseChatCompletionOptions({
|
||||
defaultModel: defaultOpenAIOptions.model,
|
||||
defaultTemperature: defaultOpenAIOptions.temperature,
|
||||
modelFetchId: 'fetchModels',
|
||||
}),
|
||||
getSetVariableIds: getChatCompletionSetVarIds,
|
||||
fetchers: [
|
||||
{
|
||||
id: 'fetchModels',
|
||||
@ -195,192 +52,24 @@ export const createChatCompletion = createAction({
|
||||
},
|
||||
],
|
||||
run: {
|
||||
server: async ({ credentials: { apiKey }, options, variables }) => {
|
||||
const config = {
|
||||
apiKey,
|
||||
baseURL: options.baseUrl,
|
||||
defaultHeaders: {
|
||||
'api-key': apiKey,
|
||||
server: (params) =>
|
||||
runChatCompletion({
|
||||
...params,
|
||||
config: {
|
||||
baseUrl: defaultOpenAIOptions.baseUrl,
|
||||
defaultModel: defaultOpenAIOptions.model,
|
||||
},
|
||||
defaultQuery: options.apiVersion
|
||||
? {
|
||||
'api-version': options.apiVersion,
|
||||
}
|
||||
: undefined,
|
||||
} satisfies ClientOptions
|
||||
|
||||
const openai = new OpenAI(config)
|
||||
|
||||
const tools = options.tools
|
||||
?.filter((t) => t.name && t.parameters)
|
||||
.map((t) => ({
|
||||
type: 'function',
|
||||
function: {
|
||||
name: t.name as string,
|
||||
description: t.description,
|
||||
parameters: parseToolParameters(t.parameters!),
|
||||
},
|
||||
})) satisfies ChatCompletionTool[] | undefined
|
||||
|
||||
const messages = parseChatCompletionMessages({ options, variables })
|
||||
|
||||
const body = {
|
||||
model: options.model ?? defaultOpenAIOptions.model,
|
||||
temperature: options.temperature
|
||||
? Number(options.temperature)
|
||||
: undefined,
|
||||
messages,
|
||||
tools: (tools?.length ?? 0) > 0 ? tools : undefined,
|
||||
}
|
||||
|
||||
let totalTokens = 0
|
||||
let message: ChatCompletionMessage
|
||||
|
||||
for (let i = 0; i < maxToolCalls; i++) {
|
||||
const response = await openai.chat.completions.create(body)
|
||||
|
||||
message = response.choices[0].message
|
||||
totalTokens += response.usage?.total_tokens || 0
|
||||
|
||||
if (!message.tool_calls) break
|
||||
|
||||
messages.push(message)
|
||||
|
||||
for (const toolCall of message.tool_calls) {
|
||||
const name = toolCall.function?.name
|
||||
if (!name) continue
|
||||
const toolDefinition = options.tools?.find((t) => t.name === name)
|
||||
if (!toolDefinition?.code || !toolDefinition.parameters) {
|
||||
messages.push({
|
||||
tool_call_id: toolCall.id,
|
||||
role: 'tool',
|
||||
content: 'Function not found',
|
||||
})
|
||||
continue
|
||||
}
|
||||
const toolParams = Object.fromEntries(
|
||||
toolDefinition.parameters.map(({ name }) => [name, null])
|
||||
)
|
||||
const toolArgs = toolCall.function?.arguments
|
||||
? JSON.parse(toolCall.function?.arguments)
|
||||
: undefined
|
||||
if (!toolArgs) continue
|
||||
const { output, newVariables } = await executeFunction({
|
||||
variables: variables.list(),
|
||||
args: { ...toolParams, ...toolArgs },
|
||||
body: toolDefinition.code,
|
||||
})
|
||||
newVariables?.forEach((v) => variables.set(v.id, v.value))
|
||||
|
||||
messages.push({
|
||||
tool_call_id: toolCall.id,
|
||||
role: 'tool',
|
||||
content: output,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
options.responseMapping?.forEach((mapping) => {
|
||||
if (!mapping.variableId) return
|
||||
if (!mapping.item || mapping.item === 'Message content')
|
||||
variables.set(mapping.variableId, message.content)
|
||||
if (mapping.item === 'Total tokens')
|
||||
variables.set(mapping.variableId, totalTokens)
|
||||
})
|
||||
},
|
||||
}),
|
||||
stream: {
|
||||
getStreamVariableId: (options) =>
|
||||
options.responseMapping?.find(
|
||||
(res) => res.item === 'Message content' || !res.item
|
||||
)?.variableId,
|
||||
run: async ({ credentials: { apiKey }, options, variables }) => {
|
||||
const config = {
|
||||
apiKey,
|
||||
baseURL: options.baseUrl,
|
||||
defaultHeaders: {
|
||||
'api-key': apiKey,
|
||||
getStreamVariableId: getChatCompletionStreamVarId,
|
||||
run: (params) =>
|
||||
runChatCompletionStream({
|
||||
...params,
|
||||
config: {
|
||||
baseUrl: defaultOpenAIOptions.baseUrl,
|
||||
defaultModel: defaultOpenAIOptions.model,
|
||||
},
|
||||
defaultQuery: options.apiVersion
|
||||
? {
|
||||
'api-version': options.apiVersion,
|
||||
}
|
||||
: undefined,
|
||||
} satisfies ClientOptions
|
||||
|
||||
const openai = new OpenAI(config)
|
||||
|
||||
const tools = options.tools
|
||||
?.filter((t) => t.name && t.parameters)
|
||||
.map((t) => ({
|
||||
type: 'function',
|
||||
function: {
|
||||
name: t.name as string,
|
||||
description: t.description,
|
||||
parameters: parseToolParameters(t.parameters!),
|
||||
},
|
||||
})) satisfies ChatCompletionTool[] | undefined
|
||||
|
||||
const messages = parseChatCompletionMessages({ options, variables })
|
||||
|
||||
const response = await openai.chat.completions.create({
|
||||
model: options.model ?? defaultOpenAIOptions.model,
|
||||
temperature: options.temperature
|
||||
? Number(options.temperature)
|
||||
: undefined,
|
||||
stream: true,
|
||||
messages,
|
||||
tools: (tools?.length ?? 0) > 0 ? tools : undefined,
|
||||
})
|
||||
|
||||
return OpenAIStream(response, {
|
||||
experimental_onToolCall: async (
|
||||
call: ToolCallPayload,
|
||||
appendToolCallMessage
|
||||
) => {
|
||||
for (const toolCall of call.tools) {
|
||||
const name = toolCall.func?.name
|
||||
if (!name) continue
|
||||
const toolDefinition = options.tools?.find((t) => t.name === name)
|
||||
if (!toolDefinition?.code || !toolDefinition.parameters) {
|
||||
messages.push({
|
||||
tool_call_id: toolCall.id,
|
||||
role: 'tool',
|
||||
content: 'Function not found',
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
const { output } = await executeFunction({
|
||||
variables: variables.list(),
|
||||
args:
|
||||
typeof toolCall.func.arguments === 'string'
|
||||
? JSON.parse(toolCall.func.arguments)
|
||||
: toolCall.func.arguments,
|
||||
body: toolDefinition.code,
|
||||
})
|
||||
|
||||
// TO-DO: enable once we're out of edge runtime.
|
||||
// newVariables?.forEach((v) => variables.set(v.id, v.value))
|
||||
|
||||
const newMessages = appendToolCallMessage({
|
||||
tool_call_id: toolCall.id,
|
||||
function_name: toolCall.func.name,
|
||||
tool_call_result: output,
|
||||
})
|
||||
|
||||
return openai.chat.completions.create({
|
||||
messages: [
|
||||
...messages,
|
||||
...newMessages,
|
||||
] as OpenAI.Chat.Completions.ChatCompletionMessageParam[],
|
||||
model: options.model ?? defaultOpenAIOptions.model,
|
||||
stream: true,
|
||||
tools,
|
||||
})
|
||||
}
|
||||
},
|
||||
})
|
||||
},
|
||||
}),
|
||||
},
|
||||
},
|
||||
})
|
||||
|
@ -8,6 +8,7 @@ export const auth = createAuth({
|
||||
isRequired: true,
|
||||
label: 'API key',
|
||||
placeholder: 'sk-...',
|
||||
inputType: 'password',
|
||||
helperText:
|
||||
'You can generate an API key [here](https://platform.openai.com/account/api-keys)',
|
||||
withVariableButton: false,
|
||||
|
@ -1,14 +1,19 @@
|
||||
import { option } from '@typebot.io/forge'
|
||||
import { defaultOpenAIOptions } from './constants'
|
||||
|
||||
export const baseOptions = option.object({
|
||||
baseUrl: option.string.layout({
|
||||
accordion: 'Customize provider',
|
||||
label: 'Base URL',
|
||||
defaultValue: defaultOpenAIOptions.baseUrl,
|
||||
}),
|
||||
apiVersion: option.string.layout({
|
||||
accordion: 'Customize provider',
|
||||
label: 'API version',
|
||||
}),
|
||||
})
|
||||
export const baseOptions = option
|
||||
.object({
|
||||
baseUrl: option.string.layout({
|
||||
accordion: 'Customize provider',
|
||||
label: 'Base URL',
|
||||
defaultValue: defaultOpenAIOptions.baseUrl,
|
||||
}),
|
||||
apiVersion: option.string.layout({
|
||||
accordion: 'Customize provider',
|
||||
label: 'API version',
|
||||
}),
|
||||
})
|
||||
.layout({
|
||||
isHidden: true,
|
||||
})
|
||||
.describe('Deprecated, use other dedicated OpenAI compatible blocks instead')
|
||||
|
@ -1,14 +1,13 @@
|
||||
import type { OpenAI } from 'openai'
|
||||
import { options as createChatCompletionOption } from '../actions/createChatCompletion'
|
||||
import { ReadOnlyVariableStore } from '@typebot.io/forge'
|
||||
import { isNotEmpty } from '@typebot.io/lib'
|
||||
import { z } from '@typebot.io/forge/zod'
|
||||
import { ChatCompletionOptions } from '../shared/parseChatCompletionOptions'
|
||||
|
||||
export const parseChatCompletionMessages = ({
|
||||
options: { messages },
|
||||
variables,
|
||||
}: {
|
||||
options: Pick<z.infer<typeof createChatCompletionOption>, 'messages'>
|
||||
options: ChatCompletionOptions
|
||||
variables: ReadOnlyVariableStore
|
||||
}): OpenAI.Chat.ChatCompletionMessageParam[] => {
|
||||
const parsedMessages = messages
|
||||
|
@ -1,5 +1,5 @@
|
||||
import type { OpenAI } from 'openai'
|
||||
import { toolParametersSchema } from '../actions/createChatCompletion'
|
||||
import { toolParametersSchema } from '../shared/parseChatCompletionOptions'
|
||||
import { z } from '@typebot.io/forge/zod'
|
||||
|
||||
export const parseToolParameters = (
|
||||
|
@ -0,0 +1,5 @@
|
||||
import { isDefined } from '@typebot.io/lib'
|
||||
import { ChatCompletionOptions } from './parseChatCompletionOptions'
|
||||
|
||||
export const getChatCompletionSetVarIds = (options: ChatCompletionOptions) =>
|
||||
options.responseMapping?.map((res) => res.variableId).filter(isDefined) ?? []
|
@ -0,0 +1,6 @@
|
||||
import { ChatCompletionOptions } from './parseChatCompletionOptions'
|
||||
|
||||
export const getChatCompletionStreamVarId = (options: ChatCompletionOptions) =>
|
||||
options.responseMapping?.find(
|
||||
(res) => res.item === 'Message content' || !res.item
|
||||
)?.variableId
|
@ -0,0 +1,164 @@
|
||||
import { option } from '@typebot.io/forge'
|
||||
import { z } from '@typebot.io/forge/zod'
|
||||
import { baseOptions } from '../baseOptions'
|
||||
|
||||
const nativeMessageContentSchema = {
|
||||
content: option.string.layout({
|
||||
inputType: 'textarea',
|
||||
placeholder: 'Content',
|
||||
}),
|
||||
}
|
||||
|
||||
const systemMessageItemSchema = option
|
||||
.object({
|
||||
role: option.literal('system'),
|
||||
})
|
||||
.extend(nativeMessageContentSchema)
|
||||
|
||||
const userMessageItemSchema = option
|
||||
.object({
|
||||
role: option.literal('user'),
|
||||
})
|
||||
.extend(nativeMessageContentSchema)
|
||||
|
||||
const assistantMessageItemSchema = option
|
||||
.object({
|
||||
role: option.literal('assistant'),
|
||||
})
|
||||
.extend(nativeMessageContentSchema)
|
||||
|
||||
const parameterBase = {
|
||||
name: option.string.layout({
|
||||
label: 'Name',
|
||||
placeholder: 'myVariable',
|
||||
withVariableButton: false,
|
||||
}),
|
||||
description: option.string.layout({
|
||||
label: 'Description',
|
||||
withVariableButton: false,
|
||||
}),
|
||||
required: option.boolean.layout({
|
||||
label: 'Is required?',
|
||||
}),
|
||||
}
|
||||
|
||||
export const toolParametersSchema = option
|
||||
.array(
|
||||
option.discriminatedUnion('type', [
|
||||
option
|
||||
.object({
|
||||
type: option.literal('string'),
|
||||
})
|
||||
.extend(parameterBase),
|
||||
option
|
||||
.object({
|
||||
type: option.literal('number'),
|
||||
})
|
||||
.extend(parameterBase),
|
||||
option
|
||||
.object({
|
||||
type: option.literal('boolean'),
|
||||
})
|
||||
.extend(parameterBase),
|
||||
option
|
||||
.object({
|
||||
type: option.literal('enum'),
|
||||
values: option
|
||||
.array(option.string)
|
||||
.layout({ itemLabel: 'possible value' }),
|
||||
})
|
||||
.extend(parameterBase),
|
||||
])
|
||||
)
|
||||
.layout({
|
||||
accordion: 'Parameters',
|
||||
itemLabel: 'parameter',
|
||||
})
|
||||
|
||||
const functionToolItemSchema = option.object({
|
||||
type: option.literal('function'),
|
||||
name: option.string.layout({
|
||||
label: 'Name',
|
||||
placeholder: 'myFunctionName',
|
||||
withVariableButton: false,
|
||||
}),
|
||||
description: option.string.layout({
|
||||
label: 'Description',
|
||||
placeholder: 'A brief description of what this function does.',
|
||||
withVariableButton: false,
|
||||
}),
|
||||
parameters: toolParametersSchema,
|
||||
code: option.string.layout({
|
||||
inputType: 'code',
|
||||
label: 'Code',
|
||||
lang: 'javascript',
|
||||
moreInfoTooltip:
|
||||
'A javascript code snippet that can use the defined parameters. It should return a value.',
|
||||
withVariableButton: false,
|
||||
}),
|
||||
})
|
||||
|
||||
const dialogueMessageItemSchema = option.object({
|
||||
role: option.literal('Dialogue'),
|
||||
dialogueVariableId: option.string.layout({
|
||||
inputType: 'variableDropdown',
|
||||
placeholder: 'Dialogue variable',
|
||||
}),
|
||||
startsBy: option.enum(['user', 'assistant']).layout({
|
||||
label: 'starts by',
|
||||
direction: 'row',
|
||||
defaultValue: 'user',
|
||||
}),
|
||||
})
|
||||
|
||||
type Props = {
|
||||
defaultModel?: string
|
||||
defaultTemperature?: number
|
||||
modelFetchId?: string
|
||||
modelHelperText?: string
|
||||
}
|
||||
|
||||
export const parseChatCompletionOptions = ({
|
||||
defaultModel,
|
||||
defaultTemperature,
|
||||
modelFetchId,
|
||||
modelHelperText,
|
||||
}: Props = {}) =>
|
||||
option.object({
|
||||
model: option.string.layout({
|
||||
placeholder: modelFetchId ? 'Select a model' : undefined,
|
||||
label: modelFetchId ? undefined : 'Model',
|
||||
defaultValue: defaultModel,
|
||||
fetcher: modelFetchId,
|
||||
helperText: modelHelperText,
|
||||
}),
|
||||
messages: option
|
||||
.array(
|
||||
option.discriminatedUnion('role', [
|
||||
systemMessageItemSchema,
|
||||
userMessageItemSchema,
|
||||
assistantMessageItemSchema,
|
||||
dialogueMessageItemSchema,
|
||||
])
|
||||
)
|
||||
.layout({ accordion: 'Messages', itemLabel: 'message', isOrdered: true }),
|
||||
tools: option
|
||||
.array(option.discriminatedUnion('type', [functionToolItemSchema]))
|
||||
.layout({ accordion: 'Tools', itemLabel: 'tool' }),
|
||||
temperature: option.number.layout({
|
||||
accordion: 'Advanced settings',
|
||||
label: 'Temperature',
|
||||
direction: 'row',
|
||||
defaultValue: defaultTemperature,
|
||||
}),
|
||||
responseMapping: option
|
||||
.saveResponseArray(['Message content', 'Total tokens'] as const)
|
||||
.layout({
|
||||
accordion: 'Save response',
|
||||
}),
|
||||
})
|
||||
|
||||
export type ChatCompletionOptions = z.infer<
|
||||
ReturnType<typeof parseChatCompletionOptions>
|
||||
> &
|
||||
z.infer<typeof baseOptions>
|
125
packages/forge/blocks/openai/shared/runChatCompletion.ts
Normal file
125
packages/forge/blocks/openai/shared/runChatCompletion.ts
Normal file
@ -0,0 +1,125 @@
|
||||
import OpenAI, { ClientOptions } from 'openai'
|
||||
import { parseToolParameters } from '../helpers/parseToolParameters'
|
||||
import { executeFunction } from '@typebot.io/variables/executeFunction'
|
||||
import { ChatCompletionTool, ChatCompletionMessage } from 'openai/resources'
|
||||
import { maxToolCalls } from '../constants'
|
||||
import { parseChatCompletionMessages } from '../helpers/parseChatCompletionMessages'
|
||||
import { ChatCompletionOptions } from './parseChatCompletionOptions'
|
||||
import { LogsStore, VariableStore } from '@typebot.io/forge/types'
|
||||
|
||||
type OpenAIConfig = {
|
||||
baseUrl: string
|
||||
defaultModel?: string
|
||||
}
|
||||
|
||||
type Props = {
|
||||
credentials: {
|
||||
apiKey?: string
|
||||
}
|
||||
options: ChatCompletionOptions
|
||||
variables: VariableStore
|
||||
logs: LogsStore
|
||||
config: OpenAIConfig
|
||||
}
|
||||
|
||||
export const runChatCompletion = async ({
|
||||
credentials: { apiKey },
|
||||
options,
|
||||
variables,
|
||||
config: openAIConfig,
|
||||
logs,
|
||||
}: Props) => {
|
||||
const model = options.model?.trim() ?? openAIConfig.defaultModel
|
||||
if (!model) return logs.add('No model provided')
|
||||
const config = {
|
||||
apiKey,
|
||||
baseURL: openAIConfig.baseUrl ?? options.baseUrl,
|
||||
defaultHeaders: options.baseUrl
|
||||
? {
|
||||
'api-key': apiKey,
|
||||
}
|
||||
: undefined,
|
||||
defaultQuery: options.apiVersion
|
||||
? {
|
||||
'api-version': options.apiVersion,
|
||||
}
|
||||
: undefined,
|
||||
} satisfies ClientOptions
|
||||
|
||||
const openai = new OpenAI(config)
|
||||
|
||||
const tools = options.tools
|
||||
?.filter((t) => t.name && t.parameters)
|
||||
.map((t) => ({
|
||||
type: 'function',
|
||||
function: {
|
||||
name: t.name as string,
|
||||
description: t.description,
|
||||
parameters: parseToolParameters(t.parameters!),
|
||||
},
|
||||
})) satisfies ChatCompletionTool[] | undefined
|
||||
|
||||
const messages = parseChatCompletionMessages({ options, variables })
|
||||
|
||||
const body = {
|
||||
model,
|
||||
temperature: options.temperature ? Number(options.temperature) : undefined,
|
||||
messages,
|
||||
tools: (tools?.length ?? 0) > 0 ? tools : undefined,
|
||||
}
|
||||
|
||||
let totalTokens = 0
|
||||
let message: ChatCompletionMessage
|
||||
|
||||
for (let i = 0; i < maxToolCalls; i++) {
|
||||
const response = await openai.chat.completions.create(body)
|
||||
|
||||
message = response.choices[0].message
|
||||
totalTokens += response.usage?.total_tokens || 0
|
||||
|
||||
if (!message.tool_calls) break
|
||||
|
||||
messages.push(message)
|
||||
|
||||
for (const toolCall of message.tool_calls) {
|
||||
const name = toolCall.function?.name
|
||||
if (!name) continue
|
||||
const toolDefinition = options.tools?.find((t) => t.name === name)
|
||||
if (!toolDefinition?.code || !toolDefinition.parameters) {
|
||||
messages.push({
|
||||
tool_call_id: toolCall.id,
|
||||
role: 'tool',
|
||||
content: 'Function not found',
|
||||
})
|
||||
continue
|
||||
}
|
||||
const toolParams = Object.fromEntries(
|
||||
toolDefinition.parameters.map(({ name }) => [name, null])
|
||||
)
|
||||
const toolArgs = toolCall.function?.arguments
|
||||
? JSON.parse(toolCall.function?.arguments)
|
||||
: undefined
|
||||
if (!toolArgs) continue
|
||||
const { output, newVariables } = await executeFunction({
|
||||
variables: variables.list(),
|
||||
args: { ...toolParams, ...toolArgs },
|
||||
body: toolDefinition.code,
|
||||
})
|
||||
newVariables?.forEach((v) => variables.set(v.id, v.value))
|
||||
|
||||
messages.push({
|
||||
tool_call_id: toolCall.id,
|
||||
role: 'tool',
|
||||
content: output,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
options.responseMapping?.forEach((mapping) => {
|
||||
if (!mapping.variableId) return
|
||||
if (!mapping.item || mapping.item === 'Message content')
|
||||
variables.set(mapping.variableId, message.content)
|
||||
if (mapping.item === 'Total tokens')
|
||||
variables.set(mapping.variableId, totalTokens)
|
||||
})
|
||||
}
|
108
packages/forge/blocks/openai/shared/runChatCompletionStream.ts
Normal file
108
packages/forge/blocks/openai/shared/runChatCompletionStream.ts
Normal file
@ -0,0 +1,108 @@
|
||||
import { LogsStore, ReadOnlyVariableStore } from '@typebot.io/forge/types'
|
||||
import { ChatCompletionOptions } from './parseChatCompletionOptions'
|
||||
import { executeFunction } from '@typebot.io/variables/executeFunction'
|
||||
import { OpenAIStream, ToolCallPayload } from 'ai'
|
||||
import OpenAI, { ClientOptions } from 'openai'
|
||||
import { ChatCompletionTool } from 'openai/resources'
|
||||
import { parseChatCompletionMessages } from '../helpers/parseChatCompletionMessages'
|
||||
import { parseToolParameters } from '../helpers/parseToolParameters'
|
||||
|
||||
type Props = {
|
||||
credentials: { apiKey?: string }
|
||||
options: ChatCompletionOptions
|
||||
variables: ReadOnlyVariableStore
|
||||
config: { baseUrl: string; defaultModel?: string }
|
||||
}
|
||||
export const runChatCompletionStream = async ({
|
||||
credentials: { apiKey },
|
||||
options,
|
||||
variables,
|
||||
config: openAIConfig,
|
||||
}: Props) => {
|
||||
const model = options.model?.trim() ?? openAIConfig.defaultModel
|
||||
if (!model) return
|
||||
const config = {
|
||||
apiKey,
|
||||
baseURL: openAIConfig.baseUrl ?? options.baseUrl,
|
||||
defaultHeaders: {
|
||||
'api-key': apiKey,
|
||||
},
|
||||
defaultQuery: options.apiVersion
|
||||
? {
|
||||
'api-version': options.apiVersion,
|
||||
}
|
||||
: undefined,
|
||||
} satisfies ClientOptions
|
||||
|
||||
const openai = new OpenAI(config)
|
||||
|
||||
const tools = options.tools
|
||||
?.filter((t) => t.name && t.parameters)
|
||||
.map((t) => ({
|
||||
type: 'function',
|
||||
function: {
|
||||
name: t.name as string,
|
||||
description: t.description,
|
||||
parameters: parseToolParameters(t.parameters!),
|
||||
},
|
||||
})) satisfies ChatCompletionTool[] | undefined
|
||||
|
||||
const messages = parseChatCompletionMessages({ options, variables })
|
||||
|
||||
const response = await openai.chat.completions.create({
|
||||
model,
|
||||
temperature: options.temperature ? Number(options.temperature) : undefined,
|
||||
stream: true,
|
||||
messages,
|
||||
tools: (tools?.length ?? 0) > 0 ? tools : undefined,
|
||||
})
|
||||
|
||||
return OpenAIStream(response, {
|
||||
experimental_onToolCall: async (
|
||||
call: ToolCallPayload,
|
||||
appendToolCallMessage
|
||||
) => {
|
||||
for (const toolCall of call.tools) {
|
||||
const name = toolCall.func?.name
|
||||
if (!name) continue
|
||||
const toolDefinition = options.tools?.find((t) => t.name === name)
|
||||
if (!toolDefinition?.code || !toolDefinition.parameters) {
|
||||
messages.push({
|
||||
tool_call_id: toolCall.id,
|
||||
role: 'tool',
|
||||
content: 'Function not found',
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
const { output } = await executeFunction({
|
||||
variables: variables.list(),
|
||||
args:
|
||||
typeof toolCall.func.arguments === 'string'
|
||||
? JSON.parse(toolCall.func.arguments)
|
||||
: toolCall.func.arguments,
|
||||
body: toolDefinition.code,
|
||||
})
|
||||
|
||||
// TO-DO: enable once we're out of edge runtime.
|
||||
// newVariables?.forEach((v) => variables.set(v.id, v.value))
|
||||
|
||||
const newMessages = appendToolCallMessage({
|
||||
tool_call_id: toolCall.id,
|
||||
function_name: toolCall.func.name,
|
||||
tool_call_result: output,
|
||||
})
|
||||
|
||||
return openai.chat.completions.create({
|
||||
messages: [
|
||||
...messages,
|
||||
...newMessages,
|
||||
] as OpenAI.Chat.Completions.ChatCompletionMessageParam[],
|
||||
model,
|
||||
stream: true,
|
||||
tools,
|
||||
})
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
Reference in New Issue
Block a user