⬆️ Upgrade AI SDK (#1641)
This commit is contained in:
28
packages/ai/appendToolResultsToMessages.ts
Normal file
28
packages/ai/appendToolResultsToMessages.ts
Normal file
@ -0,0 +1,28 @@
|
||||
import { CoreMessage, ToolCallPart, ToolResultPart } from 'ai'
|
||||
|
||||
type Props = {
|
||||
messages: CoreMessage[]
|
||||
toolCalls: ToolCallPart[]
|
||||
toolResults: ToolResultPart[]
|
||||
}
|
||||
export const appendToolResultsToMessages = ({
|
||||
messages,
|
||||
toolCalls,
|
||||
toolResults,
|
||||
}: Props): CoreMessage[] => {
|
||||
if (toolCalls.length > 0) {
|
||||
messages.push({
|
||||
role: 'assistant',
|
||||
content: toolCalls,
|
||||
})
|
||||
}
|
||||
|
||||
if (toolResults.length > 0) {
|
||||
messages.push({
|
||||
role: 'tool',
|
||||
content: toolResults,
|
||||
})
|
||||
}
|
||||
|
||||
return messages
|
||||
}
|
14
packages/ai/package.json
Normal file
14
packages/ai/package.json
Normal file
@ -0,0 +1,14 @@
|
||||
{
|
||||
"name": "@typebot.io/ai",
|
||||
"version": "1.0.0",
|
||||
"license": "AGPL-3.0-or-later",
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"@typebot.io/lib": "workspace:*",
|
||||
"@typebot.io/forge": "workspace:*",
|
||||
"@typebot.io/variables": "workspace:*",
|
||||
"ai": "3.2.22",
|
||||
"ky": "1.2.4",
|
||||
"@typebot.io/tsconfig": "workspace:*"
|
||||
}
|
||||
}
|
113
packages/ai/parseChatCompletionMessages.ts
Normal file
113
packages/ai/parseChatCompletionMessages.ts
Normal file
@ -0,0 +1,113 @@
|
||||
import { CoreAssistantMessage, CoreMessage, CoreUserMessage } from 'ai'
|
||||
import { VariableStore } from '@typebot.io/forge'
|
||||
import { isDefined, isEmpty } from '@typebot.io/lib'
|
||||
import { splitUserTextMessageIntoBlocks } from './splitUserTextMessageIntoBlocks'
|
||||
import { Message, StandardMessage, DialogueMessage } from './types'
|
||||
|
||||
type Props = {
|
||||
messages: Message[] | undefined
|
||||
isVisionEnabled: boolean
|
||||
shouldDownloadImages: boolean
|
||||
variables: VariableStore
|
||||
}
|
||||
|
||||
export const parseChatCompletionMessages = async ({
|
||||
messages,
|
||||
isVisionEnabled,
|
||||
shouldDownloadImages,
|
||||
variables,
|
||||
}: Props): Promise<CoreMessage[]> => {
|
||||
if (!messages) return []
|
||||
const parsedMessages: CoreMessage[] = (
|
||||
await Promise.all(
|
||||
messages.map(async (message) => {
|
||||
if (!message.role) return
|
||||
|
||||
if (message.role === 'Dialogue')
|
||||
return parseDialogueMessage({
|
||||
message,
|
||||
variables,
|
||||
isVisionEnabled,
|
||||
shouldDownloadImages,
|
||||
})
|
||||
|
||||
return parseStandardMessage({
|
||||
message,
|
||||
variables,
|
||||
isVisionEnabled,
|
||||
shouldDownloadImages,
|
||||
})
|
||||
})
|
||||
)
|
||||
)
|
||||
.flat()
|
||||
.filter(isDefined)
|
||||
|
||||
return parsedMessages
|
||||
}
|
||||
|
||||
const parseDialogueMessage = async ({
|
||||
message,
|
||||
variables,
|
||||
isVisionEnabled,
|
||||
shouldDownloadImages,
|
||||
}: Pick<Props, 'variables' | 'isVisionEnabled' | 'shouldDownloadImages'> & {
|
||||
message: DialogueMessage
|
||||
}) => {
|
||||
if (!message.dialogueVariableId) return
|
||||
const dialogue = variables.get(message.dialogueVariableId) ?? []
|
||||
const dialogueArr = Array.isArray(dialogue) ? dialogue : [dialogue]
|
||||
|
||||
return Promise.all(
|
||||
dialogueArr.map<
|
||||
Promise<CoreUserMessage | CoreAssistantMessage | undefined>
|
||||
>(async (dialogueItem, index) => {
|
||||
if (!dialogueItem) return
|
||||
if (index === 0 && message.startsBy === 'assistant')
|
||||
return { role: 'assistant' as const, content: dialogueItem }
|
||||
if (index % (message.startsBy === 'assistant' ? 1 : 2) === 0) {
|
||||
return {
|
||||
role: 'user' as const,
|
||||
content: isVisionEnabled
|
||||
? await splitUserTextMessageIntoBlocks({
|
||||
input: dialogueItem ?? '',
|
||||
shouldDownloadImages,
|
||||
})
|
||||
: dialogueItem,
|
||||
}
|
||||
}
|
||||
return { role: 'assistant' as const, content: dialogueItem }
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
const parseStandardMessage = async ({
|
||||
message,
|
||||
variables,
|
||||
isVisionEnabled,
|
||||
shouldDownloadImages,
|
||||
}: Pick<Props, 'variables' | 'isVisionEnabled' | 'shouldDownloadImages'> & {
|
||||
message: StandardMessage
|
||||
}) => {
|
||||
if (!message.content) return
|
||||
|
||||
const content = variables.parse(message.content)
|
||||
|
||||
if (isEmpty(content)) return
|
||||
|
||||
if (message.role === 'user')
|
||||
return {
|
||||
role: 'user' as const,
|
||||
content: isVisionEnabled
|
||||
? await splitUserTextMessageIntoBlocks({
|
||||
input: content,
|
||||
shouldDownloadImages,
|
||||
})
|
||||
: content,
|
||||
}
|
||||
|
||||
return {
|
||||
role: message.role,
|
||||
content,
|
||||
}
|
||||
}
|
68
packages/ai/parseTools.ts
Normal file
68
packages/ai/parseTools.ts
Normal file
@ -0,0 +1,68 @@
|
||||
import { VariableStore } from '@typebot.io/forge'
|
||||
import { z } from '@typebot.io/forge/zod'
|
||||
import { executeFunction } from '@typebot.io/variables/executeFunction'
|
||||
import { Variable } from '@typebot.io/variables/types'
|
||||
import { CoreTool } from 'ai'
|
||||
import { isNotEmpty } from '@typebot.io/lib'
|
||||
import { Tools } from './schemas'
|
||||
|
||||
export const parseTools = ({
|
||||
tools,
|
||||
variables,
|
||||
}: {
|
||||
tools: Tools
|
||||
variables: VariableStore
|
||||
onNewVariabes?: (newVariables: Variable[]) => void
|
||||
}): Record<string, CoreTool> => {
|
||||
if (!tools?.length) return {}
|
||||
return tools.reduce<Record<string, CoreTool>>((acc, tool) => {
|
||||
if (!tool.code || !tool.name) return acc
|
||||
acc[tool.name] = {
|
||||
description: tool.description,
|
||||
parameters: parseParameters(tool.parameters),
|
||||
execute: async (args) => {
|
||||
const { output, newVariables } = await executeFunction({
|
||||
variables: variables.list(),
|
||||
args,
|
||||
body: tool.code!,
|
||||
})
|
||||
newVariables?.forEach((v) => variables.set(v.id, v.value))
|
||||
return output
|
||||
},
|
||||
} satisfies CoreTool
|
||||
return acc
|
||||
}, {})
|
||||
}
|
||||
|
||||
const parseParameters = (
|
||||
parameters: NonNullable<Tools>[number]['parameters']
|
||||
): z.ZodTypeAny | undefined => {
|
||||
if (!parameters || parameters?.length === 0) return
|
||||
|
||||
const shape: z.ZodRawShape = {}
|
||||
parameters.forEach((param) => {
|
||||
if (!param.name) return
|
||||
switch (param.type) {
|
||||
case 'string':
|
||||
shape[param.name] = z.string()
|
||||
break
|
||||
case 'number':
|
||||
shape[param.name] = z.number()
|
||||
break
|
||||
case 'boolean':
|
||||
shape[param.name] = z.boolean()
|
||||
break
|
||||
case 'enum': {
|
||||
if (!param.values || param.values.length === 0) return
|
||||
shape[param.name] = z.enum(param.values as any)
|
||||
break
|
||||
}
|
||||
}
|
||||
if (isNotEmpty(param.description))
|
||||
shape[param.name] = shape[param.name].describe(param.description)
|
||||
if (param.required === false)
|
||||
shape[param.name] = shape[param.name].optional()
|
||||
})
|
||||
|
||||
return z.object(shape)
|
||||
}
|
11
packages/ai/pumpStreamUntilDone.ts
Normal file
11
packages/ai/pumpStreamUntilDone.ts
Normal file
@ -0,0 +1,11 @@
|
||||
export const pumpStreamUntilDone = async (
|
||||
controller: ReadableStreamDefaultController<Uint8Array>,
|
||||
reader: ReadableStreamDefaultReader
|
||||
): Promise<void> => {
|
||||
const { done, value } = await reader.read()
|
||||
|
||||
if (done) return
|
||||
|
||||
controller.enqueue(value)
|
||||
return pumpStreamUntilDone(controller, reader)
|
||||
}
|
79
packages/ai/schemas.ts
Normal file
79
packages/ai/schemas.ts
Normal file
@ -0,0 +1,79 @@
|
||||
import { option } from '@typebot.io/forge'
|
||||
import { z } from '@typebot.io/forge/zod'
|
||||
|
||||
const parameterBase = {
|
||||
name: option.string.layout({
|
||||
label: 'Name',
|
||||
placeholder: 'myVariable',
|
||||
withVariableButton: false,
|
||||
}),
|
||||
description: option.string.layout({
|
||||
label: 'Description',
|
||||
withVariableButton: false,
|
||||
}),
|
||||
required: option.boolean.layout({
|
||||
label: 'Is required?',
|
||||
}),
|
||||
}
|
||||
|
||||
export const toolParametersSchema = option
|
||||
.array(
|
||||
option.discriminatedUnion('type', [
|
||||
option
|
||||
.object({
|
||||
type: option.literal('string'),
|
||||
})
|
||||
.extend(parameterBase),
|
||||
option
|
||||
.object({
|
||||
type: option.literal('number'),
|
||||
})
|
||||
.extend(parameterBase),
|
||||
option
|
||||
.object({
|
||||
type: option.literal('boolean'),
|
||||
})
|
||||
.extend(parameterBase),
|
||||
option
|
||||
.object({
|
||||
type: option.literal('enum'),
|
||||
values: option
|
||||
.array(option.string)
|
||||
.layout({ itemLabel: 'possible value' }),
|
||||
})
|
||||
.extend(parameterBase),
|
||||
])
|
||||
)
|
||||
.layout({
|
||||
accordion: 'Parameters',
|
||||
itemLabel: 'parameter',
|
||||
})
|
||||
|
||||
const functionToolItemSchema = option.object({
|
||||
type: option.literal('function'),
|
||||
name: option.string.layout({
|
||||
label: 'Name',
|
||||
placeholder: 'myFunctionName',
|
||||
withVariableButton: false,
|
||||
}),
|
||||
description: option.string.layout({
|
||||
label: 'Description',
|
||||
placeholder: 'A brief description of what this function does.',
|
||||
withVariableButton: false,
|
||||
}),
|
||||
parameters: toolParametersSchema,
|
||||
code: option.string.layout({
|
||||
inputType: 'code',
|
||||
label: 'Code',
|
||||
lang: 'javascript',
|
||||
moreInfoTooltip:
|
||||
'A javascript code snippet that can use the defined parameters. It should return a value.',
|
||||
withVariableButton: false,
|
||||
}),
|
||||
})
|
||||
|
||||
export const toolsSchema = option
|
||||
.array(option.discriminatedUnion('type', [functionToolItemSchema]))
|
||||
.layout({ accordion: 'Tools', itemLabel: 'tool' })
|
||||
|
||||
export type Tools = z.infer<typeof toolsSchema>
|
57
packages/ai/splitUserTextMessageIntoBlocks.ts
Normal file
57
packages/ai/splitUserTextMessageIntoBlocks.ts
Normal file
@ -0,0 +1,57 @@
|
||||
import { ImagePart, TextPart, UserContent } from 'ai'
|
||||
import ky, { HTTPError } from 'ky'
|
||||
|
||||
type Props = {
|
||||
input: string
|
||||
shouldDownloadImages: boolean
|
||||
}
|
||||
export const splitUserTextMessageIntoBlocks = async ({
|
||||
input,
|
||||
shouldDownloadImages,
|
||||
}: Props): Promise<UserContent> => {
|
||||
const urlRegex = /(^|\n\n)(https?:\/\/[^\s]+)(\n\n|$)/g
|
||||
const match = input.match(urlRegex)
|
||||
if (!match) return input
|
||||
let parts: (TextPart | ImagePart)[] = []
|
||||
let processedInput = input
|
||||
|
||||
for (const url of match) {
|
||||
const textBeforeUrl = processedInput.slice(0, processedInput.indexOf(url))
|
||||
if (textBeforeUrl.trim().length > 0) {
|
||||
parts.push({ type: 'text', text: textBeforeUrl })
|
||||
}
|
||||
const cleanUrl = url.trim()
|
||||
|
||||
try {
|
||||
const response = await ky.get(cleanUrl)
|
||||
if (
|
||||
!response.ok ||
|
||||
!response.headers.get('content-type')?.startsWith('image/')
|
||||
) {
|
||||
parts.push({ type: 'text', text: cleanUrl })
|
||||
} else {
|
||||
parts.push({
|
||||
type: 'image',
|
||||
image: shouldDownloadImages
|
||||
? await response.arrayBuffer()
|
||||
: url.trim(),
|
||||
})
|
||||
}
|
||||
} catch (err) {
|
||||
if (err instanceof HTTPError) {
|
||||
console.log(err.response.status, await err.response.text())
|
||||
} else {
|
||||
console.error(err)
|
||||
}
|
||||
}
|
||||
processedInput = processedInput.slice(
|
||||
processedInput.indexOf(url) + url.length
|
||||
)
|
||||
}
|
||||
|
||||
if (processedInput.trim().length > 0) {
|
||||
parts.push({ type: 'text', text: processedInput })
|
||||
}
|
||||
|
||||
return parts
|
||||
}
|
8
packages/ai/tsconfig.json
Normal file
8
packages/ai/tsconfig.json
Normal file
@ -0,0 +1,8 @@
|
||||
{
|
||||
"extends": "@typebot.io/tsconfig/base.json",
|
||||
"include": ["**/*.ts"],
|
||||
"exclude": ["node_modules"],
|
||||
"compilerOptions": {
|
||||
"lib": ["ES2021", "DOM"]
|
||||
}
|
||||
}
|
12
packages/ai/types.ts
Normal file
12
packages/ai/types.ts
Normal file
@ -0,0 +1,12 @@
|
||||
export type DialogueMessage = {
|
||||
role: 'Dialogue'
|
||||
startsBy?: 'user' | 'assistant'
|
||||
dialogueVariableId?: string
|
||||
}
|
||||
|
||||
export type StandardMessage = {
|
||||
role: 'user' | 'assistant' | 'system'
|
||||
content?: string
|
||||
}
|
||||
|
||||
export type Message = DialogueMessage | StandardMessage
|
@ -128,7 +128,10 @@ export const getMessageStream = async ({
|
||||
state: session.state,
|
||||
currentBlockId: session.state.currentBlockId,
|
||||
})
|
||||
if (newSetVariableHistory.length > 0)
|
||||
if (
|
||||
newSetVariableHistory.length > 0 &&
|
||||
session.state.typebotsQueue[0].resultId
|
||||
)
|
||||
await saveSetVariableHistoryItems(newSetVariableHistory)
|
||||
await updateSession({
|
||||
id: session.id,
|
||||
|
@ -20,7 +20,7 @@
|
||||
"@typebot.io/variables": "workspace:*",
|
||||
"@udecode/plate-common": "30.4.5",
|
||||
"@typebot.io/logic": "workspace:*",
|
||||
"ai": "3.2.1",
|
||||
"ai": "3.2.22",
|
||||
"chrono-node": "2.7.6",
|
||||
"date-fns": "2.30.0",
|
||||
"date-fns-tz": "2.0.0",
|
||||
|
@ -14,6 +14,7 @@
|
||||
"license": "AGPL-3.0-or-later",
|
||||
"dependencies": {
|
||||
"@ark-ui/solid": "3.3.0",
|
||||
"@ai-sdk/ui-utils": "0.0.12",
|
||||
"@stripe/stripe-js": "1.54.1",
|
||||
"@udecode/plate-common": "30.4.5",
|
||||
"dompurify": "3.0.6",
|
||||
@ -25,6 +26,7 @@
|
||||
"devDependencies": {
|
||||
"@babel/preset-typescript": "7.22.5",
|
||||
"@rollup/plugin-babel": "6.0.3",
|
||||
"@rollup/plugin-commonjs": "26.0.1",
|
||||
"@rollup/plugin-node-resolve": "15.1.0",
|
||||
"@rollup/plugin-terser": "0.4.3",
|
||||
"@rollup/plugin-typescript": "11.1.2",
|
||||
|
@ -7,6 +7,7 @@ import tailwindcss from 'tailwindcss'
|
||||
import typescript from '@rollup/plugin-typescript'
|
||||
import { typescriptPaths } from 'rollup-plugin-typescript-paths'
|
||||
import replace from '@rollup/plugin-replace'
|
||||
import commonjs from '@rollup/plugin-commonjs'
|
||||
import fs from 'fs'
|
||||
|
||||
const extensions = ['.ts', '.tsx']
|
||||
@ -27,6 +28,7 @@ const indexConfig = {
|
||||
},
|
||||
plugins: [
|
||||
resolve({ extensions }),
|
||||
commonjs(),
|
||||
babel({
|
||||
babelHelpers: 'bundled',
|
||||
exclude: 'node_modules/**',
|
||||
|
@ -1,5 +1,5 @@
|
||||
import { ClientSideActionContext } from '@/types'
|
||||
import { readDataStream } from '@/utils/ai/readDataStream'
|
||||
import { readDataStream } from '@ai-sdk/ui-utils'
|
||||
import { guessApiHost } from '@/utils/guessApiHost'
|
||||
import { isNotEmpty } from '@typebot.io/lib/utils'
|
||||
import { createUniqueId } from 'solid-js'
|
||||
|
@ -1,80 +0,0 @@
|
||||
import { StreamPartType, parseStreamPart } from './streamParts'
|
||||
|
||||
const NEWLINE = '\n'.charCodeAt(0)
|
||||
|
||||
// concatenates all the chunks into a single Uint8Array
|
||||
function concatChunks(chunks: Uint8Array[], totalLength: number) {
|
||||
const concatenatedChunks = new Uint8Array(totalLength)
|
||||
|
||||
let offset = 0
|
||||
for (const chunk of chunks) {
|
||||
concatenatedChunks.set(chunk, offset)
|
||||
offset += chunk.length
|
||||
}
|
||||
chunks.length = 0
|
||||
|
||||
return concatenatedChunks
|
||||
}
|
||||
|
||||
/**
|
||||
Converts a ReadableStreamDefaultReader into an async generator that yields
|
||||
StreamPart objects.
|
||||
|
||||
@param reader
|
||||
Reader for the stream to read from.
|
||||
@param isAborted
|
||||
Optional function that returns true if the request has been aborted.
|
||||
If the function returns true, the generator will stop reading the stream.
|
||||
If the function is not provided, the generator will not stop reading the stream.
|
||||
*/
|
||||
export async function* readDataStream(
|
||||
reader: ReadableStreamDefaultReader<Uint8Array>,
|
||||
{
|
||||
isAborted,
|
||||
}: {
|
||||
isAborted?: () => boolean
|
||||
} = {}
|
||||
): AsyncGenerator<StreamPartType> {
|
||||
// implementation note: this slightly more complex algorithm is required
|
||||
// to pass the tests in the edge environment.
|
||||
|
||||
const decoder = new TextDecoder()
|
||||
const chunks: Uint8Array[] = []
|
||||
let totalLength = 0
|
||||
|
||||
while (true) {
|
||||
const { value } = await reader.read()
|
||||
|
||||
if (value) {
|
||||
chunks.push(value)
|
||||
totalLength += value.length
|
||||
if (value[value.length - 1] !== NEWLINE) {
|
||||
// if the last character is not a newline, we have not read the whole JSON value
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
if (chunks.length === 0) {
|
||||
break // we have reached the end of the stream
|
||||
}
|
||||
|
||||
const concatenatedChunks = concatChunks(chunks, totalLength)
|
||||
totalLength = 0
|
||||
|
||||
const streamParts = decoder
|
||||
.decode(concatenatedChunks, { stream: true })
|
||||
.split('\n')
|
||||
.filter((line) => line !== '') // splitting leaves an empty string at the end
|
||||
.map(parseStreamPart)
|
||||
|
||||
for (const streamPart of streamParts) {
|
||||
yield streamPart
|
||||
}
|
||||
|
||||
// The request has been aborted, stop reading the stream.
|
||||
if (isAborted?.()) {
|
||||
reader.cancel()
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
@ -1,377 +0,0 @@
|
||||
import {
|
||||
AssistantMessage,
|
||||
DataMessage,
|
||||
FunctionCall,
|
||||
JSONValue,
|
||||
ToolCall,
|
||||
} from './types'
|
||||
|
||||
type StreamString =
|
||||
`${(typeof StreamStringPrefixes)[keyof typeof StreamStringPrefixes]}:${string}\n`
|
||||
|
||||
export interface StreamPart<CODE extends string, NAME extends string, TYPE> {
|
||||
code: CODE
|
||||
name: NAME
|
||||
parse: (value: JSONValue) => { type: NAME; value: TYPE }
|
||||
}
|
||||
|
||||
const textStreamPart: StreamPart<'0', 'text', string> = {
|
||||
code: '0',
|
||||
name: 'text',
|
||||
parse: (value: JSONValue) => {
|
||||
if (typeof value !== 'string') {
|
||||
throw new Error('"text" parts expect a string value.')
|
||||
}
|
||||
return { type: 'text', value }
|
||||
},
|
||||
}
|
||||
|
||||
const functionCallStreamPart: StreamPart<
|
||||
'1',
|
||||
'function_call',
|
||||
{ function_call: FunctionCall }
|
||||
> = {
|
||||
code: '1',
|
||||
name: 'function_call',
|
||||
parse: (value: JSONValue) => {
|
||||
if (
|
||||
value == null ||
|
||||
typeof value !== 'object' ||
|
||||
!('function_call' in value) ||
|
||||
typeof value.function_call !== 'object' ||
|
||||
value.function_call == null ||
|
||||
!('name' in value.function_call) ||
|
||||
!('arguments' in value.function_call) ||
|
||||
typeof value.function_call.name !== 'string' ||
|
||||
typeof value.function_call.arguments !== 'string'
|
||||
) {
|
||||
throw new Error(
|
||||
'"function_call" parts expect an object with a "function_call" property.'
|
||||
)
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'function_call',
|
||||
value: value as unknown as { function_call: FunctionCall },
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
const dataStreamPart: StreamPart<'2', 'data', Array<JSONValue>> = {
|
||||
code: '2',
|
||||
name: 'data',
|
||||
parse: (value: JSONValue) => {
|
||||
if (!Array.isArray(value)) {
|
||||
throw new Error('"data" parts expect an array value.')
|
||||
}
|
||||
|
||||
return { type: 'data', value }
|
||||
},
|
||||
}
|
||||
|
||||
const errorStreamPart: StreamPart<'3', 'error', string> = {
|
||||
code: '3',
|
||||
name: 'error',
|
||||
parse: (value: JSONValue) => {
|
||||
if (typeof value !== 'string') {
|
||||
throw new Error('"error" parts expect a string value.')
|
||||
}
|
||||
return { type: 'error', value }
|
||||
},
|
||||
}
|
||||
|
||||
const assistantMessageStreamPart: StreamPart<
|
||||
'4',
|
||||
'assistant_message',
|
||||
AssistantMessage
|
||||
> = {
|
||||
code: '4',
|
||||
name: 'assistant_message',
|
||||
parse: (value: JSONValue) => {
|
||||
if (
|
||||
value == null ||
|
||||
typeof value !== 'object' ||
|
||||
!('id' in value) ||
|
||||
!('role' in value) ||
|
||||
!('content' in value) ||
|
||||
typeof value.id !== 'string' ||
|
||||
typeof value.role !== 'string' ||
|
||||
value.role !== 'assistant' ||
|
||||
!Array.isArray(value.content) ||
|
||||
!value.content.every(
|
||||
(item) =>
|
||||
item != null &&
|
||||
typeof item === 'object' &&
|
||||
'type' in item &&
|
||||
item.type === 'text' &&
|
||||
'text' in item &&
|
||||
item.text != null &&
|
||||
typeof item.text === 'object' &&
|
||||
'value' in item.text &&
|
||||
typeof item.text.value === 'string'
|
||||
)
|
||||
) {
|
||||
throw new Error(
|
||||
'"assistant_message" parts expect an object with an "id", "role", and "content" property.'
|
||||
)
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'assistant_message',
|
||||
value: value as AssistantMessage,
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
const assistantControlDataStreamPart: StreamPart<
|
||||
'5',
|
||||
'assistant_control_data',
|
||||
{
|
||||
threadId: string
|
||||
messageId: string
|
||||
}
|
||||
> = {
|
||||
code: '5',
|
||||
name: 'assistant_control_data',
|
||||
parse: (value: JSONValue) => {
|
||||
if (
|
||||
value == null ||
|
||||
typeof value !== 'object' ||
|
||||
!('threadId' in value) ||
|
||||
!('messageId' in value) ||
|
||||
typeof value.threadId !== 'string' ||
|
||||
typeof value.messageId !== 'string'
|
||||
) {
|
||||
throw new Error(
|
||||
'"assistant_control_data" parts expect an object with a "threadId" and "messageId" property.'
|
||||
)
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'assistant_control_data',
|
||||
value: {
|
||||
threadId: value.threadId,
|
||||
messageId: value.messageId,
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
const dataMessageStreamPart: StreamPart<'6', 'data_message', DataMessage> = {
|
||||
code: '6',
|
||||
name: 'data_message',
|
||||
parse: (value: JSONValue) => {
|
||||
if (
|
||||
value == null ||
|
||||
typeof value !== 'object' ||
|
||||
!('role' in value) ||
|
||||
!('data' in value) ||
|
||||
typeof value.role !== 'string' ||
|
||||
value.role !== 'data'
|
||||
) {
|
||||
throw new Error(
|
||||
'"data_message" parts expect an object with a "role" and "data" property.'
|
||||
)
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'data_message',
|
||||
value: value as DataMessage,
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
const toolCallStreamPart: StreamPart<
|
||||
'7',
|
||||
'tool_calls',
|
||||
{ tool_calls: ToolCall[] }
|
||||
> = {
|
||||
code: '7',
|
||||
name: 'tool_calls',
|
||||
parse: (value: JSONValue) => {
|
||||
if (
|
||||
value == null ||
|
||||
typeof value !== 'object' ||
|
||||
!('tool_calls' in value) ||
|
||||
typeof value.tool_calls !== 'object' ||
|
||||
value.tool_calls == null ||
|
||||
!Array.isArray(value.tool_calls) ||
|
||||
value.tool_calls.some(
|
||||
(tc) =>
|
||||
tc == null ||
|
||||
typeof tc !== 'object' ||
|
||||
!('id' in tc) ||
|
||||
typeof tc.id !== 'string' ||
|
||||
!('type' in tc) ||
|
||||
typeof tc.type !== 'string' ||
|
||||
!('function' in tc) ||
|
||||
tc.function == null ||
|
||||
typeof tc.function !== 'object' ||
|
||||
!('arguments' in tc.function) ||
|
||||
typeof tc.function.name !== 'string' ||
|
||||
typeof tc.function.arguments !== 'string'
|
||||
)
|
||||
) {
|
||||
throw new Error(
|
||||
'"tool_calls" parts expect an object with a ToolCallPayload.'
|
||||
)
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'tool_calls',
|
||||
value: value as unknown as { tool_calls: ToolCall[] },
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
const messageAnnotationsStreamPart: StreamPart<
|
||||
'8',
|
||||
'message_annotations',
|
||||
Array<JSONValue>
|
||||
> = {
|
||||
code: '8',
|
||||
name: 'message_annotations',
|
||||
parse: (value: JSONValue) => {
|
||||
if (!Array.isArray(value)) {
|
||||
throw new Error('"message_annotations" parts expect an array value.')
|
||||
}
|
||||
|
||||
return { type: 'message_annotations', value }
|
||||
},
|
||||
}
|
||||
|
||||
const streamParts = [
|
||||
textStreamPart,
|
||||
functionCallStreamPart,
|
||||
dataStreamPart,
|
||||
errorStreamPart,
|
||||
assistantMessageStreamPart,
|
||||
assistantControlDataStreamPart,
|
||||
dataMessageStreamPart,
|
||||
toolCallStreamPart,
|
||||
messageAnnotationsStreamPart,
|
||||
] as const
|
||||
|
||||
// union type of all stream parts
|
||||
type StreamParts =
|
||||
| typeof textStreamPart
|
||||
| typeof functionCallStreamPart
|
||||
| typeof dataStreamPart
|
||||
| typeof errorStreamPart
|
||||
| typeof assistantMessageStreamPart
|
||||
| typeof assistantControlDataStreamPart
|
||||
| typeof dataMessageStreamPart
|
||||
| typeof toolCallStreamPart
|
||||
| typeof messageAnnotationsStreamPart
|
||||
/**
|
||||
* Maps the type of a stream part to its value type.
|
||||
*/
|
||||
type StreamPartValueType = {
|
||||
[P in StreamParts as P['name']]: ReturnType<P['parse']>['value']
|
||||
}
|
||||
|
||||
export type StreamPartType =
|
||||
| ReturnType<typeof textStreamPart.parse>
|
||||
| ReturnType<typeof functionCallStreamPart.parse>
|
||||
| ReturnType<typeof dataStreamPart.parse>
|
||||
| ReturnType<typeof errorStreamPart.parse>
|
||||
| ReturnType<typeof assistantMessageStreamPart.parse>
|
||||
| ReturnType<typeof assistantControlDataStreamPart.parse>
|
||||
| ReturnType<typeof dataMessageStreamPart.parse>
|
||||
| ReturnType<typeof toolCallStreamPart.parse>
|
||||
| ReturnType<typeof messageAnnotationsStreamPart.parse>
|
||||
|
||||
export const streamPartsByCode = {
|
||||
[textStreamPart.code]: textStreamPart,
|
||||
[functionCallStreamPart.code]: functionCallStreamPart,
|
||||
[dataStreamPart.code]: dataStreamPart,
|
||||
[errorStreamPart.code]: errorStreamPart,
|
||||
[assistantMessageStreamPart.code]: assistantMessageStreamPart,
|
||||
[assistantControlDataStreamPart.code]: assistantControlDataStreamPart,
|
||||
[dataMessageStreamPart.code]: dataMessageStreamPart,
|
||||
[toolCallStreamPart.code]: toolCallStreamPart,
|
||||
[messageAnnotationsStreamPart.code]: messageAnnotationsStreamPart,
|
||||
} as const
|
||||
|
||||
/**
|
||||
* The map of prefixes for data in the stream
|
||||
*
|
||||
* - 0: Text from the LLM response
|
||||
* - 1: (OpenAI) function_call responses
|
||||
* - 2: custom JSON added by the user using `Data`
|
||||
* - 6: (OpenAI) tool_call responses
|
||||
*
|
||||
* Example:
|
||||
* ```
|
||||
* 0:Vercel
|
||||
* 0:'s
|
||||
* 0: AI
|
||||
* 0: AI
|
||||
* 0: SDK
|
||||
* 0: is great
|
||||
* 0:!
|
||||
* 2: { "someJson": "value" }
|
||||
* 1: {"function_call": {"name": "get_current_weather", "arguments": "{\\n\\"location\\": \\"Charlottesville, Virginia\\",\\n\\"format\\": \\"celsius\\"\\n}"}}
|
||||
* 6: {"tool_call": {"id": "tool_0", "type": "function", "function": {"name": "get_current_weather", "arguments": "{\\n\\"location\\": \\"Charlottesville, Virginia\\",\\n\\"format\\": \\"celsius\\"\\n}"}}}
|
||||
*```
|
||||
*/
|
||||
export const StreamStringPrefixes = {
|
||||
[textStreamPart.name]: textStreamPart.code,
|
||||
[functionCallStreamPart.name]: functionCallStreamPart.code,
|
||||
[dataStreamPart.name]: dataStreamPart.code,
|
||||
[errorStreamPart.name]: errorStreamPart.code,
|
||||
[assistantMessageStreamPart.name]: assistantMessageStreamPart.code,
|
||||
[assistantControlDataStreamPart.name]: assistantControlDataStreamPart.code,
|
||||
[dataMessageStreamPart.name]: dataMessageStreamPart.code,
|
||||
[toolCallStreamPart.name]: toolCallStreamPart.code,
|
||||
[messageAnnotationsStreamPart.name]: messageAnnotationsStreamPart.code,
|
||||
} as const
|
||||
|
||||
export const validCodes = streamParts.map((part) => part.code)
|
||||
|
||||
/**
|
||||
Parses a stream part from a string.
|
||||
|
||||
@param line The string to parse.
|
||||
@returns The parsed stream part.
|
||||
@throws An error if the string cannot be parsed.
|
||||
*/
|
||||
export const parseStreamPart = (line: string): StreamPartType => {
|
||||
const firstSeparatorIndex = line.indexOf(':')
|
||||
|
||||
if (firstSeparatorIndex === -1) {
|
||||
throw new Error('Failed to parse stream string. No separator found.')
|
||||
}
|
||||
|
||||
const prefix = line.slice(0, firstSeparatorIndex)
|
||||
|
||||
if (!validCodes.includes(prefix as keyof typeof streamPartsByCode)) {
|
||||
throw new Error(`Failed to parse stream string. Invalid code ${prefix}.`)
|
||||
}
|
||||
|
||||
const code = prefix as keyof typeof streamPartsByCode
|
||||
|
||||
const textValue = line.slice(firstSeparatorIndex + 1)
|
||||
const jsonValue: JSONValue = JSON.parse(textValue)
|
||||
|
||||
return streamPartsByCode[code].parse(jsonValue)
|
||||
}
|
||||
|
||||
/**
|
||||
Prepends a string with a prefix from the `StreamChunkPrefixes`, JSON-ifies it,
|
||||
and appends a new line.
|
||||
|
||||
It ensures type-safety for the part type and value.
|
||||
*/
|
||||
export function formatStreamPart<T extends keyof StreamPartValueType>(
|
||||
type: T,
|
||||
value: StreamPartValueType[T]
|
||||
): StreamString {
|
||||
const streamPart = streamParts.find((part) => part.name === type)
|
||||
|
||||
if (!streamPart) {
|
||||
throw new Error(`Invalid stream part type: ${type}`)
|
||||
}
|
||||
|
||||
return `${streamPart.code}:${JSON.stringify(value)}\n`
|
||||
}
|
@ -1,355 +0,0 @@
|
||||
/* eslint-disable @typescript-eslint/ban-types */
|
||||
// https://github.com/openai/openai-node/blob/07b3504e1c40fd929f4aae1651b83afc19e3baf8/src/resources/chat/completions.ts#L146-L159
|
||||
export interface FunctionCall {
|
||||
/**
|
||||
* The arguments to call the function with, as generated by the model in JSON
|
||||
* format. Note that the model does not always generate valid JSON, and may
|
||||
* hallucinate parameters not defined by your function schema. Validate the
|
||||
* arguments in your code before calling your function.
|
||||
*/
|
||||
arguments?: string
|
||||
|
||||
/**
|
||||
* The name of the function to call.
|
||||
*/
|
||||
name?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* The tool calls generated by the model, such as function calls.
|
||||
*/
|
||||
export interface ToolCall {
|
||||
// The ID of the tool call.
|
||||
id: string
|
||||
|
||||
// The type of the tool. Currently, only `function` is supported.
|
||||
type: string
|
||||
|
||||
// The function that the model called.
|
||||
function: {
|
||||
// The name of the function.
|
||||
name: string
|
||||
|
||||
// The arguments to call the function with, as generated by the model in JSON
|
||||
arguments: string
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Controls which (if any) function is called by the model.
|
||||
* - none means the model will not call a function and instead generates a message.
|
||||
* - auto means the model can pick between generating a message or calling a function.
|
||||
* - Specifying a particular function via {"type: "function", "function": {"name": "my_function"}} forces the model to call that function.
|
||||
* none is the default when no functions are present. auto is the default if functions are present.
|
||||
*/
|
||||
export type ToolChoice =
|
||||
| 'none'
|
||||
| 'auto'
|
||||
| { type: 'function'; function: { name: string } }
|
||||
|
||||
/**
|
||||
* A list of tools the model may call. Currently, only functions are supported as a tool.
|
||||
* Use this to provide a list of functions the model may generate JSON inputs for.
|
||||
*/
|
||||
export interface Tool {
|
||||
type: 'function'
|
||||
function: Function
|
||||
}
|
||||
|
||||
export interface Function {
|
||||
/**
|
||||
* The name of the function to be called. Must be a-z, A-Z, 0-9, or contain
|
||||
* underscores and dashes, with a maximum length of 64.
|
||||
*/
|
||||
name: string
|
||||
|
||||
/**
|
||||
* The parameters the functions accepts, described as a JSON Schema object. See the
|
||||
* [guide](/docs/guides/gpt/function-calling) for examples, and the
|
||||
* [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for
|
||||
* documentation about the format.
|
||||
*
|
||||
* To describe a function that accepts no parameters, provide the value
|
||||
* `{"type": "object", "properties": {}}`.
|
||||
*/
|
||||
parameters: Record<string, unknown>
|
||||
|
||||
/**
|
||||
* A description of what the function does, used by the model to choose when and
|
||||
* how to call the function.
|
||||
*/
|
||||
description?: string
|
||||
}
|
||||
|
||||
export type IdGenerator = () => string
|
||||
|
||||
/**
|
||||
* Shared types between the API and UI packages.
|
||||
*/
|
||||
export interface Message {
|
||||
id: string
|
||||
tool_call_id?: string
|
||||
createdAt?: Date
|
||||
content: string
|
||||
ui?: string | JSX.Element | JSX.Element[] | null | undefined
|
||||
role: 'system' | 'user' | 'assistant' | 'function' | 'data' | 'tool'
|
||||
/**
|
||||
* If the message has a role of `function`, the `name` field is the name of the function.
|
||||
* Otherwise, the name field should not be set.
|
||||
*/
|
||||
name?: string
|
||||
/**
|
||||
* If the assistant role makes a function call, the `function_call` field
|
||||
* contains the function call name and arguments. Otherwise, the field should
|
||||
* not be set. (Deprecated and replaced by tool_calls.)
|
||||
*/
|
||||
function_call?: string | FunctionCall
|
||||
|
||||
data?: JSONValue
|
||||
/**
|
||||
* If the assistant role makes a tool call, the `tool_calls` field contains
|
||||
* the tool call name and arguments. Otherwise, the field should not be set.
|
||||
*/
|
||||
tool_calls?: string | ToolCall[]
|
||||
|
||||
/**
|
||||
* Additional message-specific information added on the server via StreamData
|
||||
*/
|
||||
annotations?: JSONValue[] | undefined
|
||||
}
|
||||
|
||||
export type CreateMessage = Omit<Message, 'id'> & {
|
||||
id?: Message['id']
|
||||
}
|
||||
|
||||
export type ChatRequest = {
|
||||
messages: Message[]
|
||||
options?: RequestOptions
|
||||
// @deprecated
|
||||
functions?: Array<Function>
|
||||
// @deprecated
|
||||
function_call?: FunctionCall
|
||||
data?: Record<string, string>
|
||||
tools?: Array<Tool>
|
||||
tool_choice?: ToolChoice
|
||||
}
|
||||
|
||||
export type FunctionCallHandler = (
|
||||
chatMessages: Message[],
|
||||
functionCall: FunctionCall
|
||||
) => Promise<ChatRequest | void>
|
||||
|
||||
export type ToolCallHandler = (
|
||||
chatMessages: Message[],
|
||||
toolCalls: ToolCall[]
|
||||
) => Promise<ChatRequest | void>
|
||||
|
||||
export type RequestOptions = {
|
||||
headers?: Record<string, string> | Headers
|
||||
body?: object
|
||||
}
|
||||
|
||||
export type ChatRequestOptions = {
|
||||
options?: RequestOptions
|
||||
functions?: Array<Function>
|
||||
function_call?: FunctionCall
|
||||
tools?: Array<Tool>
|
||||
tool_choice?: ToolChoice
|
||||
data?: Record<string, string>
|
||||
}
|
||||
|
||||
export type UseChatOptions = {
|
||||
/**
|
||||
* The API endpoint that accepts a `{ messages: Message[] }` object and returns
|
||||
* a stream of tokens of the AI chat response. Defaults to `/api/chat`.
|
||||
*/
|
||||
api?: string
|
||||
|
||||
/**
|
||||
* A unique identifier for the chat. If not provided, a random one will be
|
||||
* generated. When provided, the `useChat` hook with the same `id` will
|
||||
* have shared states across components.
|
||||
*/
|
||||
id?: string
|
||||
|
||||
/**
|
||||
* Initial messages of the chat. Useful to load an existing chat history.
|
||||
*/
|
||||
initialMessages?: Message[]
|
||||
|
||||
/**
|
||||
* Initial input of the chat.
|
||||
*/
|
||||
initialInput?: string
|
||||
|
||||
/**
|
||||
* Callback function to be called when a function call is received.
|
||||
* If the function returns a `ChatRequest` object, the request will be sent
|
||||
* automatically to the API and will be used to update the chat.
|
||||
*/
|
||||
experimental_onFunctionCall?: FunctionCallHandler
|
||||
|
||||
/**
|
||||
* Callback function to be called when a tool call is received.
|
||||
* If the function returns a `ChatRequest` object, the request will be sent
|
||||
* automatically to the API and will be used to update the chat.
|
||||
*/
|
||||
experimental_onToolCall?: ToolCallHandler
|
||||
|
||||
/**
|
||||
* Callback function to be called when the API response is received.
|
||||
*/
|
||||
onResponse?: (response: Response) => void | Promise<void>
|
||||
|
||||
/**
|
||||
* Callback function to be called when the chat is finished streaming.
|
||||
*/
|
||||
onFinish?: (message: Message) => void
|
||||
|
||||
/**
|
||||
* Callback function to be called when an error is encountered.
|
||||
*/
|
||||
onError?: (error: Error) => void
|
||||
|
||||
/**
|
||||
* A way to provide a function that is going to be used for ids for messages.
|
||||
* If not provided nanoid is used by default.
|
||||
*/
|
||||
generateId?: IdGenerator
|
||||
|
||||
/**
|
||||
* The credentials mode to be used for the fetch request.
|
||||
* Possible values are: 'omit', 'same-origin', 'include'.
|
||||
* Defaults to 'same-origin'.
|
||||
*/
|
||||
credentials?: RequestCredentials
|
||||
|
||||
/**
|
||||
* HTTP headers to be sent with the API request.
|
||||
*/
|
||||
headers?: Record<string, string> | Headers
|
||||
|
||||
/**
|
||||
* Extra body object to be sent with the API request.
|
||||
* @example
|
||||
* Send a `sessionId` to the API along with the messages.
|
||||
* ```js
|
||||
* useChat({
|
||||
* body: {
|
||||
* sessionId: '123',
|
||||
* }
|
||||
* })
|
||||
* ```
|
||||
*/
|
||||
body?: object
|
||||
|
||||
/**
|
||||
* Whether to send extra message fields such as `message.id` and `message.createdAt` to the API.
|
||||
* Defaults to `false`. When set to `true`, the API endpoint might need to
|
||||
* handle the extra fields before forwarding the request to the AI service.
|
||||
*/
|
||||
sendExtraMessageFields?: boolean
|
||||
|
||||
/** Stream mode (default to "stream-data") */
|
||||
streamMode?: 'stream-data' | 'text'
|
||||
}
|
||||
|
||||
export type UseCompletionOptions = {
|
||||
/**
|
||||
* The API endpoint that accepts a `{ prompt: string }` object and returns
|
||||
* a stream of tokens of the AI completion response. Defaults to `/api/completion`.
|
||||
*/
|
||||
api?: string
|
||||
/**
|
||||
* An unique identifier for the chat. If not provided, a random one will be
|
||||
* generated. When provided, the `useChat` hook with the same `id` will
|
||||
* have shared states across components.
|
||||
*/
|
||||
id?: string
|
||||
|
||||
/**
|
||||
* Initial prompt input of the completion.
|
||||
*/
|
||||
initialInput?: string
|
||||
|
||||
/**
|
||||
* Initial completion result. Useful to load an existing history.
|
||||
*/
|
||||
initialCompletion?: string
|
||||
|
||||
/**
|
||||
* Callback function to be called when the API response is received.
|
||||
*/
|
||||
onResponse?: (response: Response) => void | Promise<void>
|
||||
|
||||
/**
|
||||
* Callback function to be called when the completion is finished streaming.
|
||||
*/
|
||||
onFinish?: (prompt: string, completion: string) => void
|
||||
|
||||
/**
|
||||
* Callback function to be called when an error is encountered.
|
||||
*/
|
||||
onError?: (error: Error) => void
|
||||
|
||||
/**
|
||||
* The credentials mode to be used for the fetch request.
|
||||
* Possible values are: 'omit', 'same-origin', 'include'.
|
||||
* Defaults to 'same-origin'.
|
||||
*/
|
||||
credentials?: RequestCredentials
|
||||
|
||||
/**
|
||||
* HTTP headers to be sent with the API request.
|
||||
*/
|
||||
headers?: Record<string, string> | Headers
|
||||
|
||||
/**
|
||||
* Extra body object to be sent with the API request.
|
||||
* @example
|
||||
* Send a `sessionId` to the API along with the prompt.
|
||||
* ```js
|
||||
* useChat({
|
||||
* body: {
|
||||
* sessionId: '123',
|
||||
* }
|
||||
* })
|
||||
* ```
|
||||
*/
|
||||
body?: object
|
||||
|
||||
/** Stream mode (default to "stream-data") */
|
||||
streamMode?: 'stream-data' | 'text'
|
||||
}
|
||||
|
||||
export type JSONValue =
|
||||
| null
|
||||
| string
|
||||
| number
|
||||
| boolean
|
||||
| { [x: string]: JSONValue }
|
||||
| Array<JSONValue>
|
||||
|
||||
export type AssistantMessage = {
|
||||
id: string
|
||||
role: 'assistant'
|
||||
content: Array<{
|
||||
type: 'text'
|
||||
text: {
|
||||
value: string
|
||||
}
|
||||
}>
|
||||
}
|
||||
|
||||
/*
|
||||
* A data message is an application-specific message from the assistant
|
||||
* that should be shown in order with the other messages.
|
||||
*
|
||||
* It can trigger other operations on the frontend, such as annotating
|
||||
* a map.
|
||||
*/
|
||||
export type DataMessage = {
|
||||
id?: string // optional id, implement if needed (e.g. for persistance)
|
||||
role: 'data'
|
||||
data: JSONValue // application-specific data
|
||||
}
|
@ -1,10 +1,20 @@
|
||||
import { createAction, option } from '@typebot.io/forge'
|
||||
import { auth } from '../auth'
|
||||
import { Anthropic } from '@anthropic-ai/sdk'
|
||||
import { AnthropicStream } from 'ai'
|
||||
import { anthropicModels, defaultAnthropicOptions } from '../constants'
|
||||
import { parseChatMessages } from '../helpers/parseChatMessages'
|
||||
import {
|
||||
anthropicLegacyModels,
|
||||
anthropicModelLabels,
|
||||
anthropicModels,
|
||||
defaultAnthropicOptions,
|
||||
maxToolRoundtrips,
|
||||
} from '../constants'
|
||||
import { isDefined } from '@typebot.io/lib'
|
||||
import { createAnthropic } from '@ai-sdk/anthropic'
|
||||
import { generateText } from 'ai'
|
||||
import { runChatCompletionStream } from '../helpers/runChatCompletionStream'
|
||||
import { toolsSchema } from '@typebot.io/ai/schemas'
|
||||
import { parseTools } from '@typebot.io/ai/parseTools'
|
||||
import { parseChatCompletionMessages } from '@typebot.io/ai/parseChatCompletionMessages'
|
||||
import { isModelCompatibleWithVision } from '../helpers/isModelCompatibleWithVision'
|
||||
|
||||
const nativeMessageContentSchema = {
|
||||
content: option.string.layout({
|
||||
@ -40,7 +50,11 @@ const dialogueMessageItemSchema = option.object({
|
||||
|
||||
export const options = option.object({
|
||||
model: option.enum(anthropicModels).layout({
|
||||
defaultValue: defaultAnthropicOptions.model,
|
||||
toLabels: (val) =>
|
||||
val
|
||||
? anthropicModelLabels[val as (typeof anthropicModels)[number]]
|
||||
: undefined,
|
||||
hiddenItems: anthropicLegacyModels,
|
||||
}),
|
||||
messages: option
|
||||
.array(
|
||||
@ -51,6 +65,7 @@ export const options = option.object({
|
||||
])
|
||||
)
|
||||
.layout({ accordion: 'Messages', itemLabel: 'message', isOrdered: true }),
|
||||
tools: toolsSchema,
|
||||
systemMessage: option.string.layout({
|
||||
accordion: 'Advanced Settings',
|
||||
label: 'System prompt',
|
||||
@ -76,8 +91,12 @@ export const options = option.object({
|
||||
}),
|
||||
})
|
||||
|
||||
const transformToChatCompletionOptions = (options: any) => ({
|
||||
const transformToChatCompletionOptions = (
|
||||
options: any,
|
||||
resetModel = false
|
||||
) => ({
|
||||
...options,
|
||||
model: resetModel ? undefined : options.model,
|
||||
action: 'Create chat completion',
|
||||
responseMapping: options.responseMapping?.map((res: any) =>
|
||||
res.item === 'Message Content' ? { ...res, item: 'Message content' } : res
|
||||
@ -91,11 +110,11 @@ export const createChatMessage = createAction({
|
||||
turnableInto: [
|
||||
{
|
||||
blockId: 'mistral',
|
||||
transform: transformToChatCompletionOptions,
|
||||
transform: (opts) => transformToChatCompletionOptions(opts, true),
|
||||
},
|
||||
{
|
||||
blockId: 'openai',
|
||||
transform: transformToChatCompletionOptions,
|
||||
transform: (opts) => transformToChatCompletionOptions(opts, true),
|
||||
},
|
||||
{ blockId: 'open-router', transform: transformToChatCompletionOptions },
|
||||
{ blockId: 'together-ai', transform: transformToChatCompletionOptions },
|
||||
@ -104,72 +123,43 @@ export const createChatMessage = createAction({
|
||||
responseMapping?.map((res) => res.variableId).filter(isDefined) ?? [],
|
||||
run: {
|
||||
server: async ({ credentials: { apiKey }, options, variables, logs }) => {
|
||||
const client = new Anthropic({
|
||||
apiKey: apiKey,
|
||||
const modelName = options.model ?? defaultAnthropicOptions.model
|
||||
const model = createAnthropic({
|
||||
apiKey,
|
||||
})(modelName)
|
||||
|
||||
const { text } = await generateText({
|
||||
model,
|
||||
temperature: options.temperature
|
||||
? Number(options.temperature)
|
||||
: undefined,
|
||||
messages: await parseChatCompletionMessages({
|
||||
messages: options.messages,
|
||||
isVisionEnabled: isModelCompatibleWithVision(modelName),
|
||||
shouldDownloadImages: true,
|
||||
variables,
|
||||
}),
|
||||
tools: parseTools({ tools: options.tools, variables }),
|
||||
maxToolRoundtrips: maxToolRoundtrips,
|
||||
})
|
||||
|
||||
const messages = await parseChatMessages({ options, variables })
|
||||
|
||||
try {
|
||||
const reply = await client.messages.create({
|
||||
messages,
|
||||
model: options.model ?? defaultAnthropicOptions.model,
|
||||
system: options.systemMessage,
|
||||
temperature: options.temperature
|
||||
? Number(options.temperature)
|
||||
: undefined,
|
||||
max_tokens: options.maxTokens
|
||||
? Number(options.maxTokens)
|
||||
: defaultAnthropicOptions.maxTokens,
|
||||
})
|
||||
|
||||
messages.push(reply)
|
||||
|
||||
options.responseMapping?.forEach((mapping) => {
|
||||
if (!mapping.variableId) return
|
||||
|
||||
if (!mapping.item || mapping.item === 'Message Content')
|
||||
variables.set(mapping.variableId, reply.content[0].text)
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof Anthropic.APIError) {
|
||||
logs.add({
|
||||
status: 'error',
|
||||
description: `${error.status} ${error.name}`,
|
||||
details: error.message,
|
||||
})
|
||||
} else {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
options.responseMapping?.forEach((mapping) => {
|
||||
if (!mapping.variableId) return
|
||||
if (!mapping.item || mapping.item === 'Message Content')
|
||||
variables.set(mapping.variableId, text)
|
||||
})
|
||||
},
|
||||
stream: {
|
||||
getStreamVariableId: (options) =>
|
||||
options.responseMapping?.find(
|
||||
(res) => res.item === 'Message Content' || !res.item
|
||||
)?.variableId,
|
||||
run: async ({ credentials: { apiKey }, options, variables }) => {
|
||||
const client = new Anthropic({
|
||||
apiKey: apiKey,
|
||||
})
|
||||
|
||||
const messages = await parseChatMessages({ options, variables })
|
||||
|
||||
const response = await client.messages.create({
|
||||
messages,
|
||||
model: options.model ?? defaultAnthropicOptions.model,
|
||||
system: options.systemMessage,
|
||||
temperature: options.temperature
|
||||
? Number(options.temperature)
|
||||
: undefined,
|
||||
max_tokens: options.maxTokens
|
||||
? Number(options.maxTokens)
|
||||
: defaultAnthropicOptions.maxTokens,
|
||||
stream: true,
|
||||
})
|
||||
|
||||
return { stream: AnthropicStream(response) }
|
||||
},
|
||||
run: async ({ credentials: { apiKey }, options, variables }) =>
|
||||
runChatCompletionStream({
|
||||
credentials: { apiKey },
|
||||
options,
|
||||
variables,
|
||||
}),
|
||||
},
|
||||
},
|
||||
})
|
||||
|
@ -1,4 +1,5 @@
|
||||
export const anthropicModels = [
|
||||
'claude-3-5-sonnet-20240620',
|
||||
'claude-3-opus-20240229',
|
||||
'claude-3-sonnet-20240229',
|
||||
'claude-3-haiku-20240307',
|
||||
@ -7,8 +8,24 @@ export const anthropicModels = [
|
||||
'claude-instant-1.2',
|
||||
] as const
|
||||
|
||||
export const anthropicLegacyModels = [
|
||||
'claude-2.1',
|
||||
'claude-2.0',
|
||||
'claude-instant-1.2',
|
||||
]
|
||||
|
||||
export const anthropicModelLabels = {
|
||||
'claude-3-5-sonnet-20240620': 'Claude 3.5 Sonnet',
|
||||
'claude-3-opus-20240229': 'Claude 3.0 Opus',
|
||||
'claude-3-sonnet-20240229': 'Claude 3.0 Sonnet',
|
||||
'claude-3-haiku-20240307': 'Claude 3.0 Haiku',
|
||||
'claude-2.1': 'Claude 2.1',
|
||||
'claude-2.0': 'Claude 2.0',
|
||||
'claude-instant-1.2': 'Claude Instant 1.2',
|
||||
} satisfies Record<(typeof anthropicModels)[number], string>
|
||||
|
||||
export const defaultAnthropicOptions = {
|
||||
model: anthropicModels[0],
|
||||
model: 'claude-3-opus-20240229',
|
||||
temperature: 1,
|
||||
maxTokens: 1024,
|
||||
} as const
|
||||
@ -21,3 +38,5 @@ export const supportedImageTypes = [
|
||||
'image/gif',
|
||||
'image/webp',
|
||||
] as const
|
||||
|
||||
export const maxToolRoundtrips = 10
|
||||
|
@ -0,0 +1,5 @@
|
||||
import { wildcardMatch } from '@typebot.io/lib/wildcardMatch'
|
||||
import { modelsWithImageUrlSupport } from '../constants'
|
||||
|
||||
export const isModelCompatibleWithVision = (model: string | undefined) =>
|
||||
model ? wildcardMatch(modelsWithImageUrlSupport)(model) : false
|
@ -1,148 +0,0 @@
|
||||
import { Anthropic } from '@anthropic-ai/sdk'
|
||||
import { options as createMessageOptions } from '../actions/createChatMessage'
|
||||
import { VariableStore } from '@typebot.io/forge'
|
||||
import { isDefined, isEmpty } from '@typebot.io/lib'
|
||||
import { z } from '@typebot.io/forge/zod'
|
||||
import ky, { HTTPError } from 'ky'
|
||||
import {
|
||||
defaultAnthropicOptions,
|
||||
modelsWithImageUrlSupport,
|
||||
supportedImageTypes,
|
||||
} from '../constants'
|
||||
import { wildcardMatch } from '@typebot.io/lib/wildcardMatch'
|
||||
|
||||
const isModelCompatibleWithImageUrls = (model: string | undefined) =>
|
||||
model ? wildcardMatch(modelsWithImageUrlSupport)(model) : false
|
||||
|
||||
export const parseChatMessages = async ({
|
||||
options: { messages, model },
|
||||
variables,
|
||||
}: {
|
||||
options: Pick<z.infer<typeof createMessageOptions>, 'messages' | 'model'>
|
||||
variables: VariableStore
|
||||
}): Promise<Anthropic.Messages.MessageParam[]> => {
|
||||
if (!messages) return []
|
||||
const isVisionEnabled = isModelCompatibleWithImageUrls(
|
||||
model ?? defaultAnthropicOptions.model
|
||||
)
|
||||
const parsedMessages = (
|
||||
await Promise.all(
|
||||
messages.map(async (message) => {
|
||||
if (!message.role) return
|
||||
|
||||
if (message.role === 'Dialogue') {
|
||||
if (!message.dialogueVariableId) return
|
||||
const dialogue = variables.get(message.dialogueVariableId) ?? []
|
||||
const dialogueArr = Array.isArray(dialogue) ? dialogue : [dialogue]
|
||||
|
||||
return Promise.all(
|
||||
dialogueArr.map(async (dialogueItem, index) => {
|
||||
if (index === 0 && message.startsBy === 'assistant')
|
||||
return {
|
||||
role: 'assistant',
|
||||
content: dialogueItem,
|
||||
}
|
||||
if (index % (message.startsBy === 'assistant' ? 1 : 2) === 0) {
|
||||
return {
|
||||
role: 'user',
|
||||
content: isVisionEnabled
|
||||
? await splitUserTextMessageIntoBlocks(dialogueItem ?? '')
|
||||
: dialogueItem,
|
||||
}
|
||||
}
|
||||
return {
|
||||
role: 'assistant',
|
||||
content: dialogueItem,
|
||||
}
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
if (!message.content) return
|
||||
|
||||
const content = variables.parse(message.content)
|
||||
|
||||
if (isEmpty(content)) return
|
||||
|
||||
if (message.role === 'user')
|
||||
return {
|
||||
role: 'user',
|
||||
content: isVisionEnabled
|
||||
? await splitUserTextMessageIntoBlocks(content)
|
||||
: content,
|
||||
}
|
||||
|
||||
return {
|
||||
role: message.role,
|
||||
content,
|
||||
}
|
||||
})
|
||||
)
|
||||
)
|
||||
.flat()
|
||||
.filter((message) => {
|
||||
return isDefined(message?.role) && isDefined(message.content)
|
||||
}) as Anthropic.Messages.MessageParam[]
|
||||
|
||||
return parsedMessages
|
||||
}
|
||||
|
||||
const splitUserTextMessageIntoBlocks = async (
|
||||
input: string
|
||||
): Promise<
|
||||
| string
|
||||
| (Anthropic.Messages.TextBlockParam | Anthropic.Messages.ImageBlockParam)[]
|
||||
> => {
|
||||
const urlRegex = /(^|\n\n)(https?:\/\/[^\s]+)(\n\n|$)/g
|
||||
const match = input.match(urlRegex)
|
||||
if (!match) return input
|
||||
const parts: (
|
||||
| Anthropic.Messages.TextBlockParam
|
||||
| Anthropic.Messages.ImageBlockParam
|
||||
)[] = []
|
||||
let processedInput = input
|
||||
|
||||
for (const url of match) {
|
||||
const textBeforeUrl = processedInput.slice(0, processedInput.indexOf(url))
|
||||
if (textBeforeUrl.trim().length > 0) {
|
||||
parts.push({ type: 'text', text: textBeforeUrl })
|
||||
}
|
||||
const cleanUrl = url.trim()
|
||||
|
||||
try {
|
||||
const response = await ky.get(cleanUrl)
|
||||
if (
|
||||
!response.ok ||
|
||||
!supportedImageTypes.includes(
|
||||
response.headers.get('content-type') as any
|
||||
)
|
||||
) {
|
||||
parts.push({ type: 'text', text: cleanUrl })
|
||||
} else {
|
||||
parts.push({
|
||||
type: 'image',
|
||||
source: {
|
||||
data: Buffer.from(await response.arrayBuffer()).toString('base64'),
|
||||
type: 'base64',
|
||||
media_type: response.headers.get('content-type') as any,
|
||||
},
|
||||
})
|
||||
}
|
||||
} catch (err) {
|
||||
if (err instanceof HTTPError) {
|
||||
console.log(err.response.status, await err.response.text())
|
||||
} else {
|
||||
console.error(err)
|
||||
}
|
||||
}
|
||||
processedInput = processedInput.slice(
|
||||
processedInput.indexOf(url) + url.length
|
||||
)
|
||||
}
|
||||
|
||||
if (processedInput.trim().length > 0) {
|
||||
parts.push({ type: 'text', text: processedInput })
|
||||
}
|
||||
|
||||
return parts
|
||||
}
|
@ -0,0 +1,110 @@
|
||||
import { createAnthropic } from '@ai-sdk/anthropic'
|
||||
import { defaultAnthropicOptions, maxToolRoundtrips } from '../constants'
|
||||
import { APICallError, streamText, ToolCallPart, ToolResultPart } from 'ai'
|
||||
import { isModelCompatibleWithVision } from './isModelCompatibleWithVision'
|
||||
import { VariableStore } from '@typebot.io/forge'
|
||||
import { ChatCompletionOptions } from '@typebot.io/openai-block/shared/parseChatCompletionOptions'
|
||||
import { parseChatCompletionMessages } from '@typebot.io/ai/parseChatCompletionMessages'
|
||||
import { parseTools } from '@typebot.io/ai/parseTools'
|
||||
import { pumpStreamUntilDone } from '@typebot.io/ai/pumpStreamUntilDone'
|
||||
import { appendToolResultsToMessages } from '@typebot.io/ai/appendToolResultsToMessages'
|
||||
|
||||
type Props = {
|
||||
credentials: { apiKey?: string }
|
||||
options: {
|
||||
model?: string
|
||||
temperature?: ChatCompletionOptions['temperature']
|
||||
messages?: ChatCompletionOptions['messages']
|
||||
tools?: ChatCompletionOptions['tools']
|
||||
}
|
||||
variables: VariableStore
|
||||
}
|
||||
|
||||
export const runChatCompletionStream = async ({
|
||||
credentials: { apiKey },
|
||||
options,
|
||||
variables,
|
||||
}: Props): Promise<{
|
||||
stream?: ReadableStream<any>
|
||||
httpError?: { status: number; message: string }
|
||||
}> => {
|
||||
if (!apiKey) return { httpError: { status: 401, message: 'API key missing' } }
|
||||
const modelName = options.model?.trim() ?? defaultAnthropicOptions.model
|
||||
if (!modelName)
|
||||
return { httpError: { status: 400, message: 'model not found' } }
|
||||
|
||||
const model = createAnthropic({
|
||||
apiKey,
|
||||
})(modelName)
|
||||
|
||||
try {
|
||||
const streamConfig = {
|
||||
model,
|
||||
temperature: options.temperature
|
||||
? Number(options.temperature)
|
||||
: undefined,
|
||||
tools: parseTools({ tools: options.tools, variables }),
|
||||
messages: await parseChatCompletionMessages({
|
||||
messages: options.messages,
|
||||
isVisionEnabled: isModelCompatibleWithVision(modelName),
|
||||
shouldDownloadImages: false,
|
||||
variables,
|
||||
}),
|
||||
}
|
||||
|
||||
const response = await streamText(streamConfig)
|
||||
|
||||
let totalToolCalls = 0
|
||||
let toolCalls: ToolCallPart[] = []
|
||||
let toolResults: ToolResultPart[] = []
|
||||
|
||||
return {
|
||||
stream: new ReadableStream({
|
||||
async start(controller) {
|
||||
const reader = response.toAIStream().getReader()
|
||||
|
||||
await pumpStreamUntilDone(controller, reader)
|
||||
|
||||
toolCalls = await response.toolCalls
|
||||
if (toolCalls.length > 0)
|
||||
toolResults = (await response.toolResults) as ToolResultPart[]
|
||||
|
||||
while (
|
||||
toolCalls &&
|
||||
toolCalls.length > 0 &&
|
||||
totalToolCalls < maxToolRoundtrips
|
||||
) {
|
||||
totalToolCalls += 1
|
||||
const newResponse = await streamText({
|
||||
...streamConfig,
|
||||
messages: appendToolResultsToMessages({
|
||||
messages: streamConfig.messages,
|
||||
toolCalls,
|
||||
toolResults,
|
||||
}),
|
||||
})
|
||||
const reader = newResponse.toAIStream().getReader()
|
||||
await pumpStreamUntilDone(controller, reader)
|
||||
toolCalls = await newResponse.toolCalls
|
||||
if (toolCalls.length > 0)
|
||||
toolResults = (await newResponse.toolResults) as ToolResultPart[]
|
||||
}
|
||||
|
||||
controller.close()
|
||||
},
|
||||
}),
|
||||
}
|
||||
} catch (err) {
|
||||
if (err instanceof APICallError) {
|
||||
return {
|
||||
httpError: { status: err.statusCode ?? 500, message: err.message },
|
||||
}
|
||||
}
|
||||
return {
|
||||
httpError: {
|
||||
status: 500,
|
||||
message: 'An error occured while generating the stream',
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
@ -15,10 +15,10 @@
|
||||
"typescript": "5.4.5"
|
||||
},
|
||||
"dependencies": {
|
||||
"@anthropic-ai/sdk": "0.20.6",
|
||||
"@ai-sdk/anthropic": "0.0.21",
|
||||
"@ai-sdk/anthropic": "0.0.30",
|
||||
"@typebot.io/openai-block": "workspace:*",
|
||||
"ai": "3.2.1",
|
||||
"ai": "3.2.22",
|
||||
"@typebot.io/ai": "workspace:*",
|
||||
"ky": "1.2.4"
|
||||
}
|
||||
}
|
||||
|
@ -14,6 +14,6 @@
|
||||
"typescript": "5.4.5"
|
||||
},
|
||||
"dependencies": {
|
||||
"ai": "3.2.1"
|
||||
"ai": "3.2.22"
|
||||
}
|
||||
}
|
||||
|
@ -24,7 +24,6 @@ export const convertTextToSpeech = createAction({
|
||||
fetcher: 'fetchModels',
|
||||
label: 'Model',
|
||||
placeholder: 'Select a model',
|
||||
defaultValue: 'eleven_monolingual_v1',
|
||||
}),
|
||||
saveUrlInVariableId: option.string.layout({
|
||||
label: 'Save audio URL in variable',
|
||||
|
@ -5,6 +5,11 @@ import { parseMessages } from '../helpers/parseMessages'
|
||||
import { createMistral } from '@ai-sdk/mistral'
|
||||
import { generateText, streamText } from 'ai'
|
||||
import { fetchModels } from '../helpers/fetchModels'
|
||||
import { toolsSchema } from '@typebot.io/ai/schemas'
|
||||
import { parseTools } from '@typebot.io/ai/parseTools'
|
||||
import { maxToolRoundtrips } from '../constants'
|
||||
import { parseChatCompletionMessages } from '@typebot.io/ai/parseChatCompletionMessages'
|
||||
import { runChatCompletionStream } from '../helpers/runChatCompletionStream'
|
||||
|
||||
const nativeMessageContentSchema = {
|
||||
content: option.string.layout({
|
||||
@ -59,6 +64,7 @@ export const options = option.object({
|
||||
])
|
||||
)
|
||||
.layout({ accordion: 'Messages', itemLabel: 'message', isOrdered: true }),
|
||||
tools: toolsSchema,
|
||||
responseMapping: option.saveResponseArray(['Message content']).layout({
|
||||
accordion: 'Save response',
|
||||
}),
|
||||
@ -71,6 +77,10 @@ export const createChatCompletion = createAction({
|
||||
turnableInto: [
|
||||
{
|
||||
blockId: 'openai',
|
||||
transform: (opts) => ({
|
||||
...opts,
|
||||
model: undefined,
|
||||
}),
|
||||
},
|
||||
{
|
||||
blockId: 'together-ai',
|
||||
@ -110,8 +120,14 @@ export const createChatCompletion = createAction({
|
||||
|
||||
const { text } = await generateText({
|
||||
model,
|
||||
messages: parseMessages({ options, variables }),
|
||||
tools: {},
|
||||
messages: await parseChatCompletionMessages({
|
||||
messages: options.messages,
|
||||
variables,
|
||||
isVisionEnabled: false,
|
||||
shouldDownloadImages: false,
|
||||
}),
|
||||
tools: parseTools({ tools: options.tools, variables }),
|
||||
maxToolRoundtrips: maxToolRoundtrips,
|
||||
})
|
||||
|
||||
options.responseMapping?.forEach((mapping) => {
|
||||
@ -125,19 +141,12 @@ export const createChatCompletion = createAction({
|
||||
options.responseMapping?.find(
|
||||
(res) => res.item === 'Message content' || !res.item
|
||||
)?.variableId,
|
||||
run: async ({ credentials: { apiKey }, options, variables }) => {
|
||||
if (!options.model) return {}
|
||||
const model = createMistral({
|
||||
apiKey,
|
||||
})(options.model)
|
||||
|
||||
const response = await streamText({
|
||||
model,
|
||||
messages: parseMessages({ options, variables }),
|
||||
})
|
||||
|
||||
return { stream: response.toAIStream() }
|
||||
},
|
||||
run: async ({ credentials: { apiKey }, options, variables }) =>
|
||||
runChatCompletionStream({
|
||||
credentials: { apiKey },
|
||||
options,
|
||||
variables,
|
||||
}),
|
||||
},
|
||||
},
|
||||
})
|
||||
|
@ -1 +1,3 @@
|
||||
export const apiBaseUrl = 'https://api.mistral.ai'
|
||||
|
||||
export const maxToolRoundtrips = 10
|
||||
|
105
packages/forge/blocks/mistral/helpers/runChatCompletionStream.ts
Normal file
105
packages/forge/blocks/mistral/helpers/runChatCompletionStream.ts
Normal file
@ -0,0 +1,105 @@
|
||||
import { createMistral } from '@ai-sdk/mistral'
|
||||
import { APICallError, streamText, ToolCallPart, ToolResultPart } from 'ai'
|
||||
import { VariableStore } from '@typebot.io/forge'
|
||||
import { ChatCompletionOptions } from '@typebot.io/openai-block/shared/parseChatCompletionOptions'
|
||||
import { parseChatCompletionMessages } from '@typebot.io/ai/parseChatCompletionMessages'
|
||||
import { parseTools } from '@typebot.io/ai/parseTools'
|
||||
import { maxToolRoundtrips } from '../constants'
|
||||
import { pumpStreamUntilDone } from '@typebot.io/ai/pumpStreamUntilDone'
|
||||
import { appendToolResultsToMessages } from '@typebot.io/ai/appendToolResultsToMessages'
|
||||
|
||||
type Props = {
|
||||
credentials: { apiKey?: string }
|
||||
options: {
|
||||
model?: string
|
||||
temperature?: ChatCompletionOptions['temperature']
|
||||
messages?: ChatCompletionOptions['messages']
|
||||
tools?: ChatCompletionOptions['tools']
|
||||
}
|
||||
variables: VariableStore
|
||||
}
|
||||
|
||||
export const runChatCompletionStream = async ({
|
||||
credentials: { apiKey },
|
||||
options,
|
||||
variables,
|
||||
}: Props): Promise<{
|
||||
stream?: ReadableStream<any>
|
||||
httpError?: { status: number; message: string }
|
||||
}> => {
|
||||
if (!apiKey) return { httpError: { status: 401, message: 'API key missing' } }
|
||||
const modelName = options.model?.trim()
|
||||
if (!modelName)
|
||||
return { httpError: { status: 400, message: 'model not found' } }
|
||||
|
||||
const streamConfig = {
|
||||
model: createMistral({
|
||||
apiKey,
|
||||
})(modelName),
|
||||
messages: await parseChatCompletionMessages({
|
||||
messages: options.messages,
|
||||
isVisionEnabled: false,
|
||||
shouldDownloadImages: false,
|
||||
variables,
|
||||
}),
|
||||
temperature: options.temperature ? Number(options.temperature) : undefined,
|
||||
tools: parseTools({ tools: options.tools, variables }),
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await streamText(streamConfig)
|
||||
|
||||
let totalToolCalls = 0
|
||||
let toolCalls: ToolCallPart[] = []
|
||||
let toolResults: ToolResultPart[] = []
|
||||
|
||||
return {
|
||||
stream: new ReadableStream({
|
||||
async start(controller) {
|
||||
const reader = response.toAIStream().getReader()
|
||||
|
||||
await pumpStreamUntilDone(controller, reader)
|
||||
|
||||
toolCalls = await response.toolCalls
|
||||
if (toolCalls.length > 0)
|
||||
toolResults = (await response.toolResults) as ToolResultPart[]
|
||||
|
||||
while (
|
||||
toolCalls &&
|
||||
toolCalls.length > 0 &&
|
||||
totalToolCalls < maxToolRoundtrips
|
||||
) {
|
||||
totalToolCalls += 1
|
||||
const newResponse = await streamText({
|
||||
...streamConfig,
|
||||
messages: appendToolResultsToMessages({
|
||||
messages: streamConfig.messages,
|
||||
toolCalls,
|
||||
toolResults,
|
||||
}),
|
||||
})
|
||||
const reader = newResponse.toAIStream().getReader()
|
||||
await pumpStreamUntilDone(controller, reader)
|
||||
toolCalls = await newResponse.toolCalls
|
||||
if (toolCalls.length > 0)
|
||||
toolResults = (await newResponse.toolResults) as ToolResultPart[]
|
||||
}
|
||||
|
||||
controller.close()
|
||||
},
|
||||
}),
|
||||
}
|
||||
} catch (err) {
|
||||
if (err instanceof APICallError) {
|
||||
return {
|
||||
httpError: { status: err.statusCode ?? 500, message: err.message },
|
||||
}
|
||||
}
|
||||
return {
|
||||
httpError: {
|
||||
status: 500,
|
||||
message: 'An error occured while generating the stream',
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
@ -14,9 +14,10 @@
|
||||
"typescript": "5.4.5"
|
||||
},
|
||||
"dependencies": {
|
||||
"@ai-sdk/mistral": "0.0.18",
|
||||
"@ai-sdk/mistral": "0.0.22",
|
||||
"@typebot.io/openai-block": "workspace:*",
|
||||
"ai": "3.2.1",
|
||||
"ky": "1.2.4"
|
||||
"ai": "3.2.22",
|
||||
"ky": "1.2.4",
|
||||
"@typebot.io/ai": "workspace:*"
|
||||
}
|
||||
}
|
||||
|
@ -3,8 +3,8 @@ import { auth } from '../auth'
|
||||
import { parseChatCompletionOptions } from '@typebot.io/openai-block/shared/parseChatCompletionOptions'
|
||||
import { getChatCompletionSetVarIds } from '@typebot.io/openai-block/shared/getChatCompletionSetVarIds'
|
||||
import { getChatCompletionStreamVarId } from '@typebot.io/openai-block/shared/getChatCompletionStreamVarId'
|
||||
import { runChatCompletion } from '@typebot.io/openai-block/shared/runChatCompletion'
|
||||
import { runChatCompletionStream } from '@typebot.io/openai-block/shared/runChatCompletionStream'
|
||||
import { runOpenAIChatCompletion } from '@typebot.io/openai-block/shared/runOpenAIChatCompletion'
|
||||
import { runOpenAIChatCompletionStream } from '@typebot.io/openai-block/shared/runOpenAIChatCompletionStream'
|
||||
import { defaultOpenRouterOptions } from '../constants'
|
||||
import ky from 'ky'
|
||||
import { ModelsResponse } from '../types'
|
||||
@ -24,7 +24,6 @@ export const createChatCompletion = createAction({
|
||||
blockId: 'anthropic',
|
||||
transform: (options) => ({
|
||||
...options,
|
||||
model: undefined,
|
||||
action: 'Create Chat Message',
|
||||
responseMapping: options.responseMapping?.map((res: any) =>
|
||||
res.item === 'Message content'
|
||||
@ -36,6 +35,7 @@ export const createChatCompletion = createAction({
|
||||
],
|
||||
options: parseChatCompletionOptions({
|
||||
modelFetchId: 'fetchModels',
|
||||
defaultTemperature: defaultOpenRouterOptions.temperature,
|
||||
}),
|
||||
getSetVariableIds: getChatCompletionSetVarIds,
|
||||
fetchers: [
|
||||
@ -56,18 +56,19 @@ export const createChatCompletion = createAction({
|
||||
],
|
||||
run: {
|
||||
server: (params) =>
|
||||
runChatCompletion({
|
||||
runOpenAIChatCompletion({
|
||||
...params,
|
||||
config: { baseUrl: defaultOpenRouterOptions.baseUrl },
|
||||
}),
|
||||
stream: {
|
||||
getStreamVariableId: getChatCompletionStreamVarId,
|
||||
run: async (params) => ({
|
||||
stream: await runChatCompletionStream({
|
||||
run: async (params) =>
|
||||
runOpenAIChatCompletionStream({
|
||||
...params,
|
||||
config: { baseUrl: defaultOpenRouterOptions.baseUrl },
|
||||
config: {
|
||||
baseUrl: defaultOpenRouterOptions.baseUrl,
|
||||
},
|
||||
}),
|
||||
}),
|
||||
},
|
||||
},
|
||||
})
|
||||
|
@ -1,3 +1,4 @@
|
||||
export const defaultOpenRouterOptions = {
|
||||
baseUrl: 'https://openrouter.ai/api/v1',
|
||||
temperature: 1,
|
||||
} as const
|
||||
|
@ -11,9 +11,9 @@ import { baseOptions } from '../baseOptions'
|
||||
import { executeFunction } from '@typebot.io/variables/executeFunction'
|
||||
import { readDataStream } from 'ai'
|
||||
import { deprecatedAskAssistantOptions } from '../deprecated'
|
||||
import { OpenAIAssistantStream } from '../helpers/OpenAIAssistantStream'
|
||||
import { AssistantStream } from '../helpers/AssistantStream'
|
||||
import { isModelCompatibleWithVision } from '../helpers/isModelCompatibleWithVision'
|
||||
import { splitUserTextMessageIntoBlocks } from '../helpers/splitUserTextMessageIntoBlocks'
|
||||
import { splitUserTextMessageIntoOpenAIBlocks } from '../helpers/splitUserTextMessageIntoOpenAIBlocks'
|
||||
|
||||
export const askAssistant = createAction({
|
||||
auth,
|
||||
@ -294,19 +294,16 @@ const createAssistantStream = async ({
|
||||
{
|
||||
role: 'user',
|
||||
content: isModelCompatibleWithVision(assistant.model)
|
||||
? await splitUserTextMessageIntoBlocks(message)
|
||||
? await splitUserTextMessageIntoOpenAIBlocks(message)
|
||||
: message,
|
||||
}
|
||||
)
|
||||
return OpenAIAssistantStream(
|
||||
return AssistantStream(
|
||||
{ threadId: currentThreadId, messageId: createdMessage.id },
|
||||
async ({ forwardStream }) => {
|
||||
const runStream = openai.beta.threads.runs.createAndStream(
|
||||
currentThreadId,
|
||||
{
|
||||
assistant_id: assistantId,
|
||||
}
|
||||
)
|
||||
const runStream = openai.beta.threads.runs.stream(currentThreadId, {
|
||||
assistant_id: assistantId,
|
||||
})
|
||||
|
||||
let runResult = await forwardStream(runStream)
|
||||
|
||||
|
@ -4,8 +4,8 @@ import { auth } from '../auth'
|
||||
import { baseOptions } from '../baseOptions'
|
||||
import { parseChatCompletionOptions } from '../shared/parseChatCompletionOptions'
|
||||
import { getChatCompletionSetVarIds } from '../shared/getChatCompletionSetVarIds'
|
||||
import { runChatCompletion } from '../shared/runChatCompletion'
|
||||
import { runChatCompletionStream } from '../shared/runChatCompletionStream'
|
||||
import { runOpenAIChatCompletion } from '../shared/runOpenAIChatCompletion'
|
||||
import { runOpenAIChatCompletionStream } from '../shared/runOpenAIChatCompletionStream'
|
||||
import { getChatCompletionStreamVarId } from '../shared/getChatCompletionStreamVarId'
|
||||
import { fetchGPTModels } from '../helpers/fetchModels'
|
||||
|
||||
@ -14,7 +14,6 @@ export const createChatCompletion = createAction({
|
||||
auth,
|
||||
baseOptions,
|
||||
options: parseChatCompletionOptions({
|
||||
defaultModel: defaultOpenAIOptions.model,
|
||||
defaultTemperature: defaultOpenAIOptions.temperature,
|
||||
modelFetchId: 'fetchModels',
|
||||
}),
|
||||
@ -55,24 +54,25 @@ export const createChatCompletion = createAction({
|
||||
],
|
||||
run: {
|
||||
server: (params) =>
|
||||
runChatCompletion({
|
||||
runOpenAIChatCompletion({
|
||||
...params,
|
||||
config: {
|
||||
baseUrl: defaultOpenAIOptions.baseUrl,
|
||||
defaultModel: defaultOpenAIOptions.model,
|
||||
},
|
||||
compatibility: 'strict',
|
||||
}),
|
||||
stream: {
|
||||
getStreamVariableId: getChatCompletionStreamVarId,
|
||||
run: async (params) => ({
|
||||
stream: await runChatCompletionStream({
|
||||
run: async (params) =>
|
||||
runOpenAIChatCompletionStream({
|
||||
...params,
|
||||
config: {
|
||||
baseUrl: defaultOpenAIOptions.baseUrl,
|
||||
defaultModel: defaultOpenAIOptions.model,
|
||||
},
|
||||
compatibility: 'strict',
|
||||
}),
|
||||
}),
|
||||
},
|
||||
},
|
||||
})
|
||||
|
@ -1,5 +1,7 @@
|
||||
// Copied from https://github.com/vercel/ai/blob/f9db8fd6543202a8404a7a1a40f938d6270b08ef/packages/core/streams/assistant-response.ts
|
||||
// Because the stream is not exported from the package
|
||||
import { AssistantMessage, DataMessage, formatStreamPart } from 'ai'
|
||||
import { AssistantStream } from 'openai/lib/AssistantStream'
|
||||
import { AssistantStream as AssistantStreamType } from 'openai/lib/AssistantStream'
|
||||
import { Run } from 'openai/resources/beta/threads/runs/runs'
|
||||
|
||||
/**
|
||||
@ -44,14 +46,19 @@ Send a data message to the client. You can use this to provide information for r
|
||||
/**
|
||||
Forwards the assistant response stream to the client. Returns the `Run` object after it completes, or when it requires an action.
|
||||
*/
|
||||
forwardStream: (stream: AssistantStream) => Promise<Run | undefined>
|
||||
forwardStream: (stream: AssistantStreamType) => Promise<Run | undefined>
|
||||
}) => Promise<void>
|
||||
|
||||
export const OpenAIAssistantStream = (
|
||||
/**
|
||||
The `AssistantResponse` allows you to send a stream of assistant update to `useAssistant`.
|
||||
It is designed to facilitate streaming assistant responses to the `useAssistant` hook.
|
||||
It receives an assistant thread and a current message, and can send messages and data messages to the client.
|
||||
*/
|
||||
export function AssistantStream(
|
||||
{ threadId, messageId }: AssistantResponseSettings,
|
||||
process: AssistantResponseCallback
|
||||
) =>
|
||||
new ReadableStream({
|
||||
) {
|
||||
return new ReadableStream({
|
||||
async start(controller) {
|
||||
const textEncoder = new TextEncoder()
|
||||
|
||||
@ -73,7 +80,7 @@ export const OpenAIAssistantStream = (
|
||||
)
|
||||
}
|
||||
|
||||
const forwardStream = async (stream: AssistantStream) => {
|
||||
const forwardStream = async (stream: AssistantStreamType) => {
|
||||
let result: Run | undefined = undefined
|
||||
|
||||
for await (const value of stream) {
|
||||
@ -143,3 +150,4 @@ export const OpenAIAssistantStream = (
|
||||
pull(controller) {},
|
||||
cancel() {},
|
||||
})
|
||||
}
|
@ -1,81 +0,0 @@
|
||||
import type { OpenAI } from 'openai'
|
||||
import { VariableStore } from '@typebot.io/forge'
|
||||
import { isDefined, isEmpty } from '@typebot.io/lib'
|
||||
import { ChatCompletionOptions } from '../shared/parseChatCompletionOptions'
|
||||
import ky, { HTTPError } from 'ky'
|
||||
import { defaultOpenAIOptions, modelsWithImageUrlSupport } from '../constants'
|
||||
import { isModelCompatibleWithVision } from './isModelCompatibleWithVision'
|
||||
import { splitUserTextMessageIntoBlocks } from './splitUserTextMessageIntoBlocks'
|
||||
|
||||
export const parseChatCompletionMessages = async ({
|
||||
options: { messages, model },
|
||||
variables,
|
||||
}: {
|
||||
options: ChatCompletionOptions
|
||||
variables: VariableStore
|
||||
}): Promise<OpenAI.Chat.ChatCompletionMessageParam[]> => {
|
||||
if (!messages) return []
|
||||
const isVisionEnabled = isModelCompatibleWithVision(
|
||||
model ?? defaultOpenAIOptions.model
|
||||
)
|
||||
const parsedMessages = (
|
||||
await Promise.all(
|
||||
messages.map(async (message) => {
|
||||
if (!message.role) return
|
||||
|
||||
if (message.role === 'Dialogue') {
|
||||
if (!message.dialogueVariableId) return
|
||||
const dialogue = variables.get(message.dialogueVariableId) ?? []
|
||||
const dialogueArr = Array.isArray(dialogue) ? dialogue : [dialogue]
|
||||
|
||||
return Promise.all(
|
||||
dialogueArr.map(async (dialogueItem, index) => {
|
||||
if (index === 0 && message.startsBy === 'assistant')
|
||||
return {
|
||||
role: 'assistant',
|
||||
content: dialogueItem,
|
||||
}
|
||||
if (index % (message.startsBy === 'assistant' ? 1 : 2) === 0) {
|
||||
return {
|
||||
role: 'user',
|
||||
content: isVisionEnabled
|
||||
? await splitUserTextMessageIntoBlocks(dialogueItem ?? '')
|
||||
: dialogueItem,
|
||||
}
|
||||
}
|
||||
return {
|
||||
role: 'assistant',
|
||||
content: dialogueItem,
|
||||
}
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
if (!message.content) return
|
||||
|
||||
const content = variables.parse(message.content)
|
||||
|
||||
if (isEmpty(content)) return
|
||||
|
||||
if (message.role === 'user')
|
||||
return {
|
||||
role: 'user',
|
||||
content: isVisionEnabled
|
||||
? await splitUserTextMessageIntoBlocks(content)
|
||||
: content,
|
||||
}
|
||||
|
||||
return {
|
||||
role: message.role,
|
||||
content,
|
||||
}
|
||||
})
|
||||
)
|
||||
)
|
||||
.flat()
|
||||
.filter((message) => {
|
||||
return isDefined(message?.role) && isDefined(message.content)
|
||||
}) as OpenAI.Chat.ChatCompletionMessageParam[]
|
||||
|
||||
return parsedMessages
|
||||
}
|
@ -1,23 +0,0 @@
|
||||
import type { OpenAI } from 'openai'
|
||||
import { toolParametersSchema } from '../shared/parseChatCompletionOptions'
|
||||
import { z } from '@typebot.io/forge/zod'
|
||||
|
||||
export const parseToolParameters = (
|
||||
parameters: z.infer<typeof toolParametersSchema>
|
||||
): OpenAI.FunctionParameters => ({
|
||||
type: 'object',
|
||||
properties: parameters?.reduce<{
|
||||
[x: string]: unknown
|
||||
}>((acc, param) => {
|
||||
if (!param.name) return acc
|
||||
acc[param.name] = {
|
||||
type: param.type === 'enum' ? 'string' : param.type,
|
||||
enum: param.type === 'enum' ? param.values : undefined,
|
||||
description: param.description,
|
||||
}
|
||||
return acc
|
||||
}, {}),
|
||||
required:
|
||||
parameters?.filter((param) => param.required).map((param) => param.name) ??
|
||||
[],
|
||||
})
|
@ -1,7 +1,7 @@
|
||||
import ky, { HTTPError } from 'ky'
|
||||
import OpenAI from 'openai'
|
||||
|
||||
export const splitUserTextMessageIntoBlocks = async (
|
||||
export const splitUserTextMessageIntoOpenAIBlocks = async (
|
||||
input: string
|
||||
): Promise<string | OpenAI.Chat.ChatCompletionContentPart[]> => {
|
||||
const urlRegex = /(^|\n\n)(https?:\/\/[^\s]+)(\n\n|$)/g
|
@ -7,9 +7,10 @@
|
||||
"author": "Baptiste Arnaud",
|
||||
"license": "AGPL-3.0-or-later",
|
||||
"dependencies": {
|
||||
"@ai-sdk/openai": "0.0.31",
|
||||
"ai": "3.2.1",
|
||||
"openai": "4.47.1"
|
||||
"@ai-sdk/openai": "0.0.36",
|
||||
"ai": "3.2.22",
|
||||
"openai": "4.52.7",
|
||||
"@typebot.io/ai": "workspace:*"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@typebot.io/forge": "workspace:*",
|
||||
|
@ -1,6 +1,7 @@
|
||||
import { option } from '@typebot.io/forge'
|
||||
import { z } from '@typebot.io/forge/zod'
|
||||
import { baseOptions } from '../baseOptions'
|
||||
import { toolsSchema } from '@typebot.io/ai/schemas'
|
||||
|
||||
const nativeMessageContentSchema = {
|
||||
content: option.string.layout({
|
||||
@ -27,77 +28,6 @@ const assistantMessageItemSchema = option
|
||||
})
|
||||
.extend(nativeMessageContentSchema)
|
||||
|
||||
const parameterBase = {
|
||||
name: option.string.layout({
|
||||
label: 'Name',
|
||||
placeholder: 'myVariable',
|
||||
withVariableButton: false,
|
||||
}),
|
||||
description: option.string.layout({
|
||||
label: 'Description',
|
||||
withVariableButton: false,
|
||||
}),
|
||||
required: option.boolean.layout({
|
||||
label: 'Is required?',
|
||||
}),
|
||||
}
|
||||
|
||||
export const toolParametersSchema = option
|
||||
.array(
|
||||
option.discriminatedUnion('type', [
|
||||
option
|
||||
.object({
|
||||
type: option.literal('string'),
|
||||
})
|
||||
.extend(parameterBase),
|
||||
option
|
||||
.object({
|
||||
type: option.literal('number'),
|
||||
})
|
||||
.extend(parameterBase),
|
||||
option
|
||||
.object({
|
||||
type: option.literal('boolean'),
|
||||
})
|
||||
.extend(parameterBase),
|
||||
option
|
||||
.object({
|
||||
type: option.literal('enum'),
|
||||
values: option
|
||||
.array(option.string)
|
||||
.layout({ itemLabel: 'possible value' }),
|
||||
})
|
||||
.extend(parameterBase),
|
||||
])
|
||||
)
|
||||
.layout({
|
||||
accordion: 'Parameters',
|
||||
itemLabel: 'parameter',
|
||||
})
|
||||
|
||||
const functionToolItemSchema = option.object({
|
||||
type: option.literal('function'),
|
||||
name: option.string.layout({
|
||||
label: 'Name',
|
||||
placeholder: 'myFunctionName',
|
||||
withVariableButton: false,
|
||||
}),
|
||||
description: option.string.layout({
|
||||
label: 'Description',
|
||||
placeholder: 'A brief description of what this function does.',
|
||||
withVariableButton: false,
|
||||
}),
|
||||
parameters: toolParametersSchema,
|
||||
code: option.string.layout({
|
||||
inputType: 'code',
|
||||
label: 'Code',
|
||||
lang: 'javascript',
|
||||
moreInfoTooltip:
|
||||
'A javascript code snippet that can use the defined parameters. It should return a value.',
|
||||
withVariableButton: false,
|
||||
}),
|
||||
})
|
||||
|
||||
const dialogueMessageItemSchema = option.object({
|
||||
role: option.literal('Dialogue'),
|
||||
dialogueVariableId: option.string.layout({
|
||||
@ -112,23 +42,20 @@ const dialogueMessageItemSchema = option.object({
|
||||
})
|
||||
|
||||
type Props = {
|
||||
defaultModel?: string
|
||||
defaultTemperature?: number
|
||||
defaultTemperature: number
|
||||
modelFetchId?: string
|
||||
modelHelperText?: string
|
||||
}
|
||||
|
||||
export const parseChatCompletionOptions = ({
|
||||
defaultModel,
|
||||
defaultTemperature,
|
||||
modelFetchId,
|
||||
modelHelperText,
|
||||
}: Props = {}) =>
|
||||
}: Props) =>
|
||||
option.object({
|
||||
model: option.string.layout({
|
||||
placeholder: modelFetchId ? 'Select a model' : undefined,
|
||||
label: modelFetchId ? undefined : 'Model',
|
||||
defaultValue: defaultModel,
|
||||
fetcher: modelFetchId,
|
||||
helperText: modelHelperText,
|
||||
}),
|
||||
@ -142,9 +69,7 @@ export const parseChatCompletionOptions = ({
|
||||
])
|
||||
)
|
||||
.layout({ accordion: 'Messages', itemLabel: 'message', isOrdered: true }),
|
||||
tools: option
|
||||
.array(option.discriminatedUnion('type', [functionToolItemSchema]))
|
||||
.layout({ accordion: 'Tools', itemLabel: 'tool' }),
|
||||
tools: toolsSchema,
|
||||
temperature: option.number.layout({
|
||||
accordion: 'Advanced settings',
|
||||
label: 'Temperature',
|
||||
|
@ -69,14 +69,12 @@ export const parseGenerateVariablesOptions = ({
|
||||
? option.string.layout({
|
||||
placeholder: 'Select a model',
|
||||
label: 'Model',
|
||||
defaultValue: defaultModel,
|
||||
fetcher: modelFetch,
|
||||
helperText: modelHelperText,
|
||||
})
|
||||
: option.enum(modelFetch).layout({
|
||||
placeholder: 'Select a model',
|
||||
label: 'Model',
|
||||
defaultValue: defaultModel,
|
||||
helperText: modelHelperText,
|
||||
}),
|
||||
prompt: option.string.layout({
|
||||
|
@ -1,125 +0,0 @@
|
||||
import OpenAI, { ClientOptions } from 'openai'
|
||||
import { parseToolParameters } from '../helpers/parseToolParameters'
|
||||
import { executeFunction } from '@typebot.io/variables/executeFunction'
|
||||
import { ChatCompletionTool, ChatCompletionMessage } from 'openai/resources'
|
||||
import { maxToolCalls } from '../constants'
|
||||
import { parseChatCompletionMessages } from '../helpers/parseChatCompletionMessages'
|
||||
import { ChatCompletionOptions } from './parseChatCompletionOptions'
|
||||
import { LogsStore, VariableStore } from '@typebot.io/forge/types'
|
||||
|
||||
type OpenAIConfig = {
|
||||
baseUrl: string
|
||||
defaultModel?: string
|
||||
}
|
||||
|
||||
type Props = {
|
||||
credentials: {
|
||||
apiKey?: string
|
||||
}
|
||||
options: ChatCompletionOptions
|
||||
variables: VariableStore
|
||||
logs: LogsStore
|
||||
config: OpenAIConfig
|
||||
}
|
||||
|
||||
export const runChatCompletion = async ({
|
||||
credentials: { apiKey },
|
||||
options,
|
||||
variables,
|
||||
config: openAIConfig,
|
||||
logs,
|
||||
}: Props) => {
|
||||
const model = options.model?.trim() ?? openAIConfig.defaultModel
|
||||
if (!model) return logs.add('No model provided')
|
||||
const config = {
|
||||
apiKey,
|
||||
baseURL: openAIConfig.baseUrl ?? options.baseUrl,
|
||||
defaultHeaders: options.baseUrl
|
||||
? {
|
||||
'api-key': apiKey,
|
||||
}
|
||||
: undefined,
|
||||
defaultQuery: options.apiVersion
|
||||
? {
|
||||
'api-version': options.apiVersion,
|
||||
}
|
||||
: undefined,
|
||||
} satisfies ClientOptions
|
||||
|
||||
const openai = new OpenAI(config)
|
||||
|
||||
const tools = options.tools
|
||||
?.filter((t) => t.name && t.parameters)
|
||||
.map((t) => ({
|
||||
type: 'function',
|
||||
function: {
|
||||
name: t.name as string,
|
||||
description: t.description,
|
||||
parameters: parseToolParameters(t.parameters!),
|
||||
},
|
||||
})) satisfies ChatCompletionTool[] | undefined
|
||||
|
||||
const messages = await parseChatCompletionMessages({ options, variables })
|
||||
|
||||
const body = {
|
||||
model,
|
||||
temperature: options.temperature ? Number(options.temperature) : undefined,
|
||||
messages,
|
||||
tools: (tools?.length ?? 0) > 0 ? tools : undefined,
|
||||
}
|
||||
|
||||
let totalTokens = 0
|
||||
let message: ChatCompletionMessage
|
||||
|
||||
for (let i = 0; i < maxToolCalls; i++) {
|
||||
const response = await openai.chat.completions.create(body)
|
||||
|
||||
message = response.choices[0].message
|
||||
totalTokens += response.usage?.total_tokens || 0
|
||||
|
||||
if (!message.tool_calls) break
|
||||
|
||||
messages.push(message)
|
||||
|
||||
for (const toolCall of message.tool_calls) {
|
||||
const name = toolCall.function?.name
|
||||
if (!name) continue
|
||||
const toolDefinition = options.tools?.find((t) => t.name === name)
|
||||
if (!toolDefinition?.code || !toolDefinition.parameters) {
|
||||
messages.push({
|
||||
tool_call_id: toolCall.id,
|
||||
role: 'tool',
|
||||
content: 'Function not found',
|
||||
})
|
||||
continue
|
||||
}
|
||||
const toolParams = Object.fromEntries(
|
||||
toolDefinition.parameters.map(({ name }) => [name, null])
|
||||
)
|
||||
const toolArgs = toolCall.function?.arguments
|
||||
? JSON.parse(toolCall.function?.arguments)
|
||||
: undefined
|
||||
if (!toolArgs) continue
|
||||
const { output, newVariables } = await executeFunction({
|
||||
variables: variables.list(),
|
||||
args: { ...toolParams, ...toolArgs },
|
||||
body: toolDefinition.code,
|
||||
})
|
||||
newVariables?.forEach((v) => variables.set(v.id, v.value))
|
||||
|
||||
messages.push({
|
||||
tool_call_id: toolCall.id,
|
||||
role: 'tool',
|
||||
content: output,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
options.responseMapping?.forEach((mapping) => {
|
||||
if (!mapping.variableId) return
|
||||
if (!mapping.item || mapping.item === 'Message content')
|
||||
variables.set(mapping.variableId, message.content)
|
||||
if (mapping.item === 'Total tokens')
|
||||
variables.set(mapping.variableId, totalTokens)
|
||||
})
|
||||
}
|
@ -1,107 +0,0 @@
|
||||
import { VariableStore } from '@typebot.io/forge/types'
|
||||
import { ChatCompletionOptions } from './parseChatCompletionOptions'
|
||||
import { executeFunction } from '@typebot.io/variables/executeFunction'
|
||||
import { OpenAIStream, ToolCallPayload } from 'ai'
|
||||
import OpenAI, { ClientOptions } from 'openai'
|
||||
import { ChatCompletionTool } from 'openai/resources'
|
||||
import { parseChatCompletionMessages } from '../helpers/parseChatCompletionMessages'
|
||||
import { parseToolParameters } from '../helpers/parseToolParameters'
|
||||
|
||||
type Props = {
|
||||
credentials: { apiKey?: string }
|
||||
options: ChatCompletionOptions
|
||||
variables: VariableStore
|
||||
config: { baseUrl: string; defaultModel?: string }
|
||||
}
|
||||
export const runChatCompletionStream = async ({
|
||||
credentials: { apiKey },
|
||||
options,
|
||||
variables,
|
||||
config: openAIConfig,
|
||||
}: Props) => {
|
||||
const model = options.model?.trim() ?? openAIConfig.defaultModel
|
||||
if (!model) return
|
||||
const config = {
|
||||
apiKey,
|
||||
baseURL: openAIConfig.baseUrl ?? options.baseUrl,
|
||||
defaultHeaders: {
|
||||
'api-key': apiKey,
|
||||
},
|
||||
defaultQuery: options.apiVersion
|
||||
? {
|
||||
'api-version': options.apiVersion,
|
||||
}
|
||||
: undefined,
|
||||
} satisfies ClientOptions
|
||||
|
||||
const openai = new OpenAI(config)
|
||||
|
||||
const tools = options.tools
|
||||
?.filter((t) => t.name && t.parameters)
|
||||
.map((t) => ({
|
||||
type: 'function',
|
||||
function: {
|
||||
name: t.name as string,
|
||||
description: t.description,
|
||||
parameters: parseToolParameters(t.parameters!),
|
||||
},
|
||||
})) satisfies ChatCompletionTool[] | undefined
|
||||
|
||||
const messages = await parseChatCompletionMessages({ options, variables })
|
||||
|
||||
const response = await openai.chat.completions.create({
|
||||
model,
|
||||
temperature: options.temperature ? Number(options.temperature) : undefined,
|
||||
stream: true,
|
||||
messages,
|
||||
tools: (tools?.length ?? 0) > 0 ? tools : undefined,
|
||||
})
|
||||
|
||||
return OpenAIStream(response, {
|
||||
experimental_onToolCall: async (
|
||||
call: ToolCallPayload,
|
||||
appendToolCallMessage
|
||||
) => {
|
||||
for (const toolCall of call.tools) {
|
||||
const name = toolCall.func?.name
|
||||
if (!name) continue
|
||||
const toolDefinition = options.tools?.find((t) => t.name === name)
|
||||
if (!toolDefinition?.code || !toolDefinition.parameters) {
|
||||
messages.push({
|
||||
tool_call_id: toolCall.id,
|
||||
role: 'tool',
|
||||
content: 'Function not found',
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
const { output, newVariables } = await executeFunction({
|
||||
variables: variables.list(),
|
||||
args:
|
||||
typeof toolCall.func.arguments === 'string'
|
||||
? JSON.parse(toolCall.func.arguments)
|
||||
: toolCall.func.arguments,
|
||||
body: toolDefinition.code,
|
||||
})
|
||||
|
||||
newVariables?.forEach((v) => variables.set(v.id, v.value))
|
||||
|
||||
const newMessages = appendToolCallMessage({
|
||||
tool_call_id: toolCall.id,
|
||||
function_name: toolCall.func.name,
|
||||
tool_call_result: output,
|
||||
})
|
||||
|
||||
return openai.chat.completions.create({
|
||||
messages: [
|
||||
...messages,
|
||||
...newMessages,
|
||||
] as OpenAI.Chat.Completions.ChatCompletionMessageParam[],
|
||||
model,
|
||||
stream: true,
|
||||
tools,
|
||||
})
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
@ -0,0 +1,87 @@
|
||||
import { maxToolCalls } from '../constants'
|
||||
import { ChatCompletionOptions } from './parseChatCompletionOptions'
|
||||
import { LogsStore, VariableStore } from '@typebot.io/forge/types'
|
||||
import { createOpenAI } from '@ai-sdk/openai'
|
||||
import { APICallError, generateText } from 'ai'
|
||||
import { isModelCompatibleWithVision } from '../helpers/isModelCompatibleWithVision'
|
||||
import { parseTools } from '@typebot.io/ai/parseTools'
|
||||
import { parseChatCompletionMessages } from '@typebot.io/ai/parseChatCompletionMessages'
|
||||
|
||||
type OpenAIConfig = {
|
||||
baseUrl: string
|
||||
defaultModel?: string
|
||||
}
|
||||
|
||||
type Props = {
|
||||
credentials: {
|
||||
apiKey?: string
|
||||
}
|
||||
options: ChatCompletionOptions
|
||||
variables: VariableStore
|
||||
logs: LogsStore
|
||||
config: OpenAIConfig
|
||||
compatibility?: 'strict' | 'compatible'
|
||||
}
|
||||
|
||||
export const runOpenAIChatCompletion = async ({
|
||||
credentials: { apiKey },
|
||||
options,
|
||||
variables,
|
||||
config: openAIConfig,
|
||||
logs,
|
||||
compatibility,
|
||||
}: Props) => {
|
||||
if (!apiKey) return logs.add('No API key provided')
|
||||
const modelName = options.model?.trim() ?? openAIConfig.defaultModel
|
||||
if (!modelName) return logs.add('No model provided')
|
||||
|
||||
const model = createOpenAI({
|
||||
baseURL: openAIConfig.baseUrl ?? options.baseUrl,
|
||||
headers: options.baseUrl
|
||||
? {
|
||||
'api-key': apiKey,
|
||||
}
|
||||
: undefined,
|
||||
apiKey,
|
||||
compatibility,
|
||||
})(modelName)
|
||||
|
||||
try {
|
||||
const { text, usage } = await generateText({
|
||||
model,
|
||||
temperature: options.temperature
|
||||
? Number(options.temperature)
|
||||
: undefined,
|
||||
messages: await parseChatCompletionMessages({
|
||||
messages: options.messages,
|
||||
variables,
|
||||
isVisionEnabled: isModelCompatibleWithVision(modelName),
|
||||
shouldDownloadImages: false,
|
||||
}),
|
||||
tools: parseTools({ tools: options.tools, variables }),
|
||||
maxToolRoundtrips: maxToolCalls,
|
||||
})
|
||||
|
||||
options.responseMapping?.forEach((mapping) => {
|
||||
if (!mapping.variableId) return
|
||||
if (!mapping.item || mapping.item === 'Message content')
|
||||
variables.set(mapping.variableId, text)
|
||||
if (mapping.item === 'Total tokens')
|
||||
variables.set(mapping.variableId, usage.totalTokens)
|
||||
})
|
||||
} catch (err) {
|
||||
if (err instanceof APICallError) {
|
||||
logs.add({
|
||||
status: 'error',
|
||||
description: 'An API call error occured while generating the response',
|
||||
details: err.message,
|
||||
})
|
||||
return
|
||||
}
|
||||
logs.add({
|
||||
status: 'error',
|
||||
description: 'An unknown error occured while generating the response',
|
||||
details: err,
|
||||
})
|
||||
}
|
||||
}
|
@ -0,0 +1,114 @@
|
||||
import { VariableStore } from '@typebot.io/forge/types'
|
||||
import { ChatCompletionOptions } from './parseChatCompletionOptions'
|
||||
import { APICallError, streamText, ToolCallPart, ToolResultPart } from 'ai'
|
||||
import { createOpenAI } from '@ai-sdk/openai'
|
||||
import { maxToolCalls } from '../constants'
|
||||
import { isModelCompatibleWithVision } from '../helpers/isModelCompatibleWithVision'
|
||||
import { parseChatCompletionMessages } from '@typebot.io/ai/parseChatCompletionMessages'
|
||||
import { parseTools } from '@typebot.io/ai/parseTools'
|
||||
import { pumpStreamUntilDone } from '@typebot.io/ai/pumpStreamUntilDone'
|
||||
import { appendToolResultsToMessages } from '@typebot.io/ai/appendToolResultsToMessages'
|
||||
|
||||
type Props = {
|
||||
credentials: { apiKey?: string }
|
||||
options: ChatCompletionOptions
|
||||
variables: VariableStore
|
||||
config: { baseUrl: string; defaultModel?: string }
|
||||
compatibility?: 'strict' | 'compatible'
|
||||
}
|
||||
|
||||
export const runOpenAIChatCompletionStream = async ({
|
||||
credentials: { apiKey },
|
||||
options,
|
||||
variables,
|
||||
config: openAIConfig,
|
||||
compatibility,
|
||||
}: Props): Promise<{
|
||||
stream?: ReadableStream<any>
|
||||
httpError?: { status: number; message: string }
|
||||
}> => {
|
||||
if (!apiKey) return { httpError: { status: 401, message: 'API key missing' } }
|
||||
const modelName = options.model?.trim() ?? openAIConfig.defaultModel
|
||||
if (!modelName)
|
||||
return { httpError: { status: 400, message: 'model not found' } }
|
||||
|
||||
const model = createOpenAI({
|
||||
baseURL: openAIConfig.baseUrl ?? options.baseUrl,
|
||||
headers: options.baseUrl
|
||||
? {
|
||||
'api-key': apiKey,
|
||||
}
|
||||
: undefined,
|
||||
apiKey,
|
||||
compatibility,
|
||||
})(modelName)
|
||||
|
||||
const streamConfig = {
|
||||
model,
|
||||
messages: await parseChatCompletionMessages({
|
||||
messages: options.messages,
|
||||
isVisionEnabled: isModelCompatibleWithVision(modelName),
|
||||
shouldDownloadImages: false,
|
||||
variables,
|
||||
}),
|
||||
temperature: options.temperature ? Number(options.temperature) : undefined,
|
||||
tools: parseTools({ tools: options.tools, variables }),
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await streamText(streamConfig)
|
||||
|
||||
let totalToolCalls = 0
|
||||
let toolCalls: ToolCallPart[] = []
|
||||
let toolResults: ToolResultPart[] = []
|
||||
|
||||
return {
|
||||
stream: new ReadableStream({
|
||||
async start(controller) {
|
||||
const reader = response.toAIStream().getReader()
|
||||
|
||||
await pumpStreamUntilDone(controller, reader)
|
||||
|
||||
toolCalls = await response.toolCalls
|
||||
if (toolCalls.length > 0)
|
||||
toolResults = (await response.toolResults) as ToolResultPart[]
|
||||
|
||||
while (
|
||||
toolCalls &&
|
||||
toolCalls.length > 0 &&
|
||||
totalToolCalls < maxToolCalls
|
||||
) {
|
||||
totalToolCalls += 1
|
||||
const newResponse = await streamText({
|
||||
...streamConfig,
|
||||
messages: appendToolResultsToMessages({
|
||||
messages: streamConfig.messages,
|
||||
toolCalls,
|
||||
toolResults,
|
||||
}),
|
||||
})
|
||||
const reader = newResponse.toAIStream().getReader()
|
||||
await pumpStreamUntilDone(controller, reader)
|
||||
toolCalls = await newResponse.toolCalls
|
||||
if (toolCalls.length > 0)
|
||||
toolResults = (await newResponse.toolResults) as ToolResultPart[]
|
||||
}
|
||||
|
||||
controller.close()
|
||||
},
|
||||
}),
|
||||
}
|
||||
} catch (err) {
|
||||
if (err instanceof APICallError) {
|
||||
return {
|
||||
httpError: { status: err.statusCode ?? 500, message: err.message },
|
||||
}
|
||||
}
|
||||
return {
|
||||
httpError: {
|
||||
status: 500,
|
||||
message: 'An unknown error occured while generating the response',
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
@ -3,8 +3,8 @@ import { auth } from '../auth'
|
||||
import { parseChatCompletionOptions } from '@typebot.io/openai-block/shared/parseChatCompletionOptions'
|
||||
import { getChatCompletionSetVarIds } from '@typebot.io/openai-block/shared/getChatCompletionSetVarIds'
|
||||
import { getChatCompletionStreamVarId } from '@typebot.io/openai-block/shared/getChatCompletionStreamVarId'
|
||||
import { runChatCompletion } from '@typebot.io/openai-block/shared/runChatCompletion'
|
||||
import { runChatCompletionStream } from '@typebot.io/openai-block/shared/runChatCompletionStream'
|
||||
import { runOpenAIChatCompletion } from '@typebot.io/openai-block/shared/runOpenAIChatCompletion'
|
||||
import { runOpenAIChatCompletionStream } from '@typebot.io/openai-block/shared/runOpenAIChatCompletionStream'
|
||||
import { defaultTogetherOptions } from '../constants'
|
||||
|
||||
export const createChatCompletion = createAction({
|
||||
@ -13,6 +13,7 @@ export const createChatCompletion = createAction({
|
||||
options: parseChatCompletionOptions({
|
||||
modelHelperText:
|
||||
'You can find the list of all the models available [here](https://docs.together.ai/docs/inference-models#chat-models). Copy the model string for API.',
|
||||
defaultTemperature: defaultTogetherOptions.temperature,
|
||||
}),
|
||||
turnableInto: [
|
||||
{
|
||||
@ -26,7 +27,6 @@ export const createChatCompletion = createAction({
|
||||
blockId: 'anthropic',
|
||||
transform: (options) => ({
|
||||
...options,
|
||||
model: undefined,
|
||||
action: 'Create Chat Message',
|
||||
responseMapping: options.responseMapping?.map((res: any) =>
|
||||
res.item === 'Message content'
|
||||
@ -39,18 +39,19 @@ export const createChatCompletion = createAction({
|
||||
getSetVariableIds: getChatCompletionSetVarIds,
|
||||
run: {
|
||||
server: (params) =>
|
||||
runChatCompletion({
|
||||
runOpenAIChatCompletion({
|
||||
...params,
|
||||
config: { baseUrl: defaultTogetherOptions.baseUrl },
|
||||
}),
|
||||
stream: {
|
||||
getStreamVariableId: getChatCompletionStreamVarId,
|
||||
run: async (params) => ({
|
||||
stream: await runChatCompletionStream({
|
||||
run: async (params) =>
|
||||
runOpenAIChatCompletionStream({
|
||||
...params,
|
||||
config: { baseUrl: defaultTogetherOptions.baseUrl },
|
||||
config: {
|
||||
baseUrl: defaultTogetherOptions.baseUrl,
|
||||
},
|
||||
}),
|
||||
}),
|
||||
},
|
||||
},
|
||||
})
|
||||
|
@ -1,3 +1,4 @@
|
||||
export const defaultTogetherOptions = {
|
||||
baseUrl: 'https://api.together.xyz/v1',
|
||||
temperature: 1,
|
||||
} as const
|
||||
|
@ -24,6 +24,7 @@ export interface ZodLayoutMetadata<
|
||||
isDebounceDisabled?: boolean
|
||||
hiddenItems?: string[]
|
||||
mergeWithLastField?: boolean
|
||||
toLabels?: (val?: string) => string | undefined
|
||||
}
|
||||
|
||||
declare module 'zod' {
|
||||
|
@ -1,5 +1,5 @@
|
||||
import { env } from '@typebot.io/env'
|
||||
import Redis from 'ioredis'
|
||||
import { Redis } from 'ioredis'
|
||||
|
||||
declare const global: { redis: Redis | undefined }
|
||||
let redis: Redis | undefined
|
||||
|
Reference in New Issue
Block a user