2
0

⬆️ Upgrade AI SDK (#1641)

This commit is contained in:
Baptiste Arnaud
2024-07-15 14:32:42 +02:00
committed by GitHub
parent a4fb8b6d10
commit 043f0054b0
60 changed files with 2183 additions and 1683 deletions

View File

@ -27,7 +27,6 @@
"options": {
"credentialsId": "clvqq3hey0007pub4almxnhk2",
"action": "Ask Assistant",
"assistantId": "asst_jy7aW39QWtAcVDrLOBr2JSWo",
"threadVariableId": "vf5gxmpqddsy4qugev6o0qs5c",
"message": "{{User last message}}",
"responseMapping": [{ "variableId": "vn8h1gigkjwv40godw2hrgclh" }]

View File

@ -297,7 +297,7 @@ export const FilmIcon = (props: IconProps) => (
</Icon>
)
export const WebhookIcon = (props: IconProps) => (
export const ThunderIcon = (props: IconProps) => (
<Icon viewBox="0 0 24 24" {...featherIconsBaseProps} {...props}>
<polygon points="13 2 3 14 12 14 11 22 21 10 12 10 13 2"></polygon>
</Icon>

View File

@ -1,5 +1,5 @@
import { WebhookIcon } from '@/components/icons'
import { ThunderIcon } from '@/components/icons'
import { IconProps } from '@chakra-ui/react'
import React from 'react'
export const HttpRequestIcon = (props: IconProps) => <WebhookIcon {...props} />
export const HttpRequestIcon = (props: IconProps) => <ThunderIcon {...props} />

View File

@ -1,6 +1,6 @@
import { useColorModeValue } from '@chakra-ui/react'
import React from 'react'
import { FlagIcon, SendEmailIcon, WebhookIcon } from '@/components/icons'
import { FlagIcon, SendEmailIcon, ThunderIcon } from '@/components/icons'
import { WaitIcon } from '@/features/blocks/logic/wait/components/WaitIcon'
import { ScriptIcon } from '@/features/blocks/logic/script/components/ScriptIcon'
import { JumpIcon } from '@/features/blocks/logic/jump/components/JumpIcon'
@ -103,7 +103,7 @@ export const BlockIcon = ({ type, mt }: BlockIconProps): JSX.Element => {
case IntegrationBlockType.GOOGLE_ANALYTICS:
return <GoogleAnalyticsLogo mt={mt} />
case IntegrationBlockType.WEBHOOK:
return <WebhookIcon mt={mt} />
return <ThunderIcon mt={mt} />
case IntegrationBlockType.ZAPIER:
return <ZapierLogo mt={mt} />
case IntegrationBlockType.MAKE_COM:

View File

@ -1,19 +1,50 @@
import { SetVariableLabel } from '@/components/SetVariableLabel'
import { useTypebot } from '@/features/editor/providers/TypebotProvider'
import { Stack, Text } from '@chakra-ui/react'
import { Flex, Stack, Text, Tooltip } from '@chakra-ui/react'
import { useForgedBlock } from '../hooks/useForgedBlock'
import { ForgedBlock } from '@typebot.io/forge-repository/types'
import { BlockIndices } from '@typebot.io/schemas'
import { useMemo } from 'react'
import { BubbleBlockType } from '@typebot.io/schemas/features/blocks/bubbles/constants'
import { ThunderIcon } from '@/components/icons'
type Props = {
block: ForgedBlock
indices: BlockIndices
}
export const ForgedBlockNodeContent = ({ block }: Props) => {
export const ForgedBlockNodeContent = ({ block, indices }: Props) => {
const { blockDef, actionDef } = useForgedBlock(
block.type,
block.options?.action
)
const { typebot } = useTypebot()
const isStreamingNextBlock = useMemo(() => {
if (!actionDef?.run?.stream?.getStreamVariableId) return false
const variable = typebot?.variables.find(
(variable) =>
variable.id ===
actionDef.run!.stream!.getStreamVariableId(block.options)
)
if (!variable) return false
const nextBlock =
typebot?.groups[indices.groupIndex]?.blocks[indices.blockIndex + 1]
return (
nextBlock?.type === BubbleBlockType.TEXT &&
nextBlock.content?.richText?.length === 1 &&
nextBlock.content.richText[0].type === 'p' &&
nextBlock.content.richText[0].children.length === 1 &&
nextBlock.content.richText[0].children[0].text === `{{${variable.name}}}`
)
}, [
actionDef?.run,
block.options,
indices.blockIndex,
indices.groupIndex,
typebot?.groups,
typebot?.variables,
])
const setVariableIds = actionDef?.getSetVariableIds?.(block.options) ?? []
const isConfigured =
@ -32,6 +63,23 @@ export const ForgedBlockNodeContent = ({ block }: Props) => {
variableId={variableId}
/>
))}
{isStreamingNextBlock && (
<Tooltip label="Text bubble content will be streamed">
<Flex
rounded="full"
p="1"
bgColor="gray.100"
color="purple.500"
borderWidth={1}
pos="absolute"
bottom="-15px"
left="118px"
zIndex={10}
>
<ThunderIcon fontSize="sm" />
</Flex>
</Tooltip>
)}
</Stack>
)
}

View File

@ -30,6 +30,21 @@ import { getZodInnerSchema } from '../../helpers/getZodInnerSchema'
import { TagsInput } from '@/components/TagsInput'
import { PrimitiveList } from '@/components/PrimitiveList'
const parseEnumItems = (
schema: z.ZodTypeAny,
layout?: ZodLayoutMetadata<ZodTypeAny>
) => {
const values = layout?.hiddenItems
? schema._def.values.filter((v: string) => !layout.hiddenItems?.includes(v))
: schema._def.values
if (layout?.toLabels)
return values.map((v: string) => ({
label: layout.toLabels!(v),
value: v,
}))
return values
}
const mdComponents = {
a: ({ href, children }) => (
<a
@ -134,13 +149,7 @@ export const ZodFieldLayout = ({
<DropdownList
currentItem={data ?? layout?.defaultValue}
onItemSelect={onDataChange}
items={
layout?.hiddenItems
? innerSchema._def.values.filter(
(v: any) => !layout.hiddenItems.includes(v)
)
: innerSchema._def.values
}
items={parseEnumItems(innerSchema, layout)}
label={layout?.label}
helperText={
layout?.helperText ? (

View File

@ -155,7 +155,7 @@ export const BlockNodeContent = ({
return <ZemanticAiNodeBody options={block.options} />
}
default: {
return <ForgedBlockNodeContent block={block} />
return <ForgedBlockNodeContent block={block} indices={indices} />
}
}
}

View File

@ -1,4 +1,4 @@
import { WebhookIcon } from '@/components/icons'
import { ThunderIcon } from '@/components/icons'
import { useUser } from '@/features/account/hooks/useUser'
import { useEditor } from '@/features/editor/providers/EditorProvider'
import { useTypebot } from '@/features/editor/providers/TypebotProvider'
@ -18,7 +18,7 @@ export const WebPreview = () => {
const handleNewLogs = (logs: ContinueChatResponse['logs']) => {
logs?.forEach((log) => {
showToast({
icon: <WebhookIcon />,
icon: <ThunderIcon />,
status: log.status as 'success' | 'error' | 'info',
title: log.status === 'error' ? 'An error occured' : undefined,
description: log.description,

View File

@ -239,6 +239,12 @@
},
"queryParamsStr": {
"type": "string"
},
"areControlsDisplayed": {
"type": "boolean"
},
"isAutoplayEnabled": {
"type": "boolean"
}
}
}
@ -695,6 +701,30 @@
"required": [
"type"
]
},
{
"type": "object",
"properties": {
"variableId": {
"type": "string"
},
"isExecutedOnClient": {
"type": "boolean"
},
"type": {
"type": "string",
"enum": [
"Pop",
"Shift"
]
},
"saveItemInVariableId": {
"type": "string"
}
},
"required": [
"type"
]
}
]
}
@ -4578,6 +4608,12 @@
},
"queryParamsStr": {
"type": "string"
},
"areControlsDisplayed": {
"type": "boolean"
},
"isAutoplayEnabled": {
"type": "boolean"
}
}
}
@ -5034,6 +5070,30 @@
"required": [
"type"
]
},
{
"type": "object",
"properties": {
"variableId": {
"type": "string"
},
"isExecutedOnClient": {
"type": "boolean"
},
"type": {
"type": "string",
"enum": [
"Pop",
"Shift"
]
},
"saveItemInVariableId": {
"type": "string"
}
},
"required": [
"type"
]
}
]
}
@ -8054,6 +8114,12 @@
},
"queryParamsStr": {
"type": "string"
},
"areControlsDisplayed": {
"type": "boolean"
},
"isAutoplayEnabled": {
"type": "boolean"
}
}
}
@ -8510,6 +8576,30 @@
"required": [
"type"
]
},
{
"type": "object",
"properties": {
"variableId": {
"type": "string"
},
"isExecutedOnClient": {
"type": "boolean"
},
"type": {
"type": "string",
"enum": [
"Pop",
"Shift"
]
},
"saveItemInVariableId": {
"type": "string"
}
},
"required": [
"type"
]
}
]
}
@ -16612,6 +16702,12 @@
},
"queryParamsStr": {
"type": "string"
},
"areControlsDisplayed": {
"type": "boolean"
},
"isAutoplayEnabled": {
"type": "boolean"
}
}
}
@ -17065,6 +17161,30 @@
"required": [
"type"
]
},
{
"type": "object",
"properties": {
"variableId": {
"type": "string"
},
"isExecutedOnClient": {
"type": "boolean"
},
"type": {
"type": "string",
"enum": [
"Pop",
"Shift"
]
},
"saveItemInVariableId": {
"type": "string"
}
},
"required": [
"type"
]
}
]
}
@ -19905,6 +20025,149 @@
]
}
},
"tools": {
"type": "array",
"items": {
"oneOf": [
{
"type": "object",
"properties": {}
},
{
"type": "object",
"properties": {
"type": {
"type": "string",
"enum": [
"function"
]
},
"name": {
"type": "string"
},
"description": {
"type": "string"
},
"parameters": {
"type": "array",
"items": {
"oneOf": [
{
"type": "object",
"properties": {}
},
{
"type": "object",
"properties": {
"type": {
"type": "string",
"enum": [
"string"
]
},
"name": {
"type": "string"
},
"description": {
"type": "string"
},
"required": {
"type": "boolean"
}
},
"required": [
"type"
]
},
{
"type": "object",
"properties": {
"type": {
"type": "string",
"enum": [
"number"
]
},
"name": {
"type": "string"
},
"description": {
"type": "string"
},
"required": {
"type": "boolean"
}
},
"required": [
"type"
]
},
{
"type": "object",
"properties": {
"type": {
"type": "string",
"enum": [
"boolean"
]
},
"name": {
"type": "string"
},
"description": {
"type": "string"
},
"required": {
"type": "boolean"
}
},
"required": [
"type"
]
},
{
"type": "object",
"properties": {
"type": {
"type": "string",
"enum": [
"enum"
]
},
"values": {
"type": "array",
"items": {
"type": "string"
}
},
"name": {
"type": "string"
},
"description": {
"type": "string"
},
"required": {
"type": "boolean"
}
},
"required": [
"type"
]
}
]
}
},
"code": {
"type": "string"
}
},
"required": [
"type"
]
}
]
}
},
"responseMapping": {
"type": "array",
"items": {
@ -20169,6 +20432,7 @@
"model": {
"type": "string",
"enum": [
"claude-3-5-sonnet-20240620",
"claude-3-opus-20240229",
"claude-3-sonnet-20240229",
"claude-3-haiku-20240307",
@ -20246,6 +20510,149 @@
]
}
},
"tools": {
"type": "array",
"items": {
"oneOf": [
{
"type": "object",
"properties": {}
},
{
"type": "object",
"properties": {
"type": {
"type": "string",
"enum": [
"function"
]
},
"name": {
"type": "string"
},
"description": {
"type": "string"
},
"parameters": {
"type": "array",
"items": {
"oneOf": [
{
"type": "object",
"properties": {}
},
{
"type": "object",
"properties": {
"type": {
"type": "string",
"enum": [
"string"
]
},
"name": {
"type": "string"
},
"description": {
"type": "string"
},
"required": {
"type": "boolean"
}
},
"required": [
"type"
]
},
{
"type": "object",
"properties": {
"type": {
"type": "string",
"enum": [
"number"
]
},
"name": {
"type": "string"
},
"description": {
"type": "string"
},
"required": {
"type": "boolean"
}
},
"required": [
"type"
]
},
{
"type": "object",
"properties": {
"type": {
"type": "string",
"enum": [
"boolean"
]
},
"name": {
"type": "string"
},
"description": {
"type": "string"
},
"required": {
"type": "boolean"
}
},
"required": [
"type"
]
},
{
"type": "object",
"properties": {
"type": {
"type": "string",
"enum": [
"enum"
]
},
"values": {
"type": "array",
"items": {
"type": "string"
}
},
"name": {
"type": "string"
},
"description": {
"type": "string"
},
"required": {
"type": "boolean"
}
},
"required": [
"type"
]
}
]
}
},
"code": {
"type": "string"
}
},
"required": [
"type"
]
}
]
}
},
"systemMessage": {
"type": "string"
},
@ -20302,6 +20709,7 @@
"model": {
"type": "string",
"enum": [
"claude-3-5-sonnet-20240620",
"claude-3-opus-20240229",
"claude-3-sonnet-20240229",
"claude-3-haiku-20240307",
@ -22927,6 +23335,12 @@
},
"queryParamsStr": {
"type": "string"
},
"areControlsDisplayed": {
"type": "boolean"
},
"isAutoplayEnabled": {
"type": "boolean"
}
}
}
@ -23383,6 +23797,30 @@
"required": [
"type"
]
},
{
"type": "object",
"properties": {
"variableId": {
"type": "string"
},
"isExecutedOnClient": {
"type": "boolean"
},
"type": {
"type": "string",
"enum": [
"Pop",
"Shift"
]
},
"saveItemInVariableId": {
"type": "string"
}
},
"required": [
"type"
]
}
]
}
@ -25772,6 +26210,12 @@
},
"queryParamsStr": {
"type": "string"
},
"areControlsDisplayed": {
"type": "boolean"
},
"isAutoplayEnabled": {
"type": "boolean"
}
}
}
@ -26228,6 +26672,30 @@
"required": [
"type"
]
},
{
"type": "object",
"properties": {
"variableId": {
"type": "string"
},
"isExecutedOnClient": {
"type": "boolean"
},
"type": {
"type": "string",
"enum": [
"Pop",
"Shift"
]
},
"saveItemInVariableId": {
"type": "string"
}
},
"required": [
"type"
]
}
]
}

View File

@ -3523,6 +3523,12 @@
},
"queryParamsStr": {
"type": "string"
},
"areControlsDisplayed": {
"type": "boolean"
},
"isAutoplayEnabled": {
"type": "boolean"
}
}
}
@ -3979,6 +3985,30 @@
"required": [
"type"
]
},
{
"type": "object",
"properties": {
"variableId": {
"type": "string"
},
"isExecutedOnClient": {
"type": "boolean"
},
"type": {
"type": "string",
"enum": [
"Pop",
"Shift"
]
},
"saveItemInVariableId": {
"type": "string"
}
},
"required": [
"type"
]
}
]
}
@ -7706,6 +7736,12 @@
},
"queryParamsStr": {
"type": "string"
},
"areControlsDisplayed": {
"type": "boolean"
},
"isAutoplayEnabled": {
"type": "boolean"
}
}
}
@ -8159,6 +8195,30 @@
"required": [
"type"
]
},
{
"type": "object",
"properties": {
"variableId": {
"type": "string"
},
"isExecutedOnClient": {
"type": "boolean"
},
"type": {
"type": "string",
"enum": [
"Pop",
"Shift"
]
},
"saveItemInVariableId": {
"type": "string"
}
},
"required": [
"type"
]
}
]
}
@ -10999,6 +11059,149 @@
]
}
},
"tools": {
"type": "array",
"items": {
"oneOf": [
{
"type": "object",
"properties": {}
},
{
"type": "object",
"properties": {
"type": {
"type": "string",
"enum": [
"function"
]
},
"name": {
"type": "string"
},
"description": {
"type": "string"
},
"parameters": {
"type": "array",
"items": {
"oneOf": [
{
"type": "object",
"properties": {}
},
{
"type": "object",
"properties": {
"type": {
"type": "string",
"enum": [
"string"
]
},
"name": {
"type": "string"
},
"description": {
"type": "string"
},
"required": {
"type": "boolean"
}
},
"required": [
"type"
]
},
{
"type": "object",
"properties": {
"type": {
"type": "string",
"enum": [
"number"
]
},
"name": {
"type": "string"
},
"description": {
"type": "string"
},
"required": {
"type": "boolean"
}
},
"required": [
"type"
]
},
{
"type": "object",
"properties": {
"type": {
"type": "string",
"enum": [
"boolean"
]
},
"name": {
"type": "string"
},
"description": {
"type": "string"
},
"required": {
"type": "boolean"
}
},
"required": [
"type"
]
},
{
"type": "object",
"properties": {
"type": {
"type": "string",
"enum": [
"enum"
]
},
"values": {
"type": "array",
"items": {
"type": "string"
}
},
"name": {
"type": "string"
},
"description": {
"type": "string"
},
"required": {
"type": "boolean"
}
},
"required": [
"type"
]
}
]
}
},
"code": {
"type": "string"
}
},
"required": [
"type"
]
}
]
}
},
"responseMapping": {
"type": "array",
"items": {
@ -11263,6 +11466,7 @@
"model": {
"type": "string",
"enum": [
"claude-3-5-sonnet-20240620",
"claude-3-opus-20240229",
"claude-3-sonnet-20240229",
"claude-3-haiku-20240307",
@ -11340,6 +11544,149 @@
]
}
},
"tools": {
"type": "array",
"items": {
"oneOf": [
{
"type": "object",
"properties": {}
},
{
"type": "object",
"properties": {
"type": {
"type": "string",
"enum": [
"function"
]
},
"name": {
"type": "string"
},
"description": {
"type": "string"
},
"parameters": {
"type": "array",
"items": {
"oneOf": [
{
"type": "object",
"properties": {}
},
{
"type": "object",
"properties": {
"type": {
"type": "string",
"enum": [
"string"
]
},
"name": {
"type": "string"
},
"description": {
"type": "string"
},
"required": {
"type": "boolean"
}
},
"required": [
"type"
]
},
{
"type": "object",
"properties": {
"type": {
"type": "string",
"enum": [
"number"
]
},
"name": {
"type": "string"
},
"description": {
"type": "string"
},
"required": {
"type": "boolean"
}
},
"required": [
"type"
]
},
{
"type": "object",
"properties": {
"type": {
"type": "string",
"enum": [
"boolean"
]
},
"name": {
"type": "string"
},
"description": {
"type": "string"
},
"required": {
"type": "boolean"
}
},
"required": [
"type"
]
},
{
"type": "object",
"properties": {
"type": {
"type": "string",
"enum": [
"enum"
]
},
"values": {
"type": "array",
"items": {
"type": "string"
}
},
"name": {
"type": "string"
},
"description": {
"type": "string"
},
"required": {
"type": "boolean"
}
},
"required": [
"type"
]
}
]
}
},
"code": {
"type": "string"
}
},
"required": [
"type"
]
}
]
}
},
"systemMessage": {
"type": "string"
},
@ -11396,6 +11743,7 @@
"model": {
"type": "string",
"enum": [
"claude-3-5-sonnet-20240620",
"claude-3-opus-20240229",
"claude-3-sonnet-20240229",
"claude-3-haiku-20240307",
@ -12892,6 +13240,12 @@
},
"queryParamsStr": {
"type": "string"
},
"areControlsDisplayed": {
"type": "boolean"
},
"isAutoplayEnabled": {
"type": "boolean"
}
}
}

View File

@ -21,7 +21,7 @@
"@typebot.io/js": "workspace:*",
"@typebot.io/nextjs": "workspace:*",
"@typebot.io/prisma": "workspace:*",
"ai": "3.2.1",
"ai": "3.2.22",
"bot-engine": "workspace:*",
"cors": "2.8.5",
"google-spreadsheet": "4.1.1",

View File

@ -0,0 +1,28 @@
import { CoreMessage, ToolCallPart, ToolResultPart } from 'ai'
type Props = {
messages: CoreMessage[]
toolCalls: ToolCallPart[]
toolResults: ToolResultPart[]
}
export const appendToolResultsToMessages = ({
messages,
toolCalls,
toolResults,
}: Props): CoreMessage[] => {
if (toolCalls.length > 0) {
messages.push({
role: 'assistant',
content: toolCalls,
})
}
if (toolResults.length > 0) {
messages.push({
role: 'tool',
content: toolResults,
})
}
return messages
}

14
packages/ai/package.json Normal file
View File

@ -0,0 +1,14 @@
{
"name": "@typebot.io/ai",
"version": "1.0.0",
"license": "AGPL-3.0-or-later",
"private": true,
"dependencies": {
"@typebot.io/lib": "workspace:*",
"@typebot.io/forge": "workspace:*",
"@typebot.io/variables": "workspace:*",
"ai": "3.2.22",
"ky": "1.2.4",
"@typebot.io/tsconfig": "workspace:*"
}
}

View File

@ -0,0 +1,113 @@
import { CoreAssistantMessage, CoreMessage, CoreUserMessage } from 'ai'
import { VariableStore } from '@typebot.io/forge'
import { isDefined, isEmpty } from '@typebot.io/lib'
import { splitUserTextMessageIntoBlocks } from './splitUserTextMessageIntoBlocks'
import { Message, StandardMessage, DialogueMessage } from './types'
type Props = {
messages: Message[] | undefined
isVisionEnabled: boolean
shouldDownloadImages: boolean
variables: VariableStore
}
export const parseChatCompletionMessages = async ({
messages,
isVisionEnabled,
shouldDownloadImages,
variables,
}: Props): Promise<CoreMessage[]> => {
if (!messages) return []
const parsedMessages: CoreMessage[] = (
await Promise.all(
messages.map(async (message) => {
if (!message.role) return
if (message.role === 'Dialogue')
return parseDialogueMessage({
message,
variables,
isVisionEnabled,
shouldDownloadImages,
})
return parseStandardMessage({
message,
variables,
isVisionEnabled,
shouldDownloadImages,
})
})
)
)
.flat()
.filter(isDefined)
return parsedMessages
}
const parseDialogueMessage = async ({
message,
variables,
isVisionEnabled,
shouldDownloadImages,
}: Pick<Props, 'variables' | 'isVisionEnabled' | 'shouldDownloadImages'> & {
message: DialogueMessage
}) => {
if (!message.dialogueVariableId) return
const dialogue = variables.get(message.dialogueVariableId) ?? []
const dialogueArr = Array.isArray(dialogue) ? dialogue : [dialogue]
return Promise.all(
dialogueArr.map<
Promise<CoreUserMessage | CoreAssistantMessage | undefined>
>(async (dialogueItem, index) => {
if (!dialogueItem) return
if (index === 0 && message.startsBy === 'assistant')
return { role: 'assistant' as const, content: dialogueItem }
if (index % (message.startsBy === 'assistant' ? 1 : 2) === 0) {
return {
role: 'user' as const,
content: isVisionEnabled
? await splitUserTextMessageIntoBlocks({
input: dialogueItem ?? '',
shouldDownloadImages,
})
: dialogueItem,
}
}
return { role: 'assistant' as const, content: dialogueItem }
})
)
}
const parseStandardMessage = async ({
message,
variables,
isVisionEnabled,
shouldDownloadImages,
}: Pick<Props, 'variables' | 'isVisionEnabled' | 'shouldDownloadImages'> & {
message: StandardMessage
}) => {
if (!message.content) return
const content = variables.parse(message.content)
if (isEmpty(content)) return
if (message.role === 'user')
return {
role: 'user' as const,
content: isVisionEnabled
? await splitUserTextMessageIntoBlocks({
input: content,
shouldDownloadImages,
})
: content,
}
return {
role: message.role,
content,
}
}

68
packages/ai/parseTools.ts Normal file
View File

@ -0,0 +1,68 @@
import { VariableStore } from '@typebot.io/forge'
import { z } from '@typebot.io/forge/zod'
import { executeFunction } from '@typebot.io/variables/executeFunction'
import { Variable } from '@typebot.io/variables/types'
import { CoreTool } from 'ai'
import { isNotEmpty } from '@typebot.io/lib'
import { Tools } from './schemas'
export const parseTools = ({
tools,
variables,
}: {
tools: Tools
variables: VariableStore
onNewVariabes?: (newVariables: Variable[]) => void
}): Record<string, CoreTool> => {
if (!tools?.length) return {}
return tools.reduce<Record<string, CoreTool>>((acc, tool) => {
if (!tool.code || !tool.name) return acc
acc[tool.name] = {
description: tool.description,
parameters: parseParameters(tool.parameters),
execute: async (args) => {
const { output, newVariables } = await executeFunction({
variables: variables.list(),
args,
body: tool.code!,
})
newVariables?.forEach((v) => variables.set(v.id, v.value))
return output
},
} satisfies CoreTool
return acc
}, {})
}
const parseParameters = (
parameters: NonNullable<Tools>[number]['parameters']
): z.ZodTypeAny | undefined => {
if (!parameters || parameters?.length === 0) return
const shape: z.ZodRawShape = {}
parameters.forEach((param) => {
if (!param.name) return
switch (param.type) {
case 'string':
shape[param.name] = z.string()
break
case 'number':
shape[param.name] = z.number()
break
case 'boolean':
shape[param.name] = z.boolean()
break
case 'enum': {
if (!param.values || param.values.length === 0) return
shape[param.name] = z.enum(param.values as any)
break
}
}
if (isNotEmpty(param.description))
shape[param.name] = shape[param.name].describe(param.description)
if (param.required === false)
shape[param.name] = shape[param.name].optional()
})
return z.object(shape)
}

View File

@ -0,0 +1,11 @@
export const pumpStreamUntilDone = async (
controller: ReadableStreamDefaultController<Uint8Array>,
reader: ReadableStreamDefaultReader
): Promise<void> => {
const { done, value } = await reader.read()
if (done) return
controller.enqueue(value)
return pumpStreamUntilDone(controller, reader)
}

79
packages/ai/schemas.ts Normal file
View File

@ -0,0 +1,79 @@
import { option } from '@typebot.io/forge'
import { z } from '@typebot.io/forge/zod'
const parameterBase = {
name: option.string.layout({
label: 'Name',
placeholder: 'myVariable',
withVariableButton: false,
}),
description: option.string.layout({
label: 'Description',
withVariableButton: false,
}),
required: option.boolean.layout({
label: 'Is required?',
}),
}
export const toolParametersSchema = option
.array(
option.discriminatedUnion('type', [
option
.object({
type: option.literal('string'),
})
.extend(parameterBase),
option
.object({
type: option.literal('number'),
})
.extend(parameterBase),
option
.object({
type: option.literal('boolean'),
})
.extend(parameterBase),
option
.object({
type: option.literal('enum'),
values: option
.array(option.string)
.layout({ itemLabel: 'possible value' }),
})
.extend(parameterBase),
])
)
.layout({
accordion: 'Parameters',
itemLabel: 'parameter',
})
const functionToolItemSchema = option.object({
type: option.literal('function'),
name: option.string.layout({
label: 'Name',
placeholder: 'myFunctionName',
withVariableButton: false,
}),
description: option.string.layout({
label: 'Description',
placeholder: 'A brief description of what this function does.',
withVariableButton: false,
}),
parameters: toolParametersSchema,
code: option.string.layout({
inputType: 'code',
label: 'Code',
lang: 'javascript',
moreInfoTooltip:
'A javascript code snippet that can use the defined parameters. It should return a value.',
withVariableButton: false,
}),
})
export const toolsSchema = option
.array(option.discriminatedUnion('type', [functionToolItemSchema]))
.layout({ accordion: 'Tools', itemLabel: 'tool' })
export type Tools = z.infer<typeof toolsSchema>

View File

@ -0,0 +1,57 @@
import { ImagePart, TextPart, UserContent } from 'ai'
import ky, { HTTPError } from 'ky'
type Props = {
input: string
shouldDownloadImages: boolean
}
export const splitUserTextMessageIntoBlocks = async ({
input,
shouldDownloadImages,
}: Props): Promise<UserContent> => {
const urlRegex = /(^|\n\n)(https?:\/\/[^\s]+)(\n\n|$)/g
const match = input.match(urlRegex)
if (!match) return input
let parts: (TextPart | ImagePart)[] = []
let processedInput = input
for (const url of match) {
const textBeforeUrl = processedInput.slice(0, processedInput.indexOf(url))
if (textBeforeUrl.trim().length > 0) {
parts.push({ type: 'text', text: textBeforeUrl })
}
const cleanUrl = url.trim()
try {
const response = await ky.get(cleanUrl)
if (
!response.ok ||
!response.headers.get('content-type')?.startsWith('image/')
) {
parts.push({ type: 'text', text: cleanUrl })
} else {
parts.push({
type: 'image',
image: shouldDownloadImages
? await response.arrayBuffer()
: url.trim(),
})
}
} catch (err) {
if (err instanceof HTTPError) {
console.log(err.response.status, await err.response.text())
} else {
console.error(err)
}
}
processedInput = processedInput.slice(
processedInput.indexOf(url) + url.length
)
}
if (processedInput.trim().length > 0) {
parts.push({ type: 'text', text: processedInput })
}
return parts
}

View File

@ -0,0 +1,8 @@
{
"extends": "@typebot.io/tsconfig/base.json",
"include": ["**/*.ts"],
"exclude": ["node_modules"],
"compilerOptions": {
"lib": ["ES2021", "DOM"]
}
}

12
packages/ai/types.ts Normal file
View File

@ -0,0 +1,12 @@
export type DialogueMessage = {
role: 'Dialogue'
startsBy?: 'user' | 'assistant'
dialogueVariableId?: string
}
export type StandardMessage = {
role: 'user' | 'assistant' | 'system'
content?: string
}
export type Message = DialogueMessage | StandardMessage

View File

@ -128,7 +128,10 @@ export const getMessageStream = async ({
state: session.state,
currentBlockId: session.state.currentBlockId,
})
if (newSetVariableHistory.length > 0)
if (
newSetVariableHistory.length > 0 &&
session.state.typebotsQueue[0].resultId
)
await saveSetVariableHistoryItems(newSetVariableHistory)
await updateSession({
id: session.id,

View File

@ -20,7 +20,7 @@
"@typebot.io/variables": "workspace:*",
"@udecode/plate-common": "30.4.5",
"@typebot.io/logic": "workspace:*",
"ai": "3.2.1",
"ai": "3.2.22",
"chrono-node": "2.7.6",
"date-fns": "2.30.0",
"date-fns-tz": "2.0.0",

View File

@ -14,6 +14,7 @@
"license": "AGPL-3.0-or-later",
"dependencies": {
"@ark-ui/solid": "3.3.0",
"@ai-sdk/ui-utils": "0.0.12",
"@stripe/stripe-js": "1.54.1",
"@udecode/plate-common": "30.4.5",
"dompurify": "3.0.6",
@ -25,6 +26,7 @@
"devDependencies": {
"@babel/preset-typescript": "7.22.5",
"@rollup/plugin-babel": "6.0.3",
"@rollup/plugin-commonjs": "26.0.1",
"@rollup/plugin-node-resolve": "15.1.0",
"@rollup/plugin-terser": "0.4.3",
"@rollup/plugin-typescript": "11.1.2",

View File

@ -7,6 +7,7 @@ import tailwindcss from 'tailwindcss'
import typescript from '@rollup/plugin-typescript'
import { typescriptPaths } from 'rollup-plugin-typescript-paths'
import replace from '@rollup/plugin-replace'
import commonjs from '@rollup/plugin-commonjs'
import fs from 'fs'
const extensions = ['.ts', '.tsx']
@ -27,6 +28,7 @@ const indexConfig = {
},
plugins: [
resolve({ extensions }),
commonjs(),
babel({
babelHelpers: 'bundled',
exclude: 'node_modules/**',

View File

@ -1,5 +1,5 @@
import { ClientSideActionContext } from '@/types'
import { readDataStream } from '@/utils/ai/readDataStream'
import { readDataStream } from '@ai-sdk/ui-utils'
import { guessApiHost } from '@/utils/guessApiHost'
import { isNotEmpty } from '@typebot.io/lib/utils'
import { createUniqueId } from 'solid-js'

View File

@ -1,80 +0,0 @@
import { StreamPartType, parseStreamPart } from './streamParts'
const NEWLINE = '\n'.charCodeAt(0)
// concatenates all the chunks into a single Uint8Array
function concatChunks(chunks: Uint8Array[], totalLength: number) {
const concatenatedChunks = new Uint8Array(totalLength)
let offset = 0
for (const chunk of chunks) {
concatenatedChunks.set(chunk, offset)
offset += chunk.length
}
chunks.length = 0
return concatenatedChunks
}
/**
Converts a ReadableStreamDefaultReader into an async generator that yields
StreamPart objects.
@param reader
Reader for the stream to read from.
@param isAborted
Optional function that returns true if the request has been aborted.
If the function returns true, the generator will stop reading the stream.
If the function is not provided, the generator will not stop reading the stream.
*/
export async function* readDataStream(
reader: ReadableStreamDefaultReader<Uint8Array>,
{
isAborted,
}: {
isAborted?: () => boolean
} = {}
): AsyncGenerator<StreamPartType> {
// implementation note: this slightly more complex algorithm is required
// to pass the tests in the edge environment.
const decoder = new TextDecoder()
const chunks: Uint8Array[] = []
let totalLength = 0
while (true) {
const { value } = await reader.read()
if (value) {
chunks.push(value)
totalLength += value.length
if (value[value.length - 1] !== NEWLINE) {
// if the last character is not a newline, we have not read the whole JSON value
continue
}
}
if (chunks.length === 0) {
break // we have reached the end of the stream
}
const concatenatedChunks = concatChunks(chunks, totalLength)
totalLength = 0
const streamParts = decoder
.decode(concatenatedChunks, { stream: true })
.split('\n')
.filter((line) => line !== '') // splitting leaves an empty string at the end
.map(parseStreamPart)
for (const streamPart of streamParts) {
yield streamPart
}
// The request has been aborted, stop reading the stream.
if (isAborted?.()) {
reader.cancel()
break
}
}
}

View File

@ -1,377 +0,0 @@
import {
AssistantMessage,
DataMessage,
FunctionCall,
JSONValue,
ToolCall,
} from './types'
type StreamString =
`${(typeof StreamStringPrefixes)[keyof typeof StreamStringPrefixes]}:${string}\n`
export interface StreamPart<CODE extends string, NAME extends string, TYPE> {
code: CODE
name: NAME
parse: (value: JSONValue) => { type: NAME; value: TYPE }
}
const textStreamPart: StreamPart<'0', 'text', string> = {
code: '0',
name: 'text',
parse: (value: JSONValue) => {
if (typeof value !== 'string') {
throw new Error('"text" parts expect a string value.')
}
return { type: 'text', value }
},
}
const functionCallStreamPart: StreamPart<
'1',
'function_call',
{ function_call: FunctionCall }
> = {
code: '1',
name: 'function_call',
parse: (value: JSONValue) => {
if (
value == null ||
typeof value !== 'object' ||
!('function_call' in value) ||
typeof value.function_call !== 'object' ||
value.function_call == null ||
!('name' in value.function_call) ||
!('arguments' in value.function_call) ||
typeof value.function_call.name !== 'string' ||
typeof value.function_call.arguments !== 'string'
) {
throw new Error(
'"function_call" parts expect an object with a "function_call" property.'
)
}
return {
type: 'function_call',
value: value as unknown as { function_call: FunctionCall },
}
},
}
const dataStreamPart: StreamPart<'2', 'data', Array<JSONValue>> = {
code: '2',
name: 'data',
parse: (value: JSONValue) => {
if (!Array.isArray(value)) {
throw new Error('"data" parts expect an array value.')
}
return { type: 'data', value }
},
}
const errorStreamPart: StreamPart<'3', 'error', string> = {
code: '3',
name: 'error',
parse: (value: JSONValue) => {
if (typeof value !== 'string') {
throw new Error('"error" parts expect a string value.')
}
return { type: 'error', value }
},
}
const assistantMessageStreamPart: StreamPart<
'4',
'assistant_message',
AssistantMessage
> = {
code: '4',
name: 'assistant_message',
parse: (value: JSONValue) => {
if (
value == null ||
typeof value !== 'object' ||
!('id' in value) ||
!('role' in value) ||
!('content' in value) ||
typeof value.id !== 'string' ||
typeof value.role !== 'string' ||
value.role !== 'assistant' ||
!Array.isArray(value.content) ||
!value.content.every(
(item) =>
item != null &&
typeof item === 'object' &&
'type' in item &&
item.type === 'text' &&
'text' in item &&
item.text != null &&
typeof item.text === 'object' &&
'value' in item.text &&
typeof item.text.value === 'string'
)
) {
throw new Error(
'"assistant_message" parts expect an object with an "id", "role", and "content" property.'
)
}
return {
type: 'assistant_message',
value: value as AssistantMessage,
}
},
}
const assistantControlDataStreamPart: StreamPart<
'5',
'assistant_control_data',
{
threadId: string
messageId: string
}
> = {
code: '5',
name: 'assistant_control_data',
parse: (value: JSONValue) => {
if (
value == null ||
typeof value !== 'object' ||
!('threadId' in value) ||
!('messageId' in value) ||
typeof value.threadId !== 'string' ||
typeof value.messageId !== 'string'
) {
throw new Error(
'"assistant_control_data" parts expect an object with a "threadId" and "messageId" property.'
)
}
return {
type: 'assistant_control_data',
value: {
threadId: value.threadId,
messageId: value.messageId,
},
}
},
}
const dataMessageStreamPart: StreamPart<'6', 'data_message', DataMessage> = {
code: '6',
name: 'data_message',
parse: (value: JSONValue) => {
if (
value == null ||
typeof value !== 'object' ||
!('role' in value) ||
!('data' in value) ||
typeof value.role !== 'string' ||
value.role !== 'data'
) {
throw new Error(
'"data_message" parts expect an object with a "role" and "data" property.'
)
}
return {
type: 'data_message',
value: value as DataMessage,
}
},
}
const toolCallStreamPart: StreamPart<
'7',
'tool_calls',
{ tool_calls: ToolCall[] }
> = {
code: '7',
name: 'tool_calls',
parse: (value: JSONValue) => {
if (
value == null ||
typeof value !== 'object' ||
!('tool_calls' in value) ||
typeof value.tool_calls !== 'object' ||
value.tool_calls == null ||
!Array.isArray(value.tool_calls) ||
value.tool_calls.some(
(tc) =>
tc == null ||
typeof tc !== 'object' ||
!('id' in tc) ||
typeof tc.id !== 'string' ||
!('type' in tc) ||
typeof tc.type !== 'string' ||
!('function' in tc) ||
tc.function == null ||
typeof tc.function !== 'object' ||
!('arguments' in tc.function) ||
typeof tc.function.name !== 'string' ||
typeof tc.function.arguments !== 'string'
)
) {
throw new Error(
'"tool_calls" parts expect an object with a ToolCallPayload.'
)
}
return {
type: 'tool_calls',
value: value as unknown as { tool_calls: ToolCall[] },
}
},
}
const messageAnnotationsStreamPart: StreamPart<
'8',
'message_annotations',
Array<JSONValue>
> = {
code: '8',
name: 'message_annotations',
parse: (value: JSONValue) => {
if (!Array.isArray(value)) {
throw new Error('"message_annotations" parts expect an array value.')
}
return { type: 'message_annotations', value }
},
}
const streamParts = [
textStreamPart,
functionCallStreamPart,
dataStreamPart,
errorStreamPart,
assistantMessageStreamPart,
assistantControlDataStreamPart,
dataMessageStreamPart,
toolCallStreamPart,
messageAnnotationsStreamPart,
] as const
// union type of all stream parts
type StreamParts =
| typeof textStreamPart
| typeof functionCallStreamPart
| typeof dataStreamPart
| typeof errorStreamPart
| typeof assistantMessageStreamPart
| typeof assistantControlDataStreamPart
| typeof dataMessageStreamPart
| typeof toolCallStreamPart
| typeof messageAnnotationsStreamPart
/**
* Maps the type of a stream part to its value type.
*/
type StreamPartValueType = {
[P in StreamParts as P['name']]: ReturnType<P['parse']>['value']
}
export type StreamPartType =
| ReturnType<typeof textStreamPart.parse>
| ReturnType<typeof functionCallStreamPart.parse>
| ReturnType<typeof dataStreamPart.parse>
| ReturnType<typeof errorStreamPart.parse>
| ReturnType<typeof assistantMessageStreamPart.parse>
| ReturnType<typeof assistantControlDataStreamPart.parse>
| ReturnType<typeof dataMessageStreamPart.parse>
| ReturnType<typeof toolCallStreamPart.parse>
| ReturnType<typeof messageAnnotationsStreamPart.parse>
export const streamPartsByCode = {
[textStreamPart.code]: textStreamPart,
[functionCallStreamPart.code]: functionCallStreamPart,
[dataStreamPart.code]: dataStreamPart,
[errorStreamPart.code]: errorStreamPart,
[assistantMessageStreamPart.code]: assistantMessageStreamPart,
[assistantControlDataStreamPart.code]: assistantControlDataStreamPart,
[dataMessageStreamPart.code]: dataMessageStreamPart,
[toolCallStreamPart.code]: toolCallStreamPart,
[messageAnnotationsStreamPart.code]: messageAnnotationsStreamPart,
} as const
/**
* The map of prefixes for data in the stream
*
* - 0: Text from the LLM response
* - 1: (OpenAI) function_call responses
* - 2: custom JSON added by the user using `Data`
* - 6: (OpenAI) tool_call responses
*
* Example:
* ```
* 0:Vercel
* 0:'s
* 0: AI
* 0: AI
* 0: SDK
* 0: is great
* 0:!
* 2: { "someJson": "value" }
* 1: {"function_call": {"name": "get_current_weather", "arguments": "{\\n\\"location\\": \\"Charlottesville, Virginia\\",\\n\\"format\\": \\"celsius\\"\\n}"}}
* 6: {"tool_call": {"id": "tool_0", "type": "function", "function": {"name": "get_current_weather", "arguments": "{\\n\\"location\\": \\"Charlottesville, Virginia\\",\\n\\"format\\": \\"celsius\\"\\n}"}}}
*```
*/
export const StreamStringPrefixes = {
[textStreamPart.name]: textStreamPart.code,
[functionCallStreamPart.name]: functionCallStreamPart.code,
[dataStreamPart.name]: dataStreamPart.code,
[errorStreamPart.name]: errorStreamPart.code,
[assistantMessageStreamPart.name]: assistantMessageStreamPart.code,
[assistantControlDataStreamPart.name]: assistantControlDataStreamPart.code,
[dataMessageStreamPart.name]: dataMessageStreamPart.code,
[toolCallStreamPart.name]: toolCallStreamPart.code,
[messageAnnotationsStreamPart.name]: messageAnnotationsStreamPart.code,
} as const
export const validCodes = streamParts.map((part) => part.code)
/**
Parses a stream part from a string.
@param line The string to parse.
@returns The parsed stream part.
@throws An error if the string cannot be parsed.
*/
export const parseStreamPart = (line: string): StreamPartType => {
const firstSeparatorIndex = line.indexOf(':')
if (firstSeparatorIndex === -1) {
throw new Error('Failed to parse stream string. No separator found.')
}
const prefix = line.slice(0, firstSeparatorIndex)
if (!validCodes.includes(prefix as keyof typeof streamPartsByCode)) {
throw new Error(`Failed to parse stream string. Invalid code ${prefix}.`)
}
const code = prefix as keyof typeof streamPartsByCode
const textValue = line.slice(firstSeparatorIndex + 1)
const jsonValue: JSONValue = JSON.parse(textValue)
return streamPartsByCode[code].parse(jsonValue)
}
/**
Prepends a string with a prefix from the `StreamChunkPrefixes`, JSON-ifies it,
and appends a new line.
It ensures type-safety for the part type and value.
*/
export function formatStreamPart<T extends keyof StreamPartValueType>(
type: T,
value: StreamPartValueType[T]
): StreamString {
const streamPart = streamParts.find((part) => part.name === type)
if (!streamPart) {
throw new Error(`Invalid stream part type: ${type}`)
}
return `${streamPart.code}:${JSON.stringify(value)}\n`
}

View File

@ -1,355 +0,0 @@
/* eslint-disable @typescript-eslint/ban-types */
// https://github.com/openai/openai-node/blob/07b3504e1c40fd929f4aae1651b83afc19e3baf8/src/resources/chat/completions.ts#L146-L159
export interface FunctionCall {
/**
* The arguments to call the function with, as generated by the model in JSON
* format. Note that the model does not always generate valid JSON, and may
* hallucinate parameters not defined by your function schema. Validate the
* arguments in your code before calling your function.
*/
arguments?: string
/**
* The name of the function to call.
*/
name?: string
}
/**
* The tool calls generated by the model, such as function calls.
*/
export interface ToolCall {
// The ID of the tool call.
id: string
// The type of the tool. Currently, only `function` is supported.
type: string
// The function that the model called.
function: {
// The name of the function.
name: string
// The arguments to call the function with, as generated by the model in JSON
arguments: string
}
}
/**
* Controls which (if any) function is called by the model.
* - none means the model will not call a function and instead generates a message.
* - auto means the model can pick between generating a message or calling a function.
* - Specifying a particular function via {"type: "function", "function": {"name": "my_function"}} forces the model to call that function.
* none is the default when no functions are present. auto is the default if functions are present.
*/
export type ToolChoice =
| 'none'
| 'auto'
| { type: 'function'; function: { name: string } }
/**
* A list of tools the model may call. Currently, only functions are supported as a tool.
* Use this to provide a list of functions the model may generate JSON inputs for.
*/
export interface Tool {
type: 'function'
function: Function
}
export interface Function {
/**
* The name of the function to be called. Must be a-z, A-Z, 0-9, or contain
* underscores and dashes, with a maximum length of 64.
*/
name: string
/**
* The parameters the functions accepts, described as a JSON Schema object. See the
* [guide](/docs/guides/gpt/function-calling) for examples, and the
* [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for
* documentation about the format.
*
* To describe a function that accepts no parameters, provide the value
* `{"type": "object", "properties": {}}`.
*/
parameters: Record<string, unknown>
/**
* A description of what the function does, used by the model to choose when and
* how to call the function.
*/
description?: string
}
export type IdGenerator = () => string
/**
* Shared types between the API and UI packages.
*/
export interface Message {
id: string
tool_call_id?: string
createdAt?: Date
content: string
ui?: string | JSX.Element | JSX.Element[] | null | undefined
role: 'system' | 'user' | 'assistant' | 'function' | 'data' | 'tool'
/**
* If the message has a role of `function`, the `name` field is the name of the function.
* Otherwise, the name field should not be set.
*/
name?: string
/**
* If the assistant role makes a function call, the `function_call` field
* contains the function call name and arguments. Otherwise, the field should
* not be set. (Deprecated and replaced by tool_calls.)
*/
function_call?: string | FunctionCall
data?: JSONValue
/**
* If the assistant role makes a tool call, the `tool_calls` field contains
* the tool call name and arguments. Otherwise, the field should not be set.
*/
tool_calls?: string | ToolCall[]
/**
* Additional message-specific information added on the server via StreamData
*/
annotations?: JSONValue[] | undefined
}
export type CreateMessage = Omit<Message, 'id'> & {
id?: Message['id']
}
export type ChatRequest = {
messages: Message[]
options?: RequestOptions
// @deprecated
functions?: Array<Function>
// @deprecated
function_call?: FunctionCall
data?: Record<string, string>
tools?: Array<Tool>
tool_choice?: ToolChoice
}
export type FunctionCallHandler = (
chatMessages: Message[],
functionCall: FunctionCall
) => Promise<ChatRequest | void>
export type ToolCallHandler = (
chatMessages: Message[],
toolCalls: ToolCall[]
) => Promise<ChatRequest | void>
export type RequestOptions = {
headers?: Record<string, string> | Headers
body?: object
}
export type ChatRequestOptions = {
options?: RequestOptions
functions?: Array<Function>
function_call?: FunctionCall
tools?: Array<Tool>
tool_choice?: ToolChoice
data?: Record<string, string>
}
export type UseChatOptions = {
/**
* The API endpoint that accepts a `{ messages: Message[] }` object and returns
* a stream of tokens of the AI chat response. Defaults to `/api/chat`.
*/
api?: string
/**
* A unique identifier for the chat. If not provided, a random one will be
* generated. When provided, the `useChat` hook with the same `id` will
* have shared states across components.
*/
id?: string
/**
* Initial messages of the chat. Useful to load an existing chat history.
*/
initialMessages?: Message[]
/**
* Initial input of the chat.
*/
initialInput?: string
/**
* Callback function to be called when a function call is received.
* If the function returns a `ChatRequest` object, the request will be sent
* automatically to the API and will be used to update the chat.
*/
experimental_onFunctionCall?: FunctionCallHandler
/**
* Callback function to be called when a tool call is received.
* If the function returns a `ChatRequest` object, the request will be sent
* automatically to the API and will be used to update the chat.
*/
experimental_onToolCall?: ToolCallHandler
/**
* Callback function to be called when the API response is received.
*/
onResponse?: (response: Response) => void | Promise<void>
/**
* Callback function to be called when the chat is finished streaming.
*/
onFinish?: (message: Message) => void
/**
* Callback function to be called when an error is encountered.
*/
onError?: (error: Error) => void
/**
* A way to provide a function that is going to be used for ids for messages.
* If not provided nanoid is used by default.
*/
generateId?: IdGenerator
/**
* The credentials mode to be used for the fetch request.
* Possible values are: 'omit', 'same-origin', 'include'.
* Defaults to 'same-origin'.
*/
credentials?: RequestCredentials
/**
* HTTP headers to be sent with the API request.
*/
headers?: Record<string, string> | Headers
/**
* Extra body object to be sent with the API request.
* @example
* Send a `sessionId` to the API along with the messages.
* ```js
* useChat({
* body: {
* sessionId: '123',
* }
* })
* ```
*/
body?: object
/**
* Whether to send extra message fields such as `message.id` and `message.createdAt` to the API.
* Defaults to `false`. When set to `true`, the API endpoint might need to
* handle the extra fields before forwarding the request to the AI service.
*/
sendExtraMessageFields?: boolean
/** Stream mode (default to "stream-data") */
streamMode?: 'stream-data' | 'text'
}
export type UseCompletionOptions = {
/**
* The API endpoint that accepts a `{ prompt: string }` object and returns
* a stream of tokens of the AI completion response. Defaults to `/api/completion`.
*/
api?: string
/**
* An unique identifier for the chat. If not provided, a random one will be
* generated. When provided, the `useChat` hook with the same `id` will
* have shared states across components.
*/
id?: string
/**
* Initial prompt input of the completion.
*/
initialInput?: string
/**
* Initial completion result. Useful to load an existing history.
*/
initialCompletion?: string
/**
* Callback function to be called when the API response is received.
*/
onResponse?: (response: Response) => void | Promise<void>
/**
* Callback function to be called when the completion is finished streaming.
*/
onFinish?: (prompt: string, completion: string) => void
/**
* Callback function to be called when an error is encountered.
*/
onError?: (error: Error) => void
/**
* The credentials mode to be used for the fetch request.
* Possible values are: 'omit', 'same-origin', 'include'.
* Defaults to 'same-origin'.
*/
credentials?: RequestCredentials
/**
* HTTP headers to be sent with the API request.
*/
headers?: Record<string, string> | Headers
/**
* Extra body object to be sent with the API request.
* @example
* Send a `sessionId` to the API along with the prompt.
* ```js
* useChat({
* body: {
* sessionId: '123',
* }
* })
* ```
*/
body?: object
/** Stream mode (default to "stream-data") */
streamMode?: 'stream-data' | 'text'
}
export type JSONValue =
| null
| string
| number
| boolean
| { [x: string]: JSONValue }
| Array<JSONValue>
export type AssistantMessage = {
id: string
role: 'assistant'
content: Array<{
type: 'text'
text: {
value: string
}
}>
}
/*
* A data message is an application-specific message from the assistant
* that should be shown in order with the other messages.
*
* It can trigger other operations on the frontend, such as annotating
* a map.
*/
export type DataMessage = {
id?: string // optional id, implement if needed (e.g. for persistance)
role: 'data'
data: JSONValue // application-specific data
}

View File

@ -1,10 +1,20 @@
import { createAction, option } from '@typebot.io/forge'
import { auth } from '../auth'
import { Anthropic } from '@anthropic-ai/sdk'
import { AnthropicStream } from 'ai'
import { anthropicModels, defaultAnthropicOptions } from '../constants'
import { parseChatMessages } from '../helpers/parseChatMessages'
import {
anthropicLegacyModels,
anthropicModelLabels,
anthropicModels,
defaultAnthropicOptions,
maxToolRoundtrips,
} from '../constants'
import { isDefined } from '@typebot.io/lib'
import { createAnthropic } from '@ai-sdk/anthropic'
import { generateText } from 'ai'
import { runChatCompletionStream } from '../helpers/runChatCompletionStream'
import { toolsSchema } from '@typebot.io/ai/schemas'
import { parseTools } from '@typebot.io/ai/parseTools'
import { parseChatCompletionMessages } from '@typebot.io/ai/parseChatCompletionMessages'
import { isModelCompatibleWithVision } from '../helpers/isModelCompatibleWithVision'
const nativeMessageContentSchema = {
content: option.string.layout({
@ -40,7 +50,11 @@ const dialogueMessageItemSchema = option.object({
export const options = option.object({
model: option.enum(anthropicModels).layout({
defaultValue: defaultAnthropicOptions.model,
toLabels: (val) =>
val
? anthropicModelLabels[val as (typeof anthropicModels)[number]]
: undefined,
hiddenItems: anthropicLegacyModels,
}),
messages: option
.array(
@ -51,6 +65,7 @@ export const options = option.object({
])
)
.layout({ accordion: 'Messages', itemLabel: 'message', isOrdered: true }),
tools: toolsSchema,
systemMessage: option.string.layout({
accordion: 'Advanced Settings',
label: 'System prompt',
@ -76,8 +91,12 @@ export const options = option.object({
}),
})
const transformToChatCompletionOptions = (options: any) => ({
const transformToChatCompletionOptions = (
options: any,
resetModel = false
) => ({
...options,
model: resetModel ? undefined : options.model,
action: 'Create chat completion',
responseMapping: options.responseMapping?.map((res: any) =>
res.item === 'Message Content' ? { ...res, item: 'Message content' } : res
@ -91,11 +110,11 @@ export const createChatMessage = createAction({
turnableInto: [
{
blockId: 'mistral',
transform: transformToChatCompletionOptions,
transform: (opts) => transformToChatCompletionOptions(opts, true),
},
{
blockId: 'openai',
transform: transformToChatCompletionOptions,
transform: (opts) => transformToChatCompletionOptions(opts, true),
},
{ blockId: 'open-router', transform: transformToChatCompletionOptions },
{ blockId: 'together-ai', transform: transformToChatCompletionOptions },
@ -104,72 +123,43 @@ export const createChatMessage = createAction({
responseMapping?.map((res) => res.variableId).filter(isDefined) ?? [],
run: {
server: async ({ credentials: { apiKey }, options, variables, logs }) => {
const client = new Anthropic({
apiKey: apiKey,
const modelName = options.model ?? defaultAnthropicOptions.model
const model = createAnthropic({
apiKey,
})(modelName)
const { text } = await generateText({
model,
temperature: options.temperature
? Number(options.temperature)
: undefined,
messages: await parseChatCompletionMessages({
messages: options.messages,
isVisionEnabled: isModelCompatibleWithVision(modelName),
shouldDownloadImages: true,
variables,
}),
tools: parseTools({ tools: options.tools, variables }),
maxToolRoundtrips: maxToolRoundtrips,
})
const messages = await parseChatMessages({ options, variables })
try {
const reply = await client.messages.create({
messages,
model: options.model ?? defaultAnthropicOptions.model,
system: options.systemMessage,
temperature: options.temperature
? Number(options.temperature)
: undefined,
max_tokens: options.maxTokens
? Number(options.maxTokens)
: defaultAnthropicOptions.maxTokens,
})
messages.push(reply)
options.responseMapping?.forEach((mapping) => {
if (!mapping.variableId) return
if (!mapping.item || mapping.item === 'Message Content')
variables.set(mapping.variableId, reply.content[0].text)
})
} catch (error) {
if (error instanceof Anthropic.APIError) {
logs.add({
status: 'error',
description: `${error.status} ${error.name}`,
details: error.message,
})
} else {
throw error
}
}
options.responseMapping?.forEach((mapping) => {
if (!mapping.variableId) return
if (!mapping.item || mapping.item === 'Message Content')
variables.set(mapping.variableId, text)
})
},
stream: {
getStreamVariableId: (options) =>
options.responseMapping?.find(
(res) => res.item === 'Message Content' || !res.item
)?.variableId,
run: async ({ credentials: { apiKey }, options, variables }) => {
const client = new Anthropic({
apiKey: apiKey,
})
const messages = await parseChatMessages({ options, variables })
const response = await client.messages.create({
messages,
model: options.model ?? defaultAnthropicOptions.model,
system: options.systemMessage,
temperature: options.temperature
? Number(options.temperature)
: undefined,
max_tokens: options.maxTokens
? Number(options.maxTokens)
: defaultAnthropicOptions.maxTokens,
stream: true,
})
return { stream: AnthropicStream(response) }
},
run: async ({ credentials: { apiKey }, options, variables }) =>
runChatCompletionStream({
credentials: { apiKey },
options,
variables,
}),
},
},
})

View File

@ -1,4 +1,5 @@
export const anthropicModels = [
'claude-3-5-sonnet-20240620',
'claude-3-opus-20240229',
'claude-3-sonnet-20240229',
'claude-3-haiku-20240307',
@ -7,8 +8,24 @@ export const anthropicModels = [
'claude-instant-1.2',
] as const
export const anthropicLegacyModels = [
'claude-2.1',
'claude-2.0',
'claude-instant-1.2',
]
export const anthropicModelLabels = {
'claude-3-5-sonnet-20240620': 'Claude 3.5 Sonnet',
'claude-3-opus-20240229': 'Claude 3.0 Opus',
'claude-3-sonnet-20240229': 'Claude 3.0 Sonnet',
'claude-3-haiku-20240307': 'Claude 3.0 Haiku',
'claude-2.1': 'Claude 2.1',
'claude-2.0': 'Claude 2.0',
'claude-instant-1.2': 'Claude Instant 1.2',
} satisfies Record<(typeof anthropicModels)[number], string>
export const defaultAnthropicOptions = {
model: anthropicModels[0],
model: 'claude-3-opus-20240229',
temperature: 1,
maxTokens: 1024,
} as const
@ -21,3 +38,5 @@ export const supportedImageTypes = [
'image/gif',
'image/webp',
] as const
export const maxToolRoundtrips = 10

View File

@ -0,0 +1,5 @@
import { wildcardMatch } from '@typebot.io/lib/wildcardMatch'
import { modelsWithImageUrlSupport } from '../constants'
export const isModelCompatibleWithVision = (model: string | undefined) =>
model ? wildcardMatch(modelsWithImageUrlSupport)(model) : false

View File

@ -1,148 +0,0 @@
import { Anthropic } from '@anthropic-ai/sdk'
import { options as createMessageOptions } from '../actions/createChatMessage'
import { VariableStore } from '@typebot.io/forge'
import { isDefined, isEmpty } from '@typebot.io/lib'
import { z } from '@typebot.io/forge/zod'
import ky, { HTTPError } from 'ky'
import {
defaultAnthropicOptions,
modelsWithImageUrlSupport,
supportedImageTypes,
} from '../constants'
import { wildcardMatch } from '@typebot.io/lib/wildcardMatch'
const isModelCompatibleWithImageUrls = (model: string | undefined) =>
model ? wildcardMatch(modelsWithImageUrlSupport)(model) : false
export const parseChatMessages = async ({
options: { messages, model },
variables,
}: {
options: Pick<z.infer<typeof createMessageOptions>, 'messages' | 'model'>
variables: VariableStore
}): Promise<Anthropic.Messages.MessageParam[]> => {
if (!messages) return []
const isVisionEnabled = isModelCompatibleWithImageUrls(
model ?? defaultAnthropicOptions.model
)
const parsedMessages = (
await Promise.all(
messages.map(async (message) => {
if (!message.role) return
if (message.role === 'Dialogue') {
if (!message.dialogueVariableId) return
const dialogue = variables.get(message.dialogueVariableId) ?? []
const dialogueArr = Array.isArray(dialogue) ? dialogue : [dialogue]
return Promise.all(
dialogueArr.map(async (dialogueItem, index) => {
if (index === 0 && message.startsBy === 'assistant')
return {
role: 'assistant',
content: dialogueItem,
}
if (index % (message.startsBy === 'assistant' ? 1 : 2) === 0) {
return {
role: 'user',
content: isVisionEnabled
? await splitUserTextMessageIntoBlocks(dialogueItem ?? '')
: dialogueItem,
}
}
return {
role: 'assistant',
content: dialogueItem,
}
})
)
}
if (!message.content) return
const content = variables.parse(message.content)
if (isEmpty(content)) return
if (message.role === 'user')
return {
role: 'user',
content: isVisionEnabled
? await splitUserTextMessageIntoBlocks(content)
: content,
}
return {
role: message.role,
content,
}
})
)
)
.flat()
.filter((message) => {
return isDefined(message?.role) && isDefined(message.content)
}) as Anthropic.Messages.MessageParam[]
return parsedMessages
}
const splitUserTextMessageIntoBlocks = async (
input: string
): Promise<
| string
| (Anthropic.Messages.TextBlockParam | Anthropic.Messages.ImageBlockParam)[]
> => {
const urlRegex = /(^|\n\n)(https?:\/\/[^\s]+)(\n\n|$)/g
const match = input.match(urlRegex)
if (!match) return input
const parts: (
| Anthropic.Messages.TextBlockParam
| Anthropic.Messages.ImageBlockParam
)[] = []
let processedInput = input
for (const url of match) {
const textBeforeUrl = processedInput.slice(0, processedInput.indexOf(url))
if (textBeforeUrl.trim().length > 0) {
parts.push({ type: 'text', text: textBeforeUrl })
}
const cleanUrl = url.trim()
try {
const response = await ky.get(cleanUrl)
if (
!response.ok ||
!supportedImageTypes.includes(
response.headers.get('content-type') as any
)
) {
parts.push({ type: 'text', text: cleanUrl })
} else {
parts.push({
type: 'image',
source: {
data: Buffer.from(await response.arrayBuffer()).toString('base64'),
type: 'base64',
media_type: response.headers.get('content-type') as any,
},
})
}
} catch (err) {
if (err instanceof HTTPError) {
console.log(err.response.status, await err.response.text())
} else {
console.error(err)
}
}
processedInput = processedInput.slice(
processedInput.indexOf(url) + url.length
)
}
if (processedInput.trim().length > 0) {
parts.push({ type: 'text', text: processedInput })
}
return parts
}

View File

@ -0,0 +1,110 @@
import { createAnthropic } from '@ai-sdk/anthropic'
import { defaultAnthropicOptions, maxToolRoundtrips } from '../constants'
import { APICallError, streamText, ToolCallPart, ToolResultPart } from 'ai'
import { isModelCompatibleWithVision } from './isModelCompatibleWithVision'
import { VariableStore } from '@typebot.io/forge'
import { ChatCompletionOptions } from '@typebot.io/openai-block/shared/parseChatCompletionOptions'
import { parseChatCompletionMessages } from '@typebot.io/ai/parseChatCompletionMessages'
import { parseTools } from '@typebot.io/ai/parseTools'
import { pumpStreamUntilDone } from '@typebot.io/ai/pumpStreamUntilDone'
import { appendToolResultsToMessages } from '@typebot.io/ai/appendToolResultsToMessages'
type Props = {
credentials: { apiKey?: string }
options: {
model?: string
temperature?: ChatCompletionOptions['temperature']
messages?: ChatCompletionOptions['messages']
tools?: ChatCompletionOptions['tools']
}
variables: VariableStore
}
export const runChatCompletionStream = async ({
credentials: { apiKey },
options,
variables,
}: Props): Promise<{
stream?: ReadableStream<any>
httpError?: { status: number; message: string }
}> => {
if (!apiKey) return { httpError: { status: 401, message: 'API key missing' } }
const modelName = options.model?.trim() ?? defaultAnthropicOptions.model
if (!modelName)
return { httpError: { status: 400, message: 'model not found' } }
const model = createAnthropic({
apiKey,
})(modelName)
try {
const streamConfig = {
model,
temperature: options.temperature
? Number(options.temperature)
: undefined,
tools: parseTools({ tools: options.tools, variables }),
messages: await parseChatCompletionMessages({
messages: options.messages,
isVisionEnabled: isModelCompatibleWithVision(modelName),
shouldDownloadImages: false,
variables,
}),
}
const response = await streamText(streamConfig)
let totalToolCalls = 0
let toolCalls: ToolCallPart[] = []
let toolResults: ToolResultPart[] = []
return {
stream: new ReadableStream({
async start(controller) {
const reader = response.toAIStream().getReader()
await pumpStreamUntilDone(controller, reader)
toolCalls = await response.toolCalls
if (toolCalls.length > 0)
toolResults = (await response.toolResults) as ToolResultPart[]
while (
toolCalls &&
toolCalls.length > 0 &&
totalToolCalls < maxToolRoundtrips
) {
totalToolCalls += 1
const newResponse = await streamText({
...streamConfig,
messages: appendToolResultsToMessages({
messages: streamConfig.messages,
toolCalls,
toolResults,
}),
})
const reader = newResponse.toAIStream().getReader()
await pumpStreamUntilDone(controller, reader)
toolCalls = await newResponse.toolCalls
if (toolCalls.length > 0)
toolResults = (await newResponse.toolResults) as ToolResultPart[]
}
controller.close()
},
}),
}
} catch (err) {
if (err instanceof APICallError) {
return {
httpError: { status: err.statusCode ?? 500, message: err.message },
}
}
return {
httpError: {
status: 500,
message: 'An error occured while generating the stream',
},
}
}
}

View File

@ -15,10 +15,10 @@
"typescript": "5.4.5"
},
"dependencies": {
"@anthropic-ai/sdk": "0.20.6",
"@ai-sdk/anthropic": "0.0.21",
"@ai-sdk/anthropic": "0.0.30",
"@typebot.io/openai-block": "workspace:*",
"ai": "3.2.1",
"ai": "3.2.22",
"@typebot.io/ai": "workspace:*",
"ky": "1.2.4"
}
}

View File

@ -14,6 +14,6 @@
"typescript": "5.4.5"
},
"dependencies": {
"ai": "3.2.1"
"ai": "3.2.22"
}
}

View File

@ -24,7 +24,6 @@ export const convertTextToSpeech = createAction({
fetcher: 'fetchModels',
label: 'Model',
placeholder: 'Select a model',
defaultValue: 'eleven_monolingual_v1',
}),
saveUrlInVariableId: option.string.layout({
label: 'Save audio URL in variable',

View File

@ -5,6 +5,11 @@ import { parseMessages } from '../helpers/parseMessages'
import { createMistral } from '@ai-sdk/mistral'
import { generateText, streamText } from 'ai'
import { fetchModels } from '../helpers/fetchModels'
import { toolsSchema } from '@typebot.io/ai/schemas'
import { parseTools } from '@typebot.io/ai/parseTools'
import { maxToolRoundtrips } from '../constants'
import { parseChatCompletionMessages } from '@typebot.io/ai/parseChatCompletionMessages'
import { runChatCompletionStream } from '../helpers/runChatCompletionStream'
const nativeMessageContentSchema = {
content: option.string.layout({
@ -59,6 +64,7 @@ export const options = option.object({
])
)
.layout({ accordion: 'Messages', itemLabel: 'message', isOrdered: true }),
tools: toolsSchema,
responseMapping: option.saveResponseArray(['Message content']).layout({
accordion: 'Save response',
}),
@ -71,6 +77,10 @@ export const createChatCompletion = createAction({
turnableInto: [
{
blockId: 'openai',
transform: (opts) => ({
...opts,
model: undefined,
}),
},
{
blockId: 'together-ai',
@ -110,8 +120,14 @@ export const createChatCompletion = createAction({
const { text } = await generateText({
model,
messages: parseMessages({ options, variables }),
tools: {},
messages: await parseChatCompletionMessages({
messages: options.messages,
variables,
isVisionEnabled: false,
shouldDownloadImages: false,
}),
tools: parseTools({ tools: options.tools, variables }),
maxToolRoundtrips: maxToolRoundtrips,
})
options.responseMapping?.forEach((mapping) => {
@ -125,19 +141,12 @@ export const createChatCompletion = createAction({
options.responseMapping?.find(
(res) => res.item === 'Message content' || !res.item
)?.variableId,
run: async ({ credentials: { apiKey }, options, variables }) => {
if (!options.model) return {}
const model = createMistral({
apiKey,
})(options.model)
const response = await streamText({
model,
messages: parseMessages({ options, variables }),
})
return { stream: response.toAIStream() }
},
run: async ({ credentials: { apiKey }, options, variables }) =>
runChatCompletionStream({
credentials: { apiKey },
options,
variables,
}),
},
},
})

View File

@ -1 +1,3 @@
export const apiBaseUrl = 'https://api.mistral.ai'
export const maxToolRoundtrips = 10

View File

@ -0,0 +1,105 @@
import { createMistral } from '@ai-sdk/mistral'
import { APICallError, streamText, ToolCallPart, ToolResultPart } from 'ai'
import { VariableStore } from '@typebot.io/forge'
import { ChatCompletionOptions } from '@typebot.io/openai-block/shared/parseChatCompletionOptions'
import { parseChatCompletionMessages } from '@typebot.io/ai/parseChatCompletionMessages'
import { parseTools } from '@typebot.io/ai/parseTools'
import { maxToolRoundtrips } from '../constants'
import { pumpStreamUntilDone } from '@typebot.io/ai/pumpStreamUntilDone'
import { appendToolResultsToMessages } from '@typebot.io/ai/appendToolResultsToMessages'
type Props = {
credentials: { apiKey?: string }
options: {
model?: string
temperature?: ChatCompletionOptions['temperature']
messages?: ChatCompletionOptions['messages']
tools?: ChatCompletionOptions['tools']
}
variables: VariableStore
}
export const runChatCompletionStream = async ({
credentials: { apiKey },
options,
variables,
}: Props): Promise<{
stream?: ReadableStream<any>
httpError?: { status: number; message: string }
}> => {
if (!apiKey) return { httpError: { status: 401, message: 'API key missing' } }
const modelName = options.model?.trim()
if (!modelName)
return { httpError: { status: 400, message: 'model not found' } }
const streamConfig = {
model: createMistral({
apiKey,
})(modelName),
messages: await parseChatCompletionMessages({
messages: options.messages,
isVisionEnabled: false,
shouldDownloadImages: false,
variables,
}),
temperature: options.temperature ? Number(options.temperature) : undefined,
tools: parseTools({ tools: options.tools, variables }),
}
try {
const response = await streamText(streamConfig)
let totalToolCalls = 0
let toolCalls: ToolCallPart[] = []
let toolResults: ToolResultPart[] = []
return {
stream: new ReadableStream({
async start(controller) {
const reader = response.toAIStream().getReader()
await pumpStreamUntilDone(controller, reader)
toolCalls = await response.toolCalls
if (toolCalls.length > 0)
toolResults = (await response.toolResults) as ToolResultPart[]
while (
toolCalls &&
toolCalls.length > 0 &&
totalToolCalls < maxToolRoundtrips
) {
totalToolCalls += 1
const newResponse = await streamText({
...streamConfig,
messages: appendToolResultsToMessages({
messages: streamConfig.messages,
toolCalls,
toolResults,
}),
})
const reader = newResponse.toAIStream().getReader()
await pumpStreamUntilDone(controller, reader)
toolCalls = await newResponse.toolCalls
if (toolCalls.length > 0)
toolResults = (await newResponse.toolResults) as ToolResultPart[]
}
controller.close()
},
}),
}
} catch (err) {
if (err instanceof APICallError) {
return {
httpError: { status: err.statusCode ?? 500, message: err.message },
}
}
return {
httpError: {
status: 500,
message: 'An error occured while generating the stream',
},
}
}
}

View File

@ -14,9 +14,10 @@
"typescript": "5.4.5"
},
"dependencies": {
"@ai-sdk/mistral": "0.0.18",
"@ai-sdk/mistral": "0.0.22",
"@typebot.io/openai-block": "workspace:*",
"ai": "3.2.1",
"ky": "1.2.4"
"ai": "3.2.22",
"ky": "1.2.4",
"@typebot.io/ai": "workspace:*"
}
}

View File

@ -3,8 +3,8 @@ import { auth } from '../auth'
import { parseChatCompletionOptions } from '@typebot.io/openai-block/shared/parseChatCompletionOptions'
import { getChatCompletionSetVarIds } from '@typebot.io/openai-block/shared/getChatCompletionSetVarIds'
import { getChatCompletionStreamVarId } from '@typebot.io/openai-block/shared/getChatCompletionStreamVarId'
import { runChatCompletion } from '@typebot.io/openai-block/shared/runChatCompletion'
import { runChatCompletionStream } from '@typebot.io/openai-block/shared/runChatCompletionStream'
import { runOpenAIChatCompletion } from '@typebot.io/openai-block/shared/runOpenAIChatCompletion'
import { runOpenAIChatCompletionStream } from '@typebot.io/openai-block/shared/runOpenAIChatCompletionStream'
import { defaultOpenRouterOptions } from '../constants'
import ky from 'ky'
import { ModelsResponse } from '../types'
@ -24,7 +24,6 @@ export const createChatCompletion = createAction({
blockId: 'anthropic',
transform: (options) => ({
...options,
model: undefined,
action: 'Create Chat Message',
responseMapping: options.responseMapping?.map((res: any) =>
res.item === 'Message content'
@ -36,6 +35,7 @@ export const createChatCompletion = createAction({
],
options: parseChatCompletionOptions({
modelFetchId: 'fetchModels',
defaultTemperature: defaultOpenRouterOptions.temperature,
}),
getSetVariableIds: getChatCompletionSetVarIds,
fetchers: [
@ -56,18 +56,19 @@ export const createChatCompletion = createAction({
],
run: {
server: (params) =>
runChatCompletion({
runOpenAIChatCompletion({
...params,
config: { baseUrl: defaultOpenRouterOptions.baseUrl },
}),
stream: {
getStreamVariableId: getChatCompletionStreamVarId,
run: async (params) => ({
stream: await runChatCompletionStream({
run: async (params) =>
runOpenAIChatCompletionStream({
...params,
config: { baseUrl: defaultOpenRouterOptions.baseUrl },
config: {
baseUrl: defaultOpenRouterOptions.baseUrl,
},
}),
}),
},
},
})

View File

@ -1,3 +1,4 @@
export const defaultOpenRouterOptions = {
baseUrl: 'https://openrouter.ai/api/v1',
temperature: 1,
} as const

View File

@ -11,9 +11,9 @@ import { baseOptions } from '../baseOptions'
import { executeFunction } from '@typebot.io/variables/executeFunction'
import { readDataStream } from 'ai'
import { deprecatedAskAssistantOptions } from '../deprecated'
import { OpenAIAssistantStream } from '../helpers/OpenAIAssistantStream'
import { AssistantStream } from '../helpers/AssistantStream'
import { isModelCompatibleWithVision } from '../helpers/isModelCompatibleWithVision'
import { splitUserTextMessageIntoBlocks } from '../helpers/splitUserTextMessageIntoBlocks'
import { splitUserTextMessageIntoOpenAIBlocks } from '../helpers/splitUserTextMessageIntoOpenAIBlocks'
export const askAssistant = createAction({
auth,
@ -294,19 +294,16 @@ const createAssistantStream = async ({
{
role: 'user',
content: isModelCompatibleWithVision(assistant.model)
? await splitUserTextMessageIntoBlocks(message)
? await splitUserTextMessageIntoOpenAIBlocks(message)
: message,
}
)
return OpenAIAssistantStream(
return AssistantStream(
{ threadId: currentThreadId, messageId: createdMessage.id },
async ({ forwardStream }) => {
const runStream = openai.beta.threads.runs.createAndStream(
currentThreadId,
{
assistant_id: assistantId,
}
)
const runStream = openai.beta.threads.runs.stream(currentThreadId, {
assistant_id: assistantId,
})
let runResult = await forwardStream(runStream)

View File

@ -4,8 +4,8 @@ import { auth } from '../auth'
import { baseOptions } from '../baseOptions'
import { parseChatCompletionOptions } from '../shared/parseChatCompletionOptions'
import { getChatCompletionSetVarIds } from '../shared/getChatCompletionSetVarIds'
import { runChatCompletion } from '../shared/runChatCompletion'
import { runChatCompletionStream } from '../shared/runChatCompletionStream'
import { runOpenAIChatCompletion } from '../shared/runOpenAIChatCompletion'
import { runOpenAIChatCompletionStream } from '../shared/runOpenAIChatCompletionStream'
import { getChatCompletionStreamVarId } from '../shared/getChatCompletionStreamVarId'
import { fetchGPTModels } from '../helpers/fetchModels'
@ -14,7 +14,6 @@ export const createChatCompletion = createAction({
auth,
baseOptions,
options: parseChatCompletionOptions({
defaultModel: defaultOpenAIOptions.model,
defaultTemperature: defaultOpenAIOptions.temperature,
modelFetchId: 'fetchModels',
}),
@ -55,24 +54,25 @@ export const createChatCompletion = createAction({
],
run: {
server: (params) =>
runChatCompletion({
runOpenAIChatCompletion({
...params,
config: {
baseUrl: defaultOpenAIOptions.baseUrl,
defaultModel: defaultOpenAIOptions.model,
},
compatibility: 'strict',
}),
stream: {
getStreamVariableId: getChatCompletionStreamVarId,
run: async (params) => ({
stream: await runChatCompletionStream({
run: async (params) =>
runOpenAIChatCompletionStream({
...params,
config: {
baseUrl: defaultOpenAIOptions.baseUrl,
defaultModel: defaultOpenAIOptions.model,
},
compatibility: 'strict',
}),
}),
},
},
})

View File

@ -1,5 +1,7 @@
// Copied from https://github.com/vercel/ai/blob/f9db8fd6543202a8404a7a1a40f938d6270b08ef/packages/core/streams/assistant-response.ts
// Because the stream is not exported from the package
import { AssistantMessage, DataMessage, formatStreamPart } from 'ai'
import { AssistantStream } from 'openai/lib/AssistantStream'
import { AssistantStream as AssistantStreamType } from 'openai/lib/AssistantStream'
import { Run } from 'openai/resources/beta/threads/runs/runs'
/**
@ -44,14 +46,19 @@ Send a data message to the client. You can use this to provide information for r
/**
Forwards the assistant response stream to the client. Returns the `Run` object after it completes, or when it requires an action.
*/
forwardStream: (stream: AssistantStream) => Promise<Run | undefined>
forwardStream: (stream: AssistantStreamType) => Promise<Run | undefined>
}) => Promise<void>
export const OpenAIAssistantStream = (
/**
The `AssistantResponse` allows you to send a stream of assistant update to `useAssistant`.
It is designed to facilitate streaming assistant responses to the `useAssistant` hook.
It receives an assistant thread and a current message, and can send messages and data messages to the client.
*/
export function AssistantStream(
{ threadId, messageId }: AssistantResponseSettings,
process: AssistantResponseCallback
) =>
new ReadableStream({
) {
return new ReadableStream({
async start(controller) {
const textEncoder = new TextEncoder()
@ -73,7 +80,7 @@ export const OpenAIAssistantStream = (
)
}
const forwardStream = async (stream: AssistantStream) => {
const forwardStream = async (stream: AssistantStreamType) => {
let result: Run | undefined = undefined
for await (const value of stream) {
@ -143,3 +150,4 @@ export const OpenAIAssistantStream = (
pull(controller) {},
cancel() {},
})
}

View File

@ -1,81 +0,0 @@
import type { OpenAI } from 'openai'
import { VariableStore } from '@typebot.io/forge'
import { isDefined, isEmpty } from '@typebot.io/lib'
import { ChatCompletionOptions } from '../shared/parseChatCompletionOptions'
import ky, { HTTPError } from 'ky'
import { defaultOpenAIOptions, modelsWithImageUrlSupport } from '../constants'
import { isModelCompatibleWithVision } from './isModelCompatibleWithVision'
import { splitUserTextMessageIntoBlocks } from './splitUserTextMessageIntoBlocks'
export const parseChatCompletionMessages = async ({
options: { messages, model },
variables,
}: {
options: ChatCompletionOptions
variables: VariableStore
}): Promise<OpenAI.Chat.ChatCompletionMessageParam[]> => {
if (!messages) return []
const isVisionEnabled = isModelCompatibleWithVision(
model ?? defaultOpenAIOptions.model
)
const parsedMessages = (
await Promise.all(
messages.map(async (message) => {
if (!message.role) return
if (message.role === 'Dialogue') {
if (!message.dialogueVariableId) return
const dialogue = variables.get(message.dialogueVariableId) ?? []
const dialogueArr = Array.isArray(dialogue) ? dialogue : [dialogue]
return Promise.all(
dialogueArr.map(async (dialogueItem, index) => {
if (index === 0 && message.startsBy === 'assistant')
return {
role: 'assistant',
content: dialogueItem,
}
if (index % (message.startsBy === 'assistant' ? 1 : 2) === 0) {
return {
role: 'user',
content: isVisionEnabled
? await splitUserTextMessageIntoBlocks(dialogueItem ?? '')
: dialogueItem,
}
}
return {
role: 'assistant',
content: dialogueItem,
}
})
)
}
if (!message.content) return
const content = variables.parse(message.content)
if (isEmpty(content)) return
if (message.role === 'user')
return {
role: 'user',
content: isVisionEnabled
? await splitUserTextMessageIntoBlocks(content)
: content,
}
return {
role: message.role,
content,
}
})
)
)
.flat()
.filter((message) => {
return isDefined(message?.role) && isDefined(message.content)
}) as OpenAI.Chat.ChatCompletionMessageParam[]
return parsedMessages
}

View File

@ -1,23 +0,0 @@
import type { OpenAI } from 'openai'
import { toolParametersSchema } from '../shared/parseChatCompletionOptions'
import { z } from '@typebot.io/forge/zod'
export const parseToolParameters = (
parameters: z.infer<typeof toolParametersSchema>
): OpenAI.FunctionParameters => ({
type: 'object',
properties: parameters?.reduce<{
[x: string]: unknown
}>((acc, param) => {
if (!param.name) return acc
acc[param.name] = {
type: param.type === 'enum' ? 'string' : param.type,
enum: param.type === 'enum' ? param.values : undefined,
description: param.description,
}
return acc
}, {}),
required:
parameters?.filter((param) => param.required).map((param) => param.name) ??
[],
})

View File

@ -1,7 +1,7 @@
import ky, { HTTPError } from 'ky'
import OpenAI from 'openai'
export const splitUserTextMessageIntoBlocks = async (
export const splitUserTextMessageIntoOpenAIBlocks = async (
input: string
): Promise<string | OpenAI.Chat.ChatCompletionContentPart[]> => {
const urlRegex = /(^|\n\n)(https?:\/\/[^\s]+)(\n\n|$)/g

View File

@ -7,9 +7,10 @@
"author": "Baptiste Arnaud",
"license": "AGPL-3.0-or-later",
"dependencies": {
"@ai-sdk/openai": "0.0.31",
"ai": "3.2.1",
"openai": "4.47.1"
"@ai-sdk/openai": "0.0.36",
"ai": "3.2.22",
"openai": "4.52.7",
"@typebot.io/ai": "workspace:*"
},
"devDependencies": {
"@typebot.io/forge": "workspace:*",

View File

@ -1,6 +1,7 @@
import { option } from '@typebot.io/forge'
import { z } from '@typebot.io/forge/zod'
import { baseOptions } from '../baseOptions'
import { toolsSchema } from '@typebot.io/ai/schemas'
const nativeMessageContentSchema = {
content: option.string.layout({
@ -27,77 +28,6 @@ const assistantMessageItemSchema = option
})
.extend(nativeMessageContentSchema)
const parameterBase = {
name: option.string.layout({
label: 'Name',
placeholder: 'myVariable',
withVariableButton: false,
}),
description: option.string.layout({
label: 'Description',
withVariableButton: false,
}),
required: option.boolean.layout({
label: 'Is required?',
}),
}
export const toolParametersSchema = option
.array(
option.discriminatedUnion('type', [
option
.object({
type: option.literal('string'),
})
.extend(parameterBase),
option
.object({
type: option.literal('number'),
})
.extend(parameterBase),
option
.object({
type: option.literal('boolean'),
})
.extend(parameterBase),
option
.object({
type: option.literal('enum'),
values: option
.array(option.string)
.layout({ itemLabel: 'possible value' }),
})
.extend(parameterBase),
])
)
.layout({
accordion: 'Parameters',
itemLabel: 'parameter',
})
const functionToolItemSchema = option.object({
type: option.literal('function'),
name: option.string.layout({
label: 'Name',
placeholder: 'myFunctionName',
withVariableButton: false,
}),
description: option.string.layout({
label: 'Description',
placeholder: 'A brief description of what this function does.',
withVariableButton: false,
}),
parameters: toolParametersSchema,
code: option.string.layout({
inputType: 'code',
label: 'Code',
lang: 'javascript',
moreInfoTooltip:
'A javascript code snippet that can use the defined parameters. It should return a value.',
withVariableButton: false,
}),
})
const dialogueMessageItemSchema = option.object({
role: option.literal('Dialogue'),
dialogueVariableId: option.string.layout({
@ -112,23 +42,20 @@ const dialogueMessageItemSchema = option.object({
})
type Props = {
defaultModel?: string
defaultTemperature?: number
defaultTemperature: number
modelFetchId?: string
modelHelperText?: string
}
export const parseChatCompletionOptions = ({
defaultModel,
defaultTemperature,
modelFetchId,
modelHelperText,
}: Props = {}) =>
}: Props) =>
option.object({
model: option.string.layout({
placeholder: modelFetchId ? 'Select a model' : undefined,
label: modelFetchId ? undefined : 'Model',
defaultValue: defaultModel,
fetcher: modelFetchId,
helperText: modelHelperText,
}),
@ -142,9 +69,7 @@ export const parseChatCompletionOptions = ({
])
)
.layout({ accordion: 'Messages', itemLabel: 'message', isOrdered: true }),
tools: option
.array(option.discriminatedUnion('type', [functionToolItemSchema]))
.layout({ accordion: 'Tools', itemLabel: 'tool' }),
tools: toolsSchema,
temperature: option.number.layout({
accordion: 'Advanced settings',
label: 'Temperature',

View File

@ -69,14 +69,12 @@ export const parseGenerateVariablesOptions = ({
? option.string.layout({
placeholder: 'Select a model',
label: 'Model',
defaultValue: defaultModel,
fetcher: modelFetch,
helperText: modelHelperText,
})
: option.enum(modelFetch).layout({
placeholder: 'Select a model',
label: 'Model',
defaultValue: defaultModel,
helperText: modelHelperText,
}),
prompt: option.string.layout({

View File

@ -1,125 +0,0 @@
import OpenAI, { ClientOptions } from 'openai'
import { parseToolParameters } from '../helpers/parseToolParameters'
import { executeFunction } from '@typebot.io/variables/executeFunction'
import { ChatCompletionTool, ChatCompletionMessage } from 'openai/resources'
import { maxToolCalls } from '../constants'
import { parseChatCompletionMessages } from '../helpers/parseChatCompletionMessages'
import { ChatCompletionOptions } from './parseChatCompletionOptions'
import { LogsStore, VariableStore } from '@typebot.io/forge/types'
type OpenAIConfig = {
baseUrl: string
defaultModel?: string
}
type Props = {
credentials: {
apiKey?: string
}
options: ChatCompletionOptions
variables: VariableStore
logs: LogsStore
config: OpenAIConfig
}
export const runChatCompletion = async ({
credentials: { apiKey },
options,
variables,
config: openAIConfig,
logs,
}: Props) => {
const model = options.model?.trim() ?? openAIConfig.defaultModel
if (!model) return logs.add('No model provided')
const config = {
apiKey,
baseURL: openAIConfig.baseUrl ?? options.baseUrl,
defaultHeaders: options.baseUrl
? {
'api-key': apiKey,
}
: undefined,
defaultQuery: options.apiVersion
? {
'api-version': options.apiVersion,
}
: undefined,
} satisfies ClientOptions
const openai = new OpenAI(config)
const tools = options.tools
?.filter((t) => t.name && t.parameters)
.map((t) => ({
type: 'function',
function: {
name: t.name as string,
description: t.description,
parameters: parseToolParameters(t.parameters!),
},
})) satisfies ChatCompletionTool[] | undefined
const messages = await parseChatCompletionMessages({ options, variables })
const body = {
model,
temperature: options.temperature ? Number(options.temperature) : undefined,
messages,
tools: (tools?.length ?? 0) > 0 ? tools : undefined,
}
let totalTokens = 0
let message: ChatCompletionMessage
for (let i = 0; i < maxToolCalls; i++) {
const response = await openai.chat.completions.create(body)
message = response.choices[0].message
totalTokens += response.usage?.total_tokens || 0
if (!message.tool_calls) break
messages.push(message)
for (const toolCall of message.tool_calls) {
const name = toolCall.function?.name
if (!name) continue
const toolDefinition = options.tools?.find((t) => t.name === name)
if (!toolDefinition?.code || !toolDefinition.parameters) {
messages.push({
tool_call_id: toolCall.id,
role: 'tool',
content: 'Function not found',
})
continue
}
const toolParams = Object.fromEntries(
toolDefinition.parameters.map(({ name }) => [name, null])
)
const toolArgs = toolCall.function?.arguments
? JSON.parse(toolCall.function?.arguments)
: undefined
if (!toolArgs) continue
const { output, newVariables } = await executeFunction({
variables: variables.list(),
args: { ...toolParams, ...toolArgs },
body: toolDefinition.code,
})
newVariables?.forEach((v) => variables.set(v.id, v.value))
messages.push({
tool_call_id: toolCall.id,
role: 'tool',
content: output,
})
}
}
options.responseMapping?.forEach((mapping) => {
if (!mapping.variableId) return
if (!mapping.item || mapping.item === 'Message content')
variables.set(mapping.variableId, message.content)
if (mapping.item === 'Total tokens')
variables.set(mapping.variableId, totalTokens)
})
}

View File

@ -1,107 +0,0 @@
import { VariableStore } from '@typebot.io/forge/types'
import { ChatCompletionOptions } from './parseChatCompletionOptions'
import { executeFunction } from '@typebot.io/variables/executeFunction'
import { OpenAIStream, ToolCallPayload } from 'ai'
import OpenAI, { ClientOptions } from 'openai'
import { ChatCompletionTool } from 'openai/resources'
import { parseChatCompletionMessages } from '../helpers/parseChatCompletionMessages'
import { parseToolParameters } from '../helpers/parseToolParameters'
type Props = {
credentials: { apiKey?: string }
options: ChatCompletionOptions
variables: VariableStore
config: { baseUrl: string; defaultModel?: string }
}
export const runChatCompletionStream = async ({
credentials: { apiKey },
options,
variables,
config: openAIConfig,
}: Props) => {
const model = options.model?.trim() ?? openAIConfig.defaultModel
if (!model) return
const config = {
apiKey,
baseURL: openAIConfig.baseUrl ?? options.baseUrl,
defaultHeaders: {
'api-key': apiKey,
},
defaultQuery: options.apiVersion
? {
'api-version': options.apiVersion,
}
: undefined,
} satisfies ClientOptions
const openai = new OpenAI(config)
const tools = options.tools
?.filter((t) => t.name && t.parameters)
.map((t) => ({
type: 'function',
function: {
name: t.name as string,
description: t.description,
parameters: parseToolParameters(t.parameters!),
},
})) satisfies ChatCompletionTool[] | undefined
const messages = await parseChatCompletionMessages({ options, variables })
const response = await openai.chat.completions.create({
model,
temperature: options.temperature ? Number(options.temperature) : undefined,
stream: true,
messages,
tools: (tools?.length ?? 0) > 0 ? tools : undefined,
})
return OpenAIStream(response, {
experimental_onToolCall: async (
call: ToolCallPayload,
appendToolCallMessage
) => {
for (const toolCall of call.tools) {
const name = toolCall.func?.name
if (!name) continue
const toolDefinition = options.tools?.find((t) => t.name === name)
if (!toolDefinition?.code || !toolDefinition.parameters) {
messages.push({
tool_call_id: toolCall.id,
role: 'tool',
content: 'Function not found',
})
continue
}
const { output, newVariables } = await executeFunction({
variables: variables.list(),
args:
typeof toolCall.func.arguments === 'string'
? JSON.parse(toolCall.func.arguments)
: toolCall.func.arguments,
body: toolDefinition.code,
})
newVariables?.forEach((v) => variables.set(v.id, v.value))
const newMessages = appendToolCallMessage({
tool_call_id: toolCall.id,
function_name: toolCall.func.name,
tool_call_result: output,
})
return openai.chat.completions.create({
messages: [
...messages,
...newMessages,
] as OpenAI.Chat.Completions.ChatCompletionMessageParam[],
model,
stream: true,
tools,
})
}
},
})
}

View File

@ -0,0 +1,87 @@
import { maxToolCalls } from '../constants'
import { ChatCompletionOptions } from './parseChatCompletionOptions'
import { LogsStore, VariableStore } from '@typebot.io/forge/types'
import { createOpenAI } from '@ai-sdk/openai'
import { APICallError, generateText } from 'ai'
import { isModelCompatibleWithVision } from '../helpers/isModelCompatibleWithVision'
import { parseTools } from '@typebot.io/ai/parseTools'
import { parseChatCompletionMessages } from '@typebot.io/ai/parseChatCompletionMessages'
type OpenAIConfig = {
baseUrl: string
defaultModel?: string
}
type Props = {
credentials: {
apiKey?: string
}
options: ChatCompletionOptions
variables: VariableStore
logs: LogsStore
config: OpenAIConfig
compatibility?: 'strict' | 'compatible'
}
export const runOpenAIChatCompletion = async ({
credentials: { apiKey },
options,
variables,
config: openAIConfig,
logs,
compatibility,
}: Props) => {
if (!apiKey) return logs.add('No API key provided')
const modelName = options.model?.trim() ?? openAIConfig.defaultModel
if (!modelName) return logs.add('No model provided')
const model = createOpenAI({
baseURL: openAIConfig.baseUrl ?? options.baseUrl,
headers: options.baseUrl
? {
'api-key': apiKey,
}
: undefined,
apiKey,
compatibility,
})(modelName)
try {
const { text, usage } = await generateText({
model,
temperature: options.temperature
? Number(options.temperature)
: undefined,
messages: await parseChatCompletionMessages({
messages: options.messages,
variables,
isVisionEnabled: isModelCompatibleWithVision(modelName),
shouldDownloadImages: false,
}),
tools: parseTools({ tools: options.tools, variables }),
maxToolRoundtrips: maxToolCalls,
})
options.responseMapping?.forEach((mapping) => {
if (!mapping.variableId) return
if (!mapping.item || mapping.item === 'Message content')
variables.set(mapping.variableId, text)
if (mapping.item === 'Total tokens')
variables.set(mapping.variableId, usage.totalTokens)
})
} catch (err) {
if (err instanceof APICallError) {
logs.add({
status: 'error',
description: 'An API call error occured while generating the response',
details: err.message,
})
return
}
logs.add({
status: 'error',
description: 'An unknown error occured while generating the response',
details: err,
})
}
}

View File

@ -0,0 +1,114 @@
import { VariableStore } from '@typebot.io/forge/types'
import { ChatCompletionOptions } from './parseChatCompletionOptions'
import { APICallError, streamText, ToolCallPart, ToolResultPart } from 'ai'
import { createOpenAI } from '@ai-sdk/openai'
import { maxToolCalls } from '../constants'
import { isModelCompatibleWithVision } from '../helpers/isModelCompatibleWithVision'
import { parseChatCompletionMessages } from '@typebot.io/ai/parseChatCompletionMessages'
import { parseTools } from '@typebot.io/ai/parseTools'
import { pumpStreamUntilDone } from '@typebot.io/ai/pumpStreamUntilDone'
import { appendToolResultsToMessages } from '@typebot.io/ai/appendToolResultsToMessages'
type Props = {
credentials: { apiKey?: string }
options: ChatCompletionOptions
variables: VariableStore
config: { baseUrl: string; defaultModel?: string }
compatibility?: 'strict' | 'compatible'
}
export const runOpenAIChatCompletionStream = async ({
credentials: { apiKey },
options,
variables,
config: openAIConfig,
compatibility,
}: Props): Promise<{
stream?: ReadableStream<any>
httpError?: { status: number; message: string }
}> => {
if (!apiKey) return { httpError: { status: 401, message: 'API key missing' } }
const modelName = options.model?.trim() ?? openAIConfig.defaultModel
if (!modelName)
return { httpError: { status: 400, message: 'model not found' } }
const model = createOpenAI({
baseURL: openAIConfig.baseUrl ?? options.baseUrl,
headers: options.baseUrl
? {
'api-key': apiKey,
}
: undefined,
apiKey,
compatibility,
})(modelName)
const streamConfig = {
model,
messages: await parseChatCompletionMessages({
messages: options.messages,
isVisionEnabled: isModelCompatibleWithVision(modelName),
shouldDownloadImages: false,
variables,
}),
temperature: options.temperature ? Number(options.temperature) : undefined,
tools: parseTools({ tools: options.tools, variables }),
}
try {
const response = await streamText(streamConfig)
let totalToolCalls = 0
let toolCalls: ToolCallPart[] = []
let toolResults: ToolResultPart[] = []
return {
stream: new ReadableStream({
async start(controller) {
const reader = response.toAIStream().getReader()
await pumpStreamUntilDone(controller, reader)
toolCalls = await response.toolCalls
if (toolCalls.length > 0)
toolResults = (await response.toolResults) as ToolResultPart[]
while (
toolCalls &&
toolCalls.length > 0 &&
totalToolCalls < maxToolCalls
) {
totalToolCalls += 1
const newResponse = await streamText({
...streamConfig,
messages: appendToolResultsToMessages({
messages: streamConfig.messages,
toolCalls,
toolResults,
}),
})
const reader = newResponse.toAIStream().getReader()
await pumpStreamUntilDone(controller, reader)
toolCalls = await newResponse.toolCalls
if (toolCalls.length > 0)
toolResults = (await newResponse.toolResults) as ToolResultPart[]
}
controller.close()
},
}),
}
} catch (err) {
if (err instanceof APICallError) {
return {
httpError: { status: err.statusCode ?? 500, message: err.message },
}
}
return {
httpError: {
status: 500,
message: 'An unknown error occured while generating the response',
},
}
}
}

View File

@ -3,8 +3,8 @@ import { auth } from '../auth'
import { parseChatCompletionOptions } from '@typebot.io/openai-block/shared/parseChatCompletionOptions'
import { getChatCompletionSetVarIds } from '@typebot.io/openai-block/shared/getChatCompletionSetVarIds'
import { getChatCompletionStreamVarId } from '@typebot.io/openai-block/shared/getChatCompletionStreamVarId'
import { runChatCompletion } from '@typebot.io/openai-block/shared/runChatCompletion'
import { runChatCompletionStream } from '@typebot.io/openai-block/shared/runChatCompletionStream'
import { runOpenAIChatCompletion } from '@typebot.io/openai-block/shared/runOpenAIChatCompletion'
import { runOpenAIChatCompletionStream } from '@typebot.io/openai-block/shared/runOpenAIChatCompletionStream'
import { defaultTogetherOptions } from '../constants'
export const createChatCompletion = createAction({
@ -13,6 +13,7 @@ export const createChatCompletion = createAction({
options: parseChatCompletionOptions({
modelHelperText:
'You can find the list of all the models available [here](https://docs.together.ai/docs/inference-models#chat-models). Copy the model string for API.',
defaultTemperature: defaultTogetherOptions.temperature,
}),
turnableInto: [
{
@ -26,7 +27,6 @@ export const createChatCompletion = createAction({
blockId: 'anthropic',
transform: (options) => ({
...options,
model: undefined,
action: 'Create Chat Message',
responseMapping: options.responseMapping?.map((res: any) =>
res.item === 'Message content'
@ -39,18 +39,19 @@ export const createChatCompletion = createAction({
getSetVariableIds: getChatCompletionSetVarIds,
run: {
server: (params) =>
runChatCompletion({
runOpenAIChatCompletion({
...params,
config: { baseUrl: defaultTogetherOptions.baseUrl },
}),
stream: {
getStreamVariableId: getChatCompletionStreamVarId,
run: async (params) => ({
stream: await runChatCompletionStream({
run: async (params) =>
runOpenAIChatCompletionStream({
...params,
config: { baseUrl: defaultTogetherOptions.baseUrl },
config: {
baseUrl: defaultTogetherOptions.baseUrl,
},
}),
}),
},
},
})

View File

@ -1,3 +1,4 @@
export const defaultTogetherOptions = {
baseUrl: 'https://api.together.xyz/v1',
temperature: 1,
} as const

View File

@ -24,6 +24,7 @@ export interface ZodLayoutMetadata<
isDebounceDisabled?: boolean
hiddenItems?: string[]
mergeWithLastField?: boolean
toLabels?: (val?: string) => string | undefined
}
declare module 'zod' {

View File

@ -1,5 +1,5 @@
import { env } from '@typebot.io/env'
import Redis from 'ioredis'
import { Redis } from 'ioredis'
declare const global: { redis: Redis | undefined }
let redis: Redis | undefined

446
pnpm-lock.yaml generated
View File

@ -84,7 +84,7 @@ importers:
version: 2.2.1
'@sentry/nextjs':
specifier: 7.77.0
version: 7.77.0(next@14.1.0(@babel/core@7.24.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(react@18.2.0)(webpack@5.90.3)
version: 7.77.0(next@14.1.0(@babel/core@7.24.0)(@opentelemetry/api@1.9.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(react@18.2.0)(webpack@5.90.3)
'@tanstack/react-query':
specifier: 4.29.19
version: 4.29.19(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
@ -102,7 +102,7 @@ importers:
version: 10.40.0(@trpc/server@10.40.0)
'@trpc/next':
specifier: 10.40.0
version: 10.40.0(@tanstack/react-query@4.29.19(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(@trpc/client@10.40.0(@trpc/server@10.40.0))(@trpc/react-query@10.40.0(@tanstack/react-query@4.29.19(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(@trpc/client@10.40.0(@trpc/server@10.40.0))(@trpc/server@10.40.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(@trpc/server@10.40.0)(next@14.1.0(@babel/core@7.24.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
version: 10.40.0(@tanstack/react-query@4.29.19(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(@trpc/client@10.40.0(@trpc/server@10.40.0))(@trpc/react-query@10.40.0(@tanstack/react-query@4.29.19(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(@trpc/client@10.40.0(@trpc/server@10.40.0))(@trpc/server@10.40.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(@trpc/server@10.40.0)(next@14.1.0(@babel/core@7.24.0)(@opentelemetry/api@1.9.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
'@trpc/react-query':
specifier: 10.40.0
version: 10.40.0(@tanstack/react-query@4.29.19(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(@trpc/client@10.40.0(@trpc/server@10.40.0))(@trpc/server@10.40.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
@ -219,13 +219,13 @@ importers:
version: 0.1.1
next:
specifier: 14.1.0
version: 14.1.0(@babel/core@7.24.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
version: 14.1.0(@babel/core@7.24.0)(@opentelemetry/api@1.9.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
next-auth:
specifier: 4.22.1
version: 4.22.1(next@14.1.0(@babel/core@7.24.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(nodemailer@6.9.8)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
version: 4.22.1(next@14.1.0(@babel/core@7.24.0)(@opentelemetry/api@1.9.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(nodemailer@6.9.8)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
nextjs-cors:
specifier: 2.1.2
version: 2.1.2(next@14.1.0(@babel/core@7.24.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0))
version: 2.1.2(next@14.1.0(@babel/core@7.24.0)(@opentelemetry/api@1.9.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0))
nodemailer:
specifier: 6.9.8
version: 6.9.8
@ -409,7 +409,7 @@ importers:
version: 1.8.0
'@sentry/nextjs':
specifier: 7.77.0
version: 7.77.0(next@14.1.0(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(react@18.2.0)(webpack@5.90.3)
version: 7.77.0(next@14.1.0(@opentelemetry/api@1.9.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(react@18.2.0)(webpack@5.90.3)
'@trpc/server':
specifier: 10.40.0
version: 10.40.0
@ -426,8 +426,8 @@ importers:
specifier: workspace:*
version: link:../../packages/prisma
ai:
specifier: 3.2.1
version: 3.2.1(openai@4.47.1)(react@18.2.0)(solid-js@1.7.8)(svelte@4.2.12)(vue@3.4.21(typescript@5.4.5))(zod@3.22.4)
specifier: 3.2.22
version: 3.2.22(openai@4.47.1)(react@18.2.0)(solid-js@1.7.8)(svelte@4.2.12)(vue@3.4.21(typescript@5.4.5))(zod@3.22.4)
bot-engine:
specifier: workspace:*
version: link:../../packages/deprecated/bot-engine
@ -448,10 +448,10 @@ importers:
version: 1.2.4
next:
specifier: 14.1.0
version: 14.1.0(@babel/core@7.22.9)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
version: 14.1.0(@babel/core@7.22.9)(@opentelemetry/api@1.9.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
nextjs-cors:
specifier: 2.1.2
version: 2.1.2(next@14.1.0(react-dom@18.2.0(react@18.2.0))(react@18.2.0))
version: 2.1.2(next@14.1.0(@opentelemetry/api@1.9.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0))
nodemailer:
specifier: 6.9.8
version: 6.9.8
@ -566,7 +566,7 @@ importers:
version: 3.2.0(@chakra-ui/system@2.6.2(@emotion/react@11.11.4(@types/react@18.2.15)(react@18.2.0))(@emotion/styled@11.11.5(@emotion/react@11.11.4(@types/react@18.2.15)(react@18.2.0))(@types/react@18.2.15)(react@18.2.0))(react@18.2.0))(react@18.2.0)
'@chakra-ui/next-js':
specifier: 2.2.0
version: 2.2.0(@chakra-ui/react@2.8.2(@emotion/react@11.11.4(@types/react@18.2.15)(react@18.2.0))(@emotion/styled@11.11.5(@emotion/react@11.11.4(@types/react@18.2.15)(react@18.2.0))(@types/react@18.2.15)(react@18.2.0))(@types/react@18.2.15)(framer-motion@11.1.7(@emotion/is-prop-valid@1.2.2)(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(@emotion/react@11.11.4(@types/react@18.2.15)(react@18.2.0))(next@14.1.0(@babel/core@7.22.9)(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(react@18.2.0)
version: 2.2.0(@chakra-ui/react@2.8.2(@emotion/react@11.11.4(@types/react@18.2.15)(react@18.2.0))(@emotion/styled@11.11.5(@emotion/react@11.11.4(@types/react@18.2.15)(react@18.2.0))(@types/react@18.2.15)(react@18.2.0))(@types/react@18.2.15)(framer-motion@11.1.7(@emotion/is-prop-valid@1.2.2)(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(@emotion/react@11.11.4(@types/react@18.2.15)(react@18.2.0))(next@14.1.0(@babel/core@7.22.9)(@opentelemetry/api@1.9.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(react@18.2.0)
'@chakra-ui/react':
specifier: 2.8.2
version: 2.8.2(@emotion/react@11.11.4(@types/react@18.2.15)(react@18.2.0))(@emotion/styled@11.11.5(@emotion/react@11.11.4(@types/react@18.2.15)(react@18.2.0))(@types/react@18.2.15)(react@18.2.0))(@types/react@18.2.15)(framer-motion@11.1.7(@emotion/is-prop-valid@1.2.2)(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
@ -602,7 +602,7 @@ importers:
version: 11.1.7(@emotion/is-prop-valid@1.2.2)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
next:
specifier: 14.1.0
version: 14.1.0(@babel/core@7.22.9)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
version: 14.1.0(@babel/core@7.22.9)(@opentelemetry/api@1.9.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
next-mdx-remote:
specifier: 4.4.1
version: 4.4.1(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
@ -705,7 +705,7 @@ importers:
version: 10.0.1
next:
specifier: 14.1.0
version: 14.1.0(@babel/core@7.22.9)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
version: 14.1.0(@babel/core@7.22.9)(@opentelemetry/api@1.9.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
stripe:
specifier: 12.13.0
version: 12.13.0
@ -714,6 +714,27 @@ importers:
specifier: workspace:*
version: link:../../../packages/tsconfig
packages/ai:
dependencies:
'@typebot.io/forge':
specifier: workspace:*
version: link:../forge/core
'@typebot.io/lib':
specifier: workspace:*
version: link:../lib
'@typebot.io/tsconfig':
specifier: workspace:*
version: link:../tsconfig
'@typebot.io/variables':
specifier: workspace:*
version: link:../variables
ai:
specifier: 3.2.22
version: 3.2.22(openai@4.52.7)(react@18.2.0)(solid-js@1.7.8)(svelte@4.2.12)(vue@3.4.21(typescript@5.4.5))(zod@3.22.4)
ky:
specifier: 1.2.4
version: 1.2.4
packages/bot-engine:
dependencies:
'@paralleldrive/cuid2':
@ -724,7 +745,7 @@ importers:
version: 1.8.0
'@sentry/nextjs':
specifier: 7.77.0
version: 7.77.0(next@14.1.0(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(react@18.2.0)(webpack@5.90.3)
version: 7.77.0(next@14.1.0(@opentelemetry/api@1.9.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(react@18.2.0)(webpack@5.90.3)
'@trpc/server':
specifier: 10.40.0
version: 10.40.0
@ -759,8 +780,8 @@ importers:
specifier: 30.4.5
version: 30.4.5(@types/react@18.2.15)(immer@10.0.2)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)(scheduler@0.23.0)(slate-history@0.100.0(slate@0.102.0))(slate-hyperscript@0.100.0(slate@0.102.0))(slate-react@0.102.0(react-dom@18.2.0(react@18.2.0))(react@18.2.0)(slate@0.102.0))(slate@0.102.0)
ai:
specifier: 3.2.1
version: 3.2.1(openai@4.47.1)(react@18.2.0)(solid-js@1.7.8)(svelte@4.2.12)(vue@3.4.21(typescript@5.4.5))(zod@3.22.4)
specifier: 3.2.22
version: 3.2.22(openai@4.47.1)(react@18.2.0)(solid-js@1.7.8)(svelte@4.2.12)(vue@3.4.21(typescript@5.4.5))(zod@3.22.4)
chrono-node:
specifier: 2.7.6
version: 2.7.6
@ -1003,6 +1024,9 @@ importers:
packages/embeds/js:
dependencies:
'@ai-sdk/ui-utils':
specifier: 0.0.12
version: 0.0.12(zod@3.22.4)
'@ark-ui/solid':
specifier: 3.3.0
version: 3.3.0(@internationalized/date@3.5.4)(solid-js@1.7.8)
@ -1034,6 +1058,9 @@ importers:
'@rollup/plugin-babel':
specifier: 6.0.3
version: 6.0.3(@babel/core@7.24.0)(@types/babel__core@7.20.5)(rollup@3.26.2)
'@rollup/plugin-commonjs':
specifier: 26.0.1
version: 26.0.1(rollup@3.26.2)
'@rollup/plugin-node-resolve':
specifier: 15.1.0
version: 15.1.0(rollup@3.26.2)
@ -1111,7 +1138,7 @@ importers:
dependencies:
next:
specifier: 12.x || 13.x || 14.x
version: 14.1.0(@babel/core@7.24.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
version: 14.1.0(@babel/core@7.24.0)(@opentelemetry/api@1.9.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
devDependencies:
'@babel/preset-react':
specifier: 7.22.5
@ -1296,17 +1323,17 @@ importers:
packages/forge/blocks/anthropic:
dependencies:
'@ai-sdk/anthropic':
specifier: 0.0.21
version: 0.0.21(zod@3.22.4)
'@anthropic-ai/sdk':
specifier: 0.20.6
version: 0.20.6
specifier: 0.0.30
version: 0.0.30(zod@3.22.4)
'@typebot.io/ai':
specifier: workspace:*
version: link:../../../ai
'@typebot.io/openai-block':
specifier: workspace:*
version: link:../openai
ai:
specifier: 3.2.1
version: 3.2.1(openai@4.47.1)(react@18.2.0)(solid-js@1.7.8)(svelte@4.2.12)(vue@3.4.21(typescript@5.4.5))(zod@3.22.4)
specifier: 3.2.22
version: 3.2.22(openai@4.52.7)(react@18.2.0)(solid-js@1.7.8)(svelte@4.2.12)(vue@3.4.21(typescript@5.4.5))(zod@3.22.4)
ky:
specifier: 1.2.4
version: 1.2.4
@ -1372,8 +1399,8 @@ importers:
packages/forge/blocks/difyAi:
dependencies:
ai:
specifier: 3.2.1
version: 3.2.1(openai@4.47.1)(react@18.2.0)(solid-js@1.7.8)(svelte@4.2.12)(vue@3.4.21(typescript@5.4.5))(zod@3.22.4)
specifier: 3.2.22
version: 3.2.22(openai@4.52.7)(react@18.2.0)(solid-js@1.7.8)(svelte@4.2.12)(vue@3.4.21(typescript@5.4.5))(zod@3.22.4)
devDependencies:
'@typebot.io/forge':
specifier: workspace:*
@ -1419,14 +1446,17 @@ importers:
packages/forge/blocks/mistral:
dependencies:
'@ai-sdk/mistral':
specifier: 0.0.18
version: 0.0.18(zod@3.22.4)
specifier: 0.0.22
version: 0.0.22(zod@3.22.4)
'@typebot.io/ai':
specifier: workspace:*
version: link:../../../ai
'@typebot.io/openai-block':
specifier: workspace:*
version: link:../openai
ai:
specifier: 3.2.1
version: 3.2.1(openai@4.47.1)(react@18.2.0)(solid-js@1.7.8)(svelte@4.2.12)(vue@3.4.21(typescript@5.4.5))(zod@3.22.4)
specifier: 3.2.22
version: 3.2.22(openai@4.52.7)(react@18.2.0)(solid-js@1.7.8)(svelte@4.2.12)(vue@3.4.21(typescript@5.4.5))(zod@3.22.4)
ky:
specifier: 1.2.4
version: 1.2.4
@ -1499,14 +1529,17 @@ importers:
packages/forge/blocks/openai:
dependencies:
'@ai-sdk/openai':
specifier: 0.0.31
version: 0.0.31(zod@3.22.4)
specifier: 0.0.36
version: 0.0.36(zod@3.22.4)
'@typebot.io/ai':
specifier: workspace:*
version: link:../../../ai
ai:
specifier: 3.2.1
version: 3.2.1(openai@4.47.1)(react@18.2.0)(solid-js@1.7.8)(svelte@4.2.12)(vue@3.4.21(typescript@5.4.5))(zod@3.22.4)
specifier: 3.2.22
version: 3.2.22(openai@4.52.7)(react@18.2.0)(solid-js@1.7.8)(svelte@4.2.12)(vue@3.4.21(typescript@5.4.5))(zod@3.22.4)
openai:
specifier: 4.47.1
version: 4.47.1
specifier: 4.52.7
version: 4.52.7
devDependencies:
'@typebot.io/forge':
specifier: workspace:*
@ -1677,7 +1710,7 @@ importers:
dependencies:
'@sentry/nextjs':
specifier: 7.77.0
version: 7.77.0(next@14.1.0(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(react@18.2.0)(webpack@5.90.3)
version: 7.77.0(next@14.1.0(@opentelemetry/api@1.9.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(react@18.2.0)(webpack@5.90.3)
'@trpc/server':
specifier: 10.40.0
version: 10.40.0
@ -1774,7 +1807,7 @@ importers:
version: 13.11.9
next:
specifier: 14.1.0
version: 14.1.0(@babel/core@7.22.9)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
version: 14.1.0(@babel/core@7.22.9)(@opentelemetry/api@1.9.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
nodemailer:
specifier: 6.9.8
version: 6.9.8
@ -2041,7 +2074,7 @@ importers:
version: link:../env
react-email:
specifier: 2.0.0
version: 2.0.0(@swc/helpers@0.5.10)(eslint@8.44.0)
version: 2.0.0(@opentelemetry/api@1.9.0)(@swc/helpers@0.5.10)(eslint@8.44.0)
devDependencies:
dotenv-cli:
specifier: 7.4.1
@ -2067,26 +2100,35 @@ packages:
resolution: {integrity: sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA==}
engines: {node: '>=0.10.0'}
'@ai-sdk/anthropic@0.0.21':
resolution: {integrity: sha512-QjVnTbfbAmfMjDqLbcZFC4pKBvp4RqzrZJQF3mzulSkeXWqNZo9G9oV7W1PDbMA3o+DJdxRFyTK43aKUBCP31Q==}
'@ai-sdk/anthropic@0.0.30':
resolution: {integrity: sha512-iPJjKtIH8yk2cf5BNXLN6sn6TTghOh8puWothX4pPVBM/OKC4RWVjYTEELwUv2VDPIw918KBg2j/T0RfTgu+bw==}
engines: {node: '>=18'}
peerDependencies:
zod: ^3.0.0
'@ai-sdk/mistral@0.0.18':
resolution: {integrity: sha512-aNbdyINZU2Kmv6+uLEEbvQJxHChYf1RofIETYAmCZcOk3wU1gReWSjZK7eP9BzehXg1TkeF1UpT60bnzl0++Pg==}
'@ai-sdk/mistral@0.0.22':
resolution: {integrity: sha512-pSWuq3us+X2mMATiRyCfjOKoW7/CIsmKZsb5DrC9ZdxwjdIaQLl9yvmWu17QYJkOqjY77RwcCi9WGfrAEZG/QA==}
engines: {node: '>=18'}
peerDependencies:
zod: ^3.0.0
'@ai-sdk/openai@0.0.31':
resolution: {integrity: sha512-7ehX2N0NzCdxUOYXutwYgu6gdWO+zS/v8pWEd7VW8QpNq3equ0VZ0j+pDUNv4f3GJ449QwySb6+V+DHM9W/pLg==}
'@ai-sdk/openai@0.0.36':
resolution: {integrity: sha512-6IcvR35UMuuQEQPkVjzUtqDAuz6vy+PMCEL0PAS2ufHXdPPm81OTKVetqjgOPjebsikhVP0soK1pKPEe2cztAQ==}
engines: {node: '>=18'}
peerDependencies:
zod: ^3.0.0
'@ai-sdk/provider-utils@0.0.15':
resolution: {integrity: sha512-eTkIaZc/Ud96DYG40lLuKWJvZ2GoW/wT4KH9r1f3wGUhj5wgQN+bzgdI57z60VOEDuMmDVuILVnTLFe0HNT5Iw==}
'@ai-sdk/provider-utils@0.0.14':
resolution: {integrity: sha512-PCQFN3MlC6DShS/81IFU9NVvt9OekQGiZTEowRc2AwAwWrDsv7er3UkcMswFAL/Z7xZKjgu0dZTNH1z9oUlo7A==}
engines: {node: '>=18'}
peerDependencies:
zod: ^3.0.0
peerDependenciesMeta:
zod:
optional: true
'@ai-sdk/provider-utils@1.0.2':
resolution: {integrity: sha512-57f6O4OFVNEpI8Z8o+K40tIB3YQiTw+VCql/qrAO9Utq7Ti1o6+X9tvm177DlZJL7ft0Rwzvgy48S9YhrEKgmA==}
engines: {node: '>=18'}
peerDependencies:
zod: ^3.0.0
@ -2098,8 +2140,12 @@ packages:
resolution: {integrity: sha512-NzkrtREQpHID1cTqY/C4CI30PVOaXWKYytDR2EcytmFgnP7Z6+CrGIA/YCnNhYAuUm6Nx+nGpRL/Hmyrv7NYzg==}
engines: {node: '>=18'}
'@ai-sdk/react@0.0.4':
resolution: {integrity: sha512-YPvp81onTxNlnOWolyjvappS5y9pMkZwWKMxrqwMimaJI4NWquPrAeHCYqzaVAb/+RKaveEGSvyYs/SD8AO6ig==}
'@ai-sdk/provider@0.0.12':
resolution: {integrity: sha512-oOwPQD8i2Ynpn22cur4sk26FW3mSy6t6/X/K1Ay2yGBKYiSpRyLfObhOrZEGsXDx+3euKy4nEZ193R36NM+tpQ==}
engines: {node: '>=18'}
'@ai-sdk/react@0.0.20':
resolution: {integrity: sha512-L/PFqvT+rZd/aYZekEWXuVW3zooJEZAf2O5wL5JeKi71slBEuKZGGZP/7GzyX+8Uhl3JOg4wOdJTO/dogWzbAQ==}
engines: {node: '>=18'}
peerDependencies:
react: ^18 || ^19
@ -2110,8 +2156,8 @@ packages:
zod:
optional: true
'@ai-sdk/solid@0.0.4':
resolution: {integrity: sha512-1X/vauXG+V0Hsb2P8kZFKaDrderTtB/7XdHZ/UkSMzTk8k0twx9OEXgztW8Rggh51t6sdI7mUoqAY5Khvjf01w==}
'@ai-sdk/solid@0.0.14':
resolution: {integrity: sha512-9esGkm7/jocNELfGstrd3TYgWycXLP0OG6LXGGaEXd7v75eEp067avoLgQuPdWmzjnJD2U7N8u4wXa0lLd0WQQ==}
engines: {node: '>=18'}
peerDependencies:
solid-js: ^1.7.7
@ -2119,8 +2165,8 @@ packages:
solid-js:
optional: true
'@ai-sdk/svelte@0.0.4':
resolution: {integrity: sha512-LVxg9/60ARX8AQIswyDx53HQlQQH91yUOThhUA0x9s2BcxgpDgDN37imynnoZbU7lvA5M9NvwlinkmUdJzUVTA==}
'@ai-sdk/svelte@0.0.15':
resolution: {integrity: sha512-k4WwNgAddrQhumC6ogjZ/MPEk9kn3xEcD4CLX4CURX7y+641ktDIcZr5KeS+4o9U/jTrjSbYBJVr5HjoWm+Ixg==}
engines: {node: '>=18'}
peerDependencies:
svelte: ^3.0.0 || ^4.0.0
@ -2128,8 +2174,8 @@ packages:
svelte:
optional: true
'@ai-sdk/ui-utils@0.0.4':
resolution: {integrity: sha512-vUfuqVOZV3MyFokAduQyJsnDP00qzyZut6mizFscXlCOmiiW3FAnu/XEnMEwCmf7yUG7O4v7Xa2zd4X1tsN5pg==}
'@ai-sdk/ui-utils@0.0.12':
resolution: {integrity: sha512-ivveEuneZPOUKqcIqZRCr2NUD+LJC8mYfL7jJRWaCr+JZqdYZ+5uR/nc8GKCly2TcC9/qoF3zxQuZEn0c5805g==}
engines: {node: '>=18'}
peerDependencies:
zod: ^3.0.0
@ -2137,8 +2183,8 @@ packages:
zod:
optional: true
'@ai-sdk/vue@0.0.4':
resolution: {integrity: sha512-gWyvenqPi1FC8tvczKhla4pCDTVMXvXHpiIJaBn7fRNq2vO7gDSAr9O//SCSPGY3l1aUCKLgKJbbeoXiTRSGBQ==}
'@ai-sdk/vue@0.0.15':
resolution: {integrity: sha512-e8JBjZWV7MYdGcgiZCNp2qso/HdqJ2hSRD54oEELfiHgVf2y3FLnnRnc4M1MwyvX6WaVYvAd6+pdDgwVjU7h1Q==}
engines: {node: '>=18'}
peerDependencies:
vue: ^3.3.4
@ -2154,9 +2200,6 @@ packages:
resolution: {integrity: sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==}
engines: {node: '>=6.0.0'}
'@anthropic-ai/sdk@0.20.6':
resolution: {integrity: sha512-vpVWAol+Ih1UkZGUj8DYPuqWDGxBp6M/JYz4nvq2HBT0zKdvi24Z9oznA7tr+HDed78JZrw+nbxs2I8JbTAIiQ==}
'@apidevtools/json-schema-ref-parser@9.0.6':
resolution: {integrity: sha512-M3YgsLjI0lZxvrpeGVk9Ap032W6TPQkH6pRAZz81Ac3WUNF79VQooAFnp8umjvVzUmD93NkogxEwbSce7qMsUg==}
@ -4626,6 +4669,10 @@ packages:
'@one-ini/wasm@0.1.1':
resolution: {integrity: sha512-XuySG1E38YScSJoMlqovLru4KTUNSjgVTIjyh7qMX6aNN5HY5Ct5LhRJdxO79JtTzKfzV/bnWpz+zquYrISsvw==}
'@opentelemetry/api@1.9.0':
resolution: {integrity: sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==}
engines: {node: '>=8.0.0'}
'@panva/hkdf@1.1.1':
resolution: {integrity: sha512-dhPeilub1NuIG0X5Kvhh9lH4iW3ZsHlnzwgwbOlgwQ2wG1IqFzsgHqmKPk3WzsdWAeaxKJxgM0+W433RmN45GA==}
@ -5164,6 +5211,15 @@ packages:
rollup:
optional: true
'@rollup/plugin-commonjs@26.0.1':
resolution: {integrity: sha512-UnsKoZK6/aGIH6AdkptXhNvhaqftcjq3zZdT+LY5Ftms6JR06nADcDsYp5hTU9E2lbJUEOhdlY5J4DNTneM+jQ==}
engines: {node: '>=16.0.0 || 14 >= 14.17'}
peerDependencies:
rollup: ^2.68.0||^3.0.0||^4.0.0
peerDependenciesMeta:
rollup:
optional: true
'@rollup/plugin-node-resolve@15.1.0':
resolution: {integrity: sha512-xeZHCgsiZ9pzYVgAo9580eCGqwh/XCEUM9q6iQfGNocjgkufHAqC3exA+45URvhiYV8sBF9RlBai650eNs7AsA==}
engines: {node: '>=14.0.0'}
@ -6459,8 +6515,8 @@ packages:
resolution: {integrity: sha512-0poP0T7el6Vq3rstR8Mn4V/IQrpBLO6POkUSrN7RhyY+GF/InCFShQzsQ39T25gkHhLgSLByyAz+Kjb+c2L98w==}
engines: {node: '>=12'}
ai@3.2.1:
resolution: {integrity: sha512-6C2rGQLeZmhbjPBOZy2IU8aGg2c9btL8QKWS+dT2Pyxik2ue28FbEsOWQ2O1DOG/5NLX6VM6yNXMlBem3N59Cg==}
ai@3.2.22:
resolution: {integrity: sha512-2u2YT6cf/bTRexUtSiSDco/3/z/xlQ9iiW3y2aH05RwDlj9Q6rpALsTdjRNcglI+OBPaXUEORB/bD1dRwxob6Q==}
engines: {node: '>=18'}
peerDependencies:
openai: ^4.42.0
@ -8451,11 +8507,16 @@ packages:
engines: {node: '>=16 || 14 >=14.17'}
hasBin: true
glob@10.4.5:
resolution: {integrity: sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==}
hasBin: true
glob@7.1.7:
resolution: {integrity: sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==}
glob@7.2.3:
resolution: {integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==}
deprecated: Glob versions prior to v9 are no longer supported
glob@8.1.0:
resolution: {integrity: sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==}
@ -9084,6 +9145,9 @@ packages:
resolution: {integrity: sha512-N3yCS/NegsOBokc8GAdM8UcmfsKiSS8cipheD/nivzr700H+nsMOxJjQnvwOcRYVuFkdH0wGUvW2WbXGmrZGbQ==}
engines: {node: '>=14'}
jackspeak@3.4.3:
resolution: {integrity: sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==}
jest-changed-files@29.7.0:
resolution: {integrity: sha512-fEArFiwf1BpQ+4bXSprcDc3/x4HSzL4al2tozwVpDFpsxALjLYdyiIK4e5Vz66GQJIbXJ82+35PtysofptNX2w==}
engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
@ -9968,6 +10032,10 @@ packages:
resolution: {integrity: sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==}
engines: {node: '>=16 || 14 >=14.17'}
minimatch@9.0.5:
resolution: {integrity: sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==}
engines: {node: '>=16 || 14 >=14.17'}
minimist-options@4.1.0:
resolution: {integrity: sha512-Q4r8ghd80yhO/0j1O3B2BjweX3fiHg9cdOwjJd2J76Q135c+NDxGCqdYKQ1SKBuFfgWbAUzBfvYjPUEeNgqN1A==}
engines: {node: '>= 6'}
@ -9991,6 +10059,10 @@ packages:
resolution: {integrity: sha512-jYofLM5Dam9279rdkWzqHozUo4ybjdZmCsDHePy5V/PbBcVMiSZR97gmAy45aqi8CK1lG2ECd356FU86avfwUQ==}
engines: {node: '>=16 || 14 >=14.17'}
minipass@7.1.2:
resolution: {integrity: sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==}
engines: {node: '>=16 || 14 >=14.17'}
minizlib@2.1.2:
resolution: {integrity: sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==}
engines: {node: '>= 8'}
@ -10380,6 +10452,10 @@ packages:
resolution: {integrity: sha512-WWSxhC/69ZhYWxH/OBsLEirIjUcfpQ5+ihkXKp06hmeYXgBBIUCa9IptMzYx6NdkiOCsSGYCnTIsxaic3AjRCQ==}
hasBin: true
openai@4.52.7:
resolution: {integrity: sha512-dgxA6UZHary6NXUHEDj5TWt8ogv0+ibH+b4pT5RrWMjiRZVylNwLcw/2ubDrX5n0oUmHX/ZgudMJeemxzOvz7A==}
hasBin: true
openapi-types@12.1.3:
resolution: {integrity: sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw==}
@ -10460,6 +10536,9 @@ packages:
resolution: {integrity: sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==}
engines: {node: '>=6'}
package-json-from-dist@1.0.0:
resolution: {integrity: sha512-dATvCeZN/8wQsGywez1mzHtTlP22H8OEfPrVMLNr4/eGa+ijtLn/6M5f0dY8UKNrC2O9UCU6SSoG3qRKnt7STw==}
pad@2.3.0:
resolution: {integrity: sha512-lxrgnOG5AXmzMRT1O5urWtYFxHnFSE+QntgTHij1nvS4W+ubhQLmQRHmZXDeEvk9I00itAixLqU9Q6fE0gW3sw==}
engines: {node: '>= 4.0.0'}
@ -10517,6 +10596,10 @@ packages:
resolution: {integrity: sha512-MkhCqzzBEpPvxxQ71Md0b1Kk51W01lrYvlMzSUaIzNsODdd7mqhiimSZlr+VegAz5Z6Vzt9Xg2ttE//XBhH3EQ==}
engines: {node: '>=16 || 14 >=14.17'}
path-scurry@1.11.1:
resolution: {integrity: sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==}
engines: {node: '>=16 || 14 >=14.18'}
path-to-regexp@0.1.7:
resolution: {integrity: sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==}
@ -11628,13 +11711,6 @@ packages:
solid-js@1.7.8:
resolution: {integrity: sha512-XHBWk1FvFd0JMKljko7FfhefJMTSgYEuVKcQ2a8hzRXfiuSJAGsrPPafqEo+f6l+e8Oe3cROSpIL6kbzjC1fjQ==}
solid-swr-store@0.10.7:
resolution: {integrity: sha512-A6d68aJmRP471aWqKKPE2tpgOiR5fH4qXQNfKIec+Vap+MGQm3tvXlT8n0I8UgJSlNAsSAUuw2VTviH2h3Vv5g==}
engines: {node: '>=10'}
peerDependencies:
solid-js: ^1.2
swr-store: ^0.10
sonner@1.3.1:
resolution: {integrity: sha512-+rOAO56b2eI3q5BtgljERSn2umRk63KFIvgb2ohbZ5X+Eb5u+a/7/0ZgswYqgBMg8dyl7n6OXd9KasA8QF9ToA==}
peerDependencies:
@ -11929,10 +12005,6 @@ packages:
engines: {node: '>=10.13.0'}
hasBin: true
swr-store@0.10.6:
resolution: {integrity: sha512-xPjB1hARSiRaNNlUQvWSVrG5SirCjk2TmaUyzzvk69SZQan9hCJqw/5rG9iL7xElHU784GxRPISClq4488/XVw==}
engines: {node: '>=10'}
swr@2.2.0:
resolution: {integrity: sha512-AjqHOv2lAhkuUdIiBu9xbuettzAzWXmCEcLONNKJRba87WAefz8Ca9d6ds/SzrPc235n1IxWYdhJ2zF3MNUaoQ==}
peerDependencies:
@ -12886,25 +12958,25 @@ snapshots:
'@aashutoshrathi/word-wrap@1.2.6': {}
'@ai-sdk/anthropic@0.0.21(zod@3.22.4)':
'@ai-sdk/anthropic@0.0.30(zod@3.22.4)':
dependencies:
'@ai-sdk/provider': 0.0.10
'@ai-sdk/provider-utils': 0.0.15(zod@3.22.4)
'@ai-sdk/provider': 0.0.12
'@ai-sdk/provider-utils': 1.0.2(zod@3.22.4)
zod: 3.22.4
'@ai-sdk/mistral@0.0.18(zod@3.22.4)':
'@ai-sdk/mistral@0.0.22(zod@3.22.4)':
dependencies:
'@ai-sdk/provider': 0.0.10
'@ai-sdk/provider-utils': 0.0.15(zod@3.22.4)
'@ai-sdk/provider': 0.0.12
'@ai-sdk/provider-utils': 1.0.2(zod@3.22.4)
zod: 3.22.4
'@ai-sdk/openai@0.0.31(zod@3.22.4)':
'@ai-sdk/openai@0.0.36(zod@3.22.4)':
dependencies:
'@ai-sdk/provider': 0.0.10
'@ai-sdk/provider-utils': 0.0.15(zod@3.22.4)
'@ai-sdk/provider': 0.0.12
'@ai-sdk/provider-utils': 1.0.2(zod@3.22.4)
zod: 3.22.4
'@ai-sdk/provider-utils@0.0.15(zod@3.22.4)':
'@ai-sdk/provider-utils@0.0.14(zod@3.22.4)':
dependencies:
'@ai-sdk/provider': 0.0.10
eventsource-parser: 1.1.2
@ -12913,49 +12985,61 @@ snapshots:
optionalDependencies:
zod: 3.22.4
'@ai-sdk/provider-utils@1.0.2(zod@3.22.4)':
dependencies:
'@ai-sdk/provider': 0.0.12
eventsource-parser: 1.1.2
nanoid: 3.3.6
secure-json-parse: 2.7.0
optionalDependencies:
zod: 3.22.4
'@ai-sdk/provider@0.0.10':
dependencies:
json-schema: 0.4.0
'@ai-sdk/react@0.0.4(react@18.2.0)(zod@3.22.4)':
'@ai-sdk/provider@0.0.12':
dependencies:
'@ai-sdk/provider-utils': 0.0.15(zod@3.22.4)
'@ai-sdk/ui-utils': 0.0.4(zod@3.22.4)
json-schema: 0.4.0
'@ai-sdk/react@0.0.20(react@18.2.0)(zod@3.22.4)':
dependencies:
'@ai-sdk/provider-utils': 1.0.2(zod@3.22.4)
'@ai-sdk/ui-utils': 0.0.12(zod@3.22.4)
swr: 2.2.0(react@18.2.0)
optionalDependencies:
react: 18.2.0
zod: 3.22.4
'@ai-sdk/solid@0.0.4(solid-js@1.7.8)(zod@3.22.4)':
'@ai-sdk/solid@0.0.14(solid-js@1.7.8)(zod@3.22.4)':
dependencies:
'@ai-sdk/ui-utils': 0.0.4(zod@3.22.4)
solid-swr-store: 0.10.7(solid-js@1.7.8)(swr-store@0.10.6)
swr-store: 0.10.6
'@ai-sdk/ui-utils': 0.0.12(zod@3.22.4)
optionalDependencies:
solid-js: 1.7.8
transitivePeerDependencies:
- zod
'@ai-sdk/svelte@0.0.4(svelte@4.2.12)(zod@3.22.4)':
'@ai-sdk/svelte@0.0.15(svelte@4.2.12)(zod@3.22.4)':
dependencies:
'@ai-sdk/provider-utils': 0.0.15(zod@3.22.4)
'@ai-sdk/ui-utils': 0.0.4(zod@3.22.4)
'@ai-sdk/provider-utils': 1.0.2(zod@3.22.4)
'@ai-sdk/ui-utils': 0.0.12(zod@3.22.4)
sswr: 2.1.0(svelte@4.2.12)
optionalDependencies:
svelte: 4.2.12
transitivePeerDependencies:
- zod
'@ai-sdk/ui-utils@0.0.4(zod@3.22.4)':
'@ai-sdk/ui-utils@0.0.12(zod@3.22.4)':
dependencies:
'@ai-sdk/provider-utils': 0.0.15(zod@3.22.4)
'@ai-sdk/provider-utils': 1.0.2(zod@3.22.4)
secure-json-parse: 2.7.0
optionalDependencies:
zod: 3.22.4
'@ai-sdk/vue@0.0.4(vue@3.4.21(typescript@5.4.5))(zod@3.22.4)':
'@ai-sdk/vue@0.0.15(vue@3.4.21(typescript@5.4.5))(zod@3.22.4)':
dependencies:
'@ai-sdk/ui-utils': 0.0.4(zod@3.22.4)
'@ai-sdk/provider-utils': 0.0.14(zod@3.22.4)
'@ai-sdk/ui-utils': 0.0.12(zod@3.22.4)
swrv: 1.0.4(vue@3.4.21(typescript@5.4.5))
optionalDependencies:
vue: 3.4.21(typescript@5.4.5)
@ -12969,19 +13053,6 @@ snapshots:
'@jridgewell/gen-mapping': 0.3.5
'@jridgewell/trace-mapping': 0.3.25
'@anthropic-ai/sdk@0.20.6':
dependencies:
'@types/node': 18.11.18
'@types/node-fetch': 2.6.11
abort-controller: 3.0.0
agentkeepalive: 4.5.0
form-data-encoder: 1.7.2
formdata-node: 4.4.1
node-fetch: 2.7.0
web-streams-polyfill: 3.3.3
transitivePeerDependencies:
- encoding
'@apidevtools/json-schema-ref-parser@9.0.6':
dependencies:
'@jsdevtools/ono': 7.1.3
@ -14260,12 +14331,12 @@ snapshots:
transitivePeerDependencies:
- '@types/react'
'@chakra-ui/next-js@2.2.0(@chakra-ui/react@2.8.2(@emotion/react@11.11.4(@types/react@18.2.15)(react@18.2.0))(@emotion/styled@11.11.5(@emotion/react@11.11.4(@types/react@18.2.15)(react@18.2.0))(@types/react@18.2.15)(react@18.2.0))(@types/react@18.2.15)(framer-motion@11.1.7(@emotion/is-prop-valid@1.2.2)(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(@emotion/react@11.11.4(@types/react@18.2.15)(react@18.2.0))(next@14.1.0(@babel/core@7.22.9)(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(react@18.2.0)':
'@chakra-ui/next-js@2.2.0(@chakra-ui/react@2.8.2(@emotion/react@11.11.4(@types/react@18.2.15)(react@18.2.0))(@emotion/styled@11.11.5(@emotion/react@11.11.4(@types/react@18.2.15)(react@18.2.0))(@types/react@18.2.15)(react@18.2.0))(@types/react@18.2.15)(framer-motion@11.1.7(@emotion/is-prop-valid@1.2.2)(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(@emotion/react@11.11.4(@types/react@18.2.15)(react@18.2.0))(next@14.1.0(@babel/core@7.22.9)(@opentelemetry/api@1.9.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(react@18.2.0)':
dependencies:
'@chakra-ui/react': 2.8.2(@emotion/react@11.11.4(@types/react@18.2.15)(react@18.2.0))(@emotion/styled@11.11.5(@emotion/react@11.11.4(@types/react@18.2.15)(react@18.2.0))(@types/react@18.2.15)(react@18.2.0))(@types/react@18.2.15)(framer-motion@11.1.7(@emotion/is-prop-valid@1.2.2)(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
'@emotion/cache': 11.11.0
'@emotion/react': 11.11.4(@types/react@18.2.15)(react@18.2.0)
next: 14.1.0(@babel/core@7.22.9)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
next: 14.1.0(@babel/core@7.22.9)(@opentelemetry/api@1.9.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
react: 18.2.0
'@chakra-ui/number-input@2.1.2(@chakra-ui/system@2.6.2(@emotion/react@11.11.4(@types/react@18.2.15)(react@18.2.0))(@emotion/styled@11.11.5(@emotion/react@11.11.4(@types/react@18.2.15)(react@18.2.0))(@types/react@18.2.15)(react@18.2.0))(react@18.2.0))(react@18.2.0)':
@ -16273,6 +16344,8 @@ snapshots:
'@one-ini/wasm@0.1.1': {}
'@opentelemetry/api@1.9.0': {}
'@panva/hkdf@1.1.1': {}
'@paralleldrive/cuid2@2.2.1':
@ -16808,6 +16881,17 @@ snapshots:
optionalDependencies:
rollup: 2.78.0
'@rollup/plugin-commonjs@26.0.1(rollup@3.26.2)':
dependencies:
'@rollup/pluginutils': 5.1.0(rollup@3.26.2)
commondir: 1.0.1
estree-walker: 2.0.2
glob: 10.4.5
is-reference: 1.2.1
magic-string: 0.30.8
optionalDependencies:
rollup: 3.26.2
'@rollup/plugin-node-resolve@15.1.0(rollup@3.26.2)':
dependencies:
'@rollup/pluginutils': 5.1.0(rollup@3.26.2)
@ -16904,7 +16988,7 @@ snapshots:
'@sentry/utils': 7.77.0
localforage: 1.10.0
'@sentry/nextjs@7.77.0(next@14.1.0(@babel/core@7.24.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(react@18.2.0)(webpack@5.90.3)':
'@sentry/nextjs@7.77.0(next@14.1.0(@babel/core@7.24.0)(@opentelemetry/api@1.9.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(react@18.2.0)(webpack@5.90.3)':
dependencies:
'@rollup/plugin-commonjs': 24.0.0(rollup@2.78.0)
'@sentry/core': 7.77.0
@ -16916,7 +17000,7 @@ snapshots:
'@sentry/vercel-edge': 7.77.0
'@sentry/webpack-plugin': 1.20.0
chalk: 3.0.0
next: 14.1.0(@babel/core@7.24.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
next: 14.1.0(@babel/core@7.24.0)(@opentelemetry/api@1.9.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
react: 18.2.0
resolve: 1.22.8
rollup: 2.78.0
@ -16927,7 +17011,7 @@ snapshots:
- encoding
- supports-color
'@sentry/nextjs@7.77.0(next@14.1.0(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(react@18.2.0)(webpack@5.90.3)':
'@sentry/nextjs@7.77.0(next@14.1.0(@opentelemetry/api@1.9.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(react@18.2.0)(webpack@5.90.3)':
dependencies:
'@rollup/plugin-commonjs': 24.0.0(rollup@2.78.0)
'@sentry/core': 7.77.0
@ -16939,7 +17023,7 @@ snapshots:
'@sentry/vercel-edge': 7.77.0
'@sentry/webpack-plugin': 1.20.0
chalk: 3.0.0
next: 14.1.0(@babel/core@7.22.9)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
next: 14.1.0(@babel/core@7.22.9)(@opentelemetry/api@1.9.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
react: 18.2.0
resolve: 1.22.8
rollup: 2.78.0
@ -17156,13 +17240,13 @@ snapshots:
dependencies:
'@trpc/server': 10.40.0
'@trpc/next@10.40.0(@tanstack/react-query@4.29.19(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(@trpc/client@10.40.0(@trpc/server@10.40.0))(@trpc/react-query@10.40.0(@tanstack/react-query@4.29.19(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(@trpc/client@10.40.0(@trpc/server@10.40.0))(@trpc/server@10.40.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(@trpc/server@10.40.0)(next@14.1.0(@babel/core@7.24.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(react-dom@18.2.0(react@18.2.0))(react@18.2.0)':
'@trpc/next@10.40.0(@tanstack/react-query@4.29.19(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(@trpc/client@10.40.0(@trpc/server@10.40.0))(@trpc/react-query@10.40.0(@tanstack/react-query@4.29.19(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(@trpc/client@10.40.0(@trpc/server@10.40.0))(@trpc/server@10.40.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(@trpc/server@10.40.0)(next@14.1.0(@babel/core@7.24.0)(@opentelemetry/api@1.9.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(react-dom@18.2.0(react@18.2.0))(react@18.2.0)':
dependencies:
'@tanstack/react-query': 4.29.19(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
'@trpc/client': 10.40.0(@trpc/server@10.40.0)
'@trpc/react-query': 10.40.0(@tanstack/react-query@4.29.19(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(@trpc/client@10.40.0(@trpc/server@10.40.0))(@trpc/server@10.40.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
'@trpc/server': 10.40.0
next: 14.1.0(@babel/core@7.24.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
next: 14.1.0(@babel/core@7.24.0)(@opentelemetry/api@1.9.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
react: 18.2.0
react-dom: 18.2.0(react@18.2.0)
react-ssr-prepass: 1.5.0(react@18.2.0)
@ -18635,15 +18719,16 @@ snapshots:
clean-stack: 4.2.0
indent-string: 5.0.0
ai@3.2.1(openai@4.47.1)(react@18.2.0)(solid-js@1.7.8)(svelte@4.2.12)(vue@3.4.21(typescript@5.4.5))(zod@3.22.4):
ai@3.2.22(openai@4.47.1)(react@18.2.0)(solid-js@1.7.8)(svelte@4.2.12)(vue@3.4.21(typescript@5.4.5))(zod@3.22.4):
dependencies:
'@ai-sdk/provider': 0.0.10
'@ai-sdk/provider-utils': 0.0.15(zod@3.22.4)
'@ai-sdk/react': 0.0.4(react@18.2.0)(zod@3.22.4)
'@ai-sdk/solid': 0.0.4(solid-js@1.7.8)(zod@3.22.4)
'@ai-sdk/svelte': 0.0.4(svelte@4.2.12)(zod@3.22.4)
'@ai-sdk/ui-utils': 0.0.4(zod@3.22.4)
'@ai-sdk/vue': 0.0.4(vue@3.4.21(typescript@5.4.5))(zod@3.22.4)
'@ai-sdk/provider': 0.0.12
'@ai-sdk/provider-utils': 1.0.2(zod@3.22.4)
'@ai-sdk/react': 0.0.20(react@18.2.0)(zod@3.22.4)
'@ai-sdk/solid': 0.0.14(solid-js@1.7.8)(zod@3.22.4)
'@ai-sdk/svelte': 0.0.15(svelte@4.2.12)(zod@3.22.4)
'@ai-sdk/ui-utils': 0.0.12(zod@3.22.4)
'@ai-sdk/vue': 0.0.15(vue@3.4.21(typescript@5.4.5))(zod@3.22.4)
'@opentelemetry/api': 1.9.0
eventsource-parser: 1.1.2
json-schema: 0.4.0
jsondiffpatch: 0.6.0
@ -18660,6 +18745,32 @@ snapshots:
- solid-js
- vue
ai@3.2.22(openai@4.52.7)(react@18.2.0)(solid-js@1.7.8)(svelte@4.2.12)(vue@3.4.21(typescript@5.4.5))(zod@3.22.4):
dependencies:
'@ai-sdk/provider': 0.0.12
'@ai-sdk/provider-utils': 1.0.2(zod@3.22.4)
'@ai-sdk/react': 0.0.20(react@18.2.0)(zod@3.22.4)
'@ai-sdk/solid': 0.0.14(solid-js@1.7.8)(zod@3.22.4)
'@ai-sdk/svelte': 0.0.15(svelte@4.2.12)(zod@3.22.4)
'@ai-sdk/ui-utils': 0.0.12(zod@3.22.4)
'@ai-sdk/vue': 0.0.15(vue@3.4.21(typescript@5.4.5))(zod@3.22.4)
'@opentelemetry/api': 1.9.0
eventsource-parser: 1.1.2
json-schema: 0.4.0
jsondiffpatch: 0.6.0
nanoid: 3.3.6
secure-json-parse: 2.7.0
sswr: 2.1.0(svelte@4.2.12)
zod-to-json-schema: 3.22.5(zod@3.22.4)
optionalDependencies:
openai: 4.52.7
react: 18.2.0
svelte: 4.2.12
zod: 3.22.4
transitivePeerDependencies:
- solid-js
- vue
ajv-draft-04@1.0.0(ajv@8.12.0):
optionalDependencies:
ajv: 8.12.0
@ -20967,6 +21078,15 @@ snapshots:
minipass: 7.0.4
path-scurry: 1.10.1
glob@10.4.5:
dependencies:
foreground-child: 3.1.1
jackspeak: 3.4.3
minimatch: 9.0.5
minipass: 7.1.2
package-json-from-dist: 1.0.0
path-scurry: 1.11.1
glob@7.1.7:
dependencies:
fs.realpath: 1.0.0
@ -21753,6 +21873,12 @@ snapshots:
optionalDependencies:
'@pkgjs/parseargs': 0.11.0
jackspeak@3.4.3:
dependencies:
'@isaacs/cliui': 8.0.2
optionalDependencies:
'@pkgjs/parseargs': 0.11.0
jest-changed-files@29.7.0:
dependencies:
execa: 5.1.1
@ -22109,7 +22235,7 @@ snapshots:
dependencies:
config-chain: 1.1.13
editorconfig: 1.0.4
glob: 10.3.10
glob: 10.4.5
js-cookie: 3.0.5
nopt: 7.2.0
@ -23175,6 +23301,10 @@ snapshots:
dependencies:
brace-expansion: 2.0.1
minimatch@9.0.5:
dependencies:
brace-expansion: 2.0.1
minimist-options@4.1.0:
dependencies:
arrify: 1.0.1
@ -23208,6 +23338,8 @@ snapshots:
minipass@7.0.4: {}
minipass@7.1.2: {}
minizlib@2.1.2:
dependencies:
minipass: 3.3.6
@ -23261,11 +23393,11 @@ snapshots:
dependencies:
'@babel/runtime': 7.24.0
chokidar: 3.6.0
glob: 10.3.10
glob: 10.4.5
html-minifier: 4.0.0
js-beautify: 1.15.1
lodash: 4.17.21
minimatch: 9.0.3
minimatch: 9.0.5
mjml-core: 4.15.3
mjml-migrate: 4.15.3
mjml-parser-xml: 4.15.3
@ -23573,13 +23705,13 @@ snapshots:
neo-async@2.6.2: {}
next-auth@4.22.1(next@14.1.0(@babel/core@7.24.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(nodemailer@6.9.8)(react-dom@18.2.0(react@18.2.0))(react@18.2.0):
next-auth@4.22.1(next@14.1.0(@babel/core@7.24.0)(@opentelemetry/api@1.9.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(nodemailer@6.9.8)(react-dom@18.2.0(react@18.2.0))(react@18.2.0):
dependencies:
'@babel/runtime': 7.24.0
'@panva/hkdf': 1.1.1
cookie: 0.5.0
jose: 4.15.5
next: 14.1.0(@babel/core@7.24.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
next: 14.1.0(@babel/core@7.24.0)(@opentelemetry/api@1.9.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
oauth: 0.9.15
openid-client: 5.6.5
preact: 10.19.6
@ -23609,7 +23741,7 @@ snapshots:
dependencies:
enhanced-resolve: 5.16.0
next@14.0.5-canary.46(react-dom@18.2.0(react@18.2.0))(react@18.2.0):
next@14.0.5-canary.46(@opentelemetry/api@1.9.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0):
dependencies:
'@next/env': 14.0.5-canary.46
'@swc/helpers': 0.5.2
@ -23630,11 +23762,12 @@ snapshots:
'@next/swc-win32-arm64-msvc': 14.0.5-canary.46
'@next/swc-win32-ia32-msvc': 14.0.5-canary.46
'@next/swc-win32-x64-msvc': 14.0.5-canary.46
'@opentelemetry/api': 1.9.0
transitivePeerDependencies:
- '@babel/core'
- babel-plugin-macros
next@14.1.0(@babel/core@7.22.9)(react-dom@18.2.0(react@18.2.0))(react@18.2.0):
next@14.1.0(@babel/core@7.22.9)(@opentelemetry/api@1.9.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0):
dependencies:
'@next/env': 14.1.0
'@swc/helpers': 0.5.2
@ -23655,11 +23788,12 @@ snapshots:
'@next/swc-win32-arm64-msvc': 14.1.0
'@next/swc-win32-ia32-msvc': 14.1.0
'@next/swc-win32-x64-msvc': 14.1.0
'@opentelemetry/api': 1.9.0
transitivePeerDependencies:
- '@babel/core'
- babel-plugin-macros
next@14.1.0(@babel/core@7.24.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0):
next@14.1.0(@babel/core@7.24.0)(@opentelemetry/api@1.9.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0):
dependencies:
'@next/env': 14.1.0
'@swc/helpers': 0.5.2
@ -23680,19 +23814,20 @@ snapshots:
'@next/swc-win32-arm64-msvc': 14.1.0
'@next/swc-win32-ia32-msvc': 14.1.0
'@next/swc-win32-x64-msvc': 14.1.0
'@opentelemetry/api': 1.9.0
transitivePeerDependencies:
- '@babel/core'
- babel-plugin-macros
nextjs-cors@2.1.2(next@14.1.0(@babel/core@7.24.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)):
nextjs-cors@2.1.2(next@14.1.0(@babel/core@7.24.0)(@opentelemetry/api@1.9.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)):
dependencies:
cors: 2.8.5
next: 14.1.0(@babel/core@7.24.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
next: 14.1.0(@babel/core@7.24.0)(@opentelemetry/api@1.9.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
nextjs-cors@2.1.2(next@14.1.0(react-dom@18.2.0(react@18.2.0))(react@18.2.0)):
nextjs-cors@2.1.2(next@14.1.0(@opentelemetry/api@1.9.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)):
dependencies:
cors: 2.8.5
next: 14.1.0(@babel/core@7.22.9)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
next: 14.1.0(@babel/core@7.22.9)(@opentelemetry/api@1.9.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
no-case@2.3.2:
dependencies:
@ -23862,6 +23997,19 @@ snapshots:
transitivePeerDependencies:
- encoding
openai@4.52.7:
dependencies:
'@types/node': 18.11.18
'@types/node-fetch': 2.6.11
abort-controller: 3.0.0
agentkeepalive: 4.5.0
form-data-encoder: 1.7.2
formdata-node: 4.4.1
node-fetch: 2.7.0
web-streams-polyfill: 3.3.3
transitivePeerDependencies:
- encoding
openapi-types@12.1.3: {}
openapi3-ts@4.2.2:
@ -23957,6 +24105,8 @@ snapshots:
p-try@2.2.0: {}
package-json-from-dist@1.0.0: {}
pad@2.3.0:
dependencies:
wcwidth: 1.0.1
@ -24018,7 +24168,12 @@ snapshots:
path-scurry@1.10.1:
dependencies:
lru-cache: 10.2.0
minipass: 7.0.4
minipass: 7.1.2
path-scurry@1.11.1:
dependencies:
lru-cache: 10.2.0
minipass: 7.1.2
path-to-regexp@0.1.7: {}
@ -24517,7 +24672,7 @@ snapshots:
react: 18.2.0
scheduler: 0.23.0
react-email@2.0.0(@swc/helpers@0.5.10)(eslint@8.44.0):
react-email@2.0.0(@opentelemetry/api@1.9.0)(@swc/helpers@0.5.10)(eslint@8.44.0):
dependencies:
'@radix-ui/colors': 1.0.1
'@radix-ui/react-collapsible': 1.0.3(@types/react-dom@18.2.15)(@types/react@18.2.15)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
@ -24544,7 +24699,7 @@ snapshots:
glob: 10.3.4
log-symbols: 4.1.0
mime-types: 2.1.35
next: 14.0.5-canary.46(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
next: 14.0.5-canary.46(@opentelemetry/api@1.9.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
normalize-path: 3.0.0
ora: 5.4.1
postcss: 8.4.32
@ -25336,11 +25491,6 @@ snapshots:
csstype: 3.1.3
seroval: 0.5.1
solid-swr-store@0.10.7(solid-js@1.7.8)(swr-store@0.10.6):
dependencies:
solid-js: 1.7.8
swr-store: 0.10.6
sonner@1.3.1(react-dom@18.2.0(react@18.2.0))(react@18.2.0):
dependencies:
react: 18.2.0
@ -25647,10 +25797,6 @@ snapshots:
picocolors: 1.0.0
stable: 0.1.8
swr-store@0.10.6:
dependencies:
dequal: 2.0.3
swr@2.2.0(react@18.2.0):
dependencies:
react: 18.2.0