From 957eaf33dd76bb8fba1741d2a42e3ff28560770a Mon Sep 17 00:00:00 2001 From: Baptiste Arnaud Date: Fri, 8 Dec 2023 13:43:58 +0000 Subject: [PATCH] :zap: Add dynamic timeout to bot engine api --- .../src/features/chat/api/continueChat.ts | 6 ++- .../openai/audio/createSpeechOpenAI.ts | 1 + .../openai/createChatCompletionOpenAI.ts | 16 +++--- .../webhook/executeWebhookBlock.ts | 49 +++++++++++++++---- .../zemanticAi/executeZemanticAiBlock.ts | 7 ++- packages/bot-engine/continueBotFlow.ts | 12 ++++- packages/bot-engine/executeGroup.ts | 35 +++++++++++-- packages/bot-engine/startBotFlow.ts | 4 ++ packages/bot-engine/startSession.ts | 1 + packages/bot-engine/types.ts | 1 + packages/embeds/js/package.json | 2 +- .../ConversationContainer.tsx | 11 ++++- .../js/src/components/InputChatBlock.tsx | 5 +- packages/embeds/nextjs/package.json | 2 +- packages/embeds/react/package.json | 2 +- packages/env/env.ts | 1 + 16 files changed, 124 insertions(+), 31 deletions(-) diff --git a/apps/viewer/src/features/chat/api/continueChat.ts b/apps/viewer/src/features/chat/api/continueChat.ts index 35a0bb768..e71cd24c6 100644 --- a/apps/viewer/src/features/chat/api/continueChat.ts +++ b/apps/viewer/src/features/chat/api/continueChat.ts @@ -54,7 +54,11 @@ export const continueChat = publicProcedure logs, lastMessageNewFormat, visitedEdges, - } = await continueBotFlow(message, { version: 2, state: session.state }) + } = await continueBotFlow(message, { + version: 2, + state: session.state, + startTime: Date.now(), + }) if (newSessionState) await saveStateToDatabase({ diff --git a/packages/bot-engine/blocks/integrations/openai/audio/createSpeechOpenAI.ts b/packages/bot-engine/blocks/integrations/openai/audio/createSpeechOpenAI.ts index 9f57ce7b8..4ba966c83 100644 --- a/packages/bot-engine/blocks/integrations/openai/audio/createSpeechOpenAI.ts +++ b/packages/bot-engine/blocks/integrations/openai/audio/createSpeechOpenAI.ts @@ -115,6 +115,7 @@ export const createSpeechOpenAI = async ( ]) return { + startTimeShouldBeUpdated: true, outgoingEdgeId, newSessionState, } diff --git a/packages/bot-engine/blocks/integrations/openai/createChatCompletionOpenAI.ts b/packages/bot-engine/blocks/integrations/openai/createChatCompletionOpenAI.ts index 12fd584f0..a9bfabb42 100644 --- a/packages/bot-engine/blocks/integrations/openai/createChatCompletionOpenAI.ts +++ b/packages/bot-engine/blocks/integrations/openai/createChatCompletionOpenAI.ts @@ -120,6 +120,7 @@ export const createChatCompletionOpenAI = async ( }) if (!chatCompletion) return { + startTimeShouldBeUpdated: true, outgoingEdgeId, logs, } @@ -127,13 +128,16 @@ export const createChatCompletionOpenAI = async ( const totalTokens = chatCompletion.usage?.total_tokens if (isEmpty(messageContent)) { console.error('OpenAI block returned empty message', chatCompletion.choices) - return { outgoingEdgeId, newSessionState } + return { outgoingEdgeId, newSessionState, startTimeShouldBeUpdated: true } + } + return { + ...(await resumeChatCompletion(newSessionState, { + options, + outgoingEdgeId, + logs, + })(messageContent, totalTokens)), + startTimeShouldBeUpdated: true, } - return resumeChatCompletion(newSessionState, { - options, - outgoingEdgeId, - logs, - })(messageContent, totalTokens) } const isNextBubbleMessageWithAssistantMessage = diff --git a/packages/bot-engine/blocks/integrations/webhook/executeWebhookBlock.ts b/packages/bot-engine/blocks/integrations/webhook/executeWebhookBlock.ts index 60dbed542..68c6cb851 100644 --- a/packages/bot-engine/blocks/integrations/webhook/executeWebhookBlock.ts +++ b/packages/bot-engine/blocks/integrations/webhook/executeWebhookBlock.ts @@ -30,6 +30,15 @@ type ParsedWebhook = ExecutableWebhook & { isJson: boolean } +export const responseDefaultTimeout = 10000 +export const longRequestTimeout = 120000 + +const longReqTimeoutWhitelist = [ + 'https://api.openai.com', + 'https://retune.so', + 'https://www.chatbase.co', +] + export const executeWebhookBlock = async ( state: SessionState, block: WebhookBlock | ZapierBlock | MakeComBlock | PabblyConnectBlock @@ -64,14 +73,21 @@ export const executeWebhookBlock = async ( }, ], } - const { response: webhookResponse, logs: executeWebhookLogs } = - await executeWebhook(parsedWebhook) - return resumeWebhookExecution({ - state, - block, - logs: executeWebhookLogs, + const { response: webhookResponse, - }) + logs: executeWebhookLogs, + startTimeShouldBeUpdated, + } = await executeWebhook(parsedWebhook) + + return { + ...resumeWebhookExecution({ + state, + block, + logs: executeWebhookLogs, + response: webhookResponse, + }), + startTimeShouldBeUpdated, + } } const checkIfBodyIsAVariable = (body: string) => /^{{.+}}$/.test(body) @@ -142,11 +158,19 @@ const parseWebhookAttributes = export const executeWebhook = async ( webhook: ParsedWebhook -): Promise<{ response: WebhookResponse; logs?: ChatLog[] }> => { +): Promise<{ + response: WebhookResponse + logs?: ChatLog[] + startTimeShouldBeUpdated?: boolean +}> => { const logs: ChatLog[] = [] const { headers, url, method, basicAuth, body, isJson } = webhook const contentType = headers ? headers['Content-Type'] : undefined + const isLongRequest = longReqTimeoutWhitelist.some((whiteListedUrl) => + url?.includes(whiteListedUrl) + ) + const request = { url, method: method as Method, @@ -159,7 +183,11 @@ export const executeWebhook = async ( form: contentType?.includes('x-www-form-urlencoded') && body ? body : undefined, body: body && !isJson ? (body as string) : undefined, + timeout: { + response: isLongRequest ? longRequestTimeout : responseDefaultTimeout, + }, } satisfies OptionsInit + try { const response = await got(request.url, omit(request, 'url')) logs.push({ @@ -177,6 +205,7 @@ export const executeWebhook = async ( data: safeJsonParse(response.body).data, }, logs, + startTimeShouldBeUpdated: isLongRequest, } } catch (error) { if (error instanceof HTTPError) { @@ -193,7 +222,7 @@ export const executeWebhook = async ( response, }, }) - return { response, logs } + return { response, logs, startTimeShouldBeUpdated: isLongRequest } } const response = { statusCode: 500, @@ -208,7 +237,7 @@ export const executeWebhook = async ( response, }, }) - return { response, logs } + return { response, logs, startTimeShouldBeUpdated: isLongRequest } } } diff --git a/packages/bot-engine/blocks/integrations/zemanticAi/executeZemanticAiBlock.ts b/packages/bot-engine/blocks/integrations/zemanticAi/executeZemanticAiBlock.ts index 963625330..69a9fa54c 100644 --- a/packages/bot-engine/blocks/integrations/zemanticAi/executeZemanticAiBlock.ts +++ b/packages/bot-engine/blocks/integrations/zemanticAi/executeZemanticAiBlock.ts @@ -108,6 +108,7 @@ export const executeZemanticAiBlock = async ( } catch (e) { console.error(e) return { + startTimeShouldBeUpdated: true, outgoingEdgeId: block.outgoingEdgeId, logs: [ { @@ -118,7 +119,11 @@ export const executeZemanticAiBlock = async ( } } - return { outgoingEdgeId: block.outgoingEdgeId, newSessionState } + return { + outgoingEdgeId: block.outgoingEdgeId, + newSessionState, + startTimeShouldBeUpdated: true, + } } const replaceTemplateVars = ( diff --git a/packages/bot-engine/continueBotFlow.ts b/packages/bot-engine/continueBotFlow.ts index 0b39989e5..c3bf016eb 100644 --- a/packages/bot-engine/continueBotFlow.ts +++ b/packages/bot-engine/continueBotFlow.ts @@ -41,10 +41,11 @@ import { getBlockById } from '@typebot.io/lib/getBlockById' type Params = { version: 1 | 2 state: SessionState + startTime?: number } export const continueBotFlow = async ( reply: string | undefined, - { state, version }: Params + { state, version, startTime }: Params ): Promise< ContinueChatResponse & { newSessionState: SessionState @@ -127,7 +128,13 @@ export const continueBotFlow = async ( ...group, blocks: group.blocks.slice(blockIndex + 1), } as Group, - { version, state: newSessionState, visitedEdges, firstBubbleWasStreamed } + { + version, + state: newSessionState, + visitedEdges, + firstBubbleWasStreamed, + startTime, + } ) return { ...chatReply, @@ -165,6 +172,7 @@ export const continueBotFlow = async ( state: newSessionState, firstBubbleWasStreamed, visitedEdges, + startTime, }) return { diff --git a/packages/bot-engine/executeGroup.ts b/packages/bot-engine/executeGroup.ts index 14e67c5c0..f74a43bbf 100644 --- a/packages/bot-engine/executeGroup.ts +++ b/packages/bot-engine/executeGroup.ts @@ -27,6 +27,9 @@ import { } from './parseBubbleBlock' import { InputBlockType } from '@typebot.io/schemas/features/blocks/inputs/constants' import { VisitedEdge } from '@typebot.io/prisma' +import { env } from '@typebot.io/env' +import { TRPCError } from '@trpc/server' +import { ExecuteIntegrationResponse, ExecuteLogicResponse } from './types' type ContextProps = { version: 1 | 2 @@ -35,6 +38,7 @@ type ContextProps = { currentLastBubbleId?: string firstBubbleWasStreamed?: boolean visitedEdges: VisitedEdge[] + startTime?: number } export const executeGroup = async ( @@ -46,6 +50,7 @@ export const executeGroup = async ( currentReply, currentLastBubbleId, firstBubbleWasStreamed, + startTime, }: ContextProps ): Promise< ContinueChatResponse & { @@ -53,6 +58,7 @@ export const executeGroup = async ( visitedEdges: VisitedEdge[] } > => { + let newStartTime = startTime const messages: ContinueChatResponse['messages'] = currentReply?.messages ?? [] let clientSideActions: ContinueChatResponse['clientSideActions'] = @@ -65,6 +71,17 @@ export const executeGroup = async ( let index = -1 for (const block of group.blocks) { + if ( + newStartTime && + env.CHAT_API_TIMEOUT && + Date.now() - newStartTime > env.CHAT_API_TIMEOUT + ) { + throw new TRPCError({ + code: 'TIMEOUT', + message: `${env.CHAT_API_TIMEOUT / 1000} seconds timeout reached`, + }) + } + index++ nextEdgeId = block.outgoingEdgeId @@ -93,13 +110,20 @@ export const executeGroup = async ( logs, visitedEdges, } - const executionResponse = isLogicBlock(block) - ? await executeLogic(newSessionState)(block) - : isIntegrationBlock(block) - ? await executeIntegration(newSessionState)(block) - : null + const executionResponse = ( + isLogicBlock(block) + ? await executeLogic(newSessionState)(block) + : isIntegrationBlock(block) + ? await executeIntegration(newSessionState)(block) + : null + ) as ExecuteLogicResponse | ExecuteIntegrationResponse | null if (!executionResponse) continue + if ( + 'startTimeShouldBeUpdated' in executionResponse && + executionResponse.startTimeShouldBeUpdated + ) + newStartTime = Date.now() if (executionResponse.logs) logs = [...(logs ?? []), ...executionResponse.logs] if (executionResponse.newSessionState) @@ -162,6 +186,7 @@ export const executeGroup = async ( logs, }, currentLastBubbleId: lastBubbleBlockId, + startTime: newStartTime, }) } diff --git a/packages/bot-engine/startBotFlow.ts b/packages/bot-engine/startBotFlow.ts index 88b841f8c..155e030b6 100644 --- a/packages/bot-engine/startBotFlow.ts +++ b/packages/bot-engine/startBotFlow.ts @@ -12,12 +12,14 @@ type Props = { version: 1 | 2 state: SessionState startFrom?: StartFrom + startTime?: number } export const startBotFlow = async ({ version, state, startFrom, + startTime, }: Props): Promise< ContinueChatResponse & { newSessionState: SessionState @@ -39,6 +41,7 @@ export const startBotFlow = async ({ version, state: newSessionState, visitedEdges, + startTime, }) } const firstEdgeId = getFirstEdgeId({ @@ -54,6 +57,7 @@ export const startBotFlow = async ({ version, state: newSessionState, visitedEdges, + startTime, }) } diff --git a/packages/bot-engine/startSession.ts b/packages/bot-engine/startSession.ts index 8bce2c4e4..c20be32f1 100644 --- a/packages/bot-engine/startSession.ts +++ b/packages/bot-engine/startSession.ts @@ -157,6 +157,7 @@ export const startSession = async ({ state: initialState, startFrom: startParams.type === 'preview' ? startParams.startFrom : undefined, + startTime: Date.now(), }) // If params has message and first block is an input block, we can directly continue the bot flow diff --git a/packages/bot-engine/types.ts b/packages/bot-engine/types.ts index bee7f215e..60648ee43 100644 --- a/packages/bot-engine/types.ts +++ b/packages/bot-engine/types.ts @@ -10,6 +10,7 @@ export type ExecuteLogicResponse = { export type ExecuteIntegrationResponse = { outgoingEdgeId: EdgeId | undefined newSessionState?: SessionState + startTimeShouldBeUpdated?: boolean } & Pick export type ParsedReply = diff --git a/packages/embeds/js/package.json b/packages/embeds/js/package.json index 9477d2bce..9ed0fcbc0 100644 --- a/packages/embeds/js/package.json +++ b/packages/embeds/js/package.json @@ -1,6 +1,6 @@ { "name": "@typebot.io/js", - "version": "0.2.25", + "version": "0.2.26", "description": "Javascript library to display typebots on your website", "type": "module", "main": "dist/index.js", diff --git a/packages/embeds/js/src/components/ConversationContainer/ConversationContainer.tsx b/packages/embeds/js/src/components/ConversationContainer/ConversationContainer.tsx index cb040f733..7934fbd4e 100644 --- a/packages/embeds/js/src/components/ConversationContainer/ConversationContainer.tsx +++ b/packages/embeds/js/src/components/ConversationContainer/ConversationContainer.tsx @@ -170,13 +170,20 @@ export const ConversationContainer = (props: Props) => { setIsSending(false) if (error) { setHasError(true) - props.onNewLogs?.([ + const errorLogs = [ { description: 'Failed to send the reply', details: error, status: 'error', }, - ]) + ] + await saveClientLogsQuery({ + apiHost: props.context.apiHost, + sessionId: props.initialChatReply.sessionId, + clientLogs: errorLogs, + }) + props.onNewLogs?.(errorLogs) + return } if (!data) return if (data.lastMessageNewFormat) { diff --git a/packages/embeds/js/src/components/InputChatBlock.tsx b/packages/embeds/js/src/components/InputChatBlock.tsx index 3982e6eae..a5b2154bb 100644 --- a/packages/embeds/js/src/components/InputChatBlock.tsx +++ b/packages/embeds/js/src/components/InputChatBlock.tsx @@ -103,6 +103,7 @@ export const InputChatBlock = (props: Props) => { block={props.block} inputIndex={props.inputIndex} isInputPrefillEnabled={props.isInputPrefillEnabled} + existingAnswer={props.hasError ? answer() : undefined} onTransitionEnd={props.onTransitionEnd} onSubmit={handleSubmit} onSkip={handleSkip} @@ -118,6 +119,7 @@ const Input = (props: { block: NonNullable inputIndex: number isInputPrefillEnabled: boolean + existingAnswer?: string onTransitionEnd: () => void onSubmit: (answer: InputSubmitContent) => void onSkip: (label: string) => void @@ -125,7 +127,8 @@ const Input = (props: { const onSubmit = (answer: InputSubmitContent) => props.onSubmit(answer) const getPrefilledValue = () => - props.isInputPrefillEnabled ? props.block.prefilledValue : undefined + props.existingAnswer ?? + (props.isInputPrefillEnabled ? props.block.prefilledValue : undefined) const submitPaymentSuccess = () => props.onSubmit({ diff --git a/packages/embeds/nextjs/package.json b/packages/embeds/nextjs/package.json index f285b90a0..c20eba617 100644 --- a/packages/embeds/nextjs/package.json +++ b/packages/embeds/nextjs/package.json @@ -1,6 +1,6 @@ { "name": "@typebot.io/nextjs", - "version": "0.2.25", + "version": "0.2.26", "description": "Convenient library to display typebots on your Next.js website", "main": "dist/index.js", "types": "dist/index.d.ts", diff --git a/packages/embeds/react/package.json b/packages/embeds/react/package.json index 1d24fecce..3b90f2bdf 100644 --- a/packages/embeds/react/package.json +++ b/packages/embeds/react/package.json @@ -1,6 +1,6 @@ { "name": "@typebot.io/react", - "version": "0.2.25", + "version": "0.2.26", "description": "Convenient library to display typebots on your React app", "main": "dist/index.js", "types": "dist/index.d.ts", diff --git a/packages/env/env.ts b/packages/env/env.ts index ffcce97b3..99b6bb15a 100644 --- a/packages/env/env.ts +++ b/packages/env/env.ts @@ -65,6 +65,7 @@ const baseEnv = { ) .default('FREE'), DEBUG: boolean.optional().default('false'), + CHAT_API_TIMEOUT: z.coerce.number().optional(), }, client: { NEXT_PUBLIC_E2E_TEST: boolean.optional(),