2
0

Add dynamic timeout to bot engine api

This commit is contained in:
Baptiste Arnaud
2023-12-08 13:43:58 +00:00
parent 8819e9e567
commit 957eaf33dd
16 changed files with 124 additions and 31 deletions

View File

@@ -54,7 +54,11 @@ export const continueChat = publicProcedure
logs, logs,
lastMessageNewFormat, lastMessageNewFormat,
visitedEdges, visitedEdges,
} = await continueBotFlow(message, { version: 2, state: session.state }) } = await continueBotFlow(message, {
version: 2,
state: session.state,
startTime: Date.now(),
})
if (newSessionState) if (newSessionState)
await saveStateToDatabase({ await saveStateToDatabase({

View File

@@ -115,6 +115,7 @@ export const createSpeechOpenAI = async (
]) ])
return { return {
startTimeShouldBeUpdated: true,
outgoingEdgeId, outgoingEdgeId,
newSessionState, newSessionState,
} }

View File

@@ -120,6 +120,7 @@ export const createChatCompletionOpenAI = async (
}) })
if (!chatCompletion) if (!chatCompletion)
return { return {
startTimeShouldBeUpdated: true,
outgoingEdgeId, outgoingEdgeId,
logs, logs,
} }
@@ -127,13 +128,16 @@ export const createChatCompletionOpenAI = async (
const totalTokens = chatCompletion.usage?.total_tokens const totalTokens = chatCompletion.usage?.total_tokens
if (isEmpty(messageContent)) { if (isEmpty(messageContent)) {
console.error('OpenAI block returned empty message', chatCompletion.choices) console.error('OpenAI block returned empty message', chatCompletion.choices)
return { outgoingEdgeId, newSessionState } return { outgoingEdgeId, newSessionState, startTimeShouldBeUpdated: true }
}
return {
...(await resumeChatCompletion(newSessionState, {
options,
outgoingEdgeId,
logs,
})(messageContent, totalTokens)),
startTimeShouldBeUpdated: true,
} }
return resumeChatCompletion(newSessionState, {
options,
outgoingEdgeId,
logs,
})(messageContent, totalTokens)
} }
const isNextBubbleMessageWithAssistantMessage = const isNextBubbleMessageWithAssistantMessage =

View File

@@ -30,6 +30,15 @@ type ParsedWebhook = ExecutableWebhook & {
isJson: boolean isJson: boolean
} }
export const responseDefaultTimeout = 10000
export const longRequestTimeout = 120000
const longReqTimeoutWhitelist = [
'https://api.openai.com',
'https://retune.so',
'https://www.chatbase.co',
]
export const executeWebhookBlock = async ( export const executeWebhookBlock = async (
state: SessionState, state: SessionState,
block: WebhookBlock | ZapierBlock | MakeComBlock | PabblyConnectBlock block: WebhookBlock | ZapierBlock | MakeComBlock | PabblyConnectBlock
@@ -64,14 +73,21 @@ export const executeWebhookBlock = async (
}, },
], ],
} }
const { response: webhookResponse, logs: executeWebhookLogs } = const {
await executeWebhook(parsedWebhook)
return resumeWebhookExecution({
state,
block,
logs: executeWebhookLogs,
response: webhookResponse, response: webhookResponse,
}) logs: executeWebhookLogs,
startTimeShouldBeUpdated,
} = await executeWebhook(parsedWebhook)
return {
...resumeWebhookExecution({
state,
block,
logs: executeWebhookLogs,
response: webhookResponse,
}),
startTimeShouldBeUpdated,
}
} }
const checkIfBodyIsAVariable = (body: string) => /^{{.+}}$/.test(body) const checkIfBodyIsAVariable = (body: string) => /^{{.+}}$/.test(body)
@@ -142,11 +158,19 @@ const parseWebhookAttributes =
export const executeWebhook = async ( export const executeWebhook = async (
webhook: ParsedWebhook webhook: ParsedWebhook
): Promise<{ response: WebhookResponse; logs?: ChatLog[] }> => { ): Promise<{
response: WebhookResponse
logs?: ChatLog[]
startTimeShouldBeUpdated?: boolean
}> => {
const logs: ChatLog[] = [] const logs: ChatLog[] = []
const { headers, url, method, basicAuth, body, isJson } = webhook const { headers, url, method, basicAuth, body, isJson } = webhook
const contentType = headers ? headers['Content-Type'] : undefined const contentType = headers ? headers['Content-Type'] : undefined
const isLongRequest = longReqTimeoutWhitelist.some((whiteListedUrl) =>
url?.includes(whiteListedUrl)
)
const request = { const request = {
url, url,
method: method as Method, method: method as Method,
@@ -159,7 +183,11 @@ export const executeWebhook = async (
form: form:
contentType?.includes('x-www-form-urlencoded') && body ? body : undefined, contentType?.includes('x-www-form-urlencoded') && body ? body : undefined,
body: body && !isJson ? (body as string) : undefined, body: body && !isJson ? (body as string) : undefined,
timeout: {
response: isLongRequest ? longRequestTimeout : responseDefaultTimeout,
},
} satisfies OptionsInit } satisfies OptionsInit
try { try {
const response = await got(request.url, omit(request, 'url')) const response = await got(request.url, omit(request, 'url'))
logs.push({ logs.push({
@@ -177,6 +205,7 @@ export const executeWebhook = async (
data: safeJsonParse(response.body).data, data: safeJsonParse(response.body).data,
}, },
logs, logs,
startTimeShouldBeUpdated: isLongRequest,
} }
} catch (error) { } catch (error) {
if (error instanceof HTTPError) { if (error instanceof HTTPError) {
@@ -193,7 +222,7 @@ export const executeWebhook = async (
response, response,
}, },
}) })
return { response, logs } return { response, logs, startTimeShouldBeUpdated: isLongRequest }
} }
const response = { const response = {
statusCode: 500, statusCode: 500,
@@ -208,7 +237,7 @@ export const executeWebhook = async (
response, response,
}, },
}) })
return { response, logs } return { response, logs, startTimeShouldBeUpdated: isLongRequest }
} }
} }

View File

@@ -108,6 +108,7 @@ export const executeZemanticAiBlock = async (
} catch (e) { } catch (e) {
console.error(e) console.error(e)
return { return {
startTimeShouldBeUpdated: true,
outgoingEdgeId: block.outgoingEdgeId, outgoingEdgeId: block.outgoingEdgeId,
logs: [ logs: [
{ {
@@ -118,7 +119,11 @@ export const executeZemanticAiBlock = async (
} }
} }
return { outgoingEdgeId: block.outgoingEdgeId, newSessionState } return {
outgoingEdgeId: block.outgoingEdgeId,
newSessionState,
startTimeShouldBeUpdated: true,
}
} }
const replaceTemplateVars = ( const replaceTemplateVars = (

View File

@@ -41,10 +41,11 @@ import { getBlockById } from '@typebot.io/lib/getBlockById'
type Params = { type Params = {
version: 1 | 2 version: 1 | 2
state: SessionState state: SessionState
startTime?: number
} }
export const continueBotFlow = async ( export const continueBotFlow = async (
reply: string | undefined, reply: string | undefined,
{ state, version }: Params { state, version, startTime }: Params
): Promise< ): Promise<
ContinueChatResponse & { ContinueChatResponse & {
newSessionState: SessionState newSessionState: SessionState
@@ -127,7 +128,13 @@ export const continueBotFlow = async (
...group, ...group,
blocks: group.blocks.slice(blockIndex + 1), blocks: group.blocks.slice(blockIndex + 1),
} as Group, } as Group,
{ version, state: newSessionState, visitedEdges, firstBubbleWasStreamed } {
version,
state: newSessionState,
visitedEdges,
firstBubbleWasStreamed,
startTime,
}
) )
return { return {
...chatReply, ...chatReply,
@@ -165,6 +172,7 @@ export const continueBotFlow = async (
state: newSessionState, state: newSessionState,
firstBubbleWasStreamed, firstBubbleWasStreamed,
visitedEdges, visitedEdges,
startTime,
}) })
return { return {

View File

@@ -27,6 +27,9 @@ import {
} from './parseBubbleBlock' } from './parseBubbleBlock'
import { InputBlockType } from '@typebot.io/schemas/features/blocks/inputs/constants' import { InputBlockType } from '@typebot.io/schemas/features/blocks/inputs/constants'
import { VisitedEdge } from '@typebot.io/prisma' import { VisitedEdge } from '@typebot.io/prisma'
import { env } from '@typebot.io/env'
import { TRPCError } from '@trpc/server'
import { ExecuteIntegrationResponse, ExecuteLogicResponse } from './types'
type ContextProps = { type ContextProps = {
version: 1 | 2 version: 1 | 2
@@ -35,6 +38,7 @@ type ContextProps = {
currentLastBubbleId?: string currentLastBubbleId?: string
firstBubbleWasStreamed?: boolean firstBubbleWasStreamed?: boolean
visitedEdges: VisitedEdge[] visitedEdges: VisitedEdge[]
startTime?: number
} }
export const executeGroup = async ( export const executeGroup = async (
@@ -46,6 +50,7 @@ export const executeGroup = async (
currentReply, currentReply,
currentLastBubbleId, currentLastBubbleId,
firstBubbleWasStreamed, firstBubbleWasStreamed,
startTime,
}: ContextProps }: ContextProps
): Promise< ): Promise<
ContinueChatResponse & { ContinueChatResponse & {
@@ -53,6 +58,7 @@ export const executeGroup = async (
visitedEdges: VisitedEdge[] visitedEdges: VisitedEdge[]
} }
> => { > => {
let newStartTime = startTime
const messages: ContinueChatResponse['messages'] = const messages: ContinueChatResponse['messages'] =
currentReply?.messages ?? [] currentReply?.messages ?? []
let clientSideActions: ContinueChatResponse['clientSideActions'] = let clientSideActions: ContinueChatResponse['clientSideActions'] =
@@ -65,6 +71,17 @@ export const executeGroup = async (
let index = -1 let index = -1
for (const block of group.blocks) { for (const block of group.blocks) {
if (
newStartTime &&
env.CHAT_API_TIMEOUT &&
Date.now() - newStartTime > env.CHAT_API_TIMEOUT
) {
throw new TRPCError({
code: 'TIMEOUT',
message: `${env.CHAT_API_TIMEOUT / 1000} seconds timeout reached`,
})
}
index++ index++
nextEdgeId = block.outgoingEdgeId nextEdgeId = block.outgoingEdgeId
@@ -93,13 +110,20 @@ export const executeGroup = async (
logs, logs,
visitedEdges, visitedEdges,
} }
const executionResponse = isLogicBlock(block) const executionResponse = (
? await executeLogic(newSessionState)(block) isLogicBlock(block)
: isIntegrationBlock(block) ? await executeLogic(newSessionState)(block)
? await executeIntegration(newSessionState)(block) : isIntegrationBlock(block)
: null ? await executeIntegration(newSessionState)(block)
: null
) as ExecuteLogicResponse | ExecuteIntegrationResponse | null
if (!executionResponse) continue if (!executionResponse) continue
if (
'startTimeShouldBeUpdated' in executionResponse &&
executionResponse.startTimeShouldBeUpdated
)
newStartTime = Date.now()
if (executionResponse.logs) if (executionResponse.logs)
logs = [...(logs ?? []), ...executionResponse.logs] logs = [...(logs ?? []), ...executionResponse.logs]
if (executionResponse.newSessionState) if (executionResponse.newSessionState)
@@ -162,6 +186,7 @@ export const executeGroup = async (
logs, logs,
}, },
currentLastBubbleId: lastBubbleBlockId, currentLastBubbleId: lastBubbleBlockId,
startTime: newStartTime,
}) })
} }

View File

@@ -12,12 +12,14 @@ type Props = {
version: 1 | 2 version: 1 | 2
state: SessionState state: SessionState
startFrom?: StartFrom startFrom?: StartFrom
startTime?: number
} }
export const startBotFlow = async ({ export const startBotFlow = async ({
version, version,
state, state,
startFrom, startFrom,
startTime,
}: Props): Promise< }: Props): Promise<
ContinueChatResponse & { ContinueChatResponse & {
newSessionState: SessionState newSessionState: SessionState
@@ -39,6 +41,7 @@ export const startBotFlow = async ({
version, version,
state: newSessionState, state: newSessionState,
visitedEdges, visitedEdges,
startTime,
}) })
} }
const firstEdgeId = getFirstEdgeId({ const firstEdgeId = getFirstEdgeId({
@@ -54,6 +57,7 @@ export const startBotFlow = async ({
version, version,
state: newSessionState, state: newSessionState,
visitedEdges, visitedEdges,
startTime,
}) })
} }

View File

@@ -157,6 +157,7 @@ export const startSession = async ({
state: initialState, state: initialState,
startFrom: startFrom:
startParams.type === 'preview' ? startParams.startFrom : undefined, startParams.type === 'preview' ? startParams.startFrom : undefined,
startTime: Date.now(),
}) })
// If params has message and first block is an input block, we can directly continue the bot flow // If params has message and first block is an input block, we can directly continue the bot flow

View File

@@ -10,6 +10,7 @@ export type ExecuteLogicResponse = {
export type ExecuteIntegrationResponse = { export type ExecuteIntegrationResponse = {
outgoingEdgeId: EdgeId | undefined outgoingEdgeId: EdgeId | undefined
newSessionState?: SessionState newSessionState?: SessionState
startTimeShouldBeUpdated?: boolean
} & Pick<ContinueChatResponse, 'clientSideActions' | 'logs'> } & Pick<ContinueChatResponse, 'clientSideActions' | 'logs'>
export type ParsedReply = export type ParsedReply =

View File

@@ -1,6 +1,6 @@
{ {
"name": "@typebot.io/js", "name": "@typebot.io/js",
"version": "0.2.25", "version": "0.2.26",
"description": "Javascript library to display typebots on your website", "description": "Javascript library to display typebots on your website",
"type": "module", "type": "module",
"main": "dist/index.js", "main": "dist/index.js",

View File

@@ -170,13 +170,20 @@ export const ConversationContainer = (props: Props) => {
setIsSending(false) setIsSending(false)
if (error) { if (error) {
setHasError(true) setHasError(true)
props.onNewLogs?.([ const errorLogs = [
{ {
description: 'Failed to send the reply', description: 'Failed to send the reply',
details: error, details: error,
status: 'error', status: 'error',
}, },
]) ]
await saveClientLogsQuery({
apiHost: props.context.apiHost,
sessionId: props.initialChatReply.sessionId,
clientLogs: errorLogs,
})
props.onNewLogs?.(errorLogs)
return
} }
if (!data) return if (!data) return
if (data.lastMessageNewFormat) { if (data.lastMessageNewFormat) {

View File

@@ -103,6 +103,7 @@ export const InputChatBlock = (props: Props) => {
block={props.block} block={props.block}
inputIndex={props.inputIndex} inputIndex={props.inputIndex}
isInputPrefillEnabled={props.isInputPrefillEnabled} isInputPrefillEnabled={props.isInputPrefillEnabled}
existingAnswer={props.hasError ? answer() : undefined}
onTransitionEnd={props.onTransitionEnd} onTransitionEnd={props.onTransitionEnd}
onSubmit={handleSubmit} onSubmit={handleSubmit}
onSkip={handleSkip} onSkip={handleSkip}
@@ -118,6 +119,7 @@ const Input = (props: {
block: NonNullable<ContinueChatResponse['input']> block: NonNullable<ContinueChatResponse['input']>
inputIndex: number inputIndex: number
isInputPrefillEnabled: boolean isInputPrefillEnabled: boolean
existingAnswer?: string
onTransitionEnd: () => void onTransitionEnd: () => void
onSubmit: (answer: InputSubmitContent) => void onSubmit: (answer: InputSubmitContent) => void
onSkip: (label: string) => void onSkip: (label: string) => void
@@ -125,7 +127,8 @@ const Input = (props: {
const onSubmit = (answer: InputSubmitContent) => props.onSubmit(answer) const onSubmit = (answer: InputSubmitContent) => props.onSubmit(answer)
const getPrefilledValue = () => const getPrefilledValue = () =>
props.isInputPrefillEnabled ? props.block.prefilledValue : undefined props.existingAnswer ??
(props.isInputPrefillEnabled ? props.block.prefilledValue : undefined)
const submitPaymentSuccess = () => const submitPaymentSuccess = () =>
props.onSubmit({ props.onSubmit({

View File

@@ -1,6 +1,6 @@
{ {
"name": "@typebot.io/nextjs", "name": "@typebot.io/nextjs",
"version": "0.2.25", "version": "0.2.26",
"description": "Convenient library to display typebots on your Next.js website", "description": "Convenient library to display typebots on your Next.js website",
"main": "dist/index.js", "main": "dist/index.js",
"types": "dist/index.d.ts", "types": "dist/index.d.ts",

View File

@@ -1,6 +1,6 @@
{ {
"name": "@typebot.io/react", "name": "@typebot.io/react",
"version": "0.2.25", "version": "0.2.26",
"description": "Convenient library to display typebots on your React app", "description": "Convenient library to display typebots on your React app",
"main": "dist/index.js", "main": "dist/index.js",
"types": "dist/index.d.ts", "types": "dist/index.d.ts",

1
packages/env/env.ts vendored
View File

@@ -65,6 +65,7 @@ const baseEnv = {
) )
.default('FREE'), .default('FREE'),
DEBUG: boolean.optional().default('false'), DEBUG: boolean.optional().default('false'),
CHAT_API_TIMEOUT: z.coerce.number().optional(),
}, },
client: { client: {
NEXT_PUBLIC_E2E_TEST: boolean.optional(), NEXT_PUBLIC_E2E_TEST: boolean.optional(),