2
0

Add dynamic timeout to bot engine api

This commit is contained in:
Baptiste Arnaud
2023-12-08 13:43:58 +00:00
parent 8819e9e567
commit 957eaf33dd
16 changed files with 124 additions and 31 deletions

View File

@ -115,6 +115,7 @@ export const createSpeechOpenAI = async (
])
return {
startTimeShouldBeUpdated: true,
outgoingEdgeId,
newSessionState,
}

View File

@ -120,6 +120,7 @@ export const createChatCompletionOpenAI = async (
})
if (!chatCompletion)
return {
startTimeShouldBeUpdated: true,
outgoingEdgeId,
logs,
}
@ -127,13 +128,16 @@ export const createChatCompletionOpenAI = async (
const totalTokens = chatCompletion.usage?.total_tokens
if (isEmpty(messageContent)) {
console.error('OpenAI block returned empty message', chatCompletion.choices)
return { outgoingEdgeId, newSessionState }
return { outgoingEdgeId, newSessionState, startTimeShouldBeUpdated: true }
}
return {
...(await resumeChatCompletion(newSessionState, {
options,
outgoingEdgeId,
logs,
})(messageContent, totalTokens)),
startTimeShouldBeUpdated: true,
}
return resumeChatCompletion(newSessionState, {
options,
outgoingEdgeId,
logs,
})(messageContent, totalTokens)
}
const isNextBubbleMessageWithAssistantMessage =

View File

@ -30,6 +30,15 @@ type ParsedWebhook = ExecutableWebhook & {
isJson: boolean
}
export const responseDefaultTimeout = 10000
export const longRequestTimeout = 120000
const longReqTimeoutWhitelist = [
'https://api.openai.com',
'https://retune.so',
'https://www.chatbase.co',
]
export const executeWebhookBlock = async (
state: SessionState,
block: WebhookBlock | ZapierBlock | MakeComBlock | PabblyConnectBlock
@ -64,14 +73,21 @@ export const executeWebhookBlock = async (
},
],
}
const { response: webhookResponse, logs: executeWebhookLogs } =
await executeWebhook(parsedWebhook)
return resumeWebhookExecution({
state,
block,
logs: executeWebhookLogs,
const {
response: webhookResponse,
})
logs: executeWebhookLogs,
startTimeShouldBeUpdated,
} = await executeWebhook(parsedWebhook)
return {
...resumeWebhookExecution({
state,
block,
logs: executeWebhookLogs,
response: webhookResponse,
}),
startTimeShouldBeUpdated,
}
}
const checkIfBodyIsAVariable = (body: string) => /^{{.+}}$/.test(body)
@ -142,11 +158,19 @@ const parseWebhookAttributes =
export const executeWebhook = async (
webhook: ParsedWebhook
): Promise<{ response: WebhookResponse; logs?: ChatLog[] }> => {
): Promise<{
response: WebhookResponse
logs?: ChatLog[]
startTimeShouldBeUpdated?: boolean
}> => {
const logs: ChatLog[] = []
const { headers, url, method, basicAuth, body, isJson } = webhook
const contentType = headers ? headers['Content-Type'] : undefined
const isLongRequest = longReqTimeoutWhitelist.some((whiteListedUrl) =>
url?.includes(whiteListedUrl)
)
const request = {
url,
method: method as Method,
@ -159,7 +183,11 @@ export const executeWebhook = async (
form:
contentType?.includes('x-www-form-urlencoded') && body ? body : undefined,
body: body && !isJson ? (body as string) : undefined,
timeout: {
response: isLongRequest ? longRequestTimeout : responseDefaultTimeout,
},
} satisfies OptionsInit
try {
const response = await got(request.url, omit(request, 'url'))
logs.push({
@ -177,6 +205,7 @@ export const executeWebhook = async (
data: safeJsonParse(response.body).data,
},
logs,
startTimeShouldBeUpdated: isLongRequest,
}
} catch (error) {
if (error instanceof HTTPError) {
@ -193,7 +222,7 @@ export const executeWebhook = async (
response,
},
})
return { response, logs }
return { response, logs, startTimeShouldBeUpdated: isLongRequest }
}
const response = {
statusCode: 500,
@ -208,7 +237,7 @@ export const executeWebhook = async (
response,
},
})
return { response, logs }
return { response, logs, startTimeShouldBeUpdated: isLongRequest }
}
}

View File

@ -108,6 +108,7 @@ export const executeZemanticAiBlock = async (
} catch (e) {
console.error(e)
return {
startTimeShouldBeUpdated: true,
outgoingEdgeId: block.outgoingEdgeId,
logs: [
{
@ -118,7 +119,11 @@ export const executeZemanticAiBlock = async (
}
}
return { outgoingEdgeId: block.outgoingEdgeId, newSessionState }
return {
outgoingEdgeId: block.outgoingEdgeId,
newSessionState,
startTimeShouldBeUpdated: true,
}
}
const replaceTemplateVars = (

View File

@ -41,10 +41,11 @@ import { getBlockById } from '@typebot.io/lib/getBlockById'
type Params = {
version: 1 | 2
state: SessionState
startTime?: number
}
export const continueBotFlow = async (
reply: string | undefined,
{ state, version }: Params
{ state, version, startTime }: Params
): Promise<
ContinueChatResponse & {
newSessionState: SessionState
@ -127,7 +128,13 @@ export const continueBotFlow = async (
...group,
blocks: group.blocks.slice(blockIndex + 1),
} as Group,
{ version, state: newSessionState, visitedEdges, firstBubbleWasStreamed }
{
version,
state: newSessionState,
visitedEdges,
firstBubbleWasStreamed,
startTime,
}
)
return {
...chatReply,
@ -165,6 +172,7 @@ export const continueBotFlow = async (
state: newSessionState,
firstBubbleWasStreamed,
visitedEdges,
startTime,
})
return {

View File

@ -27,6 +27,9 @@ import {
} from './parseBubbleBlock'
import { InputBlockType } from '@typebot.io/schemas/features/blocks/inputs/constants'
import { VisitedEdge } from '@typebot.io/prisma'
import { env } from '@typebot.io/env'
import { TRPCError } from '@trpc/server'
import { ExecuteIntegrationResponse, ExecuteLogicResponse } from './types'
type ContextProps = {
version: 1 | 2
@ -35,6 +38,7 @@ type ContextProps = {
currentLastBubbleId?: string
firstBubbleWasStreamed?: boolean
visitedEdges: VisitedEdge[]
startTime?: number
}
export const executeGroup = async (
@ -46,6 +50,7 @@ export const executeGroup = async (
currentReply,
currentLastBubbleId,
firstBubbleWasStreamed,
startTime,
}: ContextProps
): Promise<
ContinueChatResponse & {
@ -53,6 +58,7 @@ export const executeGroup = async (
visitedEdges: VisitedEdge[]
}
> => {
let newStartTime = startTime
const messages: ContinueChatResponse['messages'] =
currentReply?.messages ?? []
let clientSideActions: ContinueChatResponse['clientSideActions'] =
@ -65,6 +71,17 @@ export const executeGroup = async (
let index = -1
for (const block of group.blocks) {
if (
newStartTime &&
env.CHAT_API_TIMEOUT &&
Date.now() - newStartTime > env.CHAT_API_TIMEOUT
) {
throw new TRPCError({
code: 'TIMEOUT',
message: `${env.CHAT_API_TIMEOUT / 1000} seconds timeout reached`,
})
}
index++
nextEdgeId = block.outgoingEdgeId
@ -93,13 +110,20 @@ export const executeGroup = async (
logs,
visitedEdges,
}
const executionResponse = isLogicBlock(block)
? await executeLogic(newSessionState)(block)
: isIntegrationBlock(block)
? await executeIntegration(newSessionState)(block)
: null
const executionResponse = (
isLogicBlock(block)
? await executeLogic(newSessionState)(block)
: isIntegrationBlock(block)
? await executeIntegration(newSessionState)(block)
: null
) as ExecuteLogicResponse | ExecuteIntegrationResponse | null
if (!executionResponse) continue
if (
'startTimeShouldBeUpdated' in executionResponse &&
executionResponse.startTimeShouldBeUpdated
)
newStartTime = Date.now()
if (executionResponse.logs)
logs = [...(logs ?? []), ...executionResponse.logs]
if (executionResponse.newSessionState)
@ -162,6 +186,7 @@ export const executeGroup = async (
logs,
},
currentLastBubbleId: lastBubbleBlockId,
startTime: newStartTime,
})
}

View File

@ -12,12 +12,14 @@ type Props = {
version: 1 | 2
state: SessionState
startFrom?: StartFrom
startTime?: number
}
export const startBotFlow = async ({
version,
state,
startFrom,
startTime,
}: Props): Promise<
ContinueChatResponse & {
newSessionState: SessionState
@ -39,6 +41,7 @@ export const startBotFlow = async ({
version,
state: newSessionState,
visitedEdges,
startTime,
})
}
const firstEdgeId = getFirstEdgeId({
@ -54,6 +57,7 @@ export const startBotFlow = async ({
version,
state: newSessionState,
visitedEdges,
startTime,
})
}

View File

@ -157,6 +157,7 @@ export const startSession = async ({
state: initialState,
startFrom:
startParams.type === 'preview' ? startParams.startFrom : undefined,
startTime: Date.now(),
})
// If params has message and first block is an input block, we can directly continue the bot flow

View File

@ -10,6 +10,7 @@ export type ExecuteLogicResponse = {
export type ExecuteIntegrationResponse = {
outgoingEdgeId: EdgeId | undefined
newSessionState?: SessionState
startTimeShouldBeUpdated?: boolean
} & Pick<ContinueChatResponse, 'clientSideActions' | 'logs'>
export type ParsedReply =

View File

@ -1,6 +1,6 @@
{
"name": "@typebot.io/js",
"version": "0.2.25",
"version": "0.2.26",
"description": "Javascript library to display typebots on your website",
"type": "module",
"main": "dist/index.js",

View File

@ -170,13 +170,20 @@ export const ConversationContainer = (props: Props) => {
setIsSending(false)
if (error) {
setHasError(true)
props.onNewLogs?.([
const errorLogs = [
{
description: 'Failed to send the reply',
details: error,
status: 'error',
},
])
]
await saveClientLogsQuery({
apiHost: props.context.apiHost,
sessionId: props.initialChatReply.sessionId,
clientLogs: errorLogs,
})
props.onNewLogs?.(errorLogs)
return
}
if (!data) return
if (data.lastMessageNewFormat) {

View File

@ -103,6 +103,7 @@ export const InputChatBlock = (props: Props) => {
block={props.block}
inputIndex={props.inputIndex}
isInputPrefillEnabled={props.isInputPrefillEnabled}
existingAnswer={props.hasError ? answer() : undefined}
onTransitionEnd={props.onTransitionEnd}
onSubmit={handleSubmit}
onSkip={handleSkip}
@ -118,6 +119,7 @@ const Input = (props: {
block: NonNullable<ContinueChatResponse['input']>
inputIndex: number
isInputPrefillEnabled: boolean
existingAnswer?: string
onTransitionEnd: () => void
onSubmit: (answer: InputSubmitContent) => void
onSkip: (label: string) => void
@ -125,7 +127,8 @@ const Input = (props: {
const onSubmit = (answer: InputSubmitContent) => props.onSubmit(answer)
const getPrefilledValue = () =>
props.isInputPrefillEnabled ? props.block.prefilledValue : undefined
props.existingAnswer ??
(props.isInputPrefillEnabled ? props.block.prefilledValue : undefined)
const submitPaymentSuccess = () =>
props.onSubmit({

View File

@ -1,6 +1,6 @@
{
"name": "@typebot.io/nextjs",
"version": "0.2.25",
"version": "0.2.26",
"description": "Convenient library to display typebots on your Next.js website",
"main": "dist/index.js",
"types": "dist/index.d.ts",

View File

@ -1,6 +1,6 @@
{
"name": "@typebot.io/react",
"version": "0.2.25",
"version": "0.2.26",
"description": "Convenient library to display typebots on your React app",
"main": "dist/index.js",
"types": "dist/index.d.ts",

1
packages/env/env.ts vendored
View File

@ -65,6 +65,7 @@ const baseEnv = {
)
.default('FREE'),
DEBUG: boolean.optional().default('false'),
CHAT_API_TIMEOUT: z.coerce.number().optional(),
},
client: {
NEXT_PUBLIC_E2E_TEST: boolean.optional(),