⚡ (openai) Stream chat completion to avoid serverless timeout (#526)
Closes #520
This commit is contained in:
@@ -88,7 +88,10 @@ const getStripeInfo = async (
|
||||
where: { id: credentialsId },
|
||||
})
|
||||
if (!credentials) return
|
||||
return decrypt(credentials.data, credentials.iv) as StripeCredentials['data']
|
||||
return (await decrypt(
|
||||
credentials.data,
|
||||
credentials.iv
|
||||
)) as StripeCredentials['data']
|
||||
}
|
||||
|
||||
// https://stripe.com/docs/currencies#zero-decimal
|
||||
|
||||
@@ -5,7 +5,6 @@ import {
|
||||
ChatReply,
|
||||
SessionState,
|
||||
Variable,
|
||||
VariableWithUnknowValue,
|
||||
VariableWithValue,
|
||||
} from '@typebot.io/schemas'
|
||||
import {
|
||||
@@ -13,17 +12,25 @@ import {
|
||||
OpenAICredentials,
|
||||
modelLimit,
|
||||
} from '@typebot.io/schemas/features/blocks/integrations/openai'
|
||||
import { OpenAIApi, Configuration, ChatCompletionRequestMessage } from 'openai'
|
||||
import { isDefined, byId, isNotEmpty, isEmpty } from '@typebot.io/lib'
|
||||
import { decrypt } from '@typebot.io/lib/api/encryption'
|
||||
import type {
|
||||
ChatCompletionRequestMessage,
|
||||
CreateChatCompletionRequest,
|
||||
CreateChatCompletionResponse,
|
||||
} from 'openai'
|
||||
import { byId, isNotEmpty, isEmpty } from '@typebot.io/lib'
|
||||
import { decrypt, isCredentialsV2 } from '@typebot.io/lib/api/encryption'
|
||||
import { saveErrorLog } from '@/features/logs/saveErrorLog'
|
||||
import { updateVariables } from '@/features/variables/updateVariables'
|
||||
import { parseVariables } from '@/features/variables/parseVariables'
|
||||
import { saveSuccessLog } from '@/features/logs/saveSuccessLog'
|
||||
import { parseVariableNumber } from '@/features/variables/parseVariableNumber'
|
||||
import { encoding_for_model } from '@dqbd/tiktoken'
|
||||
import got from 'got'
|
||||
import { resumeChatCompletion } from './resumeChatCompletion'
|
||||
import { isPlaneteScale } from '@/helpers/api/isPlanetScale'
|
||||
import { isVercel } from '@/helpers/api/isVercel'
|
||||
|
||||
const minTokenCompletion = 200
|
||||
const createChatEndpoint = 'https://api.openai.com/v1/chat/completions'
|
||||
|
||||
export const createChatCompletionOpenAI = async (
|
||||
state: SessionState,
|
||||
@@ -52,13 +59,10 @@ export const createChatCompletionOpenAI = async (
|
||||
console.error('Could not find credentials in database')
|
||||
return { outgoingEdgeId, logs: [noCredentialsError] }
|
||||
}
|
||||
const { apiKey } = decrypt(
|
||||
const { apiKey } = (await decrypt(
|
||||
credentials.data,
|
||||
credentials.iv
|
||||
) as OpenAICredentials['data']
|
||||
const configuration = new Configuration({
|
||||
apiKey,
|
||||
})
|
||||
)) as OpenAICredentials['data']
|
||||
const { variablesTransformedToList, messages } = parseMessages(
|
||||
newSessionState.typebot.variables,
|
||||
options.model
|
||||
@@ -71,52 +75,39 @@ export const createChatCompletionOpenAI = async (
|
||||
)
|
||||
|
||||
try {
|
||||
const openai = new OpenAIApi(configuration)
|
||||
const response = await openai.createChatCompletion({
|
||||
model: options.model,
|
||||
messages,
|
||||
temperature,
|
||||
})
|
||||
const messageContent = response.data.choices.at(0)?.message?.content
|
||||
const totalTokens = response.data.usage?.total_tokens
|
||||
if (
|
||||
isPlaneteScale() &&
|
||||
isVercel() &&
|
||||
isCredentialsV2(credentials) &&
|
||||
newSessionState.isStreamEnabled
|
||||
)
|
||||
return {
|
||||
clientSideActions: [{ streamOpenAiChatCompletion: { messages } }],
|
||||
outgoingEdgeId,
|
||||
newSessionState,
|
||||
}
|
||||
const response = await got
|
||||
.post(createChatEndpoint, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
},
|
||||
json: {
|
||||
model: options.model,
|
||||
messages,
|
||||
temperature,
|
||||
} satisfies CreateChatCompletionRequest,
|
||||
})
|
||||
.json<CreateChatCompletionResponse>()
|
||||
const messageContent = response.choices.at(0)?.message?.content
|
||||
const totalTokens = response.usage?.total_tokens
|
||||
if (isEmpty(messageContent)) {
|
||||
console.error('OpenAI block returned empty message', response)
|
||||
return { outgoingEdgeId, newSessionState }
|
||||
}
|
||||
const newVariables = options.responseMapping.reduce<
|
||||
VariableWithUnknowValue[]
|
||||
>((newVariables, mapping) => {
|
||||
const existingVariable = newSessionState.typebot.variables.find(
|
||||
byId(mapping.variableId)
|
||||
)
|
||||
if (!existingVariable) return newVariables
|
||||
if (mapping.valueToExtract === 'Message content') {
|
||||
newVariables.push({
|
||||
...existingVariable,
|
||||
value: Array.isArray(existingVariable.value)
|
||||
? existingVariable.value.concat(messageContent)
|
||||
: messageContent,
|
||||
})
|
||||
}
|
||||
if (mapping.valueToExtract === 'Total tokens' && isDefined(totalTokens)) {
|
||||
newVariables.push({
|
||||
...existingVariable,
|
||||
value: totalTokens,
|
||||
})
|
||||
}
|
||||
return newVariables
|
||||
}, [])
|
||||
if (newVariables.length > 0)
|
||||
newSessionState = await updateVariables(newSessionState)(newVariables)
|
||||
state.result &&
|
||||
(await saveSuccessLog({
|
||||
resultId: state.result.id,
|
||||
message: 'OpenAI block successfully executed',
|
||||
}))
|
||||
return {
|
||||
return resumeChatCompletion(newSessionState, {
|
||||
options,
|
||||
outgoingEdgeId,
|
||||
newSessionState,
|
||||
}
|
||||
})(messageContent, totalTokens)
|
||||
} catch (err) {
|
||||
const log: NonNullable<ChatReply['logs']>[number] = {
|
||||
status: 'error',
|
||||
|
||||
@@ -0,0 +1,103 @@
|
||||
import { parseVariableNumber } from '@/features/variables/parseVariableNumber'
|
||||
import { Connection } from '@planetscale/database'
|
||||
import { decrypt } from '@typebot.io/lib/api/encryption'
|
||||
import {
|
||||
ChatCompletionOpenAIOptions,
|
||||
OpenAICredentials,
|
||||
} from '@typebot.io/schemas/features/blocks/integrations/openai'
|
||||
import { SessionState } from '@typebot.io/schemas/features/chat'
|
||||
import {
|
||||
ParsedEvent,
|
||||
ReconnectInterval,
|
||||
createParser,
|
||||
} from 'eventsource-parser'
|
||||
import type {
|
||||
ChatCompletionRequestMessage,
|
||||
CreateChatCompletionRequest,
|
||||
} from 'openai'
|
||||
|
||||
export const getChatCompletionStream =
|
||||
(conn: Connection) =>
|
||||
async (
|
||||
state: SessionState,
|
||||
options: ChatCompletionOpenAIOptions,
|
||||
messages: ChatCompletionRequestMessage[]
|
||||
) => {
|
||||
if (!options.credentialsId) return
|
||||
const credentials = (
|
||||
await conn.execute('select data, iv from Credentials where id=?', [
|
||||
options.credentialsId,
|
||||
])
|
||||
).rows.at(0) as { data: string; iv: string } | undefined
|
||||
if (!credentials) {
|
||||
console.error('Could not find credentials in database')
|
||||
return
|
||||
}
|
||||
const { apiKey } = (await decrypt(
|
||||
credentials.data,
|
||||
credentials.iv
|
||||
)) as OpenAICredentials['data']
|
||||
|
||||
const temperature = parseVariableNumber(state.typebot.variables)(
|
||||
options.advancedSettings?.temperature
|
||||
)
|
||||
|
||||
const encoder = new TextEncoder()
|
||||
const decoder = new TextDecoder()
|
||||
|
||||
let counter = 0
|
||||
|
||||
const res = await fetch('https://api.openai.com/v1/chat/completions', {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
},
|
||||
method: 'POST',
|
||||
body: JSON.stringify({
|
||||
messages,
|
||||
model: options.model,
|
||||
temperature,
|
||||
stream: true,
|
||||
} satisfies CreateChatCompletionRequest),
|
||||
})
|
||||
|
||||
const stream = new ReadableStream({
|
||||
async start(controller) {
|
||||
function onParse(event: ParsedEvent | ReconnectInterval) {
|
||||
if (event.type === 'event') {
|
||||
const data = event.data
|
||||
if (data === '[DONE]') {
|
||||
controller.close()
|
||||
return
|
||||
}
|
||||
try {
|
||||
const json = JSON.parse(data) as {
|
||||
choices: { delta: { content: string } }[]
|
||||
}
|
||||
const text = json.choices.at(0)?.delta.content
|
||||
if (counter < 2 && (text?.match(/\n/) || []).length) {
|
||||
return
|
||||
}
|
||||
const queue = encoder.encode(text)
|
||||
controller.enqueue(queue)
|
||||
counter++
|
||||
} catch (e) {
|
||||
controller.error(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// stream response (SSE) from OpenAI may be fragmented into multiple chunks
|
||||
// this ensures we properly read chunks & invoke an event for each SSE event stream
|
||||
const parser = createParser(onParse)
|
||||
|
||||
// https://web.dev/streams/#asynchronous-iteration
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
for await (const chunk of res.body as any) {
|
||||
parser.feed(decoder.decode(chunk))
|
||||
}
|
||||
},
|
||||
})
|
||||
|
||||
return stream
|
||||
}
|
||||
@@ -0,0 +1,52 @@
|
||||
import { saveSuccessLog } from '@/features/logs/saveSuccessLog'
|
||||
import { updateVariables } from '@/features/variables/updateVariables'
|
||||
import { byId, isDefined } from '@typebot.io/lib'
|
||||
import { SessionState } from '@typebot.io/schemas'
|
||||
import { ChatCompletionOpenAIOptions } from '@typebot.io/schemas/features/blocks/integrations/openai'
|
||||
import { VariableWithUnknowValue } from '@typebot.io/schemas/features/typebot/variable'
|
||||
|
||||
export const resumeChatCompletion =
|
||||
(
|
||||
state: SessionState,
|
||||
{
|
||||
outgoingEdgeId,
|
||||
options,
|
||||
}: { outgoingEdgeId?: string; options: ChatCompletionOpenAIOptions }
|
||||
) =>
|
||||
async (message: string, totalTokens?: number) => {
|
||||
let newSessionState = state
|
||||
const newVariables = options.responseMapping.reduce<
|
||||
VariableWithUnknowValue[]
|
||||
>((newVariables, mapping) => {
|
||||
const existingVariable = newSessionState.typebot.variables.find(
|
||||
byId(mapping.variableId)
|
||||
)
|
||||
if (!existingVariable) return newVariables
|
||||
if (mapping.valueToExtract === 'Message content') {
|
||||
newVariables.push({
|
||||
...existingVariable,
|
||||
value: Array.isArray(existingVariable.value)
|
||||
? existingVariable.value.concat(message)
|
||||
: message,
|
||||
})
|
||||
}
|
||||
if (mapping.valueToExtract === 'Total tokens' && isDefined(totalTokens)) {
|
||||
newVariables.push({
|
||||
...existingVariable,
|
||||
value: totalTokens,
|
||||
})
|
||||
}
|
||||
return newVariables
|
||||
}, [])
|
||||
if (newVariables.length > 0)
|
||||
newSessionState = await updateVariables(newSessionState)(newVariables)
|
||||
state.result &&
|
||||
(await saveSuccessLog({
|
||||
resultId: state.result.id,
|
||||
message: 'OpenAI block successfully executed',
|
||||
}))
|
||||
return {
|
||||
outgoingEdgeId,
|
||||
newSessionState,
|
||||
}
|
||||
}
|
||||
@@ -193,7 +193,10 @@ const getEmailInfo = async (
|
||||
where: { id: credentialsId },
|
||||
})
|
||||
if (!credentials) return
|
||||
return decrypt(credentials.data, credentials.iv) as SmtpCredentials['data']
|
||||
return (await decrypt(
|
||||
credentials.data,
|
||||
credentials.iv
|
||||
)) as SmtpCredentials['data']
|
||||
}
|
||||
|
||||
const getEmailBody = async ({
|
||||
|
||||
@@ -61,6 +61,11 @@ const evaluateSetVariableExpression =
|
||||
const evaluating = parseVariables(variables, { fieldToParse: 'id' })(
|
||||
str.includes('return ') ? str : `return ${str}`
|
||||
)
|
||||
console.log(
|
||||
variables.map((v) => v.id),
|
||||
...variables.map((v) => parseGuessedValueType(v.value)),
|
||||
evaluating
|
||||
)
|
||||
try {
|
||||
const func = Function(...variables.map((v) => v.id), evaluating)
|
||||
return func(...variables.map((v) => parseGuessedValueType(v.value)))
|
||||
|
||||
@@ -154,6 +154,7 @@ const startSession = async (startParams?: StartParams, userId?: string) => {
|
||||
},
|
||||
currentTypebotId: typebot.id,
|
||||
dynamicTheme: parseDynamicThemeInState(typebot.theme),
|
||||
isStreamEnabled: startParams.isStreamEnabled,
|
||||
}
|
||||
|
||||
const { messages, input, clientSideActions, newSessionState, logs } =
|
||||
|
||||
@@ -9,12 +9,13 @@ import {
|
||||
ChatReply,
|
||||
InputBlock,
|
||||
InputBlockType,
|
||||
IntegrationBlockType,
|
||||
LogicBlockType,
|
||||
ResultInSession,
|
||||
SessionState,
|
||||
SetVariableBlock,
|
||||
} from '@typebot.io/schemas'
|
||||
import { isInputBlock, isNotDefined, byId } from '@typebot.io/lib'
|
||||
import { isInputBlock, isNotDefined, byId, isDefined } from '@typebot.io/lib'
|
||||
import { executeGroup } from './executeGroup'
|
||||
import { getNextGroup } from './getNextGroup'
|
||||
import { validateEmail } from '@/features/blocks/inputs/email/validateEmail'
|
||||
@@ -23,6 +24,8 @@ import { validatePhoneNumber } from '@/features/blocks/inputs/phone/validatePhon
|
||||
import { validateUrl } from '@/features/blocks/inputs/url/validateUrl'
|
||||
import { updateVariables } from '@/features/variables/updateVariables'
|
||||
import { parseVariables } from '@/features/variables/parseVariables'
|
||||
import { OpenAIBlock } from '@typebot.io/schemas/features/blocks/integrations/openai'
|
||||
import { resumeChatCompletion } from '@/features/blocks/integrations/openai/resumeChatCompletion'
|
||||
|
||||
export const continueBotFlow =
|
||||
(state: SessionState) =>
|
||||
@@ -57,6 +60,16 @@ export const continueBotFlow =
|
||||
}
|
||||
newSessionState = await updateVariables(state)([newVariable])
|
||||
}
|
||||
} else if (
|
||||
isDefined(reply) &&
|
||||
block.type === IntegrationBlockType.OPEN_AI &&
|
||||
block.options.task === 'Create chat completion'
|
||||
) {
|
||||
const result = await resumeChatCompletion(state, {
|
||||
options: block.options,
|
||||
outgoingEdgeId: block.outgoingEdgeId,
|
||||
})(reply)
|
||||
newSessionState = result.newSessionState
|
||||
} else if (!isInputBlock(block))
|
||||
throw new TRPCError({
|
||||
code: 'INTERNAL_SERVER_ERROR',
|
||||
@@ -236,7 +249,10 @@ const computeStorageUsed = async (reply: string) => {
|
||||
|
||||
const getOutgoingEdgeId =
|
||||
({ typebot: { variables } }: Pick<SessionState, 'typebot'>) =>
|
||||
(block: InputBlock | SetVariableBlock, reply: string | null) => {
|
||||
(
|
||||
block: InputBlock | SetVariableBlock | OpenAIBlock,
|
||||
reply: string | null
|
||||
) => {
|
||||
if (
|
||||
block.type === InputBlockType.CHOICE &&
|
||||
!block.options.isMultipleChoice &&
|
||||
|
||||
@@ -73,6 +73,10 @@ export const executeGroup =
|
||||
: null
|
||||
|
||||
if (!executionResponse) continue
|
||||
if (executionResponse.logs)
|
||||
logs = [...(logs ?? []), ...executionResponse.logs]
|
||||
if (executionResponse.newSessionState)
|
||||
newSessionState = executionResponse.newSessionState
|
||||
if (
|
||||
'clientSideActions' in executionResponse &&
|
||||
executionResponse.clientSideActions
|
||||
@@ -83,7 +87,8 @@ export const executeGroup =
|
||||
]
|
||||
if (
|
||||
executionResponse.clientSideActions?.find(
|
||||
(action) => 'setVariable' in action
|
||||
(action) =>
|
||||
'setVariable' in action || 'streamOpenAiChatCompletion' in action
|
||||
)
|
||||
) {
|
||||
return {
|
||||
@@ -101,10 +106,6 @@ export const executeGroup =
|
||||
}
|
||||
}
|
||||
|
||||
if (executionResponse.logs)
|
||||
logs = [...(logs ?? []), ...executionResponse.logs]
|
||||
if (executionResponse.newSessionState)
|
||||
newSessionState = executionResponse.newSessionState
|
||||
if (executionResponse.outgoingEdgeId) {
|
||||
nextEdgeId = executionResponse.outgoingEdgeId
|
||||
break
|
||||
|
||||
Reference in New Issue
Block a user