2023-03-09 08:46:36 +01:00
|
|
|
import { ExecuteIntegrationResponse } from '@/features/chat/types'
|
2023-03-13 16:28:08 +01:00
|
|
|
import { transformStringVariablesToList } from '@/features/variables/transformVariablesToList'
|
2023-03-09 08:46:36 +01:00
|
|
|
import prisma from '@/lib/prisma'
|
2023-03-13 16:28:08 +01:00
|
|
|
import {
|
2023-04-27 11:21:32 +02:00
|
|
|
ChatReply,
|
2023-03-13 16:28:08 +01:00
|
|
|
SessionState,
|
|
|
|
Variable,
|
|
|
|
VariableWithValue,
|
2023-03-15 08:35:16 +01:00
|
|
|
} from '@typebot.io/schemas'
|
2023-03-09 08:46:36 +01:00
|
|
|
import {
|
|
|
|
ChatCompletionOpenAIOptions,
|
|
|
|
OpenAICredentials,
|
2023-05-02 13:37:02 -04:00
|
|
|
modelLimit,
|
2023-03-15 08:35:16 +01:00
|
|
|
} from '@typebot.io/schemas/features/blocks/integrations/openai'
|
2023-05-25 10:32:35 +02:00
|
|
|
import type {
|
|
|
|
ChatCompletionRequestMessage,
|
|
|
|
CreateChatCompletionRequest,
|
|
|
|
CreateChatCompletionResponse,
|
|
|
|
} from 'openai'
|
|
|
|
import { byId, isNotEmpty, isEmpty } from '@typebot.io/lib'
|
|
|
|
import { decrypt, isCredentialsV2 } from '@typebot.io/lib/api/encryption'
|
2023-03-15 12:21:52 +01:00
|
|
|
import { saveErrorLog } from '@/features/logs/saveErrorLog'
|
|
|
|
import { updateVariables } from '@/features/variables/updateVariables'
|
2023-03-15 15:04:53 +01:00
|
|
|
import { parseVariables } from '@/features/variables/parseVariables'
|
2023-03-20 17:26:21 +01:00
|
|
|
import { parseVariableNumber } from '@/features/variables/parseVariableNumber'
|
2023-05-02 13:37:02 -04:00
|
|
|
import { encoding_for_model } from '@dqbd/tiktoken'
|
2023-05-25 10:32:35 +02:00
|
|
|
import got from 'got'
|
|
|
|
import { resumeChatCompletion } from './resumeChatCompletion'
|
|
|
|
import { isPlaneteScale } from '@/helpers/api/isPlanetScale'
|
|
|
|
import { isVercel } from '@/helpers/api/isVercel'
|
2023-05-02 13:37:02 -04:00
|
|
|
|
|
|
|
const minTokenCompletion = 200
|
2023-05-25 10:32:35 +02:00
|
|
|
const createChatEndpoint = 'https://api.openai.com/v1/chat/completions'
|
2023-03-09 08:46:36 +01:00
|
|
|
|
|
|
|
export const createChatCompletionOpenAI = async (
|
|
|
|
state: SessionState,
|
|
|
|
{
|
|
|
|
outgoingEdgeId,
|
|
|
|
options,
|
|
|
|
}: { outgoingEdgeId?: string; options: ChatCompletionOpenAIOptions }
|
|
|
|
): Promise<ExecuteIntegrationResponse> => {
|
2023-03-13 16:28:08 +01:00
|
|
|
let newSessionState = state
|
2023-04-27 11:21:32 +02:00
|
|
|
const noCredentialsError = {
|
|
|
|
status: 'error',
|
|
|
|
description: 'Make sure to select an OpenAI account',
|
|
|
|
}
|
2023-03-15 17:47:05 +01:00
|
|
|
if (!options.credentialsId) {
|
2023-04-27 11:21:32 +02:00
|
|
|
return {
|
|
|
|
outgoingEdgeId,
|
|
|
|
logs: [noCredentialsError],
|
|
|
|
}
|
2023-03-15 17:47:05 +01:00
|
|
|
}
|
2023-03-09 08:46:36 +01:00
|
|
|
const credentials = await prisma.credentials.findUnique({
|
|
|
|
where: {
|
|
|
|
id: options.credentialsId,
|
|
|
|
},
|
|
|
|
})
|
2023-03-15 17:47:05 +01:00
|
|
|
if (!credentials) {
|
|
|
|
console.error('Could not find credentials in database')
|
2023-04-27 11:21:32 +02:00
|
|
|
return { outgoingEdgeId, logs: [noCredentialsError] }
|
2023-03-15 17:47:05 +01:00
|
|
|
}
|
2023-05-25 10:32:35 +02:00
|
|
|
const { apiKey } = (await decrypt(
|
2023-03-09 08:46:36 +01:00
|
|
|
credentials.data,
|
|
|
|
credentials.iv
|
2023-05-25 10:32:35 +02:00
|
|
|
)) as OpenAICredentials['data']
|
2023-03-15 17:47:05 +01:00
|
|
|
const { variablesTransformedToList, messages } = parseMessages(
|
2023-05-02 13:37:02 -04:00
|
|
|
newSessionState.typebot.variables,
|
|
|
|
options.model
|
2023-03-15 17:47:05 +01:00
|
|
|
)(options.messages)
|
2023-03-13 16:28:08 +01:00
|
|
|
if (variablesTransformedToList.length > 0)
|
|
|
|
newSessionState = await updateVariables(state)(variablesTransformedToList)
|
2023-03-15 17:47:05 +01:00
|
|
|
|
2023-03-20 17:26:21 +01:00
|
|
|
const temperature = parseVariableNumber(newSessionState.typebot.variables)(
|
|
|
|
options.advancedSettings?.temperature
|
|
|
|
)
|
|
|
|
|
2023-03-13 16:28:08 +01:00
|
|
|
try {
|
2023-05-25 10:32:35 +02:00
|
|
|
if (
|
|
|
|
isPlaneteScale() &&
|
|
|
|
isVercel() &&
|
|
|
|
isCredentialsV2(credentials) &&
|
|
|
|
newSessionState.isStreamEnabled
|
|
|
|
)
|
|
|
|
return {
|
|
|
|
clientSideActions: [{ streamOpenAiChatCompletion: { messages } }],
|
|
|
|
outgoingEdgeId,
|
|
|
|
newSessionState,
|
|
|
|
}
|
|
|
|
const response = await got
|
|
|
|
.post(createChatEndpoint, {
|
|
|
|
headers: {
|
|
|
|
Authorization: `Bearer ${apiKey}`,
|
|
|
|
},
|
|
|
|
json: {
|
|
|
|
model: options.model,
|
|
|
|
messages,
|
|
|
|
temperature,
|
|
|
|
} satisfies CreateChatCompletionRequest,
|
|
|
|
})
|
|
|
|
.json<CreateChatCompletionResponse>()
|
|
|
|
const messageContent = response.choices.at(0)?.message?.content
|
|
|
|
const totalTokens = response.usage?.total_tokens
|
2023-03-15 17:47:05 +01:00
|
|
|
if (isEmpty(messageContent)) {
|
|
|
|
console.error('OpenAI block returned empty message', response)
|
2023-03-13 16:28:08 +01:00
|
|
|
return { outgoingEdgeId, newSessionState }
|
2023-03-09 08:46:36 +01:00
|
|
|
}
|
2023-05-25 10:32:35 +02:00
|
|
|
return resumeChatCompletion(newSessionState, {
|
|
|
|
options,
|
2023-03-09 08:46:36 +01:00
|
|
|
outgoingEdgeId,
|
2023-05-25 10:32:35 +02:00
|
|
|
})(messageContent, totalTokens)
|
2023-03-13 16:28:08 +01:00
|
|
|
} catch (err) {
|
2023-04-27 11:21:32 +02:00
|
|
|
const log: NonNullable<ChatReply['logs']>[number] = {
|
2023-04-03 17:12:11 +02:00
|
|
|
status: 'error',
|
|
|
|
description: 'OpenAI block returned error',
|
|
|
|
}
|
|
|
|
|
2023-04-27 11:21:32 +02:00
|
|
|
if (err && typeof err === 'object') {
|
|
|
|
if ('response' in err) {
|
|
|
|
const { status, data } = err.response as {
|
|
|
|
status: string
|
|
|
|
data: string
|
|
|
|
}
|
|
|
|
log.details = {
|
|
|
|
status,
|
|
|
|
data,
|
|
|
|
}
|
|
|
|
} else if ('message' in err) {
|
|
|
|
log.details = err.message
|
|
|
|
}
|
2023-03-20 17:26:21 +01:00
|
|
|
}
|
|
|
|
|
2023-03-13 16:28:08 +01:00
|
|
|
state.result &&
|
|
|
|
(await saveErrorLog({
|
|
|
|
resultId: state.result.id,
|
|
|
|
message: log.description,
|
|
|
|
details: log.details,
|
|
|
|
}))
|
|
|
|
return {
|
|
|
|
outgoingEdgeId,
|
|
|
|
logs: [log],
|
|
|
|
newSessionState,
|
|
|
|
}
|
2023-03-09 08:46:36 +01:00
|
|
|
}
|
|
|
|
}
|
2023-03-13 16:28:08 +01:00
|
|
|
|
|
|
|
const parseMessages =
|
2023-05-02 13:37:02 -04:00
|
|
|
(variables: Variable[], model: ChatCompletionOpenAIOptions['model']) =>
|
2023-03-13 16:28:08 +01:00
|
|
|
(
|
|
|
|
messages: ChatCompletionOpenAIOptions['messages']
|
|
|
|
): {
|
|
|
|
variablesTransformedToList: VariableWithValue[]
|
|
|
|
messages: ChatCompletionRequestMessage[]
|
|
|
|
} => {
|
|
|
|
const variablesTransformedToList: VariableWithValue[] = []
|
2023-05-02 13:37:02 -04:00
|
|
|
const firstMessagesSequenceIndex = messages.findIndex(
|
|
|
|
(message) => message.role === 'Messages sequence ✨'
|
|
|
|
)
|
2023-03-13 16:28:08 +01:00
|
|
|
const parsedMessages = messages
|
2023-05-02 13:37:02 -04:00
|
|
|
.flatMap((message, index) => {
|
2023-03-13 16:28:08 +01:00
|
|
|
if (!message.role) return
|
|
|
|
if (message.role === 'Messages sequence ✨') {
|
|
|
|
if (
|
|
|
|
!message.content?.assistantMessagesVariableId ||
|
|
|
|
!message.content?.userMessagesVariableId
|
|
|
|
)
|
|
|
|
return
|
|
|
|
variablesTransformedToList.push(
|
|
|
|
...transformStringVariablesToList(variables)([
|
|
|
|
message.content.assistantMessagesVariableId,
|
|
|
|
message.content.userMessagesVariableId,
|
|
|
|
])
|
|
|
|
)
|
|
|
|
const updatedVariables = variables.map((variable) => {
|
|
|
|
const variableTransformedToList = variablesTransformedToList.find(
|
|
|
|
byId(variable.id)
|
|
|
|
)
|
|
|
|
if (variableTransformedToList) return variableTransformedToList
|
|
|
|
return variable
|
|
|
|
})
|
|
|
|
|
|
|
|
const userMessages = (updatedVariables.find(
|
|
|
|
(variable) =>
|
|
|
|
variable.id === message.content?.userMessagesVariableId
|
|
|
|
)?.value ?? []) as string[]
|
|
|
|
|
|
|
|
const assistantMessages = (updatedVariables.find(
|
|
|
|
(variable) =>
|
|
|
|
variable.id === message.content?.assistantMessagesVariableId
|
|
|
|
)?.value ?? []) as string[]
|
|
|
|
|
2023-05-02 13:37:02 -04:00
|
|
|
let allMessages: ChatCompletionRequestMessage[] = []
|
|
|
|
|
2023-03-13 16:28:08 +01:00
|
|
|
if (userMessages.length > assistantMessages.length)
|
2023-05-02 13:37:02 -04:00
|
|
|
allMessages = userMessages.flatMap((userMessage, index) => [
|
2023-03-13 16:28:08 +01:00
|
|
|
{
|
|
|
|
role: 'user',
|
|
|
|
content: userMessage,
|
|
|
|
},
|
2023-05-02 13:37:02 -04:00
|
|
|
{ role: 'assistant', content: assistantMessages.at(index) ?? '' },
|
2023-03-13 16:28:08 +01:00
|
|
|
]) satisfies ChatCompletionRequestMessage[]
|
|
|
|
else {
|
2023-05-02 13:37:02 -04:00
|
|
|
allMessages = assistantMessages.flatMap(
|
|
|
|
(assistantMessage, index) => [
|
|
|
|
{ role: 'assistant', content: assistantMessage },
|
|
|
|
{
|
|
|
|
role: 'user',
|
|
|
|
content: userMessages.at(index) ?? '',
|
|
|
|
},
|
|
|
|
]
|
|
|
|
) satisfies ChatCompletionRequestMessage[]
|
2023-03-13 16:28:08 +01:00
|
|
|
}
|
2023-05-02 13:37:02 -04:00
|
|
|
|
|
|
|
if (index !== firstMessagesSequenceIndex) return allMessages
|
|
|
|
|
|
|
|
const encoder = encoding_for_model(model)
|
|
|
|
let messagesToSend: ChatCompletionRequestMessage[] = []
|
|
|
|
let tokenCount = 0
|
|
|
|
|
|
|
|
for (let i = allMessages.length - 1; i >= 0; i--) {
|
|
|
|
const message = allMessages[i]
|
|
|
|
const tokens = encoder.encode(message.content)
|
|
|
|
|
|
|
|
if (
|
|
|
|
tokenCount + tokens.length - minTokenCompletion >
|
|
|
|
modelLimit[model]
|
|
|
|
) {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
tokenCount += tokens.length
|
|
|
|
messagesToSend = [message, ...messagesToSend]
|
|
|
|
}
|
|
|
|
|
|
|
|
encoder.free()
|
|
|
|
|
|
|
|
return messagesToSend
|
2023-03-13 16:28:08 +01:00
|
|
|
}
|
|
|
|
return {
|
|
|
|
role: message.role,
|
|
|
|
content: parseVariables(variables)(message.content),
|
|
|
|
} satisfies ChatCompletionRequestMessage
|
|
|
|
})
|
|
|
|
.filter(
|
|
|
|
(message) => isNotEmpty(message?.role) && isNotEmpty(message?.content)
|
|
|
|
) as ChatCompletionRequestMessage[]
|
|
|
|
|
|
|
|
return {
|
|
|
|
variablesTransformedToList,
|
|
|
|
messages: parsedMessages,
|
|
|
|
}
|
|
|
|
}
|