2
0

🐛 Fix streaming text selection (#1444)

This commit is contained in:
Baptiste Arnaud
2024-04-12 11:02:28 +02:00
committed by GitHub
parent d608a30e47
commit 3f367800df
10 changed files with 52 additions and 25 deletions

View File

@ -13,7 +13,6 @@ import { decrypt } from '@typebot.io/lib/api/encryption/decrypt'
import { resumeChatCompletion } from './resumeChatCompletion' import { resumeChatCompletion } from './resumeChatCompletion'
import { parseChatCompletionMessages } from './parseChatCompletionMessages' import { parseChatCompletionMessages } from './parseChatCompletionMessages'
import { executeChatCompletionOpenAIRequest } from './executeChatCompletionOpenAIRequest' import { executeChatCompletionOpenAIRequest } from './executeChatCompletionOpenAIRequest'
import { isPlaneteScale } from '@typebot.io/lib/isPlanetScale'
import prisma from '@typebot.io/lib/prisma' import prisma from '@typebot.io/lib/prisma'
import { ExecuteIntegrationResponse } from '../../../../types' import { ExecuteIntegrationResponse } from '../../../../types'
import { parseVariableNumber } from '@typebot.io/variables/parseVariableNumber' import { parseVariableNumber } from '@typebot.io/variables/parseVariableNumber'
@ -23,6 +22,7 @@ import {
defaultOpenAIOptions, defaultOpenAIOptions,
} from '@typebot.io/schemas/features/blocks/integrations/openai/constants' } from '@typebot.io/schemas/features/blocks/integrations/openai/constants'
import { BubbleBlockType } from '@typebot.io/schemas/features/blocks/bubbles/constants' import { BubbleBlockType } from '@typebot.io/schemas/features/blocks/bubbles/constants'
import { isPlaneteScale } from '@typebot.io/lib/isPlanetScale'
export const createChatCompletionOpenAI = async ( export const createChatCompletionOpenAI = async (
state: SessionState, state: SessionState,
@ -90,7 +90,8 @@ export const createChatCompletionOpenAI = async (
blockId, blockId,
assistantMessageVariableName assistantMessageVariableName
) && ) &&
!process.env.VERCEL_ENV (!process.env.VERCEL_ENV ||
(isPlaneteScale() && credentials && isCredentialsV2(credentials)))
) { ) {
return { return {
clientSideActions: [ clientSideActions: [
@ -101,6 +102,7 @@ export const createChatCompletionOpenAI = async (
content?: string content?: string
role: (typeof chatCompletionMessageRoles)[number] role: (typeof chatCompletionMessageRoles)[number]
}[], }[],
runtime: process.env.VERCEL_ENV ? 'edge' : 'nodejs',
}, },
expectsDedicatedReply: true, expectsDedicatedReply: true,
}, },

View File

@ -59,11 +59,8 @@ export const executeForgedBlock = async (
) && ) &&
state.isStreamEnabled && state.isStreamEnabled &&
!state.whatsApp && !state.whatsApp &&
// TODO: Enable once chat api is rolling (!process.env.VERCEL_ENV ||
isPlaneteScale() && (isPlaneteScale() && credentials && isCredentialsV2(credentials)))
credentials &&
isCredentialsV2(credentials)
// !process.env.VERCEL_ENV
) { ) {
return { return {
outgoingEdgeId: block.outgoingEdgeId, outgoingEdgeId: block.outgoingEdgeId,
@ -72,6 +69,7 @@ export const executeForgedBlock = async (
type: 'stream', type: 'stream',
expectsDedicatedReply: true, expectsDedicatedReply: true,
stream: true, stream: true,
runtime: process.env.VERCEL_ENV ? 'edge' : 'nodejs',
}, },
], ],
} }

View File

@ -1,6 +1,6 @@
{ {
"name": "@typebot.io/js", "name": "@typebot.io/js",
"version": "0.2.70", "version": "0.2.71",
"description": "Javascript library to display typebots on your website", "description": "Javascript library to display typebots on your website",
"type": "module", "type": "module",
"main": "dist/index.js", "main": "dist/index.js",

View File

@ -154,6 +154,7 @@ export const ConversationContainer = (props: Props) => {
const longRequest = setTimeout(() => { const longRequest = setTimeout(() => {
setIsSending(true) setIsSending(true)
}, 1000) }, 1000)
autoScrollToBottom()
const { data, error } = await continueChatQuery({ const { data, error } = await continueChatQuery({
apiHost: props.context.apiHost, apiHost: props.context.apiHost,
sessionId: props.initialChatReply.sessionId, sessionId: props.initialChatReply.sessionId,
@ -205,6 +206,11 @@ export const ConversationContainer = (props: Props) => {
isNotDefined(action.lastBubbleBlockId) isNotDefined(action.lastBubbleBlockId)
) )
await processClientSideActions(actionsBeforeFirstBubble) await processClientSideActions(actionsBeforeFirstBubble)
if (
data.clientSideActions.length === 1 &&
data.clientSideActions[0].type === 'stream'
)
return
} }
setChatChunks((displayedChunks) => [ setChatChunks((displayedChunks) => [
...displayedChunks, ...displayedChunks,

View File

@ -1,5 +1,5 @@
import { streamingMessage } from '@/utils/streamingMessageSignal' import { streamingMessage } from '@/utils/streamingMessageSignal'
import { createEffect, createSignal } from 'solid-js' import { For, createEffect, createSignal } from 'solid-js'
import { marked } from 'marked' import { marked } from 'marked'
import domPurify from 'dompurify' import domPurify from 'dompurify'
@ -8,7 +8,7 @@ type Props = {
} }
export const StreamingBubble = (props: Props) => { export const StreamingBubble = (props: Props) => {
const [content, setContent] = createSignal<string>('') const [content, setContent] = createSignal<string[]>([])
marked.use({ marked.use({
renderer: { renderer: {
@ -19,12 +19,16 @@ export const StreamingBubble = (props: Props) => {
}) })
createEffect(() => { createEffect(() => {
if (streamingMessage()?.id === props.streamingMessageId) if (streamingMessage()?.id !== props.streamingMessageId) return []
setContent( setContent(
domPurify.sanitize(marked.parse(streamingMessage()?.content ?? ''), { streamingMessage()
ADD_ATTR: ['target'], ?.content.split('\n\n')
}) .map((line) =>
) domPurify.sanitize(marked.parse(line), {
ADD_ATTR: ['target'],
})
) ?? []
)
}) })
return ( return (
@ -43,8 +47,9 @@ export const StreamingBubble = (props: Props) => {
class={ class={
'flex flex-col overflow-hidden text-fade-in mx-4 my-2 relative text-ellipsis h-full gap-6' 'flex flex-col overflow-hidden text-fade-in mx-4 my-2 relative text-ellipsis h-full gap-6'
} }
innerHTML={content()} >
/> <For each={content()}>{(line) => <span innerHTML={line} />}</For>
</div>
</div> </div>
</div> </div>
</div> </div>

View File

@ -7,16 +7,22 @@ let abortController: AbortController | null = null
const secondsToWaitBeforeRetries = 3 const secondsToWaitBeforeRetries = 3
const maxRetryAttempts = 3 const maxRetryAttempts = 3
const edgeRuntimePath = '/api/integrations/openai/streamer'
const nodejsRuntimePath = (sessionId: string) =>
`/api/v1/sessions/${sessionId}/streamMessage`
export const streamChat = export const streamChat =
(context: ClientSideActionContext & { retryAttempt?: number }) => (context: ClientSideActionContext & { retryAttempt?: number }) =>
async ({ async ({
messages, messages,
runtime,
onMessageStream, onMessageStream,
}: { }: {
messages?: { messages?: {
content?: string | undefined content?: string | undefined
role?: 'system' | 'user' | 'assistant' | undefined role?: 'system' | 'user' | 'assistant' | undefined
}[] }[]
runtime: 'edge' | 'nodejs'
onMessageStream?: (props: { id: string; message: string }) => void onMessageStream?: (props: { id: string; message: string }) => void
}): Promise<{ message?: string; error?: object }> => { }): Promise<{ message?: string; error?: object }> => {
try { try {
@ -25,9 +31,12 @@ export const streamChat =
const apiHost = context.apiHost const apiHost = context.apiHost
const res = await fetch( const res = await fetch(
`${ isNotEmpty(apiHost)
isNotEmpty(apiHost) ? apiHost : guessApiHost() ? apiHost
}/api/integrations/openai/streamer`, : guessApiHost() +
(runtime === 'edge'
? edgeRuntimePath
: nodejsRuntimePath(context.sessionId)),
{ {
method: 'POST', method: 'POST',
headers: { headers: {
@ -35,7 +44,7 @@ export const streamChat =
}, },
body: JSON.stringify({ body: JSON.stringify({
messages, messages,
sessionId: context.sessionId, sessionId: runtime === 'edge' ? context.sessionId : undefined,
}), }),
signal: abortController.signal, signal: abortController.signal,
} }
@ -52,7 +61,7 @@ export const streamChat =
return streamChat({ return streamChat({
...context, ...context,
retryAttempt: (context.retryAttempt ?? 0) + 1, retryAttempt: (context.retryAttempt ?? 0) + 1,
})({ messages, onMessageStream }) })({ messages, onMessageStream, runtime })
} }
return { return {
error: (await res.json()) || 'Failed to fetch the chat response.', error: (await res.json()) || 'Failed to fetch the chat response.',

View File

@ -54,12 +54,17 @@ export const executeClientSideAction = async ({
'streamOpenAiChatCompletion' in clientSideAction || 'streamOpenAiChatCompletion' in clientSideAction ||
'stream' in clientSideAction 'stream' in clientSideAction
) { ) {
const runtime =
'streamOpenAiChatCompletion' in clientSideAction
? clientSideAction.streamOpenAiChatCompletion.runtime
: clientSideAction.runtime
const { error, message } = await streamChat(context)({ const { error, message } = await streamChat(context)({
messages: messages:
'streamOpenAiChatCompletion' in clientSideAction 'streamOpenAiChatCompletion' in clientSideAction
? clientSideAction.streamOpenAiChatCompletion?.messages ? clientSideAction.streamOpenAiChatCompletion?.messages
: undefined, : undefined,
onMessageStream, onMessageStream,
runtime,
}) })
if (error) if (error)
return { return {

View File

@ -1,6 +1,6 @@
{ {
"name": "@typebot.io/nextjs", "name": "@typebot.io/nextjs",
"version": "0.2.70", "version": "0.2.71",
"description": "Convenient library to display typebots on your Next.js website", "description": "Convenient library to display typebots on your Next.js website",
"main": "dist/index.js", "main": "dist/index.js",
"types": "dist/index.d.ts", "types": "dist/index.d.ts",

View File

@ -1,6 +1,6 @@
{ {
"name": "@typebot.io/react", "name": "@typebot.io/react",
"version": "0.2.70", "version": "0.2.71",
"description": "Convenient library to display typebots on your React app", "description": "Convenient library to display typebots on your React app",
"main": "dist/index.js", "main": "dist/index.js",
"types": "dist/index.d.ts", "types": "dist/index.d.ts",

View File

@ -110,6 +110,7 @@ export const clientSideActionSchema = z.discriminatedUnion('type', [
messages: z.array( messages: z.array(
nativeMessageSchema.pick({ content: true, role: true }) nativeMessageSchema.pick({ content: true, role: true })
), ),
runtime: z.enum(['edge', 'nodejs']),
}), }),
}) })
.merge(clientSideActionBaseSchema) .merge(clientSideActionBaseSchema)
@ -151,6 +152,7 @@ export const clientSideActionSchema = z.discriminatedUnion('type', [
.object({ .object({
type: z.literal('stream'), type: z.literal('stream'),
stream: z.literal(true), stream: z.literal(true),
runtime: z.enum(['edge', 'nodejs']),
}) })
.merge(clientSideActionBaseSchema) .merge(clientSideActionBaseSchema)
.openapi({ .openapi({