2
0

(dify) Make sure Dify also works with agent assistants

This commit is contained in:
Baptiste Arnaud
2024-02-14 08:59:05 +01:00
parent cd488c2384
commit d171aa600c
3 changed files with 73 additions and 11 deletions

View File

@ -2,8 +2,8 @@ import { createAction, option } from '@typebot.io/forge'
import { isDefined, isEmpty } from '@typebot.io/lib'
import { HTTPError, got } from 'got'
import { auth } from '../auth'
import { DifyResponse } from '../types'
import { defaultBaseUrl } from '../constants'
import { Chunk } from '../types'
export const createChatMessage = createAction({
auth,
@ -29,7 +29,7 @@ export const createChatMessage = createAction({
accordion: 'Inputs',
}),
responseMapping: option
.saveResponseArray(['Answer', 'Conversation ID', 'Total Tokens'])
.saveResponseArray(['Answer', 'Conversation ID', 'Total Tokens'] as const)
.layout({
accordion: 'Save response',
}),
@ -44,8 +44,9 @@ export const createChatMessage = createAction({
logs,
}) => {
try {
const res: DifyResponse = await got
.post((apiEndpoint ?? defaultBaseUrl) + '/v1/chat-messages', {
const stream = got.post(
(apiEndpoint ?? defaultBaseUrl) + '/v1/chat-messages',
{
headers: {
Authorization: `Bearer ${apiKey}`,
},
@ -59,25 +60,74 @@ export const createChatMessage = createAction({
}
}, {}) ?? {},
query,
response_mode: 'blocking',
response_mode: 'streaming',
conversation_id,
user,
files: [],
},
isStream: true,
}
)
const { answer, conversationId, totalTokens } = await new Promise<{
answer: string
conversationId: string | undefined
totalTokens: number | undefined
}>((resolve, reject) => {
let jsonChunk = ''
let answer = ''
let conversationId: string | undefined
let totalTokens: number | undefined
stream.on('data', (chunk) => {
const lines = chunk.toString().split('\n') as string[]
lines
.filter((line) => line.length > 0)
.forEach((line) => {
try {
const data = JSON.parse(
(jsonChunk.length > 0 ? jsonChunk : line).replace(
/^data: /,
''
)
) as Chunk
jsonChunk = ''
if (
data.event === 'message' ||
data.event === 'agent_message'
) {
answer += data.answer
}
if (data.event === 'message_end') {
totalTokens = data.metadata.usage.total_tokens
conversationId = data.conversation_id
}
} catch (error) {
jsonChunk += line
}
})
})
.json()
stream.on('end', () => {
resolve({ answer, conversationId, totalTokens })
})
stream.on('error', (error) => {
reject(error)
})
})
responseMapping?.forEach((mapping) => {
if (!mapping.variableId) return
const item = mapping.item ?? 'Answer'
if (item === 'Answer') variables.set(mapping.variableId, res.answer)
if (item === 'Answer') variables.set(mapping.variableId, answer)
if (item === 'Conversation ID')
variables.set(mapping.variableId, res.conversation_id)
variables.set(mapping.variableId, conversationId)
if (item === 'Total Tokens')
variables.set(mapping.variableId, res.metadata.usage.total_tokens)
variables.set(mapping.variableId, totalTokens)
})
} catch (error) {
if (error instanceof HTTPError)

View File

@ -1,5 +1,15 @@
export type DifyResponse = {
type MessageChunk = {
event: 'message'
answer: string
}
type AgentMessageChunk = {
event: 'agent_message'
answer: string
}
type MessageEndChunk = {
event: 'message_end'
metadata: {
usage: {
total_tokens: number
@ -7,3 +17,5 @@ export type DifyResponse = {
}
conversation_id: string
}
export type Chunk = MessageChunk | AgentMessageChunk | MessageEndChunk

View File

@ -148,7 +148,7 @@ export const options = option.object({
defaultValue: defaultOpenAIOptions.temperature,
}),
responseMapping: option
.saveResponseArray(['Message content', 'Total tokens'])
.saveResponseArray(['Message content', 'Total tokens'] as const)
.layout({
accordion: 'Save response',
}),