Hello @baptisteArno, As we discussed in issue #1315 we created a basic implementation of Anthropic’s Claude AI block. This block is based on the OpenAI block and shares a similar structure. The most notable changes in this PR are: - Added the Claude AI block. - Added relevant documentation for the new block. - Formatted some other source files in order to pass git pre-hook checks. Some notes to be made: - Currently there is no way to dynamically fetch the model’s versions since there is no endpoint provided by the SDK. - All pre version-3 Claude models are hard-coded constant variables. - We have opened an issue for that on the SDK repository [here](https://github.com/anthropics/anthropic-sdk-typescript/issues/313). - We can implement in a new PR Claude’s new [Vision system](https://docs.anthropic.com/claude/docs/vision) which allows for image analysis and understanding. - This can be done in a later phase, given that you agree of course. <!-- This is an auto-generated comment: release notes by coderabbit.ai --> ## Summary by CodeRabbit - **New Features** - Introduced the Anthropic block for creating chat messages with Claude AI in Typebot. - Added functionality to create chat messages using Anthropic AI SDK with configurable options. - Implemented encrypted credentials for Anthropic account integration. - Added constants and helpers for better handling of chat messages with Anthropic models. - Included Anthropic block in the list of enabled and forged blocks for broader access. <!-- end of auto-generated comment: release notes by coderabbit.ai --> --------- Co-authored-by: Retr0-01 <contact@retr0.dev> Co-authored-by: Baptiste Arnaud <baptiste.arnaud95@gmail.com> Co-authored-by: Baptiste Arnaud <contact@baptiste-arnaud.fr>
166 lines
4.6 KiB
TypeScript
166 lines
4.6 KiB
TypeScript
import { createAction, option } from '@typebot.io/forge'
|
|
import { auth } from '../auth'
|
|
import { Anthropic } from '@anthropic-ai/sdk'
|
|
import { AnthropicStream } from 'ai'
|
|
import { anthropicModels, defaultAnthropicOptions } from '../constants'
|
|
import { parseChatMessages } from '../helpers/parseChatMessages'
|
|
import { isDefined } from '@typebot.io/lib'
|
|
|
|
const nativeMessageContentSchema = {
|
|
content: option.string.layout({
|
|
inputType: 'textarea',
|
|
placeholder: 'Content',
|
|
}),
|
|
}
|
|
|
|
const userMessageItemSchema = option
|
|
.object({
|
|
role: option.literal('user'),
|
|
})
|
|
.extend(nativeMessageContentSchema)
|
|
|
|
const assistantMessageItemSchema = option
|
|
.object({
|
|
role: option.literal('assistant'),
|
|
})
|
|
.extend(nativeMessageContentSchema)
|
|
|
|
const dialogueMessageItemSchema = option.object({
|
|
role: option.literal('Dialogue'),
|
|
dialogueVariableId: option.string.layout({
|
|
inputType: 'variableDropdown',
|
|
placeholder: 'Dialogue variable',
|
|
}),
|
|
startsBy: option.enum(['user', 'assistant']).layout({
|
|
label: 'starts by',
|
|
direction: 'row',
|
|
defaultValue: 'user',
|
|
}),
|
|
})
|
|
|
|
export const options = option.object({
|
|
model: option.enum(anthropicModels).layout({
|
|
defaultValue: defaultAnthropicOptions.model,
|
|
}),
|
|
messages: option
|
|
.array(
|
|
option.discriminatedUnion('role', [
|
|
userMessageItemSchema,
|
|
assistantMessageItemSchema,
|
|
dialogueMessageItemSchema,
|
|
])
|
|
)
|
|
.layout({ accordion: 'Messages', itemLabel: 'message', isOrdered: true }),
|
|
systemMessage: option.string.layout({
|
|
accordion: 'Advanced Settings',
|
|
label: 'System prompt',
|
|
direction: 'row',
|
|
inputType: 'textarea',
|
|
}),
|
|
temperature: option.number.layout({
|
|
accordion: 'Advanced Settings',
|
|
label: 'Temperature',
|
|
direction: 'row',
|
|
defaultValue: defaultAnthropicOptions.temperature,
|
|
}),
|
|
maxTokens: option.number.layout({
|
|
accordion: 'Advanced Settings',
|
|
label: 'Max Tokens',
|
|
direction: 'row',
|
|
defaultValue: defaultAnthropicOptions.maxTokens,
|
|
}),
|
|
responseMapping: option
|
|
.saveResponseArray(['Message Content'] as const)
|
|
.layout({
|
|
accordion: 'Save Response',
|
|
}),
|
|
})
|
|
|
|
export const createChatMessage = createAction({
|
|
name: 'Create Chat Message',
|
|
auth,
|
|
options,
|
|
turnableInto: [
|
|
{
|
|
blockType: 'mistral',
|
|
},
|
|
{
|
|
blockType: 'openai',
|
|
},
|
|
{ blockType: 'open-router' },
|
|
{ blockType: 'together-ai' },
|
|
],
|
|
getSetVariableIds: ({ responseMapping }) =>
|
|
responseMapping?.map((res) => res.variableId).filter(isDefined) ?? [],
|
|
run: {
|
|
server: async ({ credentials: { apiKey }, options, variables, logs }) => {
|
|
const client = new Anthropic({
|
|
apiKey: apiKey,
|
|
})
|
|
|
|
const messages = parseChatMessages({ options, variables })
|
|
|
|
try {
|
|
const reply = await client.messages.create({
|
|
messages,
|
|
model: options.model ?? defaultAnthropicOptions.model,
|
|
system: options.systemMessage,
|
|
temperature: options.temperature
|
|
? Number(options.temperature)
|
|
: undefined,
|
|
max_tokens: options.maxTokens
|
|
? Number(options.maxTokens)
|
|
: defaultAnthropicOptions.maxTokens,
|
|
})
|
|
|
|
messages.push(reply)
|
|
|
|
options.responseMapping?.forEach((mapping) => {
|
|
if (!mapping.variableId) return
|
|
|
|
if (!mapping.item || mapping.item === 'Message Content')
|
|
variables.set(mapping.variableId, reply.content[0].text)
|
|
})
|
|
} catch (error) {
|
|
if (error instanceof Anthropic.APIError) {
|
|
logs.add({
|
|
status: 'error',
|
|
description: `${error.status} ${error.name}`,
|
|
details: error.message,
|
|
})
|
|
} else {
|
|
throw error
|
|
}
|
|
}
|
|
},
|
|
stream: {
|
|
getStreamVariableId: (options) =>
|
|
options.responseMapping?.find(
|
|
(res) => res.item === 'Message Content' || !res.item
|
|
)?.variableId,
|
|
run: async ({ credentials: { apiKey }, options, variables }) => {
|
|
const client = new Anthropic({
|
|
apiKey: apiKey,
|
|
})
|
|
|
|
const messages = parseChatMessages({ options, variables })
|
|
|
|
const response = await client.messages.create({
|
|
messages,
|
|
model: options.model ?? defaultAnthropicOptions.model,
|
|
system: options.systemMessage,
|
|
temperature: options.temperature
|
|
? Number(options.temperature)
|
|
: undefined,
|
|
max_tokens: options.maxTokens
|
|
? Number(options.maxTokens)
|
|
: defaultAnthropicOptions.maxTokens,
|
|
stream: true,
|
|
})
|
|
|
|
return AnthropicStream(response)
|
|
},
|
|
},
|
|
},
|
|
})
|