2
0

Add Together AI block (#1304)

Closes #1253
This commit is contained in:
Baptiste Arnaud
2024-03-01 15:33:22 +01:00
committed by GitHub
parent 275ba1b1c4
commit 648ec08a10
27 changed files with 623 additions and 361 deletions

View File

@ -61,6 +61,8 @@ export const ZodFieldLayout = ({
const innerSchema = getZodInnerSchema(schema)
const layout = innerSchema._def.layout
if (layout?.isHidden) return null
switch (innerSchema._def.typeName) {
case 'ZodObject':
return (

View File

@ -9,6 +9,7 @@ import {
Text,
} from '@chakra-ui/react'
import { z } from '@typebot.io/forge/zod'
import React from 'react'
import { ZodLayoutMetadata } from '@typebot.io/forge/zod'
import { ReactNode } from 'react'
import { ZodTypeAny } from 'zod'
@ -32,7 +33,9 @@ export const ZodObjectLayout = ({
blockDef?: ForgedBlockDefinition
blockOptions?: ForgedBlock['options']
onDataChange: (value: any) => void
}) => {
}): ReactNode[] => {
const layout = getZodInnerSchema(schema)._def.layout
if (layout?.isHidden) return []
return Object.keys(schema.shape).reduce<{
nodes: ReactNode[]
accordionsCreated: string[]
@ -43,6 +46,8 @@ export const ZodObjectLayout = ({
const layout = keySchema._def.layout as
| ZodLayoutMetadata<ZodTypeAny>
| undefined
if (layout?.isHidden) return nodes
if (
layout &&
layout.accordion &&

View File

@ -18792,7 +18792,8 @@
"qr-code",
"dify-ai",
"mistral",
"elevenlabs"
"elevenlabs",
"together-ai"
]
},
"options": {}

View File

@ -9413,7 +9413,8 @@
"qr-code",
"dify-ai",
"mistral",
"elevenlabs"
"elevenlabs",
"together-ai"
]
},
"options": {}

View File

@ -1,167 +1,24 @@
import { option, createAction } from '@typebot.io/forge'
import { createAction } from '@typebot.io/forge'
import OpenAI, { ClientOptions } from 'openai'
import { defaultOpenAIOptions, maxToolCalls } from '../constants'
import { OpenAIStream, ToolCallPayload } from 'ai'
import { parseChatCompletionMessages } from '../helpers/parseChatCompletionMessages'
import { isDefined } from '@typebot.io/lib'
import { defaultOpenAIOptions } from '../constants'
import { auth } from '../auth'
import { baseOptions } from '../baseOptions'
import {
ChatCompletionMessage,
ChatCompletionTool,
} from 'openai/resources/chat/completions'
import { parseToolParameters } from '../helpers/parseToolParameters'
import { executeFunction } from '@typebot.io/variables/executeFunction'
const nativeMessageContentSchema = {
content: option.string.layout({
inputType: 'textarea',
placeholder: 'Content',
}),
}
const systemMessageItemSchema = option
.object({
role: option.literal('system'),
})
.extend(nativeMessageContentSchema)
const userMessageItemSchema = option
.object({
role: option.literal('user'),
})
.extend(nativeMessageContentSchema)
const assistantMessageItemSchema = option
.object({
role: option.literal('assistant'),
})
.extend(nativeMessageContentSchema)
const parameterBase = {
name: option.string.layout({
label: 'Name',
placeholder: 'myVariable',
withVariableButton: false,
}),
description: option.string.layout({
label: 'Description',
withVariableButton: false,
}),
required: option.boolean.layout({
label: 'Is required?',
}),
}
export const toolParametersSchema = option
.array(
option.discriminatedUnion('type', [
option
.object({
type: option.literal('string'),
})
.extend(parameterBase),
option
.object({
type: option.literal('number'),
})
.extend(parameterBase),
option
.object({
type: option.literal('boolean'),
})
.extend(parameterBase),
option
.object({
type: option.literal('enum'),
values: option
.array(option.string)
.layout({ itemLabel: 'possible value' }),
})
.extend(parameterBase),
])
)
.layout({
accordion: 'Parameters',
itemLabel: 'parameter',
})
const functionToolItemSchema = option.object({
type: option.literal('function'),
name: option.string.layout({
label: 'Name',
placeholder: 'myFunctionName',
withVariableButton: false,
}),
description: option.string.layout({
label: 'Description',
placeholder: 'A brief description of what this function does.',
withVariableButton: false,
}),
parameters: toolParametersSchema,
code: option.string.layout({
inputType: 'code',
label: 'Code',
lang: 'javascript',
moreInfoTooltip:
'A javascript code snippet that can use the defined parameters. It should return a value.',
withVariableButton: false,
}),
})
const dialogueMessageItemSchema = option.object({
role: option.literal('Dialogue'),
dialogueVariableId: option.string.layout({
inputType: 'variableDropdown',
placeholder: 'Dialogue variable',
}),
startsBy: option.enum(['user', 'assistant']).layout({
label: 'starts by',
direction: 'row',
defaultValue: 'user',
}),
})
export const options = option.object({
model: option.string.layout({
placeholder: 'Select a model',
defaultValue: defaultOpenAIOptions.model,
fetcher: 'fetchModels',
}),
messages: option
.array(
option.discriminatedUnion('role', [
systemMessageItemSchema,
userMessageItemSchema,
assistantMessageItemSchema,
dialogueMessageItemSchema,
])
)
.layout({ accordion: 'Messages', itemLabel: 'message', isOrdered: true }),
tools: option
.array(option.discriminatedUnion('type', [functionToolItemSchema]))
.layout({ accordion: 'Tools', itemLabel: 'tool' }),
temperature: option.number.layout({
accordion: 'Advanced settings',
label: 'Temperature',
direction: 'row',
defaultValue: defaultOpenAIOptions.temperature,
}),
responseMapping: option
.saveResponseArray(['Message content', 'Total tokens'] as const)
.layout({
accordion: 'Save response',
}),
})
import { parseChatCompletionOptions } from '../shared/parseChatCompletionOptions'
import { getChatCompletionSetVarIds } from '../shared/getChatCompletionSetVarIds'
import { runChatCompletion } from '../shared/runChatCompletion'
import { runChatCompletionStream } from '../shared/runChatCompletionStream'
import { getChatCompletionStreamVarId } from '../shared/getChatCompletionStreamVarId'
export const createChatCompletion = createAction({
name: 'Create chat completion',
auth,
baseOptions,
options,
getSetVariableIds: (options) =>
options.responseMapping?.map((res) => res.variableId).filter(isDefined) ??
[],
options: parseChatCompletionOptions({
defaultModel: defaultOpenAIOptions.model,
defaultTemperature: defaultOpenAIOptions.temperature,
modelFetchId: 'fetchModels',
}),
getSetVariableIds: getChatCompletionSetVarIds,
fetchers: [
{
id: 'fetchModels',
@ -195,192 +52,24 @@ export const createChatCompletion = createAction({
},
],
run: {
server: async ({ credentials: { apiKey }, options, variables }) => {
const config = {
apiKey,
baseURL: options.baseUrl,
defaultHeaders: {
'api-key': apiKey,
server: (params) =>
runChatCompletion({
...params,
config: {
baseUrl: defaultOpenAIOptions.baseUrl,
defaultModel: defaultOpenAIOptions.model,
},
defaultQuery: options.apiVersion
? {
'api-version': options.apiVersion,
}
: undefined,
} satisfies ClientOptions
const openai = new OpenAI(config)
const tools = options.tools
?.filter((t) => t.name && t.parameters)
.map((t) => ({
type: 'function',
function: {
name: t.name as string,
description: t.description,
parameters: parseToolParameters(t.parameters!),
},
})) satisfies ChatCompletionTool[] | undefined
const messages = parseChatCompletionMessages({ options, variables })
const body = {
model: options.model ?? defaultOpenAIOptions.model,
temperature: options.temperature
? Number(options.temperature)
: undefined,
messages,
tools: (tools?.length ?? 0) > 0 ? tools : undefined,
}
let totalTokens = 0
let message: ChatCompletionMessage
for (let i = 0; i < maxToolCalls; i++) {
const response = await openai.chat.completions.create(body)
message = response.choices[0].message
totalTokens += response.usage?.total_tokens || 0
if (!message.tool_calls) break
messages.push(message)
for (const toolCall of message.tool_calls) {
const name = toolCall.function?.name
if (!name) continue
const toolDefinition = options.tools?.find((t) => t.name === name)
if (!toolDefinition?.code || !toolDefinition.parameters) {
messages.push({
tool_call_id: toolCall.id,
role: 'tool',
content: 'Function not found',
})
continue
}
const toolParams = Object.fromEntries(
toolDefinition.parameters.map(({ name }) => [name, null])
)
const toolArgs = toolCall.function?.arguments
? JSON.parse(toolCall.function?.arguments)
: undefined
if (!toolArgs) continue
const { output, newVariables } = await executeFunction({
variables: variables.list(),
args: { ...toolParams, ...toolArgs },
body: toolDefinition.code,
})
newVariables?.forEach((v) => variables.set(v.id, v.value))
messages.push({
tool_call_id: toolCall.id,
role: 'tool',
content: output,
})
}
}
options.responseMapping?.forEach((mapping) => {
if (!mapping.variableId) return
if (!mapping.item || mapping.item === 'Message content')
variables.set(mapping.variableId, message.content)
if (mapping.item === 'Total tokens')
variables.set(mapping.variableId, totalTokens)
})
},
}),
stream: {
getStreamVariableId: (options) =>
options.responseMapping?.find(
(res) => res.item === 'Message content' || !res.item
)?.variableId,
run: async ({ credentials: { apiKey }, options, variables }) => {
const config = {
apiKey,
baseURL: options.baseUrl,
defaultHeaders: {
'api-key': apiKey,
getStreamVariableId: getChatCompletionStreamVarId,
run: (params) =>
runChatCompletionStream({
...params,
config: {
baseUrl: defaultOpenAIOptions.baseUrl,
defaultModel: defaultOpenAIOptions.model,
},
defaultQuery: options.apiVersion
? {
'api-version': options.apiVersion,
}
: undefined,
} satisfies ClientOptions
const openai = new OpenAI(config)
const tools = options.tools
?.filter((t) => t.name && t.parameters)
.map((t) => ({
type: 'function',
function: {
name: t.name as string,
description: t.description,
parameters: parseToolParameters(t.parameters!),
},
})) satisfies ChatCompletionTool[] | undefined
const messages = parseChatCompletionMessages({ options, variables })
const response = await openai.chat.completions.create({
model: options.model ?? defaultOpenAIOptions.model,
temperature: options.temperature
? Number(options.temperature)
: undefined,
stream: true,
messages,
tools: (tools?.length ?? 0) > 0 ? tools : undefined,
})
return OpenAIStream(response, {
experimental_onToolCall: async (
call: ToolCallPayload,
appendToolCallMessage
) => {
for (const toolCall of call.tools) {
const name = toolCall.func?.name
if (!name) continue
const toolDefinition = options.tools?.find((t) => t.name === name)
if (!toolDefinition?.code || !toolDefinition.parameters) {
messages.push({
tool_call_id: toolCall.id,
role: 'tool',
content: 'Function not found',
})
continue
}
const { output } = await executeFunction({
variables: variables.list(),
args:
typeof toolCall.func.arguments === 'string'
? JSON.parse(toolCall.func.arguments)
: toolCall.func.arguments,
body: toolDefinition.code,
})
// TO-DO: enable once we're out of edge runtime.
// newVariables?.forEach((v) => variables.set(v.id, v.value))
const newMessages = appendToolCallMessage({
tool_call_id: toolCall.id,
function_name: toolCall.func.name,
tool_call_result: output,
})
return openai.chat.completions.create({
messages: [
...messages,
...newMessages,
] as OpenAI.Chat.Completions.ChatCompletionMessageParam[],
model: options.model ?? defaultOpenAIOptions.model,
stream: true,
tools,
})
}
},
})
},
}),
},
},
})

View File

@ -8,6 +8,7 @@ export const auth = createAuth({
isRequired: true,
label: 'API key',
placeholder: 'sk-...',
inputType: 'password',
helperText:
'You can generate an API key [here](https://platform.openai.com/account/api-keys)',
withVariableButton: false,

View File

@ -1,14 +1,19 @@
import { option } from '@typebot.io/forge'
import { defaultOpenAIOptions } from './constants'
export const baseOptions = option.object({
baseUrl: option.string.layout({
accordion: 'Customize provider',
label: 'Base URL',
defaultValue: defaultOpenAIOptions.baseUrl,
}),
apiVersion: option.string.layout({
accordion: 'Customize provider',
label: 'API version',
}),
})
export const baseOptions = option
.object({
baseUrl: option.string.layout({
accordion: 'Customize provider',
label: 'Base URL',
defaultValue: defaultOpenAIOptions.baseUrl,
}),
apiVersion: option.string.layout({
accordion: 'Customize provider',
label: 'API version',
}),
})
.layout({
isHidden: true,
})
.describe('Deprecated, use other dedicated OpenAI compatible blocks instead')

View File

@ -1,14 +1,13 @@
import type { OpenAI } from 'openai'
import { options as createChatCompletionOption } from '../actions/createChatCompletion'
import { ReadOnlyVariableStore } from '@typebot.io/forge'
import { isNotEmpty } from '@typebot.io/lib'
import { z } from '@typebot.io/forge/zod'
import { ChatCompletionOptions } from '../shared/parseChatCompletionOptions'
export const parseChatCompletionMessages = ({
options: { messages },
variables,
}: {
options: Pick<z.infer<typeof createChatCompletionOption>, 'messages'>
options: ChatCompletionOptions
variables: ReadOnlyVariableStore
}): OpenAI.Chat.ChatCompletionMessageParam[] => {
const parsedMessages = messages

View File

@ -1,5 +1,5 @@
import type { OpenAI } from 'openai'
import { toolParametersSchema } from '../actions/createChatCompletion'
import { toolParametersSchema } from '../shared/parseChatCompletionOptions'
import { z } from '@typebot.io/forge/zod'
export const parseToolParameters = (

View File

@ -0,0 +1,5 @@
import { isDefined } from '@typebot.io/lib'
import { ChatCompletionOptions } from './parseChatCompletionOptions'
export const getChatCompletionSetVarIds = (options: ChatCompletionOptions) =>
options.responseMapping?.map((res) => res.variableId).filter(isDefined) ?? []

View File

@ -0,0 +1,6 @@
import { ChatCompletionOptions } from './parseChatCompletionOptions'
export const getChatCompletionStreamVarId = (options: ChatCompletionOptions) =>
options.responseMapping?.find(
(res) => res.item === 'Message content' || !res.item
)?.variableId

View File

@ -0,0 +1,164 @@
import { option } from '@typebot.io/forge'
import { z } from '@typebot.io/forge/zod'
import { baseOptions } from '../baseOptions'
const nativeMessageContentSchema = {
content: option.string.layout({
inputType: 'textarea',
placeholder: 'Content',
}),
}
const systemMessageItemSchema = option
.object({
role: option.literal('system'),
})
.extend(nativeMessageContentSchema)
const userMessageItemSchema = option
.object({
role: option.literal('user'),
})
.extend(nativeMessageContentSchema)
const assistantMessageItemSchema = option
.object({
role: option.literal('assistant'),
})
.extend(nativeMessageContentSchema)
const parameterBase = {
name: option.string.layout({
label: 'Name',
placeholder: 'myVariable',
withVariableButton: false,
}),
description: option.string.layout({
label: 'Description',
withVariableButton: false,
}),
required: option.boolean.layout({
label: 'Is required?',
}),
}
export const toolParametersSchema = option
.array(
option.discriminatedUnion('type', [
option
.object({
type: option.literal('string'),
})
.extend(parameterBase),
option
.object({
type: option.literal('number'),
})
.extend(parameterBase),
option
.object({
type: option.literal('boolean'),
})
.extend(parameterBase),
option
.object({
type: option.literal('enum'),
values: option
.array(option.string)
.layout({ itemLabel: 'possible value' }),
})
.extend(parameterBase),
])
)
.layout({
accordion: 'Parameters',
itemLabel: 'parameter',
})
const functionToolItemSchema = option.object({
type: option.literal('function'),
name: option.string.layout({
label: 'Name',
placeholder: 'myFunctionName',
withVariableButton: false,
}),
description: option.string.layout({
label: 'Description',
placeholder: 'A brief description of what this function does.',
withVariableButton: false,
}),
parameters: toolParametersSchema,
code: option.string.layout({
inputType: 'code',
label: 'Code',
lang: 'javascript',
moreInfoTooltip:
'A javascript code snippet that can use the defined parameters. It should return a value.',
withVariableButton: false,
}),
})
const dialogueMessageItemSchema = option.object({
role: option.literal('Dialogue'),
dialogueVariableId: option.string.layout({
inputType: 'variableDropdown',
placeholder: 'Dialogue variable',
}),
startsBy: option.enum(['user', 'assistant']).layout({
label: 'starts by',
direction: 'row',
defaultValue: 'user',
}),
})
type Props = {
defaultModel?: string
defaultTemperature?: number
modelFetchId?: string
modelHelperText?: string
}
export const parseChatCompletionOptions = ({
defaultModel,
defaultTemperature,
modelFetchId,
modelHelperText,
}: Props = {}) =>
option.object({
model: option.string.layout({
placeholder: modelFetchId ? 'Select a model' : undefined,
label: modelFetchId ? undefined : 'Model',
defaultValue: defaultModel,
fetcher: modelFetchId,
helperText: modelHelperText,
}),
messages: option
.array(
option.discriminatedUnion('role', [
systemMessageItemSchema,
userMessageItemSchema,
assistantMessageItemSchema,
dialogueMessageItemSchema,
])
)
.layout({ accordion: 'Messages', itemLabel: 'message', isOrdered: true }),
tools: option
.array(option.discriminatedUnion('type', [functionToolItemSchema]))
.layout({ accordion: 'Tools', itemLabel: 'tool' }),
temperature: option.number.layout({
accordion: 'Advanced settings',
label: 'Temperature',
direction: 'row',
defaultValue: defaultTemperature,
}),
responseMapping: option
.saveResponseArray(['Message content', 'Total tokens'] as const)
.layout({
accordion: 'Save response',
}),
})
export type ChatCompletionOptions = z.infer<
ReturnType<typeof parseChatCompletionOptions>
> &
z.infer<typeof baseOptions>

View File

@ -0,0 +1,125 @@
import OpenAI, { ClientOptions } from 'openai'
import { parseToolParameters } from '../helpers/parseToolParameters'
import { executeFunction } from '@typebot.io/variables/executeFunction'
import { ChatCompletionTool, ChatCompletionMessage } from 'openai/resources'
import { maxToolCalls } from '../constants'
import { parseChatCompletionMessages } from '../helpers/parseChatCompletionMessages'
import { ChatCompletionOptions } from './parseChatCompletionOptions'
import { LogsStore, VariableStore } from '@typebot.io/forge/types'
type OpenAIConfig = {
baseUrl: string
defaultModel?: string
}
type Props = {
credentials: {
apiKey?: string
}
options: ChatCompletionOptions
variables: VariableStore
logs: LogsStore
config: OpenAIConfig
}
export const runChatCompletion = async ({
credentials: { apiKey },
options,
variables,
config: openAIConfig,
logs,
}: Props) => {
const model = options.model?.trim() ?? openAIConfig.defaultModel
if (!model) return logs.add('No model provided')
const config = {
apiKey,
baseURL: openAIConfig.baseUrl ?? options.baseUrl,
defaultHeaders: options.baseUrl
? {
'api-key': apiKey,
}
: undefined,
defaultQuery: options.apiVersion
? {
'api-version': options.apiVersion,
}
: undefined,
} satisfies ClientOptions
const openai = new OpenAI(config)
const tools = options.tools
?.filter((t) => t.name && t.parameters)
.map((t) => ({
type: 'function',
function: {
name: t.name as string,
description: t.description,
parameters: parseToolParameters(t.parameters!),
},
})) satisfies ChatCompletionTool[] | undefined
const messages = parseChatCompletionMessages({ options, variables })
const body = {
model,
temperature: options.temperature ? Number(options.temperature) : undefined,
messages,
tools: (tools?.length ?? 0) > 0 ? tools : undefined,
}
let totalTokens = 0
let message: ChatCompletionMessage
for (let i = 0; i < maxToolCalls; i++) {
const response = await openai.chat.completions.create(body)
message = response.choices[0].message
totalTokens += response.usage?.total_tokens || 0
if (!message.tool_calls) break
messages.push(message)
for (const toolCall of message.tool_calls) {
const name = toolCall.function?.name
if (!name) continue
const toolDefinition = options.tools?.find((t) => t.name === name)
if (!toolDefinition?.code || !toolDefinition.parameters) {
messages.push({
tool_call_id: toolCall.id,
role: 'tool',
content: 'Function not found',
})
continue
}
const toolParams = Object.fromEntries(
toolDefinition.parameters.map(({ name }) => [name, null])
)
const toolArgs = toolCall.function?.arguments
? JSON.parse(toolCall.function?.arguments)
: undefined
if (!toolArgs) continue
const { output, newVariables } = await executeFunction({
variables: variables.list(),
args: { ...toolParams, ...toolArgs },
body: toolDefinition.code,
})
newVariables?.forEach((v) => variables.set(v.id, v.value))
messages.push({
tool_call_id: toolCall.id,
role: 'tool',
content: output,
})
}
}
options.responseMapping?.forEach((mapping) => {
if (!mapping.variableId) return
if (!mapping.item || mapping.item === 'Message content')
variables.set(mapping.variableId, message.content)
if (mapping.item === 'Total tokens')
variables.set(mapping.variableId, totalTokens)
})
}

View File

@ -0,0 +1,108 @@
import { LogsStore, ReadOnlyVariableStore } from '@typebot.io/forge/types'
import { ChatCompletionOptions } from './parseChatCompletionOptions'
import { executeFunction } from '@typebot.io/variables/executeFunction'
import { OpenAIStream, ToolCallPayload } from 'ai'
import OpenAI, { ClientOptions } from 'openai'
import { ChatCompletionTool } from 'openai/resources'
import { parseChatCompletionMessages } from '../helpers/parseChatCompletionMessages'
import { parseToolParameters } from '../helpers/parseToolParameters'
type Props = {
credentials: { apiKey?: string }
options: ChatCompletionOptions
variables: ReadOnlyVariableStore
config: { baseUrl: string; defaultModel?: string }
}
export const runChatCompletionStream = async ({
credentials: { apiKey },
options,
variables,
config: openAIConfig,
}: Props) => {
const model = options.model?.trim() ?? openAIConfig.defaultModel
if (!model) return
const config = {
apiKey,
baseURL: openAIConfig.baseUrl ?? options.baseUrl,
defaultHeaders: {
'api-key': apiKey,
},
defaultQuery: options.apiVersion
? {
'api-version': options.apiVersion,
}
: undefined,
} satisfies ClientOptions
const openai = new OpenAI(config)
const tools = options.tools
?.filter((t) => t.name && t.parameters)
.map((t) => ({
type: 'function',
function: {
name: t.name as string,
description: t.description,
parameters: parseToolParameters(t.parameters!),
},
})) satisfies ChatCompletionTool[] | undefined
const messages = parseChatCompletionMessages({ options, variables })
const response = await openai.chat.completions.create({
model,
temperature: options.temperature ? Number(options.temperature) : undefined,
stream: true,
messages,
tools: (tools?.length ?? 0) > 0 ? tools : undefined,
})
return OpenAIStream(response, {
experimental_onToolCall: async (
call: ToolCallPayload,
appendToolCallMessage
) => {
for (const toolCall of call.tools) {
const name = toolCall.func?.name
if (!name) continue
const toolDefinition = options.tools?.find((t) => t.name === name)
if (!toolDefinition?.code || !toolDefinition.parameters) {
messages.push({
tool_call_id: toolCall.id,
role: 'tool',
content: 'Function not found',
})
continue
}
const { output } = await executeFunction({
variables: variables.list(),
args:
typeof toolCall.func.arguments === 'string'
? JSON.parse(toolCall.func.arguments)
: toolCall.func.arguments,
body: toolDefinition.code,
})
// TO-DO: enable once we're out of edge runtime.
// newVariables?.forEach((v) => variables.set(v.id, v.value))
const newMessages = appendToolCallMessage({
tool_call_id: toolCall.id,
function_name: toolCall.func.name,
tool_call_result: output,
})
return openai.chat.completions.create({
messages: [
...messages,
...newMessages,
] as OpenAI.Chat.Completions.ChatCompletionMessageParam[],
model,
stream: true,
tools,
})
}
},
})
}

View File

@ -0,0 +1,33 @@
import { createAction } from '@typebot.io/forge'
import { auth } from '../auth'
import { parseChatCompletionOptions } from '@typebot.io/openai-block/shared/parseChatCompletionOptions'
import { getChatCompletionSetVarIds } from '@typebot.io/openai-block/shared/getChatCompletionSetVarIds'
import { getChatCompletionStreamVarId } from '@typebot.io/openai-block/shared/getChatCompletionStreamVarId'
import { runChatCompletion } from '@typebot.io/openai-block/shared/runChatCompletion'
import { runChatCompletionStream } from '@typebot.io/openai-block/shared/runChatCompletionStream'
import { defaultTogetherOptions } from '../constants'
export const createChatCompletion = createAction({
name: 'Create chat completion',
auth,
options: parseChatCompletionOptions({
modelHelperText:
'You can find the list of all the models available [here](https://docs.together.ai/docs/inference-models#chat-models). Copy the model string for API.',
}),
getSetVariableIds: getChatCompletionSetVarIds,
run: {
server: (params) =>
runChatCompletion({
...params,
config: { baseUrl: defaultTogetherOptions.baseUrl },
}),
stream: {
getStreamVariableId: getChatCompletionStreamVarId,
run: (params) =>
runChatCompletionStream({
...params,
config: { baseUrl: defaultTogetherOptions.baseUrl },
}),
},
},
})

View File

@ -0,0 +1,15 @@
import { option, AuthDefinition } from '@typebot.io/forge'
export const auth = {
type: 'encryptedCredentials',
name: 'Together account',
schema: option.object({
apiKey: option.string.layout({
label: 'API key',
isRequired: true,
inputType: 'password',
helperText:
'You can get your API key [here](https://api.together.xyz/settings/api-keys).',
}),
}),
} satisfies AuthDefinition

View File

@ -0,0 +1,3 @@
export const defaultTogetherOptions = {
baseUrl: 'https://api.together.xyz/v1',
} as const

View File

@ -0,0 +1,14 @@
import { createBlock } from '@typebot.io/forge'
import { TogetherAiLogo } from './logo'
import { auth } from './auth'
import { createChatCompletion } from './actions/createChatCompletion'
export const togetherAi = createBlock({
id: 'together-ai',
name: 'Together',
fullName: 'Together AI',
tags: ['ai', 'openai', 'chat', 'completion'],
LightLogo: TogetherAiLogo,
auth,
actions: [createChatCompletion],
})

View File

@ -0,0 +1,18 @@
import React from 'react'
export const TogetherAiLogo = (props: React.SVGProps<SVGSVGElement>) => (
<svg viewBox="0 0 32 32" {...props}>
<g clipPath="url(#clip0_542_18748)">
<rect width="32" height="32" rx="5.64706" fill="#F1EFED" />
<circle cx="22.8233" cy="9.64706" r="5.64706" fill="#D3D1D1" />
<circle cx="22.8233" cy="22.8238" r="5.64706" fill="#D3D1D1" />
<circle cx="9.64706" cy="22.8238" r="5.64706" fill="#D3D1D1" />
<circle cx="9.64706" cy="9.64706" r="5.64706" fill="#0F6FFF" />
</g>
<defs>
<clipPath id="clip0_542_18748">
<rect width="32" height="32" fill="white" />
</clipPath>
</defs>
</svg>
)

View File

@ -0,0 +1,17 @@
{
"name": "@typebot.io/together-ai-block",
"version": "1.0.0",
"description": "",
"main": "index.ts",
"keywords": [],
"license": "ISC",
"devDependencies": {
"@typebot.io/forge": "workspace:*",
"@typebot.io/lib": "workspace:*",
"@typebot.io/tsconfig": "workspace:*",
"@typebot.io/variables": "workspace:*",
"@typebot.io/openai-block": "workspace:*",
"@types/react": "18.2.15",
"typescript": "5.3.2"
}
}

View File

@ -0,0 +1,10 @@
{
"extends": "@typebot.io/tsconfig/base.json",
"include": ["**/*.ts", "**/*.tsx"],
"exclude": ["node_modules"],
"compilerOptions": {
"lib": ["ESNext", "DOM"],
"noEmit": true,
"jsx": "react"
}
}

View File

@ -10,12 +10,21 @@ export const createAuth = <A extends AuthDefinition>(authDefinition: A) =>
authDefinition
export const createBlock = <
I extends string,
Id extends string,
A extends AuthDefinition,
O extends z.ZodObject<any>
>(
blockDefinition: BlockDefinition<I, A, O>
): BlockDefinition<I, A, O> => blockDefinition
blockDefinition: BlockDefinition<Id, A, O>
): BlockDefinition<Id, A, O> => blockDefinition
export const createVersionedBlock = <
Blocks extends Record<
string,
BlockDefinition<string, AuthDefinition, z.ZodObject<any>>
>
>(
blocks: Blocks
): Blocks => blocks
export const createAction = <
A extends AuthDefinition,

View File

@ -20,6 +20,7 @@ export interface ZodLayoutMetadata<
itemLabel?: T extends OptionableZodType<ZodArray<any>> ? string : never
isOrdered?: T extends OptionableZodType<ZodArray<any>> ? boolean : never
moreInfoTooltip?: string
isHidden?: boolean
}
declare module 'zod' {

View File

@ -8,4 +8,5 @@ export const enabledBlocks = [
'dify-ai',
'mistral',
'elevenlabs',
'together-ai',
] as const

View File

@ -1,4 +1,5 @@
// Do not edit this file manually
import { togetherAi } from '@typebot.io/together-ai-block'
import { elevenlabs } from '@typebot.io/elevenlabs-block'
import { difyAi } from '@typebot.io/dify-ai-block'
import { mistral } from '@typebot.io/mistral-block'
@ -24,6 +25,7 @@ export const forgedBlocks = [
difyAi,
mistral,
elevenlabs,
togetherAi,
] as BlockDefinition<(typeof enabledBlocks)[number], any, any>[]
export type ForgedBlockDefinition = (typeof forgedBlocks)[number]

View File

@ -16,6 +16,7 @@
"@typebot.io/qrcode-block": "workspace:*",
"@typebot.io/dify-ai-block": "workspace:*",
"@typebot.io/mistral-block": "workspace:*",
"@typebot.io/elevenlabs-block": "workspace:*"
"@typebot.io/elevenlabs-block": "workspace:*",
"@typebot.io/together-ai-block": "workspace:*"
}
}

27
pnpm-lock.yaml generated
View File

@ -1321,6 +1321,30 @@ importers:
specifier: 5.3.2
version: 5.3.2
packages/forge/blocks/togetherAi:
devDependencies:
'@typebot.io/forge':
specifier: workspace:*
version: link:../../core
'@typebot.io/lib':
specifier: workspace:*
version: link:../../../lib
'@typebot.io/openai-block':
specifier: workspace:*
version: link:../openai
'@typebot.io/tsconfig':
specifier: workspace:*
version: link:../../../tsconfig
'@typebot.io/variables':
specifier: workspace:*
version: link:../../../variables
'@types/react':
specifier: 18.2.15
version: 18.2.15
typescript:
specifier: 5.3.2
version: 5.3.2
packages/forge/blocks/zemanticAi:
devDependencies:
'@typebot.io/forge':
@ -1404,6 +1428,9 @@ importers:
'@typebot.io/qrcode-block':
specifier: workspace:*
version: link:../blocks/qrcode
'@typebot.io/together-ai-block':
specifier: workspace:*
version: link:../blocks/togetherAi
'@typebot.io/zemantic-ai-block':
specifier: workspace:*
version: link:../blocks/zemanticAi