2
0

(openai) Add custom provider and custom models

Closes #532
This commit is contained in:
Baptiste Arnaud
2023-09-01 16:19:59 +02:00
parent 436fa251f8
commit 27a5f4eb74
21 changed files with 684 additions and 278 deletions

View File

@ -75,6 +75,7 @@
"nextjs-cors": "^2.1.2",
"nodemailer": "6.9.3",
"nprogress": "0.2.0",
"openai-edge": "1.2.2",
"papaparse": "5.4.1",
"posthog-js": "^1.77.1",
"posthog-node": "3.1.1",

View File

@ -24,7 +24,7 @@ import { MoreInfoTooltip } from '../MoreInfoTooltip'
import { env } from '@typebot.io/env'
type Props = {
items: string[]
items: string[] | undefined
value?: string
defaultValue?: string
debounceTimeout?: number
@ -77,9 +77,9 @@ export const AutocompleteInput = ({
const filteredItems = (
inputValue === ''
? items
? items ?? []
: [
...items.filter(
...(items ?? []).filter(
(item) =>
item.toLowerCase().startsWith((inputValue ?? '').toLowerCase()) &&
item.toLowerCase() !== inputValue.toLowerCase()
@ -186,7 +186,8 @@ export const AutocompleteInput = ({
onFocus={onOpen}
onBlur={updateCarretPosition}
onKeyDown={updateFocusedDropdownItem}
placeholder={placeholder}
placeholder={!items ? 'Loading...' : placeholder}
isDisabled={!items}
/>
</PopoverAnchor>
{filteredItems.length > 0 && (

View File

@ -35,7 +35,7 @@ type Item =
type Props<T extends Item> = {
isPopoverMatchingInputWidth?: boolean
selectedItem?: string
items: readonly T[]
items: readonly T[] | undefined
placeholder?: string
onSelect?: (value: string | undefined, item?: T) => void
}
@ -53,7 +53,7 @@ export const Select = <T extends Item>({
const { onOpen, onClose, isOpen } = useDisclosure()
const [inputValue, setInputValue] = useState(
getItemLabel(
items.find((item) =>
items?.find((item) =>
typeof item === 'string'
? selectedItem === item
: selectedItem === item.value
@ -72,13 +72,13 @@ export const Select = <T extends Item>({
const filteredItems = (
isTouched
? [
...items.filter((item) =>
...(items ?? []).filter((item) =>
getItemLabel(item)
.toLowerCase()
.includes((inputValue ?? '').toLowerCase())
),
]
: items
: items ?? []
).slice(0, 50)
const closeDropdown = () => {
@ -181,12 +181,17 @@ export const Select = <T extends Item>({
className="select-input"
value={isTouched ? inputValue : ''}
placeholder={
!isTouched && inputValue !== '' ? undefined : placeholder
!items
? 'Loading...'
: !isTouched && inputValue !== ''
? undefined
: placeholder
}
onChange={updateInputValue}
onFocus={onOpen}
onKeyDown={updateFocusedDropdownItem}
pr={selectedItem ? 16 : undefined}
isDisabled={!items}
/>
<InputRightElement

View File

@ -0,0 +1,134 @@
import prisma from '@/lib/prisma'
import { authenticatedProcedure } from '@/helpers/server/trpc'
import { TRPCError } from '@trpc/server'
import { z } from 'zod'
import { isReadWorkspaceFobidden } from '@/features/workspace/helpers/isReadWorkspaceFobidden'
import { Configuration, OpenAIApi, ResponseTypes } from 'openai-edge'
import { decrypt } from '@typebot.io/lib/api'
import { OpenAICredentials } from '@typebot.io/schemas/features/blocks/integrations/openai'
import { IntegrationBlockType, typebotSchema } from '@typebot.io/schemas'
import { isNotEmpty } from '@typebot.io/lib/utils'
export const listModels = authenticatedProcedure
.meta({
openapi: {
method: 'GET',
path: '/typebots/{typebotId}/blocks/{blockId}/openai/models',
protect: true,
summary: 'List OpenAI models',
tags: ['OpenAI'],
},
})
.input(
z.object({
typebotId: z.string(),
blockId: z.string(),
credentialsId: z.string(),
workspaceId: z.string(),
})
)
.output(
z.object({
models: z.array(z.string()),
})
)
.query(
async ({
input: { credentialsId, workspaceId, typebotId, blockId },
ctx: { user },
}) => {
const workspace = await prisma.workspace.findFirst({
where: { id: workspaceId },
select: {
members: {
select: {
userId: true,
},
},
typebots: {
where: {
id: typebotId,
},
select: {
groups: true,
},
},
credentials: {
where: {
id: credentialsId,
},
select: {
id: true,
data: true,
iv: true,
},
},
},
})
if (!workspace || isReadWorkspaceFobidden(workspace, user))
throw new TRPCError({
code: 'NOT_FOUND',
message: 'No workspace found',
})
const credentials = workspace.credentials.at(0)
if (!credentials)
throw new TRPCError({
code: 'NOT_FOUND',
message: 'No credentials found',
})
const typebot = workspace.typebots.at(0)
if (!typebot)
throw new TRPCError({
code: 'NOT_FOUND',
message: 'Typebot not found',
})
const block = typebotSchema._def.schema.shape.groups
.parse(workspace.typebots.at(0)?.groups)
.flatMap((group) => group.blocks)
.find((block) => block.id === blockId)
if (!block || block.type !== IntegrationBlockType.OPEN_AI)
throw new TRPCError({
code: 'NOT_FOUND',
message: 'OpenAI block not found',
})
const data = (await decrypt(
credentials.data,
credentials.iv
)) as OpenAICredentials['data']
const config = new Configuration({
apiKey: data.apiKey,
basePath: block.options.baseUrl,
baseOptions: {
headers: {
'api-key': data.apiKey,
},
},
defaultQueryParams: isNotEmpty(block.options.apiVersion)
? new URLSearchParams({
'api-version': block.options.apiVersion,
})
: undefined,
})
const openai = new OpenAIApi(config)
const response = await openai.listModels()
const modelsData = (await response.json()) as ResponseTypes['listModels']
return {
models: modelsData.data
.sort((a, b) => b.created - a.created)
.map((model) => model.id),
}
}
)

View File

@ -0,0 +1,6 @@
import { router } from '@/helpers/server/trpc'
import { listModels } from './listModels'
export const openAIRouter = router({
listModels,
})

View File

@ -1,9 +1,19 @@
import { Stack, useDisclosure } from '@chakra-ui/react'
import {
Accordion,
AccordionButton,
AccordionIcon,
AccordionItem,
AccordionPanel,
Stack,
useDisclosure,
Text,
} from '@chakra-ui/react'
import React from 'react'
import { CredentialsDropdown } from '@/features/credentials/components/CredentialsDropdown'
import {
ChatCompletionOpenAIOptions,
CreateImageOpenAIOptions,
defaultBaseUrl,
defaultChatCompletionOptions,
OpenAIBlock,
openAITasks,
@ -13,15 +23,19 @@ import { useWorkspace } from '@/features/workspace/WorkspaceProvider'
import { DropdownList } from '@/components/DropdownList'
import { OpenAIChatCompletionSettings } from './createChatCompletion/OpenAIChatCompletionSettings'
import { createId } from '@paralleldrive/cuid2'
import { TextInput } from '@/components/inputs'
type OpenAITask = (typeof openAITasks)[number]
type Props = {
options: OpenAIBlock['options']
block: OpenAIBlock
onOptionsChange: (options: OpenAIBlock['options']) => void
}
export const OpenAISettings = ({ options, onOptionsChange }: Props) => {
export const OpenAISettings = ({
block: { options, id },
onOptionsChange,
}: Props) => {
const { workspace } = useWorkspace()
const { isOpen, onOpen, onClose } = useDisclosure()
@ -44,6 +58,20 @@ export const OpenAISettings = ({ options, onOptionsChange }: Props) => {
}
}
const updateBaseUrl = (baseUrl: string) => {
onOptionsChange({
...options,
baseUrl,
})
}
const updateApiVersion = (apiVersion: string) => {
onOptionsChange({
...options,
apiVersion,
})
}
return (
<Stack>
{workspace && (
@ -56,22 +84,51 @@ export const OpenAISettings = ({ options, onOptionsChange }: Props) => {
credentialsName="OpenAI account"
/>
)}
<OpenAICredentialsModal
isOpen={isOpen}
onClose={onClose}
onNewCredentials={updateCredentialsId}
/>
<DropdownList
currentItem={options.task}
items={openAITasks.slice(0, -1)}
onItemSelect={updateTask}
placeholder="Select task"
/>
{options.task && (
<OpenAITaskSettings
options={options}
onOptionsChange={onOptionsChange}
/>
{options.credentialsId && (
<>
<Accordion allowToggle>
<AccordionItem>
<AccordionButton>
<Text w="full" textAlign="left">
Customize provider
</Text>
<AccordionIcon />
</AccordionButton>
<AccordionPanel as={Stack} spacing={4}>
<TextInput
label="Base URL"
defaultValue={options.baseUrl}
onChange={updateBaseUrl}
/>
{options.baseUrl !== defaultBaseUrl && (
<TextInput
label="API version"
defaultValue={options.apiVersion}
onChange={updateApiVersion}
/>
)}
</AccordionPanel>
</AccordionItem>
</Accordion>
<OpenAICredentialsModal
isOpen={isOpen}
onClose={onClose}
onNewCredentials={updateCredentialsId}
/>
<DropdownList
currentItem={options.task}
items={openAITasks.slice(0, -1)}
onItemSelect={updateTask}
placeholder="Select task"
/>
{options.task && (
<OpenAITaskSettings
blockId={id}
options={options}
onOptionsChange={onOptionsChange}
/>
)}
</>
)}
</Stack>
)
@ -80,14 +137,17 @@ export const OpenAISettings = ({ options, onOptionsChange }: Props) => {
const OpenAITaskSettings = ({
options,
onOptionsChange,
blockId,
}: {
options: ChatCompletionOpenAIOptions | CreateImageOpenAIOptions
blockId: string
onOptionsChange: (options: OpenAIBlock['options']) => void
}) => {
switch (options.task) {
case 'Create chat completion': {
return (
<OpenAIChatCompletionSettings
blockId={blockId}
options={options}
onOptionsChange={onOptionsChange}
/>

View File

@ -0,0 +1,42 @@
import { Select } from '@/components/inputs/Select'
import { useTypebot } from '@/features/editor/providers/TypebotProvider'
import { useWorkspace } from '@/features/workspace/WorkspaceProvider'
import { trpc } from '@/lib/trpc'
type Props = {
credentialsId: string
blockId: string
defaultValue: string
onChange: (model: string | undefined) => void
}
export const ModelsDropdown = ({
defaultValue,
onChange,
credentialsId,
blockId,
}: Props) => {
const { typebot } = useTypebot()
const { workspace } = useWorkspace()
const { data } = trpc.openAI.listModels.useQuery(
{
credentialsId,
blockId,
typebotId: typebot?.id as string,
workspaceId: workspace?.id as string,
},
{
enabled: !!typebot && !!workspace,
}
)
return (
<Select
items={data?.models}
selectedItem={defaultValue}
onSelect={onChange}
placeholder="Select a model"
/>
)
}

View File

@ -1,9 +1,5 @@
import { TableList } from '@/components/TableList'
import {
chatCompletionModels,
ChatCompletionOpenAIOptions,
deprecatedCompletionModels,
} from '@typebot.io/schemas/features/blocks/integrations/openai'
import { ChatCompletionOpenAIOptions } from '@typebot.io/schemas/features/blocks/integrations/openai'
import { ChatCompletionMessageItem } from './ChatCompletionMessageItem'
import {
Accordion,
@ -17,24 +13,23 @@ import {
import { TextLink } from '@/components/TextLink'
import { ChatCompletionResponseItem } from './ChatCompletionResponseItem'
import { NumberInput } from '@/components/inputs'
import { Select } from '@/components/inputs/Select'
import { ModelsDropdown } from './ModelsDropdown'
const apiReferenceUrl =
'https://platform.openai.com/docs/api-reference/chat/create'
type Props = {
blockId: string
options: ChatCompletionOpenAIOptions
onOptionsChange: (options: ChatCompletionOpenAIOptions) => void
}
export const OpenAIChatCompletionSettings = ({
blockId,
options,
onOptionsChange,
}: Props) => {
const updateModel = (
_: string | undefined,
model: (typeof chatCompletionModels)[number] | undefined
) => {
const updateModel = (model: string | undefined) => {
if (!model) return
onOptionsChange({
...options,
@ -79,68 +74,71 @@ export const OpenAIChatCompletionSettings = ({
</TextLink>{' '}
to better understand the available options.
</Text>
<Select
selectedItem={options.model}
items={chatCompletionModels.filter(
(model) => deprecatedCompletionModels.indexOf(model) === -1
)}
onSelect={updateModel}
/>
<Accordion allowMultiple>
<AccordionItem>
<AccordionButton>
<Text w="full" textAlign="left">
Messages
</Text>
<AccordionIcon />
</AccordionButton>
{options.credentialsId && (
<>
<ModelsDropdown
credentialsId={options.credentialsId}
defaultValue={options.model}
onChange={updateModel}
blockId={blockId}
/>
<Accordion allowMultiple>
<AccordionItem>
<AccordionButton>
<Text w="full" textAlign="left">
Messages
</Text>
<AccordionIcon />
</AccordionButton>
<AccordionPanel pt="4">
<TableList
initialItems={options.messages}
Item={ChatCompletionMessageItem}
onItemsChange={updateMessages}
isOrdered
addLabel="Add message"
/>
</AccordionPanel>
</AccordionItem>
<AccordionItem>
<AccordionButton>
<Text w="full" textAlign="left">
Advanced settings
</Text>
<AccordionIcon />
</AccordionButton>
<AccordionPanel>
<NumberInput
label="Temperature"
placeholder="1"
max={2}
min={0}
step={0.1}
defaultValue={options.advancedSettings?.temperature}
onValueChange={updateTemperature}
/>
</AccordionPanel>
</AccordionItem>
<AccordionItem>
<AccordionButton>
<Text w="full" textAlign="left">
Save answer
</Text>
<AccordionIcon />
</AccordionButton>
<AccordionPanel pt="4">
<TableList
initialItems={options.responseMapping}
Item={ChatCompletionResponseItem}
onItemsChange={updateResponseMapping}
newItemDefaultProps={{ valueToExtract: 'Message content' }}
/>
</AccordionPanel>
</AccordionItem>
</Accordion>
<AccordionPanel pt="4">
<TableList
initialItems={options.messages}
Item={ChatCompletionMessageItem}
onItemsChange={updateMessages}
isOrdered
addLabel="Add message"
/>
</AccordionPanel>
</AccordionItem>
<AccordionItem>
<AccordionButton>
<Text w="full" textAlign="left">
Advanced settings
</Text>
<AccordionIcon />
</AccordionButton>
<AccordionPanel>
<NumberInput
label="Temperature"
placeholder="1"
max={2}
min={0}
step={0.1}
defaultValue={options.advancedSettings?.temperature}
onValueChange={updateTemperature}
/>
</AccordionPanel>
</AccordionItem>
<AccordionItem>
<AccordionButton>
<Text w="full" textAlign="left">
Save answer
</Text>
<AccordionIcon />
</AccordionButton>
<AccordionPanel pt="4">
<TableList
initialItems={options.responseMapping}
Item={ChatCompletionResponseItem}
onItemsChange={updateResponseMapping}
newItemDefaultProps={{ valueToExtract: 'Message content' }}
/>
</AccordionPanel>
</AccordionItem>
</Accordion>
</>
)}
</Stack>
)
}

View File

@ -3,6 +3,7 @@ import { createTypebots } from '@typebot.io/lib/playwright/databaseActions'
import { createId } from '@paralleldrive/cuid2'
import { IntegrationBlockType } from '@typebot.io/schemas'
import { parseDefaultGroupWithBlock } from '@typebot.io/lib/playwright/databaseHelpers'
import { defaultBaseUrl } from '@typebot.io/schemas/features/blocks/integrations/openai'
const typebotId = createId()
@ -12,7 +13,9 @@ test('should be configurable', async ({ page }) => {
id: typebotId,
...parseDefaultGroupWithBlock({
type: IntegrationBlockType.OPEN_AI,
options: {},
options: {
baseUrl: defaultBaseUrl,
},
}),
},
])

View File

@ -69,8 +69,7 @@ export const SettingsPopoverContent = ({ onExpandClick, ...props }: Props) => {
<PopoverContent onMouseDown={handleMouseDown} pos="relative">
<PopoverArrow bgColor={arrowColor} />
<PopoverBody
pt="3"
pb="6"
py="3"
overflowY="scroll"
maxH="400px"
ref={ref}
@ -305,12 +304,7 @@ export const BlockSettings = ({
)
}
case IntegrationBlockType.OPEN_AI: {
return (
<OpenAISettings
options={block.options}
onOptionsChange={updateOptions}
/>
)
return <OpenAISettings block={block} onOptionsChange={updateOptions} />
}
case IntegrationBlockType.PIXEL: {
return (

View File

@ -14,6 +14,7 @@ import { analyticsRouter } from '@/features/analytics/api/router'
import { collaboratorsRouter } from '@/features/collaboration/api/router'
import { customDomainsRouter } from '@/features/customDomains/api/router'
import { whatsAppRouter } from '@/features/whatsapp/router'
import { openAIRouter } from '@/features/blocks/integrations/openai/api/router'
export const trpcRouter = router({
getAppVersionProcedure,
@ -31,6 +32,7 @@ export const trpcRouter = router({
collaborators: collaboratorsRouter,
customDomains: customDomainsRouter,
whatsApp: whatsAppRouter,
openAI: openAIRouter,
})
export type AppRouter = typeof trpcRouter

View File

@ -2923,6 +2923,13 @@
},
"credentialsId": {
"type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
}
},
"additionalProperties": false
@ -2937,20 +2944,7 @@
]
},
"model": {
"type": "string",
"enum": [
"gpt-3.5-turbo",
"gpt-3.5-turbo-0613",
"gpt-3.5-turbo-16k",
"gpt-3.5-turbo-16k-0613",
"gpt-3.5-turbo-0301",
"gpt-4",
"gpt-4-0613",
"gpt-4-32k",
"gpt-4-32k-0613",
"gpt-4-32k-0314",
"gpt-4-0314"
]
"type": "string"
},
"messages": {
"type": "array",
@ -3057,6 +3051,13 @@
},
"credentialsId": {
"type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
}
},
"required": [
@ -3120,6 +3121,13 @@
},
"credentialsId": {
"type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
}
},
"required": [
@ -7208,6 +7216,13 @@
},
"credentialsId": {
"type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
}
},
"additionalProperties": false
@ -7222,20 +7237,7 @@
]
},
"model": {
"type": "string",
"enum": [
"gpt-3.5-turbo",
"gpt-3.5-turbo-0613",
"gpt-3.5-turbo-16k",
"gpt-3.5-turbo-16k-0613",
"gpt-3.5-turbo-0301",
"gpt-4",
"gpt-4-0613",
"gpt-4-32k",
"gpt-4-32k-0613",
"gpt-4-32k-0314",
"gpt-4-0314"
]
"type": "string"
},
"messages": {
"type": "array",
@ -7342,6 +7344,13 @@
},
"credentialsId": {
"type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
}
},
"required": [
@ -7405,6 +7414,13 @@
},
"credentialsId": {
"type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
}
},
"required": [
@ -11128,6 +11144,13 @@
},
"credentialsId": {
"type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
}
},
"additionalProperties": false
@ -11142,20 +11165,7 @@
]
},
"model": {
"type": "string",
"enum": [
"gpt-3.5-turbo",
"gpt-3.5-turbo-0613",
"gpt-3.5-turbo-16k",
"gpt-3.5-turbo-16k-0613",
"gpt-3.5-turbo-0301",
"gpt-4",
"gpt-4-0613",
"gpt-4-32k",
"gpt-4-32k-0613",
"gpt-4-32k-0314",
"gpt-4-0314"
]
"type": "string"
},
"messages": {
"type": "array",
@ -11262,6 +11272,13 @@
},
"credentialsId": {
"type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
}
},
"required": [
@ -11325,6 +11342,13 @@
},
"credentialsId": {
"type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
}
},
"required": [
@ -15188,6 +15212,13 @@
},
"credentialsId": {
"type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
}
},
"additionalProperties": false
@ -15202,20 +15233,7 @@
]
},
"model": {
"type": "string",
"enum": [
"gpt-3.5-turbo",
"gpt-3.5-turbo-0613",
"gpt-3.5-turbo-16k",
"gpt-3.5-turbo-16k-0613",
"gpt-3.5-turbo-0301",
"gpt-4",
"gpt-4-0613",
"gpt-4-32k",
"gpt-4-32k-0613",
"gpt-4-32k-0314",
"gpt-4-0314"
]
"type": "string"
},
"messages": {
"type": "array",
@ -15322,6 +15340,13 @@
},
"credentialsId": {
"type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
}
},
"required": [
@ -15385,6 +15410,13 @@
},
"credentialsId": {
"type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
}
},
"required": [
@ -19128,6 +19160,13 @@
},
"credentialsId": {
"type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
}
},
"additionalProperties": false
@ -19142,20 +19181,7 @@
]
},
"model": {
"type": "string",
"enum": [
"gpt-3.5-turbo",
"gpt-3.5-turbo-0613",
"gpt-3.5-turbo-16k",
"gpt-3.5-turbo-16k-0613",
"gpt-3.5-turbo-0301",
"gpt-4",
"gpt-4-0613",
"gpt-4-32k",
"gpt-4-32k-0613",
"gpt-4-32k-0314",
"gpt-4-0314"
]
"type": "string"
},
"messages": {
"type": "array",
@ -19262,6 +19288,13 @@
},
"credentialsId": {
"type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
}
},
"required": [
@ -19325,6 +19358,13 @@
},
"credentialsId": {
"type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
}
},
"required": [
@ -23123,6 +23163,13 @@
},
"credentialsId": {
"type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
}
},
"additionalProperties": false
@ -23137,20 +23184,7 @@
]
},
"model": {
"type": "string",
"enum": [
"gpt-3.5-turbo",
"gpt-3.5-turbo-0613",
"gpt-3.5-turbo-16k",
"gpt-3.5-turbo-16k-0613",
"gpt-3.5-turbo-0301",
"gpt-4",
"gpt-4-0613",
"gpt-4-32k",
"gpt-4-32k-0613",
"gpt-4-32k-0314",
"gpt-4-0314"
]
"type": "string"
},
"messages": {
"type": "array",
@ -23257,6 +23291,13 @@
},
"credentialsId": {
"type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
}
},
"required": [
@ -23320,6 +23361,13 @@
},
"credentialsId": {
"type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
}
},
"required": [
@ -27181,6 +27229,13 @@
},
"credentialsId": {
"type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
}
},
"additionalProperties": false
@ -27195,20 +27250,7 @@
]
},
"model": {
"type": "string",
"enum": [
"gpt-3.5-turbo",
"gpt-3.5-turbo-0613",
"gpt-3.5-turbo-16k",
"gpt-3.5-turbo-16k-0613",
"gpt-3.5-turbo-0301",
"gpt-4",
"gpt-4-0613",
"gpt-4-32k",
"gpt-4-32k-0613",
"gpt-4-32k-0314",
"gpt-4-0314"
]
"type": "string"
},
"messages": {
"type": "array",
@ -27315,6 +27357,13 @@
},
"credentialsId": {
"type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
}
},
"required": [
@ -27378,6 +27427,13 @@
},
"credentialsId": {
"type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
}
},
"required": [
@ -31999,6 +32055,81 @@
}
}
}
},
"/typebots/{typebotId}/blocks/{blockId}/openai/models": {
"get": {
"operationId": "openAI-listModels",
"summary": "List OpenAI models",
"tags": [
"OpenAI"
],
"security": [
{
"Authorization": []
}
],
"parameters": [
{
"name": "typebotId",
"in": "path",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "blockId",
"in": "path",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "credentialsId",
"in": "query",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "workspaceId",
"in": "query",
"required": true,
"schema": {
"type": "string"
}
}
],
"responses": {
"200": {
"description": "Successful response",
"content": {
"application/json": {
"schema": {
"type": "object",
"properties": {
"models": {
"type": "array",
"items": {
"type": "string"
}
}
},
"required": [
"models"
],
"additionalProperties": false
}
}
}
},
"default": {
"$ref": "#/components/responses/error"
}
}
}
}
},
"components": {

View File

@ -2506,6 +2506,13 @@
},
"credentialsId": {
"type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
}
},
"additionalProperties": false
@ -2520,20 +2527,7 @@
]
},
"model": {
"type": "string",
"enum": [
"gpt-3.5-turbo",
"gpt-3.5-turbo-0613",
"gpt-3.5-turbo-16k",
"gpt-3.5-turbo-16k-0613",
"gpt-3.5-turbo-0301",
"gpt-4",
"gpt-4-0613",
"gpt-4-32k",
"gpt-4-32k-0613",
"gpt-4-32k-0314",
"gpt-4-0314"
]
"type": "string"
},
"messages": {
"type": "array",
@ -2640,6 +2634,13 @@
},
"credentialsId": {
"type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
}
},
"required": [
@ -2703,6 +2704,13 @@
},
"credentialsId": {
"type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
}
},
"required": [

View File

@ -29,8 +29,7 @@
"nextjs-cors": "2.1.2",
"node-html-parser": "^6.1.5",
"nodemailer": "6.9.3",
"openai": "3.3.0",
"openai-edge": "^1.2.0",
"openai-edge": "1.2.2",
"qs": "6.11.2",
"react": "18.2.0",
"react-dom": "18.2.0",

View File

@ -107,6 +107,8 @@ export const createChatCompletionOpenAI = async (
messages,
model: options.model,
temperature,
baseUrl: options.baseUrl,
apiVersion: options.apiVersion,
})
if (!response)
return {

View File

@ -1,24 +1,30 @@
import { isNotEmpty } from '@typebot.io/lib/utils'
import { ChatReply } from '@typebot.io/schemas'
import got, { HTTPError } from 'got'
import type {
CreateChatCompletionRequest,
CreateChatCompletionResponse,
} from 'openai'
const createChatEndpoint = 'https://api.openai.com/v1/chat/completions'
import { OpenAIBlock } from '@typebot.io/schemas/features/blocks/integrations/openai'
import { HTTPError } from 'got'
import {
Configuration,
OpenAIApi,
type CreateChatCompletionRequest,
type CreateChatCompletionResponse,
ResponseTypes,
} from 'openai-edge'
type Props = Pick<CreateChatCompletionRequest, 'messages' | 'model'> & {
apiKey: string
temperature: number | undefined
currentLogs?: ChatReply['logs']
isRetrying?: boolean
}
} & Pick<OpenAIBlock['options'], 'apiVersion' | 'baseUrl'>
export const executeChatCompletionOpenAIRequest = async ({
apiKey,
model,
messages,
temperature,
baseUrl,
apiVersion,
isRetrying,
currentLogs = [],
}: Props): Promise<{
response?: CreateChatCompletionResponse
@ -27,22 +33,40 @@ export const executeChatCompletionOpenAIRequest = async ({
const logs: ChatReply['logs'] = currentLogs
if (messages.length === 0) return { logs }
try {
const response = await got
.post(createChatEndpoint, {
const config = new Configuration({
apiKey,
basePath: baseUrl,
baseOptions: {
headers: {
Authorization: `Bearer ${apiKey}`,
'api-key': apiKey,
},
json: {
model,
messages,
temperature,
} satisfies CreateChatCompletionRequest,
})
.json<CreateChatCompletionResponse>()
return { response, logs }
},
defaultQueryParams: isNotEmpty(apiVersion)
? new URLSearchParams({
'api-version': apiVersion,
})
: undefined,
})
const openai = new OpenAIApi(config)
const response = await openai.createChatCompletion({
model,
messages,
temperature,
})
const completion =
(await response.json()) as ResponseTypes['createChatCompletion']
return { response: completion, logs }
} catch (error) {
if (error instanceof HTTPError) {
if (error.response.statusCode === 503) {
if (
(error.response.statusCode === 503 ||
error.response.statusCode === 500 ||
error.response.statusCode === 403) &&
!isRetrying
) {
console.log('OpenAI API error - 503, retrying in 3 seconds')
await new Promise((resolve) => setTimeout(resolve, 3000))
return executeChatCompletionOpenAIRequest({
@ -51,6 +75,9 @@ export const executeChatCompletionOpenAIRequest = async ({
messages,
temperature,
currentLogs: logs,
baseUrl,
apiVersion,
isRetrying: true,
})
}
if (error.response.statusCode === 400) {
@ -67,6 +94,8 @@ export const executeChatCompletionOpenAIRequest = async ({
messages: messages.slice(1),
temperature,
currentLogs: logs,
baseUrl,
apiVersion,
})
}
logs.push({

View File

@ -1,6 +1,7 @@
import { parseVariableNumber } from '@/features/variables/parseVariableNumber'
import { Connection } from '@planetscale/database'
import { decrypt } from '@typebot.io/lib/api/encryption'
import { isNotEmpty } from '@typebot.io/lib/utils'
import {
ChatCompletionOpenAIOptions,
OpenAICredentials,
@ -42,6 +43,17 @@ export const getChatCompletionStream =
const config = new Configuration({
apiKey,
basePath: options.baseUrl,
baseOptions: {
headers: {
'api-key': apiKey,
},
},
defaultQueryParams: isNotEmpty(options.apiVersion)
? new URLSearchParams({
'api-version': options.apiVersion,
})
: undefined,
})
const openai = new OpenAIApi(config)

View File

@ -3,7 +3,7 @@ import { transformStringVariablesToList } from '@/features/variables/transformVa
import { byId, isNotEmpty } from '@typebot.io/lib'
import { Variable, VariableWithValue } from '@typebot.io/schemas'
import { ChatCompletionOpenAIOptions } from '@typebot.io/schemas/features/blocks/integrations/openai'
import type { ChatCompletionRequestMessage } from 'openai'
import type { ChatCompletionRequestMessage } from 'openai-edge'
export const parseChatCompletionMessages =
(variables: Variable[]) =>

View File

@ -3,7 +3,7 @@ import { connect } from '@planetscale/database'
import { env } from '@typebot.io/env'
import { IntegrationBlockType, SessionState } from '@typebot.io/schemas'
import { StreamingTextResponse } from 'ai'
import { ChatCompletionRequestMessage } from 'openai'
import { ChatCompletionRequestMessage } from 'openai-edge'
export const config = {
runtime: 'edge',

View File

@ -5,23 +5,6 @@ import { IntegrationBlockType } from './enums'
export const openAITasks = ['Create chat completion', 'Create image'] as const
export const chatCompletionModels = [
'gpt-3.5-turbo',
'gpt-3.5-turbo-0613',
'gpt-3.5-turbo-16k',
'gpt-3.5-turbo-16k-0613',
'gpt-3.5-turbo-0301',
'gpt-4',
'gpt-4-0613',
'gpt-4-32k',
'gpt-4-32k-0613',
'gpt-4-32k-0314',
'gpt-4-0314',
] as const
export const deprecatedCompletionModels: (typeof chatCompletionModels)[number][] =
['gpt-3.5-turbo-0301', 'gpt-4-32k-0314', 'gpt-4-0314']
export const chatCompletionMessageRoles = [
'system',
'user',
@ -37,8 +20,12 @@ export const chatCompletionResponseValues = [
'Total tokens',
] as const
export const defaultBaseUrl = 'https://api.openai.com/v1'
const openAIBaseOptionsSchema = z.object({
credentialsId: z.string().optional(),
baseUrl: z.string().default(defaultBaseUrl),
apiVersion: z.string().optional(),
})
const initialOptionsSchema = z
@ -68,7 +55,7 @@ const chatCompletionCustomMessageSchema = z.object({
const chatCompletionOptionsSchema = z
.object({
task: z.literal(openAITasks[0]),
model: z.enum(chatCompletionModels),
model: z.string(),
messages: z.array(
z.union([chatCompletionMessageSchema, chatCompletionCustomMessageSchema])
),
@ -130,6 +117,7 @@ export const openAICredentialsSchema = z
export const defaultChatCompletionOptions = (
createId: () => string
): ChatCompletionOpenAIOptions => ({
baseUrl: defaultBaseUrl,
task: 'Create chat completion',
messages: [
{

23
pnpm-lock.yaml generated
View File

@ -218,6 +218,9 @@ importers:
nprogress:
specifier: 0.2.0
version: 0.2.0
openai-edge:
specifier: 1.2.2
version: 1.2.2
papaparse:
specifier: 5.4.1
version: 5.4.1
@ -569,12 +572,9 @@ importers:
nodemailer:
specifier: 6.9.3
version: 6.9.3
openai:
specifier: 3.3.0
version: 3.3.0
openai-edge:
specifier: ^1.2.0
version: 1.2.0
specifier: 1.2.2
version: 1.2.2
qs:
specifier: 6.11.2
version: 6.11.2
@ -17593,20 +17593,11 @@ packages:
is-wsl: 2.2.0
dev: false
/openai-edge@1.2.0:
resolution: {integrity: sha512-eaQs+O/1k6OZMUibNlBzWPXdHFxpUNLMy4BwhtXCFDub5iz7ve/PxOJTL8GBG3/1S1j6LIL93xjdlzCPQpbdgQ==}
/openai-edge@1.2.2:
resolution: {integrity: sha512-C3/Ao9Hkx5uBPv9YFBpX/x59XMPgPUU4dyGg/0J2sOJ7O9D98kD+lfdOc7v/60oYo5xzMGct80uFkYLH+X2qgw==}
engines: {node: '>=18'}
dev: false
/openai@3.3.0:
resolution: {integrity: sha512-uqxI/Au+aPRnsaQRe8CojU0eCR7I0mBiKjD3sNMzY6DaC1ZVrc85u98mtJW6voDug8fgGN+DIZmTDxTthxb7dQ==}
dependencies:
axios: 0.26.1
form-data: 4.0.0
transitivePeerDependencies:
- debug
dev: false
/openapi-to-postmanv2@1.2.7:
resolution: {integrity: sha512-oG3PZfAAljy5ebot8DZGLFDNNmDZ/qWqI/dboWlgg5hRj6dSSrXeiyXL6VQpcGDalxVX4jSChufOq2eDsFXp4w==}
engines: {node: '>=4'}