2
0

(openai) Add custom provider and custom models

Closes #532
This commit is contained in:
Baptiste Arnaud
2023-09-01 16:19:59 +02:00
parent 436fa251f8
commit 27a5f4eb74
21 changed files with 684 additions and 278 deletions

View File

@@ -75,6 +75,7 @@
"nextjs-cors": "^2.1.2", "nextjs-cors": "^2.1.2",
"nodemailer": "6.9.3", "nodemailer": "6.9.3",
"nprogress": "0.2.0", "nprogress": "0.2.0",
"openai-edge": "1.2.2",
"papaparse": "5.4.1", "papaparse": "5.4.1",
"posthog-js": "^1.77.1", "posthog-js": "^1.77.1",
"posthog-node": "3.1.1", "posthog-node": "3.1.1",

View File

@@ -24,7 +24,7 @@ import { MoreInfoTooltip } from '../MoreInfoTooltip'
import { env } from '@typebot.io/env' import { env } from '@typebot.io/env'
type Props = { type Props = {
items: string[] items: string[] | undefined
value?: string value?: string
defaultValue?: string defaultValue?: string
debounceTimeout?: number debounceTimeout?: number
@@ -77,9 +77,9 @@ export const AutocompleteInput = ({
const filteredItems = ( const filteredItems = (
inputValue === '' inputValue === ''
? items ? items ?? []
: [ : [
...items.filter( ...(items ?? []).filter(
(item) => (item) =>
item.toLowerCase().startsWith((inputValue ?? '').toLowerCase()) && item.toLowerCase().startsWith((inputValue ?? '').toLowerCase()) &&
item.toLowerCase() !== inputValue.toLowerCase() item.toLowerCase() !== inputValue.toLowerCase()
@@ -186,7 +186,8 @@ export const AutocompleteInput = ({
onFocus={onOpen} onFocus={onOpen}
onBlur={updateCarretPosition} onBlur={updateCarretPosition}
onKeyDown={updateFocusedDropdownItem} onKeyDown={updateFocusedDropdownItem}
placeholder={placeholder} placeholder={!items ? 'Loading...' : placeholder}
isDisabled={!items}
/> />
</PopoverAnchor> </PopoverAnchor>
{filteredItems.length > 0 && ( {filteredItems.length > 0 && (

View File

@@ -35,7 +35,7 @@ type Item =
type Props<T extends Item> = { type Props<T extends Item> = {
isPopoverMatchingInputWidth?: boolean isPopoverMatchingInputWidth?: boolean
selectedItem?: string selectedItem?: string
items: readonly T[] items: readonly T[] | undefined
placeholder?: string placeholder?: string
onSelect?: (value: string | undefined, item?: T) => void onSelect?: (value: string | undefined, item?: T) => void
} }
@@ -53,7 +53,7 @@ export const Select = <T extends Item>({
const { onOpen, onClose, isOpen } = useDisclosure() const { onOpen, onClose, isOpen } = useDisclosure()
const [inputValue, setInputValue] = useState( const [inputValue, setInputValue] = useState(
getItemLabel( getItemLabel(
items.find((item) => items?.find((item) =>
typeof item === 'string' typeof item === 'string'
? selectedItem === item ? selectedItem === item
: selectedItem === item.value : selectedItem === item.value
@@ -72,13 +72,13 @@ export const Select = <T extends Item>({
const filteredItems = ( const filteredItems = (
isTouched isTouched
? [ ? [
...items.filter((item) => ...(items ?? []).filter((item) =>
getItemLabel(item) getItemLabel(item)
.toLowerCase() .toLowerCase()
.includes((inputValue ?? '').toLowerCase()) .includes((inputValue ?? '').toLowerCase())
), ),
] ]
: items : items ?? []
).slice(0, 50) ).slice(0, 50)
const closeDropdown = () => { const closeDropdown = () => {
@@ -181,12 +181,17 @@ export const Select = <T extends Item>({
className="select-input" className="select-input"
value={isTouched ? inputValue : ''} value={isTouched ? inputValue : ''}
placeholder={ placeholder={
!isTouched && inputValue !== '' ? undefined : placeholder !items
? 'Loading...'
: !isTouched && inputValue !== ''
? undefined
: placeholder
} }
onChange={updateInputValue} onChange={updateInputValue}
onFocus={onOpen} onFocus={onOpen}
onKeyDown={updateFocusedDropdownItem} onKeyDown={updateFocusedDropdownItem}
pr={selectedItem ? 16 : undefined} pr={selectedItem ? 16 : undefined}
isDisabled={!items}
/> />
<InputRightElement <InputRightElement

View File

@@ -0,0 +1,134 @@
import prisma from '@/lib/prisma'
import { authenticatedProcedure } from '@/helpers/server/trpc'
import { TRPCError } from '@trpc/server'
import { z } from 'zod'
import { isReadWorkspaceFobidden } from '@/features/workspace/helpers/isReadWorkspaceFobidden'
import { Configuration, OpenAIApi, ResponseTypes } from 'openai-edge'
import { decrypt } from '@typebot.io/lib/api'
import { OpenAICredentials } from '@typebot.io/schemas/features/blocks/integrations/openai'
import { IntegrationBlockType, typebotSchema } from '@typebot.io/schemas'
import { isNotEmpty } from '@typebot.io/lib/utils'
export const listModels = authenticatedProcedure
.meta({
openapi: {
method: 'GET',
path: '/typebots/{typebotId}/blocks/{blockId}/openai/models',
protect: true,
summary: 'List OpenAI models',
tags: ['OpenAI'],
},
})
.input(
z.object({
typebotId: z.string(),
blockId: z.string(),
credentialsId: z.string(),
workspaceId: z.string(),
})
)
.output(
z.object({
models: z.array(z.string()),
})
)
.query(
async ({
input: { credentialsId, workspaceId, typebotId, blockId },
ctx: { user },
}) => {
const workspace = await prisma.workspace.findFirst({
where: { id: workspaceId },
select: {
members: {
select: {
userId: true,
},
},
typebots: {
where: {
id: typebotId,
},
select: {
groups: true,
},
},
credentials: {
where: {
id: credentialsId,
},
select: {
id: true,
data: true,
iv: true,
},
},
},
})
if (!workspace || isReadWorkspaceFobidden(workspace, user))
throw new TRPCError({
code: 'NOT_FOUND',
message: 'No workspace found',
})
const credentials = workspace.credentials.at(0)
if (!credentials)
throw new TRPCError({
code: 'NOT_FOUND',
message: 'No credentials found',
})
const typebot = workspace.typebots.at(0)
if (!typebot)
throw new TRPCError({
code: 'NOT_FOUND',
message: 'Typebot not found',
})
const block = typebotSchema._def.schema.shape.groups
.parse(workspace.typebots.at(0)?.groups)
.flatMap((group) => group.blocks)
.find((block) => block.id === blockId)
if (!block || block.type !== IntegrationBlockType.OPEN_AI)
throw new TRPCError({
code: 'NOT_FOUND',
message: 'OpenAI block not found',
})
const data = (await decrypt(
credentials.data,
credentials.iv
)) as OpenAICredentials['data']
const config = new Configuration({
apiKey: data.apiKey,
basePath: block.options.baseUrl,
baseOptions: {
headers: {
'api-key': data.apiKey,
},
},
defaultQueryParams: isNotEmpty(block.options.apiVersion)
? new URLSearchParams({
'api-version': block.options.apiVersion,
})
: undefined,
})
const openai = new OpenAIApi(config)
const response = await openai.listModels()
const modelsData = (await response.json()) as ResponseTypes['listModels']
return {
models: modelsData.data
.sort((a, b) => b.created - a.created)
.map((model) => model.id),
}
}
)

View File

@@ -0,0 +1,6 @@
import { router } from '@/helpers/server/trpc'
import { listModels } from './listModels'
export const openAIRouter = router({
listModels,
})

View File

@@ -1,9 +1,19 @@
import { Stack, useDisclosure } from '@chakra-ui/react' import {
Accordion,
AccordionButton,
AccordionIcon,
AccordionItem,
AccordionPanel,
Stack,
useDisclosure,
Text,
} from '@chakra-ui/react'
import React from 'react' import React from 'react'
import { CredentialsDropdown } from '@/features/credentials/components/CredentialsDropdown' import { CredentialsDropdown } from '@/features/credentials/components/CredentialsDropdown'
import { import {
ChatCompletionOpenAIOptions, ChatCompletionOpenAIOptions,
CreateImageOpenAIOptions, CreateImageOpenAIOptions,
defaultBaseUrl,
defaultChatCompletionOptions, defaultChatCompletionOptions,
OpenAIBlock, OpenAIBlock,
openAITasks, openAITasks,
@@ -13,15 +23,19 @@ import { useWorkspace } from '@/features/workspace/WorkspaceProvider'
import { DropdownList } from '@/components/DropdownList' import { DropdownList } from '@/components/DropdownList'
import { OpenAIChatCompletionSettings } from './createChatCompletion/OpenAIChatCompletionSettings' import { OpenAIChatCompletionSettings } from './createChatCompletion/OpenAIChatCompletionSettings'
import { createId } from '@paralleldrive/cuid2' import { createId } from '@paralleldrive/cuid2'
import { TextInput } from '@/components/inputs'
type OpenAITask = (typeof openAITasks)[number] type OpenAITask = (typeof openAITasks)[number]
type Props = { type Props = {
options: OpenAIBlock['options'] block: OpenAIBlock
onOptionsChange: (options: OpenAIBlock['options']) => void onOptionsChange: (options: OpenAIBlock['options']) => void
} }
export const OpenAISettings = ({ options, onOptionsChange }: Props) => { export const OpenAISettings = ({
block: { options, id },
onOptionsChange,
}: Props) => {
const { workspace } = useWorkspace() const { workspace } = useWorkspace()
const { isOpen, onOpen, onClose } = useDisclosure() const { isOpen, onOpen, onClose } = useDisclosure()
@@ -44,6 +58,20 @@ export const OpenAISettings = ({ options, onOptionsChange }: Props) => {
} }
} }
const updateBaseUrl = (baseUrl: string) => {
onOptionsChange({
...options,
baseUrl,
})
}
const updateApiVersion = (apiVersion: string) => {
onOptionsChange({
...options,
apiVersion,
})
}
return ( return (
<Stack> <Stack>
{workspace && ( {workspace && (
@@ -56,22 +84,51 @@ export const OpenAISettings = ({ options, onOptionsChange }: Props) => {
credentialsName="OpenAI account" credentialsName="OpenAI account"
/> />
)} )}
<OpenAICredentialsModal {options.credentialsId && (
isOpen={isOpen} <>
onClose={onClose} <Accordion allowToggle>
onNewCredentials={updateCredentialsId} <AccordionItem>
/> <AccordionButton>
<DropdownList <Text w="full" textAlign="left">
currentItem={options.task} Customize provider
items={openAITasks.slice(0, -1)} </Text>
onItemSelect={updateTask} <AccordionIcon />
placeholder="Select task" </AccordionButton>
/> <AccordionPanel as={Stack} spacing={4}>
{options.task && ( <TextInput
<OpenAITaskSettings label="Base URL"
options={options} defaultValue={options.baseUrl}
onOptionsChange={onOptionsChange} onChange={updateBaseUrl}
/> />
{options.baseUrl !== defaultBaseUrl && (
<TextInput
label="API version"
defaultValue={options.apiVersion}
onChange={updateApiVersion}
/>
)}
</AccordionPanel>
</AccordionItem>
</Accordion>
<OpenAICredentialsModal
isOpen={isOpen}
onClose={onClose}
onNewCredentials={updateCredentialsId}
/>
<DropdownList
currentItem={options.task}
items={openAITasks.slice(0, -1)}
onItemSelect={updateTask}
placeholder="Select task"
/>
{options.task && (
<OpenAITaskSettings
blockId={id}
options={options}
onOptionsChange={onOptionsChange}
/>
)}
</>
)} )}
</Stack> </Stack>
) )
@@ -80,14 +137,17 @@ export const OpenAISettings = ({ options, onOptionsChange }: Props) => {
const OpenAITaskSettings = ({ const OpenAITaskSettings = ({
options, options,
onOptionsChange, onOptionsChange,
blockId,
}: { }: {
options: ChatCompletionOpenAIOptions | CreateImageOpenAIOptions options: ChatCompletionOpenAIOptions | CreateImageOpenAIOptions
blockId: string
onOptionsChange: (options: OpenAIBlock['options']) => void onOptionsChange: (options: OpenAIBlock['options']) => void
}) => { }) => {
switch (options.task) { switch (options.task) {
case 'Create chat completion': { case 'Create chat completion': {
return ( return (
<OpenAIChatCompletionSettings <OpenAIChatCompletionSettings
blockId={blockId}
options={options} options={options}
onOptionsChange={onOptionsChange} onOptionsChange={onOptionsChange}
/> />

View File

@@ -0,0 +1,42 @@
import { Select } from '@/components/inputs/Select'
import { useTypebot } from '@/features/editor/providers/TypebotProvider'
import { useWorkspace } from '@/features/workspace/WorkspaceProvider'
import { trpc } from '@/lib/trpc'
type Props = {
credentialsId: string
blockId: string
defaultValue: string
onChange: (model: string | undefined) => void
}
export const ModelsDropdown = ({
defaultValue,
onChange,
credentialsId,
blockId,
}: Props) => {
const { typebot } = useTypebot()
const { workspace } = useWorkspace()
const { data } = trpc.openAI.listModels.useQuery(
{
credentialsId,
blockId,
typebotId: typebot?.id as string,
workspaceId: workspace?.id as string,
},
{
enabled: !!typebot && !!workspace,
}
)
return (
<Select
items={data?.models}
selectedItem={defaultValue}
onSelect={onChange}
placeholder="Select a model"
/>
)
}

View File

@@ -1,9 +1,5 @@
import { TableList } from '@/components/TableList' import { TableList } from '@/components/TableList'
import { import { ChatCompletionOpenAIOptions } from '@typebot.io/schemas/features/blocks/integrations/openai'
chatCompletionModels,
ChatCompletionOpenAIOptions,
deprecatedCompletionModels,
} from '@typebot.io/schemas/features/blocks/integrations/openai'
import { ChatCompletionMessageItem } from './ChatCompletionMessageItem' import { ChatCompletionMessageItem } from './ChatCompletionMessageItem'
import { import {
Accordion, Accordion,
@@ -17,24 +13,23 @@ import {
import { TextLink } from '@/components/TextLink' import { TextLink } from '@/components/TextLink'
import { ChatCompletionResponseItem } from './ChatCompletionResponseItem' import { ChatCompletionResponseItem } from './ChatCompletionResponseItem'
import { NumberInput } from '@/components/inputs' import { NumberInput } from '@/components/inputs'
import { Select } from '@/components/inputs/Select' import { ModelsDropdown } from './ModelsDropdown'
const apiReferenceUrl = const apiReferenceUrl =
'https://platform.openai.com/docs/api-reference/chat/create' 'https://platform.openai.com/docs/api-reference/chat/create'
type Props = { type Props = {
blockId: string
options: ChatCompletionOpenAIOptions options: ChatCompletionOpenAIOptions
onOptionsChange: (options: ChatCompletionOpenAIOptions) => void onOptionsChange: (options: ChatCompletionOpenAIOptions) => void
} }
export const OpenAIChatCompletionSettings = ({ export const OpenAIChatCompletionSettings = ({
blockId,
options, options,
onOptionsChange, onOptionsChange,
}: Props) => { }: Props) => {
const updateModel = ( const updateModel = (model: string | undefined) => {
_: string | undefined,
model: (typeof chatCompletionModels)[number] | undefined
) => {
if (!model) return if (!model) return
onOptionsChange({ onOptionsChange({
...options, ...options,
@@ -79,68 +74,71 @@ export const OpenAIChatCompletionSettings = ({
</TextLink>{' '} </TextLink>{' '}
to better understand the available options. to better understand the available options.
</Text> </Text>
<Select {options.credentialsId && (
selectedItem={options.model} <>
items={chatCompletionModels.filter( <ModelsDropdown
(model) => deprecatedCompletionModels.indexOf(model) === -1 credentialsId={options.credentialsId}
)} defaultValue={options.model}
onSelect={updateModel} onChange={updateModel}
/> blockId={blockId}
<Accordion allowMultiple> />
<AccordionItem> <Accordion allowMultiple>
<AccordionButton> <AccordionItem>
<Text w="full" textAlign="left"> <AccordionButton>
Messages <Text w="full" textAlign="left">
</Text> Messages
<AccordionIcon /> </Text>
</AccordionButton> <AccordionIcon />
</AccordionButton>
<AccordionPanel pt="4"> <AccordionPanel pt="4">
<TableList <TableList
initialItems={options.messages} initialItems={options.messages}
Item={ChatCompletionMessageItem} Item={ChatCompletionMessageItem}
onItemsChange={updateMessages} onItemsChange={updateMessages}
isOrdered isOrdered
addLabel="Add message" addLabel="Add message"
/> />
</AccordionPanel> </AccordionPanel>
</AccordionItem> </AccordionItem>
<AccordionItem> <AccordionItem>
<AccordionButton> <AccordionButton>
<Text w="full" textAlign="left"> <Text w="full" textAlign="left">
Advanced settings Advanced settings
</Text> </Text>
<AccordionIcon /> <AccordionIcon />
</AccordionButton> </AccordionButton>
<AccordionPanel> <AccordionPanel>
<NumberInput <NumberInput
label="Temperature" label="Temperature"
placeholder="1" placeholder="1"
max={2} max={2}
min={0} min={0}
step={0.1} step={0.1}
defaultValue={options.advancedSettings?.temperature} defaultValue={options.advancedSettings?.temperature}
onValueChange={updateTemperature} onValueChange={updateTemperature}
/> />
</AccordionPanel> </AccordionPanel>
</AccordionItem> </AccordionItem>
<AccordionItem> <AccordionItem>
<AccordionButton> <AccordionButton>
<Text w="full" textAlign="left"> <Text w="full" textAlign="left">
Save answer Save answer
</Text> </Text>
<AccordionIcon /> <AccordionIcon />
</AccordionButton> </AccordionButton>
<AccordionPanel pt="4"> <AccordionPanel pt="4">
<TableList <TableList
initialItems={options.responseMapping} initialItems={options.responseMapping}
Item={ChatCompletionResponseItem} Item={ChatCompletionResponseItem}
onItemsChange={updateResponseMapping} onItemsChange={updateResponseMapping}
newItemDefaultProps={{ valueToExtract: 'Message content' }} newItemDefaultProps={{ valueToExtract: 'Message content' }}
/> />
</AccordionPanel> </AccordionPanel>
</AccordionItem> </AccordionItem>
</Accordion> </Accordion>
</>
)}
</Stack> </Stack>
) )
} }

View File

@@ -3,6 +3,7 @@ import { createTypebots } from '@typebot.io/lib/playwright/databaseActions'
import { createId } from '@paralleldrive/cuid2' import { createId } from '@paralleldrive/cuid2'
import { IntegrationBlockType } from '@typebot.io/schemas' import { IntegrationBlockType } from '@typebot.io/schemas'
import { parseDefaultGroupWithBlock } from '@typebot.io/lib/playwright/databaseHelpers' import { parseDefaultGroupWithBlock } from '@typebot.io/lib/playwright/databaseHelpers'
import { defaultBaseUrl } from '@typebot.io/schemas/features/blocks/integrations/openai'
const typebotId = createId() const typebotId = createId()
@@ -12,7 +13,9 @@ test('should be configurable', async ({ page }) => {
id: typebotId, id: typebotId,
...parseDefaultGroupWithBlock({ ...parseDefaultGroupWithBlock({
type: IntegrationBlockType.OPEN_AI, type: IntegrationBlockType.OPEN_AI,
options: {}, options: {
baseUrl: defaultBaseUrl,
},
}), }),
}, },
]) ])

View File

@@ -69,8 +69,7 @@ export const SettingsPopoverContent = ({ onExpandClick, ...props }: Props) => {
<PopoverContent onMouseDown={handleMouseDown} pos="relative"> <PopoverContent onMouseDown={handleMouseDown} pos="relative">
<PopoverArrow bgColor={arrowColor} /> <PopoverArrow bgColor={arrowColor} />
<PopoverBody <PopoverBody
pt="3" py="3"
pb="6"
overflowY="scroll" overflowY="scroll"
maxH="400px" maxH="400px"
ref={ref} ref={ref}
@@ -305,12 +304,7 @@ export const BlockSettings = ({
) )
} }
case IntegrationBlockType.OPEN_AI: { case IntegrationBlockType.OPEN_AI: {
return ( return <OpenAISettings block={block} onOptionsChange={updateOptions} />
<OpenAISettings
options={block.options}
onOptionsChange={updateOptions}
/>
)
} }
case IntegrationBlockType.PIXEL: { case IntegrationBlockType.PIXEL: {
return ( return (

View File

@@ -14,6 +14,7 @@ import { analyticsRouter } from '@/features/analytics/api/router'
import { collaboratorsRouter } from '@/features/collaboration/api/router' import { collaboratorsRouter } from '@/features/collaboration/api/router'
import { customDomainsRouter } from '@/features/customDomains/api/router' import { customDomainsRouter } from '@/features/customDomains/api/router'
import { whatsAppRouter } from '@/features/whatsapp/router' import { whatsAppRouter } from '@/features/whatsapp/router'
import { openAIRouter } from '@/features/blocks/integrations/openai/api/router'
export const trpcRouter = router({ export const trpcRouter = router({
getAppVersionProcedure, getAppVersionProcedure,
@@ -31,6 +32,7 @@ export const trpcRouter = router({
collaborators: collaboratorsRouter, collaborators: collaboratorsRouter,
customDomains: customDomainsRouter, customDomains: customDomainsRouter,
whatsApp: whatsAppRouter, whatsApp: whatsAppRouter,
openAI: openAIRouter,
}) })
export type AppRouter = typeof trpcRouter export type AppRouter = typeof trpcRouter

View File

@@ -2923,6 +2923,13 @@
}, },
"credentialsId": { "credentialsId": {
"type": "string" "type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
} }
}, },
"additionalProperties": false "additionalProperties": false
@@ -2937,20 +2944,7 @@
] ]
}, },
"model": { "model": {
"type": "string", "type": "string"
"enum": [
"gpt-3.5-turbo",
"gpt-3.5-turbo-0613",
"gpt-3.5-turbo-16k",
"gpt-3.5-turbo-16k-0613",
"gpt-3.5-turbo-0301",
"gpt-4",
"gpt-4-0613",
"gpt-4-32k",
"gpt-4-32k-0613",
"gpt-4-32k-0314",
"gpt-4-0314"
]
}, },
"messages": { "messages": {
"type": "array", "type": "array",
@@ -3057,6 +3051,13 @@
}, },
"credentialsId": { "credentialsId": {
"type": "string" "type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
} }
}, },
"required": [ "required": [
@@ -3120,6 +3121,13 @@
}, },
"credentialsId": { "credentialsId": {
"type": "string" "type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
} }
}, },
"required": [ "required": [
@@ -7208,6 +7216,13 @@
}, },
"credentialsId": { "credentialsId": {
"type": "string" "type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
} }
}, },
"additionalProperties": false "additionalProperties": false
@@ -7222,20 +7237,7 @@
] ]
}, },
"model": { "model": {
"type": "string", "type": "string"
"enum": [
"gpt-3.5-turbo",
"gpt-3.5-turbo-0613",
"gpt-3.5-turbo-16k",
"gpt-3.5-turbo-16k-0613",
"gpt-3.5-turbo-0301",
"gpt-4",
"gpt-4-0613",
"gpt-4-32k",
"gpt-4-32k-0613",
"gpt-4-32k-0314",
"gpt-4-0314"
]
}, },
"messages": { "messages": {
"type": "array", "type": "array",
@@ -7342,6 +7344,13 @@
}, },
"credentialsId": { "credentialsId": {
"type": "string" "type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
} }
}, },
"required": [ "required": [
@@ -7405,6 +7414,13 @@
}, },
"credentialsId": { "credentialsId": {
"type": "string" "type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
} }
}, },
"required": [ "required": [
@@ -11128,6 +11144,13 @@
}, },
"credentialsId": { "credentialsId": {
"type": "string" "type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
} }
}, },
"additionalProperties": false "additionalProperties": false
@@ -11142,20 +11165,7 @@
] ]
}, },
"model": { "model": {
"type": "string", "type": "string"
"enum": [
"gpt-3.5-turbo",
"gpt-3.5-turbo-0613",
"gpt-3.5-turbo-16k",
"gpt-3.5-turbo-16k-0613",
"gpt-3.5-turbo-0301",
"gpt-4",
"gpt-4-0613",
"gpt-4-32k",
"gpt-4-32k-0613",
"gpt-4-32k-0314",
"gpt-4-0314"
]
}, },
"messages": { "messages": {
"type": "array", "type": "array",
@@ -11262,6 +11272,13 @@
}, },
"credentialsId": { "credentialsId": {
"type": "string" "type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
} }
}, },
"required": [ "required": [
@@ -11325,6 +11342,13 @@
}, },
"credentialsId": { "credentialsId": {
"type": "string" "type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
} }
}, },
"required": [ "required": [
@@ -15188,6 +15212,13 @@
}, },
"credentialsId": { "credentialsId": {
"type": "string" "type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
} }
}, },
"additionalProperties": false "additionalProperties": false
@@ -15202,20 +15233,7 @@
] ]
}, },
"model": { "model": {
"type": "string", "type": "string"
"enum": [
"gpt-3.5-turbo",
"gpt-3.5-turbo-0613",
"gpt-3.5-turbo-16k",
"gpt-3.5-turbo-16k-0613",
"gpt-3.5-turbo-0301",
"gpt-4",
"gpt-4-0613",
"gpt-4-32k",
"gpt-4-32k-0613",
"gpt-4-32k-0314",
"gpt-4-0314"
]
}, },
"messages": { "messages": {
"type": "array", "type": "array",
@@ -15322,6 +15340,13 @@
}, },
"credentialsId": { "credentialsId": {
"type": "string" "type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
} }
}, },
"required": [ "required": [
@@ -15385,6 +15410,13 @@
}, },
"credentialsId": { "credentialsId": {
"type": "string" "type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
} }
}, },
"required": [ "required": [
@@ -19128,6 +19160,13 @@
}, },
"credentialsId": { "credentialsId": {
"type": "string" "type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
} }
}, },
"additionalProperties": false "additionalProperties": false
@@ -19142,20 +19181,7 @@
] ]
}, },
"model": { "model": {
"type": "string", "type": "string"
"enum": [
"gpt-3.5-turbo",
"gpt-3.5-turbo-0613",
"gpt-3.5-turbo-16k",
"gpt-3.5-turbo-16k-0613",
"gpt-3.5-turbo-0301",
"gpt-4",
"gpt-4-0613",
"gpt-4-32k",
"gpt-4-32k-0613",
"gpt-4-32k-0314",
"gpt-4-0314"
]
}, },
"messages": { "messages": {
"type": "array", "type": "array",
@@ -19262,6 +19288,13 @@
}, },
"credentialsId": { "credentialsId": {
"type": "string" "type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
} }
}, },
"required": [ "required": [
@@ -19325,6 +19358,13 @@
}, },
"credentialsId": { "credentialsId": {
"type": "string" "type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
} }
}, },
"required": [ "required": [
@@ -23123,6 +23163,13 @@
}, },
"credentialsId": { "credentialsId": {
"type": "string" "type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
} }
}, },
"additionalProperties": false "additionalProperties": false
@@ -23137,20 +23184,7 @@
] ]
}, },
"model": { "model": {
"type": "string", "type": "string"
"enum": [
"gpt-3.5-turbo",
"gpt-3.5-turbo-0613",
"gpt-3.5-turbo-16k",
"gpt-3.5-turbo-16k-0613",
"gpt-3.5-turbo-0301",
"gpt-4",
"gpt-4-0613",
"gpt-4-32k",
"gpt-4-32k-0613",
"gpt-4-32k-0314",
"gpt-4-0314"
]
}, },
"messages": { "messages": {
"type": "array", "type": "array",
@@ -23257,6 +23291,13 @@
}, },
"credentialsId": { "credentialsId": {
"type": "string" "type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
} }
}, },
"required": [ "required": [
@@ -23320,6 +23361,13 @@
}, },
"credentialsId": { "credentialsId": {
"type": "string" "type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
} }
}, },
"required": [ "required": [
@@ -27181,6 +27229,13 @@
}, },
"credentialsId": { "credentialsId": {
"type": "string" "type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
} }
}, },
"additionalProperties": false "additionalProperties": false
@@ -27195,20 +27250,7 @@
] ]
}, },
"model": { "model": {
"type": "string", "type": "string"
"enum": [
"gpt-3.5-turbo",
"gpt-3.5-turbo-0613",
"gpt-3.5-turbo-16k",
"gpt-3.5-turbo-16k-0613",
"gpt-3.5-turbo-0301",
"gpt-4",
"gpt-4-0613",
"gpt-4-32k",
"gpt-4-32k-0613",
"gpt-4-32k-0314",
"gpt-4-0314"
]
}, },
"messages": { "messages": {
"type": "array", "type": "array",
@@ -27315,6 +27357,13 @@
}, },
"credentialsId": { "credentialsId": {
"type": "string" "type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
} }
}, },
"required": [ "required": [
@@ -27378,6 +27427,13 @@
}, },
"credentialsId": { "credentialsId": {
"type": "string" "type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
} }
}, },
"required": [ "required": [
@@ -31999,6 +32055,81 @@
} }
} }
} }
},
"/typebots/{typebotId}/blocks/{blockId}/openai/models": {
"get": {
"operationId": "openAI-listModels",
"summary": "List OpenAI models",
"tags": [
"OpenAI"
],
"security": [
{
"Authorization": []
}
],
"parameters": [
{
"name": "typebotId",
"in": "path",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "blockId",
"in": "path",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "credentialsId",
"in": "query",
"required": true,
"schema": {
"type": "string"
}
},
{
"name": "workspaceId",
"in": "query",
"required": true,
"schema": {
"type": "string"
}
}
],
"responses": {
"200": {
"description": "Successful response",
"content": {
"application/json": {
"schema": {
"type": "object",
"properties": {
"models": {
"type": "array",
"items": {
"type": "string"
}
}
},
"required": [
"models"
],
"additionalProperties": false
}
}
}
},
"default": {
"$ref": "#/components/responses/error"
}
}
}
} }
}, },
"components": { "components": {

View File

@@ -2506,6 +2506,13 @@
}, },
"credentialsId": { "credentialsId": {
"type": "string" "type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
} }
}, },
"additionalProperties": false "additionalProperties": false
@@ -2520,20 +2527,7 @@
] ]
}, },
"model": { "model": {
"type": "string", "type": "string"
"enum": [
"gpt-3.5-turbo",
"gpt-3.5-turbo-0613",
"gpt-3.5-turbo-16k",
"gpt-3.5-turbo-16k-0613",
"gpt-3.5-turbo-0301",
"gpt-4",
"gpt-4-0613",
"gpt-4-32k",
"gpt-4-32k-0613",
"gpt-4-32k-0314",
"gpt-4-0314"
]
}, },
"messages": { "messages": {
"type": "array", "type": "array",
@@ -2640,6 +2634,13 @@
}, },
"credentialsId": { "credentialsId": {
"type": "string" "type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
} }
}, },
"required": [ "required": [
@@ -2703,6 +2704,13 @@
}, },
"credentialsId": { "credentialsId": {
"type": "string" "type": "string"
},
"baseUrl": {
"type": "string",
"default": "https://api.openai.com/v1"
},
"apiVersion": {
"type": "string"
} }
}, },
"required": [ "required": [

View File

@@ -29,8 +29,7 @@
"nextjs-cors": "2.1.2", "nextjs-cors": "2.1.2",
"node-html-parser": "^6.1.5", "node-html-parser": "^6.1.5",
"nodemailer": "6.9.3", "nodemailer": "6.9.3",
"openai": "3.3.0", "openai-edge": "1.2.2",
"openai-edge": "^1.2.0",
"qs": "6.11.2", "qs": "6.11.2",
"react": "18.2.0", "react": "18.2.0",
"react-dom": "18.2.0", "react-dom": "18.2.0",

View File

@@ -107,6 +107,8 @@ export const createChatCompletionOpenAI = async (
messages, messages,
model: options.model, model: options.model,
temperature, temperature,
baseUrl: options.baseUrl,
apiVersion: options.apiVersion,
}) })
if (!response) if (!response)
return { return {

View File

@@ -1,24 +1,30 @@
import { isNotEmpty } from '@typebot.io/lib/utils'
import { ChatReply } from '@typebot.io/schemas' import { ChatReply } from '@typebot.io/schemas'
import got, { HTTPError } from 'got' import { OpenAIBlock } from '@typebot.io/schemas/features/blocks/integrations/openai'
import type { import { HTTPError } from 'got'
CreateChatCompletionRequest, import {
CreateChatCompletionResponse, Configuration,
} from 'openai' OpenAIApi,
type CreateChatCompletionRequest,
const createChatEndpoint = 'https://api.openai.com/v1/chat/completions' type CreateChatCompletionResponse,
ResponseTypes,
} from 'openai-edge'
type Props = Pick<CreateChatCompletionRequest, 'messages' | 'model'> & { type Props = Pick<CreateChatCompletionRequest, 'messages' | 'model'> & {
apiKey: string apiKey: string
temperature: number | undefined temperature: number | undefined
currentLogs?: ChatReply['logs'] currentLogs?: ChatReply['logs']
isRetrying?: boolean isRetrying?: boolean
} } & Pick<OpenAIBlock['options'], 'apiVersion' | 'baseUrl'>
export const executeChatCompletionOpenAIRequest = async ({ export const executeChatCompletionOpenAIRequest = async ({
apiKey, apiKey,
model, model,
messages, messages,
temperature, temperature,
baseUrl,
apiVersion,
isRetrying,
currentLogs = [], currentLogs = [],
}: Props): Promise<{ }: Props): Promise<{
response?: CreateChatCompletionResponse response?: CreateChatCompletionResponse
@@ -27,22 +33,40 @@ export const executeChatCompletionOpenAIRequest = async ({
const logs: ChatReply['logs'] = currentLogs const logs: ChatReply['logs'] = currentLogs
if (messages.length === 0) return { logs } if (messages.length === 0) return { logs }
try { try {
const response = await got const config = new Configuration({
.post(createChatEndpoint, { apiKey,
basePath: baseUrl,
baseOptions: {
headers: { headers: {
Authorization: `Bearer ${apiKey}`, 'api-key': apiKey,
}, },
json: { },
model, defaultQueryParams: isNotEmpty(apiVersion)
messages, ? new URLSearchParams({
temperature, 'api-version': apiVersion,
} satisfies CreateChatCompletionRequest, })
}) : undefined,
.json<CreateChatCompletionResponse>() })
return { response, logs }
const openai = new OpenAIApi(config)
const response = await openai.createChatCompletion({
model,
messages,
temperature,
})
const completion =
(await response.json()) as ResponseTypes['createChatCompletion']
return { response: completion, logs }
} catch (error) { } catch (error) {
if (error instanceof HTTPError) { if (error instanceof HTTPError) {
if (error.response.statusCode === 503) { if (
(error.response.statusCode === 503 ||
error.response.statusCode === 500 ||
error.response.statusCode === 403) &&
!isRetrying
) {
console.log('OpenAI API error - 503, retrying in 3 seconds') console.log('OpenAI API error - 503, retrying in 3 seconds')
await new Promise((resolve) => setTimeout(resolve, 3000)) await new Promise((resolve) => setTimeout(resolve, 3000))
return executeChatCompletionOpenAIRequest({ return executeChatCompletionOpenAIRequest({
@@ -51,6 +75,9 @@ export const executeChatCompletionOpenAIRequest = async ({
messages, messages,
temperature, temperature,
currentLogs: logs, currentLogs: logs,
baseUrl,
apiVersion,
isRetrying: true,
}) })
} }
if (error.response.statusCode === 400) { if (error.response.statusCode === 400) {
@@ -67,6 +94,8 @@ export const executeChatCompletionOpenAIRequest = async ({
messages: messages.slice(1), messages: messages.slice(1),
temperature, temperature,
currentLogs: logs, currentLogs: logs,
baseUrl,
apiVersion,
}) })
} }
logs.push({ logs.push({

View File

@@ -1,6 +1,7 @@
import { parseVariableNumber } from '@/features/variables/parseVariableNumber' import { parseVariableNumber } from '@/features/variables/parseVariableNumber'
import { Connection } from '@planetscale/database' import { Connection } from '@planetscale/database'
import { decrypt } from '@typebot.io/lib/api/encryption' import { decrypt } from '@typebot.io/lib/api/encryption'
import { isNotEmpty } from '@typebot.io/lib/utils'
import { import {
ChatCompletionOpenAIOptions, ChatCompletionOpenAIOptions,
OpenAICredentials, OpenAICredentials,
@@ -42,6 +43,17 @@ export const getChatCompletionStream =
const config = new Configuration({ const config = new Configuration({
apiKey, apiKey,
basePath: options.baseUrl,
baseOptions: {
headers: {
'api-key': apiKey,
},
},
defaultQueryParams: isNotEmpty(options.apiVersion)
? new URLSearchParams({
'api-version': options.apiVersion,
})
: undefined,
}) })
const openai = new OpenAIApi(config) const openai = new OpenAIApi(config)

View File

@@ -3,7 +3,7 @@ import { transformStringVariablesToList } from '@/features/variables/transformVa
import { byId, isNotEmpty } from '@typebot.io/lib' import { byId, isNotEmpty } from '@typebot.io/lib'
import { Variable, VariableWithValue } from '@typebot.io/schemas' import { Variable, VariableWithValue } from '@typebot.io/schemas'
import { ChatCompletionOpenAIOptions } from '@typebot.io/schemas/features/blocks/integrations/openai' import { ChatCompletionOpenAIOptions } from '@typebot.io/schemas/features/blocks/integrations/openai'
import type { ChatCompletionRequestMessage } from 'openai' import type { ChatCompletionRequestMessage } from 'openai-edge'
export const parseChatCompletionMessages = export const parseChatCompletionMessages =
(variables: Variable[]) => (variables: Variable[]) =>

View File

@@ -3,7 +3,7 @@ import { connect } from '@planetscale/database'
import { env } from '@typebot.io/env' import { env } from '@typebot.io/env'
import { IntegrationBlockType, SessionState } from '@typebot.io/schemas' import { IntegrationBlockType, SessionState } from '@typebot.io/schemas'
import { StreamingTextResponse } from 'ai' import { StreamingTextResponse } from 'ai'
import { ChatCompletionRequestMessage } from 'openai' import { ChatCompletionRequestMessage } from 'openai-edge'
export const config = { export const config = {
runtime: 'edge', runtime: 'edge',

View File

@@ -5,23 +5,6 @@ import { IntegrationBlockType } from './enums'
export const openAITasks = ['Create chat completion', 'Create image'] as const export const openAITasks = ['Create chat completion', 'Create image'] as const
export const chatCompletionModels = [
'gpt-3.5-turbo',
'gpt-3.5-turbo-0613',
'gpt-3.5-turbo-16k',
'gpt-3.5-turbo-16k-0613',
'gpt-3.5-turbo-0301',
'gpt-4',
'gpt-4-0613',
'gpt-4-32k',
'gpt-4-32k-0613',
'gpt-4-32k-0314',
'gpt-4-0314',
] as const
export const deprecatedCompletionModels: (typeof chatCompletionModels)[number][] =
['gpt-3.5-turbo-0301', 'gpt-4-32k-0314', 'gpt-4-0314']
export const chatCompletionMessageRoles = [ export const chatCompletionMessageRoles = [
'system', 'system',
'user', 'user',
@@ -37,8 +20,12 @@ export const chatCompletionResponseValues = [
'Total tokens', 'Total tokens',
] as const ] as const
export const defaultBaseUrl = 'https://api.openai.com/v1'
const openAIBaseOptionsSchema = z.object({ const openAIBaseOptionsSchema = z.object({
credentialsId: z.string().optional(), credentialsId: z.string().optional(),
baseUrl: z.string().default(defaultBaseUrl),
apiVersion: z.string().optional(),
}) })
const initialOptionsSchema = z const initialOptionsSchema = z
@@ -68,7 +55,7 @@ const chatCompletionCustomMessageSchema = z.object({
const chatCompletionOptionsSchema = z const chatCompletionOptionsSchema = z
.object({ .object({
task: z.literal(openAITasks[0]), task: z.literal(openAITasks[0]),
model: z.enum(chatCompletionModels), model: z.string(),
messages: z.array( messages: z.array(
z.union([chatCompletionMessageSchema, chatCompletionCustomMessageSchema]) z.union([chatCompletionMessageSchema, chatCompletionCustomMessageSchema])
), ),
@@ -130,6 +117,7 @@ export const openAICredentialsSchema = z
export const defaultChatCompletionOptions = ( export const defaultChatCompletionOptions = (
createId: () => string createId: () => string
): ChatCompletionOpenAIOptions => ({ ): ChatCompletionOpenAIOptions => ({
baseUrl: defaultBaseUrl,
task: 'Create chat completion', task: 'Create chat completion',
messages: [ messages: [
{ {

23
pnpm-lock.yaml generated
View File

@@ -218,6 +218,9 @@ importers:
nprogress: nprogress:
specifier: 0.2.0 specifier: 0.2.0
version: 0.2.0 version: 0.2.0
openai-edge:
specifier: 1.2.2
version: 1.2.2
papaparse: papaparse:
specifier: 5.4.1 specifier: 5.4.1
version: 5.4.1 version: 5.4.1
@@ -569,12 +572,9 @@ importers:
nodemailer: nodemailer:
specifier: 6.9.3 specifier: 6.9.3
version: 6.9.3 version: 6.9.3
openai:
specifier: 3.3.0
version: 3.3.0
openai-edge: openai-edge:
specifier: ^1.2.0 specifier: 1.2.2
version: 1.2.0 version: 1.2.2
qs: qs:
specifier: 6.11.2 specifier: 6.11.2
version: 6.11.2 version: 6.11.2
@@ -17593,20 +17593,11 @@ packages:
is-wsl: 2.2.0 is-wsl: 2.2.0
dev: false dev: false
/openai-edge@1.2.0: /openai-edge@1.2.2:
resolution: {integrity: sha512-eaQs+O/1k6OZMUibNlBzWPXdHFxpUNLMy4BwhtXCFDub5iz7ve/PxOJTL8GBG3/1S1j6LIL93xjdlzCPQpbdgQ==} resolution: {integrity: sha512-C3/Ao9Hkx5uBPv9YFBpX/x59XMPgPUU4dyGg/0J2sOJ7O9D98kD+lfdOc7v/60oYo5xzMGct80uFkYLH+X2qgw==}
engines: {node: '>=18'} engines: {node: '>=18'}
dev: false dev: false
/openai@3.3.0:
resolution: {integrity: sha512-uqxI/Au+aPRnsaQRe8CojU0eCR7I0mBiKjD3sNMzY6DaC1ZVrc85u98mtJW6voDug8fgGN+DIZmTDxTthxb7dQ==}
dependencies:
axios: 0.26.1
form-data: 4.0.0
transitivePeerDependencies:
- debug
dev: false
/openapi-to-postmanv2@1.2.7: /openapi-to-postmanv2@1.2.7:
resolution: {integrity: sha512-oG3PZfAAljy5ebot8DZGLFDNNmDZ/qWqI/dboWlgg5hRj6dSSrXeiyXL6VQpcGDalxVX4jSChufOq2eDsFXp4w==} resolution: {integrity: sha512-oG3PZfAAljy5ebot8DZGLFDNNmDZ/qWqI/dboWlgg5hRj6dSSrXeiyXL6VQpcGDalxVX4jSChufOq2eDsFXp4w==}
engines: {node: '>=4'} engines: {node: '>=4'}