2
0

⬆️ (openai) Replace openai-edge with openai and upgrade next

This commit is contained in:
Baptiste Arnaud
2023-10-06 14:22:38 +02:00
parent dfcaa0f1d0
commit 225dfed313
15 changed files with 415 additions and 178 deletions

View File

@ -33,6 +33,7 @@
"@trpc/next": "10.34.0",
"@trpc/react-query": "10.34.0",
"@trpc/server": "10.34.0",
"@typebot.io/bot-engine": "workspace:*",
"@typebot.io/emails": "workspace:*",
"@typebot.io/env": "workspace:*",
"@typebot.io/nextjs": "workspace:*",
@ -65,13 +66,13 @@
"libphonenumber-js": "1.10.37",
"micro": "10.0.1",
"micro-cors": "0.1.1",
"next": "13.4.3",
"next": "13.5.4",
"next-auth": "4.22.1",
"next-international": "0.9.5",
"nextjs-cors": "^2.1.2",
"nodemailer": "6.9.3",
"nprogress": "0.2.0",
"openai-edge": "1.2.2",
"openai": "^4.11.1",
"papaparse": "5.4.1",
"posthog-js": "^1.77.1",
"posthog-node": "3.1.1",
@ -89,8 +90,7 @@
"tinycolor2": "1.6.0",
"trpc-openapi": "1.2.0",
"unsplash-js": "^7.0.18",
"use-debounce": "9.0.4",
"@typebot.io/bot-engine": "workspace:*"
"use-debounce": "9.0.4"
},
"devDependencies": {
"@chakra-ui/styled-system": "2.9.1",

View File

@ -3,13 +3,13 @@ import { authenticatedProcedure } from '@/helpers/server/trpc'
import { TRPCError } from '@trpc/server'
import { z } from 'zod'
import { isReadWorkspaceFobidden } from '@/features/workspace/helpers/isReadWorkspaceFobidden'
import { Configuration, OpenAIApi, ResponseTypes } from 'openai-edge'
import { decrypt } from '@typebot.io/lib/api'
import {
OpenAICredentials,
defaultBaseUrl,
} from '@typebot.io/schemas/features/blocks/integrations/openai'
import { isNotEmpty } from '@typebot.io/lib/utils'
import { OpenAI, ClientOptions } from 'openai'
export const listModels = authenticatedProcedure
.meta({
@ -79,41 +79,26 @@ export const listModels = authenticatedProcedure
credentials.iv
)) as OpenAICredentials['data']
const config = new Configuration({
const config = {
apiKey: data.apiKey,
basePath: baseUrl,
baseOptions: {
headers: {
'api-key': data.apiKey,
},
baseURL: baseUrl,
defaultHeaders: {
'api-key': data.apiKey,
},
defaultQueryParams: isNotEmpty(apiVersion)
? new URLSearchParams({
defaultQuery: isNotEmpty(apiVersion)
? {
'api-version': apiVersion,
})
}
: undefined,
})
} satisfies ClientOptions
const openai = new OpenAIApi(config)
const openai = new OpenAI(config)
const response = await openai.listModels()
const modelsData = (await response.json()) as
| ResponseTypes['listModels']
| {
error: unknown
}
if ('error' in modelsData)
throw new TRPCError({
code: 'INTERNAL_SERVER_ERROR',
message: 'Could not list models',
cause: modelsData.error,
})
const models = await openai.models.list()
return {
models:
modelsData.data
models.data
.sort((a, b) => b.created - a.created)
.map((model) => model.id) ?? [],
}

View File

@ -20,7 +20,7 @@
"aos": "2.3.4",
"focus-visible": "5.2.0",
"framer-motion": "10.12.20",
"next": "13.4.3",
"next": "13.5.4",
"react": "18.2.0",
"react-dom": "18.2.0"
},

View File

@ -14,23 +14,23 @@
"@planetscale/database": "^1.8.0",
"@sentry/nextjs": "7.66.0",
"@trpc/server": "10.34.0",
"@typebot.io/bot-engine": "workspace:*",
"@typebot.io/nextjs": "workspace:*",
"@typebot.io/prisma": "workspace:*",
"ai": "2.1.32",
"ai": "2.2.14",
"bot-engine": "workspace:*",
"cors": "2.8.5",
"google-spreadsheet": "4.0.2",
"got": "12.6.0",
"next": "13.4.3",
"next": "13.5.4",
"nextjs-cors": "2.1.2",
"nodemailer": "6.9.3",
"openai-edge": "1.2.2",
"openai": "^4.11.1",
"qs": "6.11.2",
"react": "18.2.0",
"react-dom": "18.2.0",
"stripe": "12.13.0",
"trpc-openapi": "1.2.0",
"@typebot.io/bot-engine": "workspace:*"
"trpc-openapi": "1.2.0"
},
"devDependencies": {
"@faire/mjml-react": "3.3.0",

View File

@ -2,14 +2,19 @@ import { connect } from '@planetscale/database'
import { env } from '@typebot.io/env'
import { IntegrationBlockType, SessionState } from '@typebot.io/schemas'
import { StreamingTextResponse } from 'ai'
import { ChatCompletionRequestMessage } from 'openai-edge'
import { getChatCompletionStream } from '@typebot.io/bot-engine/blocks/integrations/openai/getChatCompletionStream'
import OpenAI from 'openai'
import { NextResponse } from 'next/dist/server/web/spec-extension/response'
export const config = {
runtime: 'edge',
regions: ['lhr1'],
}
const responseHeaders = {
'Access-Control-Allow-Origin': '*',
}
const handler = async (req: Request) => {
if (req.method === 'OPTIONS') {
return new Response('ok', {
@ -23,12 +28,20 @@ const handler = async (req: Request) => {
}
const { sessionId, messages } = (await req.json()) as {
sessionId: string
messages: ChatCompletionRequestMessage[]
messages: OpenAI.Chat.ChatCompletionMessage[]
}
if (!sessionId) return new Response('No session ID provided', { status: 400 })
if (!sessionId)
return NextResponse.json(
{ message: 'No session ID provided' },
{ status: 400, headers: responseHeaders }
)
if (!messages) return new Response('No messages provided', { status: 400 })
if (!messages)
return NextResponse.json(
{ message: 'No messages provided' },
{ status: 400, headers: responseHeaders }
)
const conn = connect({ url: env.DATABASE_URL })
@ -40,7 +53,11 @@ const handler = async (req: Request) => {
const state = (chatSession.rows.at(0) as { state: SessionState } | undefined)
?.state
if (!state) return new Response('No state found', { status: 400 })
if (!state)
return NextResponse.json(
{ message: 'No state found' },
{ status: 400, headers: responseHeaders }
)
const group = state.typebotsQueue[0].typebot.groups.find(
(group) => group.id === state.currentBlock?.groupId
@ -53,36 +70,46 @@ const handler = async (req: Request) => {
const block = blockIndex >= 0 ? group?.blocks[blockIndex ?? 0] : null
if (!block || !group)
return new Response('Current block not found', { status: 400 })
return NextResponse.json(
{ message: 'Current block not found' },
{ status: 400, headers: responseHeaders }
)
if (
block.type !== IntegrationBlockType.OPEN_AI ||
block.options.task !== 'Create chat completion'
)
return new Response('Current block is not an OpenAI block', { status: 400 })
return NextResponse.json(
{ message: 'Current block is not an OpenAI block' },
{ status: 400, headers: responseHeaders }
)
const streamOrResponse = await getChatCompletionStream(conn)(
state,
block.options,
messages
)
try {
const stream = await getChatCompletionStream(conn)(
state,
block.options,
messages
)
if (!stream)
return NextResponse.json(
{ message: 'Could not create stream' },
{ status: 400, headers: responseHeaders }
)
if (!streamOrResponse)
return new Response('Could not create stream', { status: 400 })
if ('ok' in streamOrResponse)
return new Response(streamOrResponse.body, {
status: streamOrResponse.status,
headers: {
'Access-Control-Allow-Origin': '*',
},
return new StreamingTextResponse(stream, {
headers: responseHeaders,
})
return new StreamingTextResponse(streamOrResponse, {
headers: {
'Access-Control-Allow-Origin': '*',
},
})
} catch (error) {
if (error instanceof OpenAI.APIError) {
const { name, status, message } = error
return NextResponse.json(
{ name, status, message },
{ status, headers: responseHeaders }
)
} else {
throw error
}
}
}
export default handler