(openai) Truncate messages sequence automatically if reaching token limit

This commit is contained in:
Baptiste Arnaud
2023-05-02 13:37:02 -04:00
parent 94735638a6
commit e58016e43a
6 changed files with 73 additions and 18 deletions

View File

@@ -77,6 +77,8 @@ export const TemplatesModal = ({ isOpen, onClose, onTypebotChoose }: Props) => {
borderRightWidth={1}
justify="space-between"
flexShrink={0}
overflowY="scroll"
className="hide-scrollbar"
>
<Stack spacing={5}>
<Stack spacing={2}>