From 02bd2acbd3dfee3d2ef9fbdce2a39066ef1aff64 Mon Sep 17 00:00:00 2001 From: Marco Guaspari Worms Date: Tue, 5 Sep 2023 14:28:57 -0300 Subject: [PATCH 1/2] let length be the maximum automatically if ser to -1 --- src/components/playground/ConfigSidebar.tsx | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/components/playground/ConfigSidebar.tsx b/src/components/playground/ConfigSidebar.tsx index f09d77f0..d850a982 100644 --- a/src/components/playground/ConfigSidebar.tsx +++ b/src/components/playground/ConfigSidebar.tsx @@ -43,11 +43,15 @@ export default function ConfigSidebar({}: Props) { /> - handleUpdateConfig("max_tokens", value) + if (value === -1) { + handleUpdateConfig("max_tokens", undefined) + } else { + handleUpdateConfig("max_tokens", value) + } } /> Date: Tue, 5 Sep 2023 14:34:21 -0300 Subject: [PATCH 2/2] fix code --- src/components/playground/ConfigSidebar.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/components/playground/ConfigSidebar.tsx b/src/components/playground/ConfigSidebar.tsx index d850a982..28eddbae 100644 --- a/src/components/playground/ConfigSidebar.tsx +++ b/src/components/playground/ConfigSidebar.tsx @@ -46,13 +46,13 @@ export default function ConfigSidebar({}: Props) { range={[-1, OpenAIChatModels[config.model].maxLimit || 8192]} step={1} value={config.max_tokens as number} - onChange={(value: OpenAIConfig["max_tokens"]) => + onChange={(value: OpenAIConfig["max_tokens"]) => { if (value === -1) { handleUpdateConfig("max_tokens", undefined) } else { handleUpdateConfig("max_tokens", value) } - } + }} />