From fd2f441e02b1eecfd2139942fcb911b32ee3c1e4 Mon Sep 17 00:00:00 2001 From: Yifei Zhang Date: Thu, 9 Nov 2023 20:45:25 +0800 Subject: [PATCH] feat: wont send max_tokens --- app/client/platforms/openai.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/app/client/platforms/openai.ts b/app/client/platforms/openai.ts index 97392a004c9..4a5ddce7de6 100644 --- a/app/client/platforms/openai.ts +++ b/app/client/platforms/openai.ts @@ -70,7 +70,8 @@ export class ChatGPTApi implements LLMApi { presence_penalty: modelConfig.presence_penalty, frequency_penalty: modelConfig.frequency_penalty, top_p: modelConfig.top_p, - max_tokens: Math.max(modelConfig.max_tokens, 1024), + // max_tokens: Math.max(modelConfig.max_tokens, 1024), + // Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore. }; console.log("[Request] openai payload: ", requestPayload);