|
@@ -70,7 +70,8 @@ export class ChatGPTApi implements LLMApi {
|
|
|
presence_penalty: modelConfig.presence_penalty,
|
|
|
frequency_penalty: modelConfig.frequency_penalty,
|
|
|
top_p: modelConfig.top_p,
|
|
|
- max_tokens: Math.max(modelConfig.max_tokens, 1024),
|
|
|
+ // max_tokens: Math.max(modelConfig.max_tokens, 1024),
|
|
|
+ // Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore.
|
|
|
};
|
|
|
|
|
|
console.log("[Request] openai payload: ", requestPayload);
|