Browse Source

fix: #3192 use smaller max_tokens as default

Yifei Zhang 1 year ago
parent
commit
87e3d663a2
1 changed files with 1 additions and 1 deletions
  1. 1 1
      app/store/config.ts

+ 1 - 1
app/store/config.ts

@@ -49,7 +49,7 @@ export const DEFAULT_CONFIG = {
     model: "gpt-3.5-turbo" as ModelType,
     model: "gpt-3.5-turbo" as ModelType,
     temperature: 0.5,
     temperature: 0.5,
     top_p: 1,
     top_p: 1,
-    max_tokens: 8192,
+    max_tokens: 4000,
     presence_penalty: 0,
     presence_penalty: 0,
     frequency_penalty: 0,
     frequency_penalty: 0,
     sendMemory: true,
     sendMemory: true,