Browse Source

feat: close #2580 only use 3.5 to summarize when not using custom models

Yidadaa 1 year ago
parent
commit
3bd76b9156
2 changed files with 14 additions and 2 deletions
  1. 2 0
      app/constant.ts
  2. 12 2
      app/store/chat.ts

+ 2 - 0
app/constant.ts

@@ -63,6 +63,8 @@ Knowledge cutoff: 2021-09
 Current model: {{model}}
 Current time: {{time}}`;
 
+export const SUMMARIZE_MODEL = "gpt-3.5-turbo";
+
 export const DEFAULT_MODELS = [
   {
     name: "gpt-4",

+ 12 - 2
app/store/chat.ts

@@ -11,6 +11,7 @@ import {
   DEFAULT_INPUT_TEMPLATE,
   DEFAULT_SYSTEM_TEMPLATE,
   StoreKey,
+  SUMMARIZE_MODEL,
 } from "../constant";
 import { api, RequestMessage } from "../client/api";
 import { ChatControllerPool } from "../client/controller";
@@ -80,6 +81,11 @@ function createEmptySession(): ChatSession {
   };
 }
 
+function getSummarizeModel(currentModel: string) {
+  // if it is using gpt-* models, force to use 3.5 to summarize
+  return currentModel.startsWith("gpt") ? SUMMARIZE_MODEL : currentModel;
+}
+
 interface ChatStore {
   sessions: ChatSession[];
   currentSessionIndex: number;
@@ -501,7 +507,7 @@ export const useChatStore = create<ChatStore>()(
           api.llm.chat({
             messages: topicMessages,
             config: {
-              model: "gpt-3.5-turbo",
+              model: getSummarizeModel(session.mask.modelConfig.model),
             },
             onFinish(message) {
               get().updateCurrentSession(
@@ -555,7 +561,11 @@ export const useChatStore = create<ChatStore>()(
                 date: "",
               }),
             ),
-            config: { ...modelConfig, stream: true, model: "gpt-3.5-turbo" },
+            config: {
+              ...modelConfig,
+              stream: true,
+              model: getSummarizeModel(session.mask.modelConfig.model),
+            },
             onUpdate(message) {
               session.memoryPrompt = message;
             },