瀏覽代碼

feat: add top p config

Yidadaa 1 年之前
父節點
當前提交
823032617d
共有 4 個文件被更改,包括 35 次插入3 次删除
  1. 19 0
      app/components/model-config.tsx
  2. 4 0
      app/locales/cn.ts
  3. 5 1
      app/locales/en.ts
  4. 7 2
      app/store/config.ts

+ 19 - 0
app/components/model-config.tsx

@@ -48,6 +48,25 @@ export function ModelConfigList(props: {
           }}
         ></InputRange>
       </ListItem>
+      <ListItem
+        title={Locale.Settings.TopP.Title}
+        subTitle={Locale.Settings.TopP.SubTitle}
+      >
+        <InputRange
+          value={(props.modelConfig.top_p ?? 1).toFixed(1)}
+          min="0"
+          max="1"
+          step="0.1"
+          onChange={(e) => {
+            props.updateConfig(
+              (config) =>
+                (config.temperature = ModalConfigValidator.top_p(
+                  e.currentTarget.valueAsNumber,
+                )),
+            );
+          }}
+        ></InputRange>
+      </ListItem>
       <ListItem
         title={Locale.Settings.MaxTokens.Title}
         subTitle={Locale.Settings.MaxTokens.SubTitle}

+ 4 - 0
app/locales/cn.ts

@@ -214,6 +214,10 @@ const cn = {
       Title: "随机性 (temperature)",
       SubTitle: "值越大,回复越随机",
     },
+    TopP: {
+      Title: "核采样 (top_p)",
+      SubTitle: "与随机性类似,但不要和随机性一起更改",
+    },
     MaxTokens: {
       Title: "单次回复限制 (max_tokens)",
       SubTitle: "单次交互所用的最大 Token 数",

+ 5 - 1
app/locales/en.ts

@@ -215,6 +215,10 @@ const en: LocaleType = {
       Title: "Temperature",
       SubTitle: "A larger value makes the more random output",
     },
+    TopP: {
+      Title: "Top P",
+      SubTitle: "Do not alter this value together with temperature",
+    },
     MaxTokens: {
       Title: "Max Tokens",
       SubTitle: "Maximum length of input tokens and generated tokens",
@@ -249,7 +253,7 @@ const en: LocaleType = {
   },
   Context: {
     Toast: (x: any) => `With ${x} contextual prompts`,
-    Edit: "Contextual and Memory Prompts",
+    Edit: "Current Chat Settings",
     Add: "Add a Prompt",
     Clear: "Context Cleared",
     Revert: "Revert",

+ 7 - 2
app/store/config.ts

@@ -33,6 +33,7 @@ export const DEFAULT_CONFIG = {
   modelConfig: {
     model: "gpt-3.5-turbo" as ModelType,
     temperature: 0.5,
+    top_p: 1,
     max_tokens: 2000,
     presence_penalty: 0,
     frequency_penalty: 0,
@@ -158,6 +159,9 @@ export const ModalConfigValidator = {
   temperature(x: number) {
     return limitNumber(x, 0, 1, 1);
   },
+  top_p(x: number) {
+    return limitNumber(x, 0, 1, 1);
+  },
 };
 
 export const useAppConfig = create<ChatConfigStore>()(
@@ -177,15 +181,16 @@ export const useAppConfig = create<ChatConfigStore>()(
     }),
     {
       name: StoreKey.Config,
-      version: 3.2,
+      version: 3.3,
       migrate(persistedState, version) {
-        if (version === 3.2) return persistedState as any;
+        if (version === 3.3) return persistedState as any;
 
         const state = persistedState as ChatConfig;
         state.modelConfig.sendMemory = true;
         state.modelConfig.historyMessageCount = 4;
         state.modelConfig.compressMessageLengthThreshold = 1000;
         state.modelConfig.frequency_penalty = 0;
+        state.modelConfig.top_p = 1;
         state.modelConfig.template = DEFAULT_INPUT_TEMPLATE;
         state.dontShowMaskSplashScreen = false;