config.ts 4.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182
  1. import { create } from "zustand";
  2. import { persist } from "zustand/middleware";
  3. import { LLMModel } from "../client/api";
  4. import { getClientConfig } from "../config/client";
  5. import { DEFAULT_INPUT_TEMPLATE, DEFAULT_MODELS, StoreKey } from "../constant";
  6. export type ModelType = (typeof DEFAULT_MODELS)[number]["name"];
  7. export enum SubmitKey {
  8. Enter = "Enter",
  9. CtrlEnter = "Ctrl + Enter",
  10. ShiftEnter = "Shift + Enter",
  11. AltEnter = "Alt + Enter",
  12. MetaEnter = "Meta + Enter",
  13. }
  14. export enum Theme {
  15. Auto = "auto",
  16. Dark = "dark",
  17. Light = "light",
  18. }
  19. export const DEFAULT_CONFIG = {
  20. submitKey: SubmitKey.CtrlEnter as SubmitKey,
  21. avatar: "1f603",
  22. fontSize: 14,
  23. theme: Theme.Auto as Theme,
  24. tightBorder: !!getClientConfig()?.isApp,
  25. sendPreviewBubble: true,
  26. enableAutoGenerateTitle: true,
  27. sidebarWidth: 300,
  28. disablePromptHint: false,
  29. dontShowMaskSplashScreen: false, // dont show splash screen when create chat
  30. hideBuiltinMasks: false, // dont add builtin masks
  31. customModels: "",
  32. models: DEFAULT_MODELS as any as LLMModel[],
  33. modelConfig: {
  34. model: "gpt-3.5-turbo" as ModelType,
  35. temperature: 0.5,
  36. top_p: 1,
  37. max_tokens: 2000,
  38. presence_penalty: 0,
  39. frequency_penalty: 0,
  40. sendMemory: true,
  41. historyMessageCount: 4,
  42. compressMessageLengthThreshold: 1000,
  43. enableInjectSystemPrompts: true,
  44. template: DEFAULT_INPUT_TEMPLATE,
  45. },
  46. };
  47. export type ChatConfig = typeof DEFAULT_CONFIG;
  48. export type ChatConfigStore = ChatConfig & {
  49. reset: () => void;
  50. update: (updater: (config: ChatConfig) => void) => void;
  51. mergeModels: (newModels: LLMModel[]) => void;
  52. allModels: () => LLMModel[];
  53. };
  54. export type ModelConfig = ChatConfig["modelConfig"];
  55. export function limitNumber(
  56. x: number,
  57. min: number,
  58. max: number,
  59. defaultValue: number,
  60. ) {
  61. if (typeof x !== "number" || isNaN(x)) {
  62. return defaultValue;
  63. }
  64. return Math.min(max, Math.max(min, x));
  65. }
  66. export const ModalConfigValidator = {
  67. model(x: string) {
  68. return x as ModelType;
  69. },
  70. max_tokens(x: number) {
  71. return limitNumber(x, 0, 100000, 2000);
  72. },
  73. presence_penalty(x: number) {
  74. return limitNumber(x, -2, 2, 0);
  75. },
  76. frequency_penalty(x: number) {
  77. return limitNumber(x, -2, 2, 0);
  78. },
  79. temperature(x: number) {
  80. return limitNumber(x, 0, 1, 1);
  81. },
  82. top_p(x: number) {
  83. return limitNumber(x, 0, 1, 1);
  84. },
  85. };
  86. export const useAppConfig = create<ChatConfigStore>()(
  87. persist(
  88. (set, get) => ({
  89. ...DEFAULT_CONFIG,
  90. reset() {
  91. set(() => ({ ...DEFAULT_CONFIG }));
  92. },
  93. update(updater) {
  94. const config = { ...get() };
  95. updater(config);
  96. set(() => config);
  97. },
  98. mergeModels(newModels) {
  99. if (!newModels || newModels.length === 0) {
  100. return;
  101. }
  102. const oldModels = get().models;
  103. const modelMap: Record<string, LLMModel> = {};
  104. for (const model of oldModels) {
  105. model.available = false;
  106. modelMap[model.name] = model;
  107. }
  108. for (const model of newModels) {
  109. model.available = true;
  110. modelMap[model.name] = model;
  111. }
  112. set(() => ({
  113. models: Object.values(modelMap),
  114. }));
  115. },
  116. allModels() {
  117. const customModels = get()
  118. .customModels.split(",")
  119. .filter((v) => !!v && v.length > 0)
  120. .map((m) => ({ name: m, available: true }));
  121. const models = get().models.concat(customModels);
  122. return models;
  123. },
  124. }),
  125. {
  126. name: StoreKey.Config,
  127. version: 3.7,
  128. migrate(persistedState, version) {
  129. const state = persistedState as ChatConfig;
  130. if (version < 3.4) {
  131. state.modelConfig.sendMemory = true;
  132. state.modelConfig.historyMessageCount = 4;
  133. state.modelConfig.compressMessageLengthThreshold = 1000;
  134. state.modelConfig.frequency_penalty = 0;
  135. state.modelConfig.top_p = 1;
  136. state.modelConfig.template = DEFAULT_INPUT_TEMPLATE;
  137. state.dontShowMaskSplashScreen = false;
  138. state.hideBuiltinMasks = false;
  139. }
  140. if (version < 3.5) {
  141. state.customModels = "claude,claude-100k";
  142. }
  143. if (version < 3.6) {
  144. state.modelConfig.enableInjectSystemPrompts = true;
  145. }
  146. if (version < 3.7) {
  147. state.enableAutoGenerateTitle = true;
  148. }
  149. return state as any;
  150. },
  151. },
  152. ),
  153. );