config.ts 4.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181
  1. import { LLMModel } from "../client/api";
  2. import { DalleSize } from "../typing";
  3. import { getClientConfig } from "../config/client";
  4. import {
  5. DEFAULT_INPUT_TEMPLATE,
  6. DEFAULT_MODELS,
  7. DEFAULT_SIDEBAR_WIDTH,
  8. StoreKey,
  9. ServiceProvider,
  10. } from "../constant";
  11. import { createPersistStore } from "../utils/store";
  12. export type ModelType = (typeof DEFAULT_MODELS)[number]["name"];
  13. export enum SubmitKey {
  14. Enter = "Enter",
  15. CtrlEnter = "Ctrl + Enter",
  16. ShiftEnter = "Shift + Enter",
  17. AltEnter = "Alt + Enter",
  18. MetaEnter = "Meta + Enter",
  19. }
  20. export enum Theme {
  21. Auto = "auto",
  22. Dark = "dark",
  23. Light = "light",
  24. }
  25. const config = getClientConfig();
  26. export const DEFAULT_CONFIG = {
  27. lastUpdate: Date.now(), // timestamp, to merge state
  28. submitKey: SubmitKey.Enter,
  29. avatar: "1f603",
  30. fontSize: 14,
  31. theme: Theme.Auto as Theme,
  32. tightBorder: !!config?.isApp,
  33. sendPreviewBubble: true,
  34. enableAutoGenerateTitle: true,
  35. sidebarWidth: DEFAULT_SIDEBAR_WIDTH,
  36. disablePromptHint: false,
  37. dontShowMaskSplashScreen: false, // dont show splash screen when create chat
  38. hideBuiltinMasks: false, // dont add builtin masks
  39. customModels: "",
  40. models: DEFAULT_MODELS as any as LLMModel[],
  41. modelConfig: {
  42. model: "gpt-3.5-turbo" as ModelType,
  43. providerName: "OpenAI" as ServiceProvider,
  44. temperature: 0.5,
  45. top_p: 1,
  46. max_tokens: 4000,
  47. presence_penalty: 0,
  48. frequency_penalty: 0,
  49. sendMemory: true,
  50. historyMessageCount: 4,
  51. compressMessageLengthThreshold: 1000,
  52. enableInjectSystemPrompts: true,
  53. template: config?.template ?? DEFAULT_INPUT_TEMPLATE,
  54. size: "1024x1024" as DalleSize,
  55. },
  56. };
  57. export type ChatConfig = typeof DEFAULT_CONFIG;
  58. export type ModelConfig = ChatConfig["modelConfig"];
  59. export function limitNumber(
  60. x: number,
  61. min: number,
  62. max: number,
  63. defaultValue: number,
  64. ) {
  65. if (isNaN(x)) {
  66. return defaultValue;
  67. }
  68. return Math.min(max, Math.max(min, x));
  69. }
  70. export const ModalConfigValidator = {
  71. model(x: string) {
  72. return x as ModelType;
  73. },
  74. max_tokens(x: number) {
  75. return limitNumber(x, 0, 512000, 1024);
  76. },
  77. presence_penalty(x: number) {
  78. return limitNumber(x, -2, 2, 0);
  79. },
  80. frequency_penalty(x: number) {
  81. return limitNumber(x, -2, 2, 0);
  82. },
  83. temperature(x: number) {
  84. return limitNumber(x, 0, 2, 1);
  85. },
  86. top_p(x: number) {
  87. return limitNumber(x, 0, 1, 1);
  88. },
  89. };
  90. export const useAppConfig = createPersistStore(
  91. { ...DEFAULT_CONFIG },
  92. (set, get) => ({
  93. reset() {
  94. set(() => ({ ...DEFAULT_CONFIG }));
  95. },
  96. mergeModels(newModels: LLMModel[]) {
  97. if (!newModels || newModels.length === 0) {
  98. return;
  99. }
  100. const oldModels = get().models;
  101. const modelMap: Record<string, LLMModel> = {};
  102. for (const model of oldModels) {
  103. model.available = false;
  104. modelMap[`${model.name}@${model?.provider?.id}`] = model;
  105. }
  106. for (const model of newModels) {
  107. model.available = true;
  108. modelMap[`${model.name}@${model?.provider?.id}`] = model;
  109. }
  110. set(() => ({
  111. models: Object.values(modelMap),
  112. }));
  113. },
  114. allModels() {},
  115. }),
  116. {
  117. name: StoreKey.Config,
  118. version: 3.9,
  119. migrate(persistedState, version) {
  120. const state = persistedState as ChatConfig;
  121. if (version < 3.4) {
  122. state.modelConfig.sendMemory = true;
  123. state.modelConfig.historyMessageCount = 4;
  124. state.modelConfig.compressMessageLengthThreshold = 1000;
  125. state.modelConfig.frequency_penalty = 0;
  126. state.modelConfig.top_p = 1;
  127. state.modelConfig.template = DEFAULT_INPUT_TEMPLATE;
  128. state.dontShowMaskSplashScreen = false;
  129. state.hideBuiltinMasks = false;
  130. }
  131. if (version < 3.5) {
  132. state.customModels = "claude,claude-100k";
  133. }
  134. if (version < 3.6) {
  135. state.modelConfig.enableInjectSystemPrompts = true;
  136. }
  137. if (version < 3.7) {
  138. state.enableAutoGenerateTitle = true;
  139. }
  140. if (version < 3.8) {
  141. state.lastUpdate = Date.now();
  142. }
  143. if (version < 3.9) {
  144. state.modelConfig.template =
  145. state.modelConfig.template !== DEFAULT_INPUT_TEMPLATE
  146. ? state.modelConfig.template
  147. : config?.template ?? DEFAULT_INPUT_TEMPLATE;
  148. }
  149. return state as any;
  150. },
  151. },
  152. );