Browse Source

Merge pull request #3205 from H0llyW00dzZ/summarizelogic

Refactor Summarize Logic
DeanYao 1 year ago
parent
commit
e38b527ac2
1 changed files with 6 additions and 1 deletions
  1. 6 1
      app/store/chat.ts

+ 6 - 1
app/store/chat.ts

@@ -557,6 +557,7 @@ export const useChatStore = createPersistStore(
             messages: topicMessages,
             messages: topicMessages,
             config: {
             config: {
               model: getSummarizeModel(session.mask.modelConfig.model),
               model: getSummarizeModel(session.mask.modelConfig.model),
+              stream: false,
             },
             },
             onFinish(message) {
             onFinish(message) {
               get().updateCurrentSession(
               get().updateCurrentSession(
@@ -600,6 +601,10 @@ export const useChatStore = createPersistStore(
           historyMsgLength > modelConfig.compressMessageLengthThreshold &&
           historyMsgLength > modelConfig.compressMessageLengthThreshold &&
           modelConfig.sendMemory
           modelConfig.sendMemory
         ) {
         ) {
+          /** Destruct max_tokens while summarizing
+           * this param is just shit
+           **/
+          const { max_tokens, ...modelcfg } = modelConfig;
           api.llm.chat({
           api.llm.chat({
             messages: toBeSummarizedMsgs.concat(
             messages: toBeSummarizedMsgs.concat(
               createMessage({
               createMessage({
@@ -609,7 +614,7 @@ export const useChatStore = createPersistStore(
               }),
               }),
             ),
             ),
             config: {
             config: {
-              ...modelConfig,
+              ...modelcfg,
               stream: true,
               stream: true,
               model: getSummarizeModel(session.mask.modelConfig.model),
               model: getSummarizeModel(session.mask.modelConfig.model),
             },
             },