|
@@ -557,6 +557,7 @@ export const useChatStore = createPersistStore(
|
|
|
messages: topicMessages,
|
|
messages: topicMessages,
|
|
|
config: {
|
|
config: {
|
|
|
model: getSummarizeModel(session.mask.modelConfig.model),
|
|
model: getSummarizeModel(session.mask.modelConfig.model),
|
|
|
|
|
+ stream: false,
|
|
|
},
|
|
},
|
|
|
onFinish(message) {
|
|
onFinish(message) {
|
|
|
get().updateCurrentSession(
|
|
get().updateCurrentSession(
|
|
@@ -600,6 +601,10 @@ export const useChatStore = createPersistStore(
|
|
|
historyMsgLength > modelConfig.compressMessageLengthThreshold &&
|
|
historyMsgLength > modelConfig.compressMessageLengthThreshold &&
|
|
|
modelConfig.sendMemory
|
|
modelConfig.sendMemory
|
|
|
) {
|
|
) {
|
|
|
|
|
+ /** Destruct max_tokens while summarizing
|
|
|
|
|
+ * this param is just shit
|
|
|
|
|
+ **/
|
|
|
|
|
+ const { max_tokens, ...modelcfg } = modelConfig;
|
|
|
api.llm.chat({
|
|
api.llm.chat({
|
|
|
messages: toBeSummarizedMsgs.concat(
|
|
messages: toBeSummarizedMsgs.concat(
|
|
|
createMessage({
|
|
createMessage({
|
|
@@ -609,7 +614,7 @@ export const useChatStore = createPersistStore(
|
|
|
}),
|
|
}),
|
|
|
),
|
|
),
|
|
|
config: {
|
|
config: {
|
|
|
- ...modelConfig,
|
|
|
|
|
|
|
+ ...modelcfg,
|
|
|
stream: true,
|
|
stream: true,
|
|
|
model: getSummarizeModel(session.mask.modelConfig.model),
|
|
model: getSummarizeModel(session.mask.modelConfig.model),
|
|
|
},
|
|
},
|