Fred Liang 1 год назад
Родитель
Сommit
199f29e63c
2 измененных файлов с 30 добавлено и 26 удалено
  1. 15 9
      app/client/platforms/google.ts
  2. 15 17
      app/store/chat.ts

+ 15 - 9
app/client/platforms/google.ts

@@ -21,10 +21,24 @@ export class GeminiProApi implements LLMApi {
   }
   async chat(options: ChatOptions): Promise<void> {
     const messages = options.messages.map((v) => ({
-      role: v.role.replace("assistant", "model").replace("system", "model"),
+      role: v.role.replace("assistant", "model").replace("system", "user"),
       parts: [{ text: v.content }],
     }));
 
+    // google requires that role in neighboring messages must not be the same
+    for (let i = 0; i < messages.length - 1; ) {
+      // Check if current and next item both have the role "model"
+      if (messages[i].role === messages[i + 1].role) {
+        // Concatenate the 'parts' of the current and next item
+        messages[i].parts = messages[i].parts.concat(messages[i + 1].parts);
+        // Remove the next item
+        messages.splice(i + 1, 1);
+      } else {
+        // Move to the next item
+        i++;
+      }
+    }
+
     const modelConfig = {
       ...useAppConfig.getState().modelConfig,
       ...useChatStore.getState().currentSession().mask.modelConfig,
@@ -43,14 +57,6 @@ export class GeminiProApi implements LLMApi {
         topP: modelConfig.top_p,
         // "topK": modelConfig.top_k,
       },
-      // stream: options.config.stream,
-      // model: modelConfig.model,
-      // temperature: modelConfig.temperature,
-      // presence_penalty: modelConfig.presence_penalty,
-      // frequency_penalty: modelConfig.frequency_penalty,
-      // top_p: modelConfig.top_p,
-      // max_tokens: Math.max(modelConfig.max_tokens, 1024),
-      // Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore.
     };
 
     console.log("[Request] google payload: ", requestPayload);

+ 15 - 17
app/store/chat.ts

@@ -389,24 +389,22 @@ export const useChatStore = createPersistStore(
         const shouldInjectSystemPrompts = modelConfig.enableInjectSystemPrompts;
 
         var systemPrompts: ChatMessage[] = [];
-        if (modelConfig.model !== "gemini-pro") {
-          systemPrompts = shouldInjectSystemPrompts
-            ? [
-                createMessage({
-                  role: "system",
-                  content: fillTemplateWith("", {
-                    ...modelConfig,
-                    template: DEFAULT_SYSTEM_TEMPLATE,
-                  }),
+        systemPrompts = shouldInjectSystemPrompts
+          ? [
+              createMessage({
+                role: "system",
+                content: fillTemplateWith("", {
+                  ...modelConfig,
+                  template: DEFAULT_SYSTEM_TEMPLATE,
                 }),
-              ]
-            : [];
-          if (shouldInjectSystemPrompts) {
-            console.log(
-              "[Global System Prompt] ",
-              systemPrompts.at(0)?.content ?? "empty",
-            );
-          }
+              }),
+            ]
+          : [];
+        if (shouldInjectSystemPrompts) {
+          console.log(
+            "[Global System Prompt] ",
+            systemPrompts.at(0)?.content ?? "empty",
+          );
         }
 
         // long term memory