|
|
@@ -91,8 +91,9 @@ export class HunyuanApi implements LLMApi {
|
|
|
|
|
|
async chat(options: ChatOptions) {
|
|
|
const visionModel = isVisionModel(options.config.model);
|
|
|
- const messages = options.messages.map((v) => ({
|
|
|
- role: v.role,
|
|
|
+ const messages = options.messages.map((v, index) => ({
|
|
|
+ // "Messages 中 system 角色必须位于列表的最开始"
|
|
|
+ role: index !== 0 && v.role === "system" ? "user" : v.role,
|
|
|
content: visionModel ? v.content : getMessageTextContent(v),
|
|
|
}));
|
|
|
|