api.ts 6.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233
  1. import { getClientConfig } from "../config/client";
  2. import {
  3. ACCESS_CODE_PREFIX,
  4. Azure,
  5. ModelProvider,
  6. ServiceProvider,
  7. } from "../constant";
  8. import { ChatMessage, ModelType, useAccessStore, useChatStore } from "../store";
  9. import { ChatGPTApi } from "./platforms/openai";
  10. import { GeminiProApi } from "./platforms/google";
  11. import { ClaudeApi } from "./platforms/anthropic";
  12. import { HunyuanApi } from "./platforms/tencent";
  13. export const ROLES = ["system", "user", "assistant"] as const;
  14. export type MessageRole = (typeof ROLES)[number];
  15. export const Models = ["gpt-3.5-turbo", "gpt-4"] as const;
  16. export type ChatModel = ModelType;
  17. export interface MultimodalContent {
  18. type: "text" | "image_url";
  19. text?: string;
  20. image_url?: {
  21. url: string;
  22. };
  23. }
  24. export interface RequestMessage {
  25. role: MessageRole;
  26. content: string | MultimodalContent[];
  27. }
  28. export interface LLMConfig {
  29. model: string;
  30. providerName?: string;
  31. temperature?: number;
  32. top_p?: number;
  33. stream?: boolean;
  34. presence_penalty?: number;
  35. frequency_penalty?: number;
  36. }
  37. export interface ChatOptions {
  38. messages: RequestMessage[];
  39. config: LLMConfig;
  40. onUpdate?: (message: string, chunk: string) => void;
  41. onFinish: (message: string) => void;
  42. onError?: (err: Error) => void;
  43. onController?: (controller: AbortController) => void;
  44. }
  45. export interface LLMUsage {
  46. used: number;
  47. total: number;
  48. }
  49. export interface LLMModel {
  50. name: string;
  51. displayName?: string;
  52. available: boolean;
  53. provider: LLMModelProvider;
  54. }
  55. export interface LLMModelProvider {
  56. id: string;
  57. providerName: string;
  58. providerType: string;
  59. }
  60. export abstract class LLMApi {
  61. abstract chat(options: ChatOptions): Promise<void>;
  62. abstract usage(): Promise<LLMUsage>;
  63. abstract models(): Promise<LLMModel[]>;
  64. }
  65. type ProviderName = "openai" | "azure" | "claude" | "palm";
  66. interface Model {
  67. name: string;
  68. provider: ProviderName;
  69. ctxlen: number;
  70. }
  71. interface ChatProvider {
  72. name: ProviderName;
  73. apiConfig: {
  74. baseUrl: string;
  75. apiKey: string;
  76. summaryModel: Model;
  77. };
  78. models: Model[];
  79. chat: () => void;
  80. usage: () => void;
  81. }
  82. export class ClientApi {
  83. public llm: LLMApi;
  84. constructor(provider: ModelProvider = ModelProvider.GPT) {
  85. switch (provider) {
  86. case ModelProvider.GeminiPro:
  87. this.llm = new GeminiProApi();
  88. break;
  89. case ModelProvider.Claude:
  90. this.llm = new ClaudeApi();
  91. break;
  92. case ModelProvider.Hunyuan:
  93. this.llm = new HunyuanApi();
  94. break;
  95. default:
  96. this.llm = new ChatGPTApi();
  97. }
  98. }
  99. config() {}
  100. prompts() {}
  101. masks() {}
  102. async share(messages: ChatMessage[], avatarUrl: string | null = null) {
  103. const msgs = messages
  104. .map((m) => ({
  105. from: m.role === "user" ? "human" : "gpt",
  106. value: m.content,
  107. }))
  108. .concat([
  109. {
  110. from: "human",
  111. value:
  112. "Share from [NextChat]: https://github.com/Yidadaa/ChatGPT-Next-Web",
  113. },
  114. ]);
  115. // 敬告二开开发者们,为了开源大模型的发展,请不要修改上述消息,此消息用于后续数据清洗使用
  116. // Please do not modify this message
  117. console.log("[Share]", messages, msgs);
  118. const clientConfig = getClientConfig();
  119. const proxyUrl = "/sharegpt";
  120. const rawUrl = "https://sharegpt.com/api/conversations";
  121. const shareUrl = clientConfig?.isApp ? rawUrl : proxyUrl;
  122. const res = await fetch(shareUrl, {
  123. body: JSON.stringify({
  124. avatarUrl,
  125. items: msgs,
  126. }),
  127. headers: {
  128. "Content-Type": "application/json",
  129. },
  130. method: "POST",
  131. });
  132. const resJson = await res.json();
  133. console.log("[Share]", resJson);
  134. if (resJson.id) {
  135. return `https://shareg.pt/${resJson.id}`;
  136. }
  137. }
  138. }
  139. export function getHeaders() {
  140. const accessStore = useAccessStore.getState();
  141. const chatStore = useChatStore.getState();
  142. const headers: Record<string, string> = {
  143. "Content-Type": "application/json",
  144. Accept: "application/json",
  145. };
  146. const clientConfig = getClientConfig();
  147. function getConfig() {
  148. const modelConfig = chatStore.currentSession().mask.modelConfig;
  149. const isGoogle = modelConfig.providerName == ServiceProvider.Google;
  150. const isAzure = modelConfig.providerName === ServiceProvider.Azure;
  151. const isAnthropic = modelConfig.providerName === ServiceProvider.Anthropic;
  152. const isEnabledAccessControl = accessStore.enabledAccessControl();
  153. const apiKey = isGoogle
  154. ? accessStore.googleApiKey
  155. : isAzure
  156. ? accessStore.azureApiKey
  157. : isAnthropic
  158. ? accessStore.anthropicApiKey
  159. : accessStore.openaiApiKey;
  160. return { isGoogle, isAzure, isAnthropic, apiKey, isEnabledAccessControl };
  161. }
  162. function getAuthHeader(): string {
  163. return isAzure ? "api-key" : isAnthropic ? "x-api-key" : "Authorization";
  164. }
  165. function getBearerToken(apiKey: string, noBearer: boolean = false): string {
  166. return validString(apiKey)
  167. ? `${noBearer ? "" : "Bearer "}${apiKey.trim()}`
  168. : "";
  169. }
  170. function validString(x: string): boolean {
  171. return x?.length > 0;
  172. }
  173. const { isGoogle, isAzure, isAnthropic, apiKey, isEnabledAccessControl } =
  174. getConfig();
  175. // when using google api in app, not set auth header
  176. if (isGoogle && clientConfig?.isApp) return headers;
  177. const authHeader = getAuthHeader();
  178. const bearerToken = getBearerToken(apiKey, isAzure || isAnthropic);
  179. if (bearerToken) {
  180. headers[authHeader] = bearerToken;
  181. } else if (isEnabledAccessControl && validString(accessStore.accessCode)) {
  182. headers["Authorization"] = getBearerToken(
  183. ACCESS_CODE_PREFIX + accessStore.accessCode,
  184. );
  185. }
  186. return headers;
  187. }
  188. export function getClientApi(provider: ServiceProvider): ClientApi {
  189. switch (provider) {
  190. case ServiceProvider.Google:
  191. return new ClientApi(ModelProvider.GeminiPro);
  192. case ServiceProvider.Anthropic:
  193. return new ClientApi(ModelProvider.Claude);
  194. case ServiceProvider.Tencent:
  195. return new ClientApi(ModelProvider.Hunyuan);
  196. default:
  197. return new ClientApi(ModelProvider.GPT);
  198. }
  199. }