api.ts 6.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239
  1. import { getClientConfig } from "../config/client";
  2. import {
  3. ACCESS_CODE_PREFIX,
  4. Azure,
  5. ModelProvider,
  6. ServiceProvider,
  7. } from "../constant";
  8. import { ChatMessage, ModelType, useAccessStore, useChatStore } from "../store";
  9. import { ChatGPTApi } from "./platforms/openai";
  10. import { GeminiProApi } from "./platforms/google";
  11. import { ClaudeApi } from "./platforms/anthropic";
  12. import { ErnieApi } from "./platforms/baidu";
  13. import { DoubaoApi } from "./platforms/bytedance";
  14. export const ROLES = ["system", "user", "assistant"] as const;
  15. export type MessageRole = (typeof ROLES)[number];
  16. export const Models = ["gpt-3.5-turbo", "gpt-4"] as const;
  17. export type ChatModel = ModelType;
  18. export interface MultimodalContent {
  19. type: "text" | "image_url";
  20. text?: string;
  21. image_url?: {
  22. url: string;
  23. };
  24. }
  25. export interface RequestMessage {
  26. role: MessageRole;
  27. content: string | MultimodalContent[];
  28. }
  29. export interface LLMConfig {
  30. model: string;
  31. providerName?: string;
  32. temperature?: number;
  33. top_p?: number;
  34. stream?: boolean;
  35. presence_penalty?: number;
  36. frequency_penalty?: number;
  37. }
  38. export interface ChatOptions {
  39. messages: RequestMessage[];
  40. config: LLMConfig;
  41. onUpdate?: (message: string, chunk: string) => void;
  42. onFinish: (message: string) => void;
  43. onError?: (err: Error) => void;
  44. onController?: (controller: AbortController) => void;
  45. }
  46. export interface LLMUsage {
  47. used: number;
  48. total: number;
  49. }
  50. export interface LLMModel {
  51. name: string;
  52. displayName?: string;
  53. available: boolean;
  54. provider: LLMModelProvider;
  55. }
  56. export interface LLMModelProvider {
  57. id: string;
  58. providerName: string;
  59. providerType: string;
  60. }
  61. export abstract class LLMApi {
  62. abstract chat(options: ChatOptions): Promise<void>;
  63. abstract usage(): Promise<LLMUsage>;
  64. abstract models(): Promise<LLMModel[]>;
  65. }
  66. type ProviderName = "openai" | "azure" | "claude" | "palm";
  67. interface Model {
  68. name: string;
  69. provider: ProviderName;
  70. ctxlen: number;
  71. }
  72. interface ChatProvider {
  73. name: ProviderName;
  74. apiConfig: {
  75. baseUrl: string;
  76. apiKey: string;
  77. summaryModel: Model;
  78. };
  79. models: Model[];
  80. chat: () => void;
  81. usage: () => void;
  82. }
  83. export class ClientApi {
  84. public llm: LLMApi;
  85. constructor(provider: ModelProvider = ModelProvider.GPT) {
  86. switch (provider) {
  87. case ModelProvider.GeminiPro:
  88. this.llm = new GeminiProApi();
  89. break;
  90. case ModelProvider.Claude:
  91. this.llm = new ClaudeApi();
  92. break;
  93. case ModelProvider.Ernie:
  94. this.llm = new ErnieApi();
  95. break;
  96. case ModelProvider.Doubao:
  97. this.llm = new DoubaoApi();
  98. break;
  99. default:
  100. this.llm = new ChatGPTApi();
  101. }
  102. }
  103. config() {}
  104. prompts() {}
  105. masks() {}
  106. async share(messages: ChatMessage[], avatarUrl: string | null = null) {
  107. const msgs = messages
  108. .map((m) => ({
  109. from: m.role === "user" ? "human" : "gpt",
  110. value: m.content,
  111. }))
  112. .concat([
  113. {
  114. from: "human",
  115. value:
  116. "Share from [NextChat]: https://github.com/Yidadaa/ChatGPT-Next-Web",
  117. },
  118. ]);
  119. // 敬告二开开发者们,为了开源大模型的发展,请不要修改上述消息,此消息用于后续数据清洗使用
  120. // Please do not modify this message
  121. console.log("[Share]", messages, msgs);
  122. const clientConfig = getClientConfig();
  123. const proxyUrl = "/sharegpt";
  124. const rawUrl = "https://sharegpt.com/api/conversations";
  125. const shareUrl = clientConfig?.isApp ? rawUrl : proxyUrl;
  126. const res = await fetch(shareUrl, {
  127. body: JSON.stringify({
  128. avatarUrl,
  129. items: msgs,
  130. }),
  131. headers: {
  132. "Content-Type": "application/json",
  133. },
  134. method: "POST",
  135. });
  136. const resJson = await res.json();
  137. console.log("[Share]", resJson);
  138. if (resJson.id) {
  139. return `https://shareg.pt/${resJson.id}`;
  140. }
  141. }
  142. }
  143. export function getHeaders() {
  144. const accessStore = useAccessStore.getState();
  145. const chatStore = useChatStore.getState();
  146. const headers: Record<string, string> = {
  147. "Content-Type": "application/json",
  148. Accept: "application/json",
  149. };
  150. const clientConfig = getClientConfig();
  151. function getConfig() {
  152. const modelConfig = chatStore.currentSession().mask.modelConfig;
  153. const isGoogle = modelConfig.providerName == ServiceProvider.Google;
  154. const isAzure = modelConfig.providerName === ServiceProvider.Azure;
  155. const isAnthropic = modelConfig.providerName === ServiceProvider.Anthropic;
  156. const isEnabledAccessControl = accessStore.enabledAccessControl();
  157. const apiKey = isGoogle
  158. ? accessStore.googleApiKey
  159. : isAzure
  160. ? accessStore.azureApiKey
  161. : isAnthropic
  162. ? accessStore.anthropicApiKey
  163. : accessStore.openaiApiKey;
  164. return { isGoogle, isAzure, isAnthropic, apiKey, isEnabledAccessControl };
  165. }
  166. function getAuthHeader(): string {
  167. return isAzure ? "api-key" : isAnthropic ? "x-api-key" : "Authorization";
  168. }
  169. function getBearerToken(apiKey: string, noBearer: boolean = false): string {
  170. return validString(apiKey)
  171. ? `${noBearer ? "" : "Bearer "}${apiKey.trim()}`
  172. : "";
  173. }
  174. function validString(x: string): boolean {
  175. return x?.length > 0;
  176. }
  177. const { isGoogle, isAzure, isAnthropic, apiKey, isEnabledAccessControl } =
  178. getConfig();
  179. // when using google api in app, not set auth header
  180. if (isGoogle && clientConfig?.isApp) return headers;
  181. const authHeader = getAuthHeader();
  182. const bearerToken = getBearerToken(apiKey, isAzure || isAnthropic);
  183. if (bearerToken) {
  184. headers[authHeader] = bearerToken;
  185. } else if (isEnabledAccessControl && validString(accessStore.accessCode)) {
  186. headers["Authorization"] = getBearerToken(
  187. ACCESS_CODE_PREFIX + accessStore.accessCode,
  188. );
  189. }
  190. return headers;
  191. }
  192. export function getClientApi(provider: ServiceProvider): ClientApi {
  193. switch (provider) {
  194. case ServiceProvider.Google:
  195. return new ClientApi(ModelProvider.GeminiPro);
  196. case ServiceProvider.Anthropic:
  197. return new ClientApi(ModelProvider.Claude);
  198. case ServiceProvider.Baidu:
  199. return new ClientApi(ModelProvider.Ernie);
  200. case ServiceProvider.ByteDance:
  201. return new ClientApi(ModelProvider.Doubao);
  202. default:
  203. return new ClientApi(ModelProvider.GPT);
  204. }
  205. }