api.ts 5.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226
  1. import { getClientConfig } from "../config/client";
  2. import {
  3. ACCESS_CODE_PREFIX,
  4. Azure,
  5. ModelProvider,
  6. ServiceProvider,
  7. } from "../constant";
  8. import { ChatMessage, ModelType, useAccessStore, useChatStore } from "../store";
  9. import { ChatGPTApi } from "./platforms/openai";
  10. import { GeminiProApi } from "./platforms/google";
  11. import { ClaudeApi } from "./platforms/anthropic";
  12. export const ROLES = ["system", "user", "assistant"] as const;
  13. export type MessageRole = (typeof ROLES)[number];
  14. export const Models = ["gpt-3.5-turbo", "gpt-4"] as const;
  15. export type ChatModel = ModelType;
  16. export interface MultimodalContent {
  17. type: "text" | "image_url";
  18. text?: string;
  19. image_url?: {
  20. url: string;
  21. };
  22. }
  23. export interface RequestMessage {
  24. role: MessageRole;
  25. content: string | MultimodalContent[];
  26. }
  27. export interface LLMConfig {
  28. model: string;
  29. providerName?: string;
  30. temperature?: number;
  31. top_p?: number;
  32. stream?: boolean;
  33. presence_penalty?: number;
  34. frequency_penalty?: number;
  35. }
  36. export interface ChatOptions {
  37. messages: RequestMessage[];
  38. config: LLMConfig;
  39. onUpdate?: (message: string, chunk: string) => void;
  40. onFinish: (message: string) => void;
  41. onError?: (err: Error) => void;
  42. onController?: (controller: AbortController) => void;
  43. }
  44. export interface LLMUsage {
  45. used: number;
  46. total: number;
  47. }
  48. export interface LLMModel {
  49. name: string;
  50. displayName?: string;
  51. available: boolean;
  52. provider: LLMModelProvider;
  53. }
  54. export interface LLMModelProvider {
  55. id: string;
  56. providerName: string;
  57. providerType: string;
  58. }
  59. export abstract class LLMApi {
  60. abstract chat(options: ChatOptions): Promise<void>;
  61. abstract usage(): Promise<LLMUsage>;
  62. abstract models(): Promise<LLMModel[]>;
  63. }
  64. type ProviderName = "openai" | "azure" | "claude" | "palm";
  65. interface Model {
  66. name: string;
  67. provider: ProviderName;
  68. ctxlen: number;
  69. }
  70. interface ChatProvider {
  71. name: ProviderName;
  72. apiConfig: {
  73. baseUrl: string;
  74. apiKey: string;
  75. summaryModel: Model;
  76. };
  77. models: Model[];
  78. chat: () => void;
  79. usage: () => void;
  80. }
  81. export class ClientApi {
  82. public llm: LLMApi;
  83. constructor(provider: ModelProvider = ModelProvider.GPT) {
  84. switch (provider) {
  85. case ModelProvider.GeminiPro:
  86. this.llm = new GeminiProApi();
  87. break;
  88. case ModelProvider.Claude:
  89. this.llm = new ClaudeApi();
  90. break;
  91. default:
  92. this.llm = new ChatGPTApi();
  93. }
  94. }
  95. config() {}
  96. prompts() {}
  97. masks() {}
  98. async share(messages: ChatMessage[], avatarUrl: string | null = null) {
  99. const msgs = messages
  100. .map((m) => ({
  101. from: m.role === "user" ? "human" : "gpt",
  102. value: m.content,
  103. }))
  104. .concat([
  105. {
  106. from: "human",
  107. value:
  108. "Share from [NextChat]: https://github.com/Yidadaa/ChatGPT-Next-Web",
  109. },
  110. ]);
  111. // 敬告二开开发者们,为了开源大模型的发展,请不要修改上述消息,此消息用于后续数据清洗使用
  112. // Please do not modify this message
  113. console.log("[Share]", messages, msgs);
  114. const clientConfig = getClientConfig();
  115. const proxyUrl = "/sharegpt";
  116. const rawUrl = "https://sharegpt.com/api/conversations";
  117. const shareUrl = clientConfig?.isApp ? rawUrl : proxyUrl;
  118. const res = await fetch(shareUrl, {
  119. body: JSON.stringify({
  120. avatarUrl,
  121. items: msgs,
  122. }),
  123. headers: {
  124. "Content-Type": "application/json",
  125. },
  126. method: "POST",
  127. });
  128. const resJson = await res.json();
  129. console.log("[Share]", resJson);
  130. if (resJson.id) {
  131. return `https://shareg.pt/${resJson.id}`;
  132. }
  133. }
  134. }
  135. export function getHeaders() {
  136. const accessStore = useAccessStore.getState();
  137. const chatStore = useChatStore.getState();
  138. const headers: Record<string, string> = {
  139. "Content-Type": "application/json",
  140. Accept: "application/json",
  141. };
  142. const clientConfig = getClientConfig();
  143. function getConfig() {
  144. const modelConfig = chatStore.currentSession().mask.modelConfig;
  145. const isGoogle = modelConfig.providerName == ServiceProvider.Google;
  146. const isAzure = modelConfig.providerName === ServiceProvider.Azure;
  147. const isAnthropic = modelConfig.providerName === ServiceProvider.Anthropic;
  148. const isEnabledAccessControl = accessStore.enabledAccessControl();
  149. const apiKey = isGoogle
  150. ? accessStore.googleApiKey
  151. : isAzure
  152. ? accessStore.azureApiKey
  153. : isAnthropic
  154. ? accessStore.anthropicApiKey
  155. : accessStore.openaiApiKey;
  156. return { isGoogle, isAzure, isAnthropic, apiKey, isEnabledAccessControl };
  157. }
  158. function getAuthHeader(): string {
  159. return isAzure ? "api-key" : isAnthropic ? "x-api-key" : "Authorization";
  160. }
  161. function getBearerToken(apiKey: string, noBearer: boolean = false): string {
  162. return validString(apiKey)
  163. ? `${noBearer ? "" : "Bearer "}${apiKey.trim()}`
  164. : "";
  165. }
  166. function validString(x: string): boolean {
  167. return x?.length > 0;
  168. }
  169. const { isGoogle, isAzure, isAnthropic, apiKey, isEnabledAccessControl } =
  170. getConfig();
  171. // when using google api in app, not set auth header
  172. if (isGoogle && clientConfig?.isApp) return headers;
  173. const authHeader = getAuthHeader();
  174. const bearerToken = getBearerToken(apiKey, isAzure || isAnthropic);
  175. if (bearerToken) {
  176. headers[authHeader] = bearerToken;
  177. } else if (isEnabledAccessControl && validString(accessStore.accessCode)) {
  178. headers["Authorization"] = getBearerToken(
  179. ACCESS_CODE_PREFIX + accessStore.accessCode,
  180. );
  181. }
  182. return headers;
  183. }
  184. export function getClientApi(provider: ServiceProvider): ClientApi {
  185. switch (provider) {
  186. case ServiceProvider.Google:
  187. return new ClientApi(ModelProvider.GeminiPro);
  188. case ServiceProvider.Anthropic:
  189. return new ClientApi(ModelProvider.Claude);
  190. default:
  191. return new ClientApi(ModelProvider.GPT);
  192. }
  193. }