anthropic.ts 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432
  1. import { ACCESS_CODE_PREFIX, Anthropic, ApiPath } from "@/app/constant";
  2. import {
  3. ChatOptions,
  4. getHeaders,
  5. LLMApi,
  6. MultimodalContent,
  7. SpeechOptions,
  8. TranscriptionOptions,
  9. } from "../api";
  10. import {
  11. useAccessStore,
  12. useAppConfig,
  13. useChatStore,
  14. usePluginStore,
  15. ChatMessageTool,
  16. } from "@/app/store";
  17. import { getClientConfig } from "@/app/config/client";
  18. import { DEFAULT_API_HOST } from "@/app/constant";
  19. import {
  20. EventStreamContentType,
  21. fetchEventSource,
  22. } from "@fortaine/fetch-event-source";
  23. import Locale from "../../locales";
  24. import { prettyObject } from "@/app/utils/format";
  25. import { getMessageTextContent, isVisionModel } from "@/app/utils";
  26. import { preProcessImageContent, stream } from "@/app/utils/chat";
  27. import { cloudflareAIGatewayUrl } from "@/app/utils/cloudflare";
  28. import { RequestPayload } from "./openai";
  29. export type MultiBlockContent = {
  30. type: "image" | "text";
  31. source?: {
  32. type: string;
  33. media_type: string;
  34. data: string;
  35. };
  36. text?: string;
  37. };
  38. export type AnthropicMessage = {
  39. role: (typeof ClaudeMapper)[keyof typeof ClaudeMapper];
  40. content: string | MultiBlockContent[];
  41. };
  42. export interface AnthropicChatRequest {
  43. model: string; // The model that will complete your prompt.
  44. messages: AnthropicMessage[]; // The prompt that you want Claude to complete.
  45. max_tokens: number; // The maximum number of tokens to generate before stopping.
  46. stop_sequences?: string[]; // Sequences that will cause the model to stop generating completion text.
  47. temperature?: number; // Amount of randomness injected into the response.
  48. top_p?: number; // Use nucleus sampling.
  49. top_k?: number; // Only sample from the top K options for each subsequent token.
  50. metadata?: object; // An object describing metadata about the request.
  51. stream?: boolean; // Whether to incrementally stream the response using server-sent events.
  52. }
  53. export interface ChatRequest {
  54. model: string; // The model that will complete your prompt.
  55. prompt: string; // The prompt that you want Claude to complete.
  56. max_tokens_to_sample: number; // The maximum number of tokens to generate before stopping.
  57. stop_sequences?: string[]; // Sequences that will cause the model to stop generating completion text.
  58. temperature?: number; // Amount of randomness injected into the response.
  59. top_p?: number; // Use nucleus sampling.
  60. top_k?: number; // Only sample from the top K options for each subsequent token.
  61. metadata?: object; // An object describing metadata about the request.
  62. stream?: boolean; // Whether to incrementally stream the response using server-sent events.
  63. }
  64. export interface ChatResponse {
  65. completion: string;
  66. stop_reason: "stop_sequence" | "max_tokens";
  67. model: string;
  68. }
  69. export type ChatStreamResponse = ChatResponse & {
  70. stop?: string;
  71. log_id: string;
  72. };
  73. const ClaudeMapper = {
  74. assistant: "assistant",
  75. user: "user",
  76. system: "user",
  77. } as const;
  78. const keys = ["claude-2, claude-instant-1"];
  79. export class ClaudeApi implements LLMApi {
  80. speech(options: SpeechOptions): Promise<ArrayBuffer> {
  81. throw new Error("Method not implemented.");
  82. }
  83. transcription(options: TranscriptionOptions): Promise<string> {
  84. throw new Error("Method not implemented.");
  85. }
  86. extractMessage(res: any) {
  87. console.log("[Response] claude response: ", res);
  88. return res?.content?.[0]?.text;
  89. }
  90. async chat(options: ChatOptions): Promise<void> {
  91. const visionModel = isVisionModel(options.config.model);
  92. const accessStore = useAccessStore.getState();
  93. const shouldStream = !!options.config.stream;
  94. const modelConfig = {
  95. ...useAppConfig.getState().modelConfig,
  96. ...useChatStore.getState().currentSession().mask.modelConfig,
  97. ...{
  98. model: options.config.model,
  99. },
  100. };
  101. // try get base64image from local cache image_url
  102. const messages: ChatOptions["messages"] = [];
  103. for (const v of options.messages) {
  104. const content = await preProcessImageContent(v.content);
  105. messages.push({ role: v.role, content });
  106. }
  107. const keys = ["system", "user"];
  108. // roles must alternate between "user" and "assistant" in claude, so add a fake assistant message between two user messages
  109. for (let i = 0; i < messages.length - 1; i++) {
  110. const message = messages[i];
  111. const nextMessage = messages[i + 1];
  112. if (keys.includes(message.role) && keys.includes(nextMessage.role)) {
  113. messages[i] = [
  114. message,
  115. {
  116. role: "assistant",
  117. content: ";",
  118. },
  119. ] as any;
  120. }
  121. }
  122. const prompt = messages
  123. .flat()
  124. .filter((v) => {
  125. if (!v.content) return false;
  126. if (typeof v.content === "string" && !v.content.trim()) return false;
  127. return true;
  128. })
  129. .map((v) => {
  130. const { role, content } = v;
  131. const insideRole = ClaudeMapper[role] ?? "user";
  132. if (!visionModel || typeof content === "string") {
  133. return {
  134. role: insideRole,
  135. content: getMessageTextContent(v),
  136. };
  137. }
  138. return {
  139. role: insideRole,
  140. content: content
  141. .filter((v) => v.image_url || v.text)
  142. .map(({ type, text, image_url }) => {
  143. if (type === "text") {
  144. return {
  145. type,
  146. text: text!,
  147. };
  148. }
  149. const { url = "" } = image_url || {};
  150. const colonIndex = url.indexOf(":");
  151. const semicolonIndex = url.indexOf(";");
  152. const comma = url.indexOf(",");
  153. const mimeType = url.slice(colonIndex + 1, semicolonIndex);
  154. const encodeType = url.slice(semicolonIndex + 1, comma);
  155. const data = url.slice(comma + 1);
  156. return {
  157. type: "image" as const,
  158. source: {
  159. type: encodeType,
  160. media_type: mimeType,
  161. data,
  162. },
  163. };
  164. }),
  165. };
  166. });
  167. if (prompt[0]?.role === "assistant") {
  168. prompt.unshift({
  169. role: "user",
  170. content: ";",
  171. });
  172. }
  173. const requestBody: AnthropicChatRequest = {
  174. messages: prompt,
  175. stream: shouldStream,
  176. model: modelConfig.model,
  177. max_tokens: modelConfig.max_tokens,
  178. temperature: modelConfig.temperature,
  179. top_p: modelConfig.top_p,
  180. // top_k: modelConfig.top_k,
  181. top_k: 5,
  182. };
  183. const path = this.path(Anthropic.ChatPath);
  184. const controller = new AbortController();
  185. options.onController?.(controller);
  186. if (shouldStream) {
  187. let index = -1;
  188. const [tools, funcs] = usePluginStore
  189. .getState()
  190. .getAsTools(
  191. useChatStore.getState().currentSession().mask?.plugin || [],
  192. );
  193. return stream(
  194. path,
  195. requestBody,
  196. {
  197. ...getHeaders(),
  198. "anthropic-version": accessStore.anthropicApiVersion,
  199. },
  200. // @ts-ignore
  201. tools.map((tool) => ({
  202. name: tool?.function?.name,
  203. description: tool?.function?.description,
  204. input_schema: tool?.function?.parameters,
  205. })),
  206. funcs,
  207. controller,
  208. // parseSSE
  209. (text: string, runTools: ChatMessageTool[]) => {
  210. // console.log("parseSSE", text, runTools);
  211. let chunkJson:
  212. | undefined
  213. | {
  214. type: "content_block_delta" | "content_block_stop";
  215. content_block?: {
  216. type: "tool_use";
  217. id: string;
  218. name: string;
  219. };
  220. delta?: {
  221. type: "text_delta" | "input_json_delta";
  222. text?: string;
  223. partial_json?: string;
  224. };
  225. index: number;
  226. };
  227. chunkJson = JSON.parse(text);
  228. if (chunkJson?.content_block?.type == "tool_use") {
  229. index += 1;
  230. const id = chunkJson?.content_block.id;
  231. const name = chunkJson?.content_block.name;
  232. runTools.push({
  233. id,
  234. type: "function",
  235. function: {
  236. name,
  237. arguments: "",
  238. },
  239. });
  240. }
  241. if (
  242. chunkJson?.delta?.type == "input_json_delta" &&
  243. chunkJson?.delta?.partial_json
  244. ) {
  245. // @ts-ignore
  246. runTools[index]["function"]["arguments"] +=
  247. chunkJson?.delta?.partial_json;
  248. }
  249. return chunkJson?.delta?.text;
  250. },
  251. // processToolMessage, include tool_calls message and tool call results
  252. (
  253. requestPayload: RequestPayload,
  254. toolCallMessage: any,
  255. toolCallResult: any[],
  256. ) => {
  257. // reset index value
  258. index = -1;
  259. // @ts-ignore
  260. requestPayload?.messages?.splice(
  261. // @ts-ignore
  262. requestPayload?.messages?.length,
  263. 0,
  264. {
  265. role: "assistant",
  266. content: toolCallMessage.tool_calls.map(
  267. (tool: ChatMessageTool) => ({
  268. type: "tool_use",
  269. id: tool.id,
  270. name: tool?.function?.name,
  271. input: tool?.function?.arguments
  272. ? JSON.parse(tool?.function?.arguments)
  273. : {},
  274. }),
  275. ),
  276. },
  277. // @ts-ignore
  278. ...toolCallResult.map((result) => ({
  279. role: "user",
  280. content: [
  281. {
  282. type: "tool_result",
  283. tool_use_id: result.tool_call_id,
  284. content: result.content,
  285. },
  286. ],
  287. })),
  288. );
  289. },
  290. options,
  291. );
  292. } else {
  293. const payload = {
  294. method: "POST",
  295. body: JSON.stringify(requestBody),
  296. signal: controller.signal,
  297. headers: {
  298. ...getHeaders(), // get common headers
  299. "anthropic-version": accessStore.anthropicApiVersion,
  300. // do not send `anthropicApiKey` in browser!!!
  301. // Authorization: getAuthKey(accessStore.anthropicApiKey),
  302. },
  303. };
  304. try {
  305. controller.signal.onabort = () => options.onFinish("");
  306. const res = await fetch(path, payload);
  307. const resJson = await res.json();
  308. const message = this.extractMessage(resJson);
  309. options.onFinish(message);
  310. } catch (e) {
  311. console.error("failed to chat", e);
  312. options.onError?.(e as Error);
  313. }
  314. }
  315. }
  316. async usage() {
  317. return {
  318. used: 0,
  319. total: 0,
  320. };
  321. }
  322. async models() {
  323. // const provider = {
  324. // id: "anthropic",
  325. // providerName: "Anthropic",
  326. // providerType: "anthropic",
  327. // };
  328. return [
  329. // {
  330. // name: "claude-instant-1.2",
  331. // available: true,
  332. // provider,
  333. // },
  334. // {
  335. // name: "claude-2.0",
  336. // available: true,
  337. // provider,
  338. // },
  339. // {
  340. // name: "claude-2.1",
  341. // available: true,
  342. // provider,
  343. // },
  344. // {
  345. // name: "claude-3-opus-20240229",
  346. // available: true,
  347. // provider,
  348. // },
  349. // {
  350. // name: "claude-3-sonnet-20240229",
  351. // available: true,
  352. // provider,
  353. // },
  354. // {
  355. // name: "claude-3-haiku-20240307",
  356. // available: true,
  357. // provider,
  358. // },
  359. ];
  360. }
  361. path(path: string): string {
  362. const accessStore = useAccessStore.getState();
  363. let baseUrl: string = "";
  364. if (accessStore.useCustomConfig) {
  365. baseUrl = accessStore.anthropicUrl;
  366. }
  367. // if endpoint is empty, use default endpoint
  368. if (baseUrl.trim().length === 0) {
  369. const isApp = !!getClientConfig()?.isApp;
  370. baseUrl = isApp
  371. ? DEFAULT_API_HOST + "/api/proxy/anthropic"
  372. : ApiPath.Anthropic;
  373. }
  374. if (!baseUrl.startsWith("http") && !baseUrl.startsWith("/api")) {
  375. baseUrl = "https://" + baseUrl;
  376. }
  377. baseUrl = trimEnd(baseUrl, "/");
  378. // try rebuild url, when using cloudflare ai gateway in client
  379. return cloudflareAIGatewayUrl(`${baseUrl}/${path}`);
  380. }
  381. }
  382. function trimEnd(s: string, end = " ") {
  383. if (end.length === 0) return s;
  384. while (s.endsWith(end)) {
  385. s = s.slice(0, -end.length);
  386. }
  387. return s;
  388. }