anthropic.ts 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424
  1. import { Anthropic, ApiPath } from "@/app/constant";
  2. import { ChatOptions, getHeaders, LLMApi, SpeechOptions } from "../api";
  3. import {
  4. useAccessStore,
  5. useAppConfig,
  6. useChatStore,
  7. usePluginStore,
  8. ChatMessageTool,
  9. } from "@/app/store";
  10. import { getClientConfig } from "@/app/config/client";
  11. import { ANTHROPIC_BASE_URL } from "@/app/constant";
  12. import { getMessageTextContent, isVisionModel } from "@/app/utils";
  13. import { preProcessImageContent, stream } from "@/app/utils/chat";
  14. import { cloudflareAIGatewayUrl } from "@/app/utils/cloudflare";
  15. import { RequestPayload } from "./openai";
  16. import { fetch } from "@/app/utils/stream";
  17. export type MultiBlockContent = {
  18. type: "image" | "text";
  19. source?: {
  20. type: string;
  21. media_type: string;
  22. data: string;
  23. };
  24. text?: string;
  25. };
  26. export type AnthropicMessage = {
  27. role: (typeof ClaudeMapper)[keyof typeof ClaudeMapper];
  28. content: string | MultiBlockContent[];
  29. };
  30. export interface AnthropicChatRequest {
  31. model: string; // The model that will complete your prompt.
  32. messages: AnthropicMessage[]; // The prompt that you want Claude to complete.
  33. max_tokens: number; // The maximum number of tokens to generate before stopping.
  34. stop_sequences?: string[]; // Sequences that will cause the model to stop generating completion text.
  35. temperature?: number; // Amount of randomness injected into the response.
  36. top_p?: number; // Use nucleus sampling.
  37. top_k?: number; // Only sample from the top K options for each subsequent token.
  38. metadata?: object; // An object describing metadata about the request.
  39. stream?: boolean; // Whether to incrementally stream the response using server-sent events.
  40. }
  41. export interface ChatRequest {
  42. model: string; // The model that will complete your prompt.
  43. prompt: string; // The prompt that you want Claude to complete.
  44. max_tokens_to_sample: number; // The maximum number of tokens to generate before stopping.
  45. stop_sequences?: string[]; // Sequences that will cause the model to stop generating completion text.
  46. temperature?: number; // Amount of randomness injected into the response.
  47. top_p?: number; // Use nucleus sampling.
  48. top_k?: number; // Only sample from the top K options for each subsequent token.
  49. metadata?: object; // An object describing metadata about the request.
  50. stream?: boolean; // Whether to incrementally stream the response using server-sent events.
  51. }
  52. export interface ChatResponse {
  53. completion: string;
  54. stop_reason: "stop_sequence" | "max_tokens";
  55. model: string;
  56. }
  57. export type ChatStreamResponse = ChatResponse & {
  58. stop?: string;
  59. log_id: string;
  60. };
  61. const ClaudeMapper = {
  62. assistant: "assistant",
  63. user: "user",
  64. system: "user",
  65. } as const;
  66. const keys = ["claude-2, claude-instant-1"];
  67. export class ClaudeApi implements LLMApi {
  68. speech(options: SpeechOptions): Promise<ArrayBuffer> {
  69. throw new Error("Method not implemented.");
  70. }
  71. extractMessage(res: any) {
  72. console.log("[Response] claude response: ", res);
  73. return res?.content?.[0]?.text;
  74. }
  75. async chat(options: ChatOptions): Promise<void> {
  76. const visionModel = isVisionModel(options.config.model);
  77. const accessStore = useAccessStore.getState();
  78. const shouldStream = !!options.config.stream;
  79. const modelConfig = {
  80. ...useAppConfig.getState().modelConfig,
  81. ...useChatStore.getState().currentSession().mask.modelConfig,
  82. ...{
  83. model: options.config.model,
  84. },
  85. };
  86. // try get base64image from local cache image_url
  87. const messages: ChatOptions["messages"] = [];
  88. for (const v of options.messages) {
  89. const content = await preProcessImageContent(v.content);
  90. messages.push({ role: v.role, content });
  91. }
  92. const keys = ["system", "user"];
  93. // roles must alternate between "user" and "assistant" in claude, so add a fake assistant message between two user messages
  94. for (let i = 0; i < messages.length - 1; i++) {
  95. const message = messages[i];
  96. const nextMessage = messages[i + 1];
  97. if (keys.includes(message.role) && keys.includes(nextMessage.role)) {
  98. messages[i] = [
  99. message,
  100. {
  101. role: "assistant",
  102. content: ";",
  103. },
  104. ] as any;
  105. }
  106. }
  107. const prompt = messages
  108. .flat()
  109. .filter((v) => {
  110. if (!v.content) return false;
  111. if (typeof v.content === "string" && !v.content.trim()) return false;
  112. return true;
  113. })
  114. .map((v) => {
  115. const { role, content } = v;
  116. const insideRole = ClaudeMapper[role] ?? "user";
  117. if (!visionModel || typeof content === "string") {
  118. return {
  119. role: insideRole,
  120. content: getMessageTextContent(v),
  121. };
  122. }
  123. return {
  124. role: insideRole,
  125. content: content
  126. .filter((v) => v.image_url || v.text)
  127. .map(({ type, text, image_url }) => {
  128. if (type === "text") {
  129. return {
  130. type,
  131. text: text!,
  132. };
  133. }
  134. const { url = "" } = image_url || {};
  135. const colonIndex = url.indexOf(":");
  136. const semicolonIndex = url.indexOf(";");
  137. const comma = url.indexOf(",");
  138. const mimeType = url.slice(colonIndex + 1, semicolonIndex);
  139. const encodeType = url.slice(semicolonIndex + 1, comma);
  140. const data = url.slice(comma + 1);
  141. return {
  142. type: "image" as const,
  143. source: {
  144. type: encodeType,
  145. media_type: mimeType,
  146. data,
  147. },
  148. };
  149. }),
  150. };
  151. });
  152. if (prompt[0]?.role === "assistant") {
  153. prompt.unshift({
  154. role: "user",
  155. content: ";",
  156. });
  157. }
  158. const requestBody: AnthropicChatRequest = {
  159. messages: prompt,
  160. stream: shouldStream,
  161. model: modelConfig.model,
  162. max_tokens: modelConfig.max_tokens,
  163. temperature: modelConfig.temperature,
  164. top_p: modelConfig.top_p,
  165. // top_k: modelConfig.top_k,
  166. top_k: 5,
  167. };
  168. const path = this.path(Anthropic.ChatPath);
  169. const controller = new AbortController();
  170. options.onController?.(controller);
  171. if (shouldStream) {
  172. let index = -1;
  173. const [tools, funcs] = usePluginStore
  174. .getState()
  175. .getAsTools(
  176. useChatStore.getState().currentSession().mask?.plugin || [],
  177. );
  178. return stream(
  179. path,
  180. requestBody,
  181. {
  182. ...getHeaders(),
  183. "anthropic-version": accessStore.anthropicApiVersion,
  184. },
  185. // @ts-ignore
  186. tools.map((tool) => ({
  187. name: tool?.function?.name,
  188. description: tool?.function?.description,
  189. input_schema: tool?.function?.parameters,
  190. })),
  191. funcs,
  192. controller,
  193. // parseSSE
  194. (text: string, runTools: ChatMessageTool[]) => {
  195. // console.log("parseSSE", text, runTools);
  196. let chunkJson:
  197. | undefined
  198. | {
  199. type: "content_block_delta" | "content_block_stop" | "message_delta" | "message_stop";
  200. content_block?: {
  201. type: "tool_use";
  202. id: string;
  203. name: string;
  204. };
  205. delta?: {
  206. type: "text_delta" | "input_json_delta";
  207. text?: string;
  208. partial_json?: string;
  209. stop_reason?: string;
  210. };
  211. index: number;
  212. };
  213. chunkJson = JSON.parse(text);
  214. // Handle refusal stop reason in message_delta
  215. if (chunkJson?.delta?.stop_reason === "refusal") {
  216. // Return a message to display to the user
  217. const refusalMessage = "\n\n[Assistant refused to respond. Please modify your request and try again.]";
  218. options.onError?.(new Error("Content policy violation: " + refusalMessage));
  219. return refusalMessage;
  220. }
  221. if (chunkJson?.content_block?.type == "tool_use") {
  222. index += 1;
  223. const id = chunkJson?.content_block.id;
  224. const name = chunkJson?.content_block.name;
  225. runTools.push({
  226. id,
  227. type: "function",
  228. function: {
  229. name,
  230. arguments: "",
  231. },
  232. });
  233. }
  234. if (
  235. chunkJson?.delta?.type == "input_json_delta" &&
  236. chunkJson?.delta?.partial_json
  237. ) {
  238. // @ts-ignore
  239. runTools[index]["function"]["arguments"] +=
  240. chunkJson?.delta?.partial_json;
  241. }
  242. return chunkJson?.delta?.text;
  243. },
  244. // processToolMessage, include tool_calls message and tool call results
  245. (
  246. requestPayload: RequestPayload,
  247. toolCallMessage: any,
  248. toolCallResult: any[],
  249. ) => {
  250. // reset index value
  251. index = -1;
  252. // @ts-ignore
  253. requestPayload?.messages?.splice(
  254. // @ts-ignore
  255. requestPayload?.messages?.length,
  256. 0,
  257. {
  258. role: "assistant",
  259. content: toolCallMessage.tool_calls.map(
  260. (tool: ChatMessageTool) => ({
  261. type: "tool_use",
  262. id: tool.id,
  263. name: tool?.function?.name,
  264. input: tool?.function?.arguments
  265. ? JSON.parse(tool?.function?.arguments)
  266. : {},
  267. }),
  268. ),
  269. },
  270. // @ts-ignore
  271. ...toolCallResult.map((result) => ({
  272. role: "user",
  273. content: [
  274. {
  275. type: "tool_result",
  276. tool_use_id: result.tool_call_id,
  277. content: result.content,
  278. },
  279. ],
  280. })),
  281. );
  282. },
  283. options,
  284. );
  285. } else {
  286. const payload = {
  287. method: "POST",
  288. body: JSON.stringify(requestBody),
  289. signal: controller.signal,
  290. headers: {
  291. ...getHeaders(), // get common headers
  292. "anthropic-version": accessStore.anthropicApiVersion,
  293. // do not send `anthropicApiKey` in browser!!!
  294. // Authorization: getAuthKey(accessStore.anthropicApiKey),
  295. },
  296. };
  297. try {
  298. controller.signal.onabort = () =>
  299. options.onFinish("", new Response(null, { status: 400 }));
  300. const res = await fetch(path, payload);
  301. const resJson = await res.json();
  302. const message = this.extractMessage(resJson);
  303. options.onFinish(message, res);
  304. } catch (e) {
  305. console.error("failed to chat", e);
  306. options.onError?.(e as Error);
  307. }
  308. }
  309. }
  310. async usage() {
  311. return {
  312. used: 0,
  313. total: 0,
  314. };
  315. }
  316. async models() {
  317. // const provider = {
  318. // id: "anthropic",
  319. // providerName: "Anthropic",
  320. // providerType: "anthropic",
  321. // };
  322. return [
  323. // {
  324. // name: "claude-instant-1.2",
  325. // available: true,
  326. // provider,
  327. // },
  328. // {
  329. // name: "claude-2.0",
  330. // available: true,
  331. // provider,
  332. // },
  333. // {
  334. // name: "claude-2.1",
  335. // available: true,
  336. // provider,
  337. // },
  338. // {
  339. // name: "claude-3-opus-20240229",
  340. // available: true,
  341. // provider,
  342. // },
  343. // {
  344. // name: "claude-3-sonnet-20240229",
  345. // available: true,
  346. // provider,
  347. // },
  348. // {
  349. // name: "claude-3-haiku-20240307",
  350. // available: true,
  351. // provider,
  352. // },
  353. ];
  354. }
  355. path(path: string): string {
  356. const accessStore = useAccessStore.getState();
  357. let baseUrl: string = "";
  358. if (accessStore.useCustomConfig) {
  359. baseUrl = accessStore.anthropicUrl;
  360. }
  361. // if endpoint is empty, use default endpoint
  362. if (baseUrl.trim().length === 0) {
  363. const isApp = !!getClientConfig()?.isApp;
  364. baseUrl = isApp ? ANTHROPIC_BASE_URL : ApiPath.Anthropic;
  365. }
  366. if (!baseUrl.startsWith("http") && !baseUrl.startsWith("/api")) {
  367. baseUrl = "https://" + baseUrl;
  368. }
  369. baseUrl = trimEnd(baseUrl, "/");
  370. // try rebuild url, when using cloudflare ai gateway in client
  371. return cloudflareAIGatewayUrl(`${baseUrl}/${path}`);
  372. }
  373. }
  374. function trimEnd(s: string, end = " ") {
  375. if (end.length === 0) return s;
  376. while (s.endsWith(end)) {
  377. s = s.slice(0, -end.length);
  378. }
  379. return s;
  380. }