openai.ts 7.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282
  1. import {
  2. DEFAULT_API_HOST,
  3. DEFAULT_MODELS,
  4. OpenaiPath,
  5. REQUEST_TIMEOUT_MS,
  6. } from "@/app/constant";
  7. import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
  8. import { ChatOptions, getHeaders, LLMApi, LLMModel, LLMUsage } from "../api";
  9. import Locale from "../../locales";
  10. import {
  11. EventStreamContentType,
  12. fetchEventSource,
  13. } from "@fortaine/fetch-event-source";
  14. import { prettyObject } from "@/app/utils/format";
  15. import { getClientConfig } from "@/app/config/client";
  16. export interface OpenAIListModelResponse {
  17. object: string;
  18. data: Array<{
  19. id: string;
  20. object: string;
  21. root: string;
  22. }>;
  23. }
  24. export class ChatGPTApi implements LLMApi {
  25. private disableListModels = true;
  26. path(path: string): string {
  27. let openaiUrl = useAccessStore.getState().openaiUrl;
  28. const apiPath = "/api/openai";
  29. if (openaiUrl.length === 0) {
  30. const isApp = !!getClientConfig()?.isApp;
  31. openaiUrl = isApp ? DEFAULT_API_HOST : apiPath;
  32. }
  33. if (openaiUrl.endsWith("/")) {
  34. openaiUrl = openaiUrl.slice(0, openaiUrl.length - 1);
  35. }
  36. if (!openaiUrl.startsWith("http") && !openaiUrl.startsWith(apiPath)) {
  37. openaiUrl = "https://" + openaiUrl;
  38. }
  39. return [openaiUrl, path].join("/");
  40. }
  41. extractMessage(res: any) {
  42. return res.choices?.at(0)?.message?.content ?? "";
  43. }
  44. async chat(options: ChatOptions) {
  45. const messages = options.messages.map((v) => ({
  46. role: v.role,
  47. content: v.content,
  48. }));
  49. const modelConfig = {
  50. ...useAppConfig.getState().modelConfig,
  51. ...useChatStore.getState().currentSession().mask.modelConfig,
  52. ...{
  53. model: options.config.model,
  54. },
  55. };
  56. const requestPayload = {
  57. messages,
  58. stream: options.config.stream,
  59. model: modelConfig.model,
  60. temperature: modelConfig.temperature,
  61. presence_penalty: modelConfig.presence_penalty,
  62. frequency_penalty: modelConfig.frequency_penalty,
  63. top_p: modelConfig.top_p,
  64. max_tokens: Math.max(modelConfig.max_tokens, 1024),
  65. };
  66. console.log("[Request] openai payload: ", requestPayload);
  67. const shouldStream = !!options.config.stream;
  68. const controller = new AbortController();
  69. options.onController?.(controller);
  70. try {
  71. const chatPath = this.path(OpenaiPath.ChatPath);
  72. const chatPayload = {
  73. method: "POST",
  74. body: JSON.stringify(requestPayload),
  75. signal: controller.signal,
  76. headers: getHeaders(),
  77. };
  78. // make a fetch request
  79. const requestTimeoutId = setTimeout(
  80. () => controller.abort(),
  81. REQUEST_TIMEOUT_MS,
  82. );
  83. if (shouldStream) {
  84. let responseText = "";
  85. let finished = false;
  86. const finish = () => {
  87. if (!finished) {
  88. options.onFinish(responseText);
  89. finished = true;
  90. }
  91. };
  92. controller.signal.onabort = finish;
  93. fetchEventSource(chatPath, {
  94. ...chatPayload,
  95. async onopen(res) {
  96. clearTimeout(requestTimeoutId);
  97. const contentType = res.headers.get("content-type");
  98. console.log(
  99. "[OpenAI] request response content type: ",
  100. contentType,
  101. );
  102. if (contentType?.startsWith("text/plain")) {
  103. responseText = await res.clone().text();
  104. return finish();
  105. }
  106. if (
  107. !res.ok ||
  108. !res.headers
  109. .get("content-type")
  110. ?.startsWith(EventStreamContentType) ||
  111. res.status !== 200
  112. ) {
  113. const responseTexts = [responseText];
  114. let extraInfo = await res.clone().text();
  115. try {
  116. const resJson = await res.clone().json();
  117. extraInfo = prettyObject(resJson);
  118. } catch {}
  119. if (res.status === 401) {
  120. responseTexts.push(Locale.Error.Unauthorized);
  121. }
  122. if (extraInfo) {
  123. responseTexts.push(extraInfo);
  124. }
  125. responseText = responseTexts.join("\n\n");
  126. return finish();
  127. }
  128. },
  129. onmessage(msg) {
  130. if (msg.data === "[DONE]" || finished) {
  131. return finish();
  132. }
  133. const text = msg.data;
  134. try {
  135. const json = JSON.parse(text);
  136. const delta = json.choices[0].delta.content;
  137. if (delta) {
  138. responseText += delta;
  139. options.onUpdate?.(responseText, delta);
  140. }
  141. } catch (e) {
  142. console.error("[Request] parse error", text, msg);
  143. }
  144. },
  145. onclose() {
  146. finish();
  147. },
  148. onerror(e) {
  149. options.onError?.(e);
  150. throw e;
  151. },
  152. openWhenHidden: true,
  153. });
  154. } else {
  155. const res = await fetch(chatPath, chatPayload);
  156. clearTimeout(requestTimeoutId);
  157. const resJson = await res.json();
  158. const message = this.extractMessage(resJson);
  159. options.onFinish(message);
  160. }
  161. } catch (e) {
  162. console.log("[Request] failed to make a chat request", e);
  163. options.onError?.(e as Error);
  164. }
  165. }
  166. async usage() {
  167. const formatDate = (d: Date) =>
  168. `${d.getFullYear()}-${(d.getMonth() + 1).toString().padStart(2, "0")}-${d
  169. .getDate()
  170. .toString()
  171. .padStart(2, "0")}`;
  172. const ONE_DAY = 1 * 24 * 60 * 60 * 1000;
  173. const now = new Date();
  174. const startOfMonth = new Date(now.getFullYear(), now.getMonth(), 1);
  175. const startDate = formatDate(startOfMonth);
  176. const endDate = formatDate(new Date(Date.now() + ONE_DAY));
  177. const [used, subs] = await Promise.all([
  178. fetch(
  179. this.path(
  180. `${OpenaiPath.UsagePath}?start_date=${startDate}&end_date=${endDate}`,
  181. ),
  182. {
  183. method: "GET",
  184. headers: getHeaders(),
  185. },
  186. ),
  187. fetch(this.path(OpenaiPath.SubsPath), {
  188. method: "GET",
  189. headers: getHeaders(),
  190. }),
  191. ]);
  192. if (used.status === 401) {
  193. throw new Error(Locale.Error.Unauthorized);
  194. }
  195. if (!used.ok || !subs.ok) {
  196. throw new Error("Failed to query usage from openai");
  197. }
  198. const response = (await used.json()) as {
  199. total_usage?: number;
  200. error?: {
  201. type: string;
  202. message: string;
  203. };
  204. };
  205. const total = (await subs.json()) as {
  206. hard_limit_usd?: number;
  207. };
  208. if (response.error && response.error.type) {
  209. throw Error(response.error.message);
  210. }
  211. if (response.total_usage) {
  212. response.total_usage = Math.round(response.total_usage) / 100;
  213. }
  214. if (total.hard_limit_usd) {
  215. total.hard_limit_usd = Math.round(total.hard_limit_usd * 100) / 100;
  216. }
  217. return {
  218. used: response.total_usage,
  219. total: total.hard_limit_usd,
  220. } as LLMUsage;
  221. }
  222. async models(): Promise<LLMModel[]> {
  223. if (this.disableListModels) {
  224. return DEFAULT_MODELS.slice();
  225. }
  226. const res = await fetch(this.path(OpenaiPath.ListModelPath), {
  227. method: "GET",
  228. headers: {
  229. ...getHeaders(),
  230. },
  231. });
  232. const resJson = (await res.json()) as OpenAIListModelResponse;
  233. const chatModels = resJson.data?.filter((m) => m.id.startsWith("gpt-"));
  234. console.log("[Models]", chatModels);
  235. if (!chatModels) {
  236. return [];
  237. }
  238. return chatModels.map((m) => ({
  239. name: m.id,
  240. available: true,
  241. }));
  242. }
  243. }
  244. export { OpenaiPath };