openai.ts 7.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281
  1. import {
  2. DEFAULT_API_HOST,
  3. DEFAULT_MODELS,
  4. OpenaiPath,
  5. REQUEST_TIMEOUT_MS,
  6. } from "@/app/constant";
  7. import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
  8. import { ChatOptions, getHeaders, LLMApi, LLMModel, LLMUsage } from "../api";
  9. import Locale from "../../locales";
  10. import {
  11. EventStreamContentType,
  12. fetchEventSource,
  13. } from "@fortaine/fetch-event-source";
  14. import { prettyObject } from "@/app/utils/format";
  15. import { getClientConfig } from "@/app/config/client";
  16. export interface OpenAIListModelResponse {
  17. object: string;
  18. data: Array<{
  19. id: string;
  20. object: string;
  21. root: string;
  22. }>;
  23. }
  24. export class ChatGPTApi implements LLMApi {
  25. private disableListModels = true;
  26. path(path: string): string {
  27. let openaiUrl = useAccessStore.getState().openaiUrl;
  28. const apiPath = "/api/openai";
  29. if (openaiUrl.length === 0) {
  30. const isApp = !!getClientConfig()?.isApp;
  31. openaiUrl = isApp ? DEFAULT_API_HOST : apiPath;
  32. }
  33. if (openaiUrl.endsWith("/")) {
  34. openaiUrl = openaiUrl.slice(0, openaiUrl.length - 1);
  35. }
  36. if (!openaiUrl.startsWith("http") && !openaiUrl.startsWith(apiPath)) {
  37. openaiUrl = "https://" + openaiUrl;
  38. }
  39. return [openaiUrl, path].join("/");
  40. }
  41. extractMessage(res: any) {
  42. return res.choices?.at(0)?.message?.content ?? "";
  43. }
  44. async chat(options: ChatOptions) {
  45. const messages = options.messages.map((v) => ({
  46. role: v.role,
  47. content: v.content,
  48. }));
  49. const modelConfig = {
  50. ...useAppConfig.getState().modelConfig,
  51. ...useChatStore.getState().currentSession().mask.modelConfig,
  52. ...{
  53. model: options.config.model,
  54. },
  55. };
  56. const requestPayload = {
  57. messages,
  58. stream: options.config.stream,
  59. model: modelConfig.model,
  60. temperature: modelConfig.temperature,
  61. presence_penalty: modelConfig.presence_penalty,
  62. frequency_penalty: modelConfig.frequency_penalty,
  63. top_p: modelConfig.top_p,
  64. };
  65. console.log("[Request] openai payload: ", requestPayload);
  66. const shouldStream = !!options.config.stream;
  67. const controller = new AbortController();
  68. options.onController?.(controller);
  69. try {
  70. const chatPath = this.path(OpenaiPath.ChatPath);
  71. const chatPayload = {
  72. method: "POST",
  73. body: JSON.stringify(requestPayload),
  74. signal: controller.signal,
  75. headers: getHeaders(),
  76. };
  77. // make a fetch request
  78. const requestTimeoutId = setTimeout(
  79. () => controller.abort(),
  80. REQUEST_TIMEOUT_MS,
  81. );
  82. if (shouldStream) {
  83. let responseText = "";
  84. let finished = false;
  85. const finish = () => {
  86. if (!finished) {
  87. options.onFinish(responseText);
  88. finished = true;
  89. }
  90. };
  91. controller.signal.onabort = finish;
  92. fetchEventSource(chatPath, {
  93. ...chatPayload,
  94. async onopen(res) {
  95. clearTimeout(requestTimeoutId);
  96. const contentType = res.headers.get("content-type");
  97. console.log(
  98. "[OpenAI] request response content type: ",
  99. contentType,
  100. );
  101. if (contentType?.startsWith("text/plain")) {
  102. responseText = await res.clone().text();
  103. return finish();
  104. }
  105. if (
  106. !res.ok ||
  107. !res.headers
  108. .get("content-type")
  109. ?.startsWith(EventStreamContentType) ||
  110. res.status !== 200
  111. ) {
  112. const responseTexts = [responseText];
  113. let extraInfo = await res.clone().text();
  114. try {
  115. const resJson = await res.clone().json();
  116. extraInfo = prettyObject(resJson);
  117. } catch {}
  118. if (res.status === 401) {
  119. responseTexts.push(Locale.Error.Unauthorized);
  120. }
  121. if (extraInfo) {
  122. responseTexts.push(extraInfo);
  123. }
  124. responseText = responseTexts.join("\n\n");
  125. return finish();
  126. }
  127. },
  128. onmessage(msg) {
  129. if (msg.data === "[DONE]" || finished) {
  130. return finish();
  131. }
  132. const text = msg.data;
  133. try {
  134. const json = JSON.parse(text);
  135. const delta = json.choices[0].delta.content;
  136. if (delta) {
  137. responseText += delta;
  138. options.onUpdate?.(responseText, delta);
  139. }
  140. } catch (e) {
  141. console.error("[Request] parse error", text, msg);
  142. }
  143. },
  144. onclose() {
  145. finish();
  146. },
  147. onerror(e) {
  148. options.onError?.(e);
  149. throw e;
  150. },
  151. openWhenHidden: true,
  152. });
  153. } else {
  154. const res = await fetch(chatPath, chatPayload);
  155. clearTimeout(requestTimeoutId);
  156. const resJson = await res.json();
  157. const message = this.extractMessage(resJson);
  158. options.onFinish(message);
  159. }
  160. } catch (e) {
  161. console.log("[Request] failed to make a chat request", e);
  162. options.onError?.(e as Error);
  163. }
  164. }
  165. async usage() {
  166. const formatDate = (d: Date) =>
  167. `${d.getFullYear()}-${(d.getMonth() + 1).toString().padStart(2, "0")}-${d
  168. .getDate()
  169. .toString()
  170. .padStart(2, "0")}`;
  171. const ONE_DAY = 1 * 24 * 60 * 60 * 1000;
  172. const now = new Date();
  173. const startOfMonth = new Date(now.getFullYear(), now.getMonth(), 1);
  174. const startDate = formatDate(startOfMonth);
  175. const endDate = formatDate(new Date(Date.now() + ONE_DAY));
  176. const [used, subs] = await Promise.all([
  177. fetch(
  178. this.path(
  179. `${OpenaiPath.UsagePath}?start_date=${startDate}&end_date=${endDate}`,
  180. ),
  181. {
  182. method: "GET",
  183. headers: getHeaders(),
  184. },
  185. ),
  186. fetch(this.path(OpenaiPath.SubsPath), {
  187. method: "GET",
  188. headers: getHeaders(),
  189. }),
  190. ]);
  191. if (used.status === 401) {
  192. throw new Error(Locale.Error.Unauthorized);
  193. }
  194. if (!used.ok || !subs.ok) {
  195. throw new Error("Failed to query usage from openai");
  196. }
  197. const response = (await used.json()) as {
  198. total_usage?: number;
  199. error?: {
  200. type: string;
  201. message: string;
  202. };
  203. };
  204. const total = (await subs.json()) as {
  205. hard_limit_usd?: number;
  206. };
  207. if (response.error && response.error.type) {
  208. throw Error(response.error.message);
  209. }
  210. if (response.total_usage) {
  211. response.total_usage = Math.round(response.total_usage) / 100;
  212. }
  213. if (total.hard_limit_usd) {
  214. total.hard_limit_usd = Math.round(total.hard_limit_usd * 100) / 100;
  215. }
  216. return {
  217. used: response.total_usage,
  218. total: total.hard_limit_usd,
  219. } as LLMUsage;
  220. }
  221. async models(): Promise<LLMModel[]> {
  222. if (this.disableListModels) {
  223. return DEFAULT_MODELS.slice();
  224. }
  225. const res = await fetch(this.path(OpenaiPath.ListModelPath), {
  226. method: "GET",
  227. headers: {
  228. ...getHeaders(),
  229. },
  230. });
  231. const resJson = (await res.json()) as OpenAIListModelResponse;
  232. const chatModels = resJson.data?.filter((m) => m.id.startsWith("gpt-"));
  233. console.log("[Models]", chatModels);
  234. if (!chatModels) {
  235. return [];
  236. }
  237. return chatModels.map((m) => ({
  238. name: m.id,
  239. available: true,
  240. }));
  241. }
  242. }
  243. export { OpenaiPath };