openai.ts 5.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194
  1. import { REQUEST_TIMEOUT_MS } from "@/app/constant";
  2. import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
  3. import { ChatOptions, getHeaders, LLMApi, LLMUsage } from "../api";
  4. import Locale from "../../locales";
  5. export class ChatGPTApi implements LLMApi {
  6. public ChatPath = "v1/chat/completions";
  7. public UsagePath = "dashboard/billing/usage";
  8. public SubsPath = "dashboard/billing/subscription";
  9. path(path: string): string {
  10. let openaiUrl = useAccessStore.getState().openaiUrl;
  11. if (openaiUrl.endsWith("/")) {
  12. openaiUrl = openaiUrl.slice(0, openaiUrl.length - 1);
  13. }
  14. return [openaiUrl, path].join("/");
  15. }
  16. extractMessage(res: any) {
  17. return res.choices?.at(0)?.message?.content ?? "";
  18. }
  19. async chat(options: ChatOptions) {
  20. const messages = options.messages.map((v) => ({
  21. role: v.role,
  22. content: v.content,
  23. }));
  24. const modelConfig = {
  25. ...useAppConfig.getState().modelConfig,
  26. ...useChatStore.getState().currentSession().mask.modelConfig,
  27. ...{
  28. model: options.config.model,
  29. },
  30. };
  31. const requestPayload = {
  32. messages,
  33. stream: options.config.stream,
  34. model: modelConfig.model,
  35. temperature: modelConfig.temperature,
  36. presence_penalty: modelConfig.presence_penalty,
  37. };
  38. console.log("[Request] openai payload: ", requestPayload);
  39. const shouldStream = !!options.config.stream;
  40. const controller = new AbortController();
  41. options.onController?.(controller);
  42. try {
  43. const chatPath = this.path(this.ChatPath);
  44. const chatPayload = {
  45. method: "POST",
  46. body: JSON.stringify(requestPayload),
  47. signal: controller.signal,
  48. headers: getHeaders(),
  49. };
  50. // make a fetch request
  51. const reqestTimeoutId = setTimeout(
  52. () => controller.abort(),
  53. REQUEST_TIMEOUT_MS,
  54. );
  55. if (shouldStream) {
  56. let responseText = "";
  57. const finish = () => {
  58. options.onFinish(responseText);
  59. };
  60. const res = await fetch(chatPath, chatPayload);
  61. clearTimeout(reqestTimeoutId);
  62. if (res.status === 401) {
  63. responseText += "\n\n" + Locale.Error.Unauthorized;
  64. return finish();
  65. }
  66. if (
  67. !res.ok ||
  68. !res.headers.get("Content-Type")?.includes("stream") ||
  69. !res.body
  70. ) {
  71. return options.onError?.(new Error());
  72. }
  73. const reader = res.body.getReader();
  74. const decoder = new TextDecoder("utf-8");
  75. while (true) {
  76. const { done, value } = await reader.read();
  77. if (done) {
  78. return finish();
  79. }
  80. const chunk = decoder.decode(value, { stream: true });
  81. const lines = chunk.split("data: ");
  82. for (const line of lines) {
  83. const text = line.trim();
  84. if (line.startsWith("[DONE]")) {
  85. return finish();
  86. }
  87. if (text.length === 0) continue;
  88. try {
  89. const json = JSON.parse(text);
  90. const delta = json.choices[0].delta.content;
  91. if (delta) {
  92. responseText += delta;
  93. options.onUpdate?.(responseText, delta);
  94. }
  95. } catch (e) {
  96. console.error("[Request] parse error", text, chunk);
  97. }
  98. }
  99. }
  100. } else {
  101. const res = await fetch(chatPath, chatPayload);
  102. clearTimeout(reqestTimeoutId);
  103. const resJson = await res.json();
  104. const message = this.extractMessage(resJson);
  105. options.onFinish(message);
  106. }
  107. } catch (e) {
  108. console.log("[Request] failed to make a chat reqeust", e);
  109. options.onError?.(e as Error);
  110. }
  111. }
  112. async usage() {
  113. const formatDate = (d: Date) =>
  114. `${d.getFullYear()}-${(d.getMonth() + 1).toString().padStart(2, "0")}-${d
  115. .getDate()
  116. .toString()
  117. .padStart(2, "0")}`;
  118. const ONE_DAY = 1 * 24 * 60 * 60 * 1000;
  119. const now = new Date();
  120. const startOfMonth = new Date(now.getFullYear(), now.getMonth(), 1);
  121. const startDate = formatDate(startOfMonth);
  122. const endDate = formatDate(new Date(Date.now() + ONE_DAY));
  123. const [used, subs] = await Promise.all([
  124. fetch(
  125. this.path(
  126. `${this.UsagePath}?start_date=${startDate}&end_date=${endDate}`,
  127. ),
  128. {
  129. method: "GET",
  130. headers: getHeaders(),
  131. },
  132. ),
  133. fetch(this.path(this.SubsPath), {
  134. method: "GET",
  135. headers: getHeaders(),
  136. }),
  137. ]);
  138. if (!used.ok || !subs.ok || used.status === 401) {
  139. throw new Error(Locale.Error.Unauthorized);
  140. }
  141. const response = (await used.json()) as {
  142. total_usage?: number;
  143. error?: {
  144. type: string;
  145. message: string;
  146. };
  147. };
  148. const total = (await subs.json()) as {
  149. hard_limit_usd?: number;
  150. };
  151. if (response.error && response.error.type) {
  152. throw Error(response.error.message);
  153. }
  154. if (response.total_usage) {
  155. response.total_usage = Math.round(response.total_usage) / 100;
  156. }
  157. if (total.hard_limit_usd) {
  158. total.hard_limit_usd = Math.round(total.hard_limit_usd * 100) / 100;
  159. }
  160. return {
  161. used: response.total_usage,
  162. total: total.hard_limit_usd,
  163. } as LLMUsage;
  164. }
  165. }