requests.ts 6.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257
  1. import type { ChatRequest, ChatResponse } from "./api/openai/typing";
  2. import { Message, ModelConfig, useAccessStore, useChatStore } from "./store";
  3. import { showToast } from "./components/ui-lib";
  4. const TIME_OUT_MS = 60000;
  5. const makeRequestParam = (
  6. messages: Message[],
  7. options?: {
  8. filterBot?: boolean;
  9. stream?: boolean;
  10. },
  11. ): ChatRequest => {
  12. let sendMessages = messages.map((v) => ({
  13. role: v.role,
  14. content: v.content,
  15. }));
  16. if (options?.filterBot) {
  17. sendMessages = sendMessages.filter((m) => m.role !== "assistant");
  18. }
  19. const modelConfig = { ...useChatStore.getState().config.modelConfig };
  20. // @yidadaa: wont send max_tokens, because it is nonsense for Muggles
  21. // @ts-expect-error
  22. delete modelConfig.max_tokens;
  23. return {
  24. messages: sendMessages,
  25. stream: options?.stream,
  26. ...modelConfig,
  27. };
  28. };
  29. function getHeaders() {
  30. const accessStore = useAccessStore.getState();
  31. let headers: Record<string, string> = {};
  32. if (accessStore.enabledAccessControl()) {
  33. headers["access-code"] = accessStore.accessCode;
  34. }
  35. if (accessStore.token && accessStore.token.length > 0) {
  36. headers["token"] = accessStore.token;
  37. }
  38. return headers;
  39. }
  40. export function requestOpenaiClient(path: string) {
  41. return (body: any, method = "POST") =>
  42. fetch("/api/openai?_vercel_no_cache=1", {
  43. method,
  44. headers: {
  45. "Content-Type": "application/json",
  46. path,
  47. ...getHeaders(),
  48. },
  49. body: body && JSON.stringify(body),
  50. });
  51. }
  52. export async function requestChat(messages: Message[]) {
  53. const req: ChatRequest = makeRequestParam(messages, { filterBot: true });
  54. const res = await requestOpenaiClient("v1/chat/completions")(req);
  55. try {
  56. const response = (await res.json()) as ChatResponse;
  57. return response;
  58. } catch (error) {
  59. console.error("[Request Chat] ", error, res.body);
  60. }
  61. }
  62. export async function requestUsage() {
  63. const formatDate = (d: Date) =>
  64. `${d.getFullYear()}-${(d.getMonth() + 1).toString().padStart(2, "0")}-${d
  65. .getDate()
  66. .toString()
  67. .padStart(2, "0")}`;
  68. const ONE_DAY = 2 * 24 * 60 * 60 * 1000;
  69. const now = new Date(Date.now() + ONE_DAY);
  70. const startOfMonth = new Date(now.getFullYear(), now.getMonth(), 1);
  71. const startDate = formatDate(startOfMonth);
  72. const endDate = formatDate(now);
  73. const [used, subs] = await Promise.all([
  74. requestOpenaiClient(
  75. `dashboard/billing/usage?start_date=${startDate}&end_date=${endDate}`,
  76. )(null, "GET"),
  77. requestOpenaiClient("dashboard/billing/subscription")(null, "GET"),
  78. ]);
  79. const response = (await used.json()) as {
  80. total_usage?: number;
  81. error?: {
  82. type: string;
  83. message: string;
  84. };
  85. };
  86. const total = (await subs.json()) as {
  87. hard_limit_usd?: number;
  88. };
  89. if (response.error && response.error.type) {
  90. showToast(response.error.message);
  91. return;
  92. }
  93. if (response.total_usage) {
  94. response.total_usage = Math.round(response.total_usage) / 100;
  95. }
  96. if (total.hard_limit_usd) {
  97. total.hard_limit_usd = Math.round(total.hard_limit_usd * 100) / 100;
  98. }
  99. return {
  100. used: response.total_usage,
  101. subscription: total.hard_limit_usd,
  102. };
  103. }
  104. export async function requestChatStream(
  105. messages: Message[],
  106. options?: {
  107. filterBot?: boolean;
  108. modelConfig?: ModelConfig;
  109. onMessage: (message: string, done: boolean) => void;
  110. onError: (error: Error, statusCode?: number) => void;
  111. onController?: (controller: AbortController) => void;
  112. },
  113. ) {
  114. const req = makeRequestParam(messages, {
  115. stream: true,
  116. filterBot: options?.filterBot,
  117. });
  118. console.log("[Request] ", req);
  119. const controller = new AbortController();
  120. const reqTimeoutId = setTimeout(() => controller.abort(), TIME_OUT_MS);
  121. try {
  122. const res = await fetch("/api/chat-stream", {
  123. method: "POST",
  124. headers: {
  125. "Content-Type": "application/json",
  126. path: "v1/chat/completions",
  127. ...getHeaders(),
  128. },
  129. body: JSON.stringify(req),
  130. signal: controller.signal,
  131. });
  132. clearTimeout(reqTimeoutId);
  133. let responseText = "";
  134. const finish = () => {
  135. options?.onMessage(responseText, true);
  136. controller.abort();
  137. };
  138. if (res.ok) {
  139. const reader = res.body?.getReader();
  140. const decoder = new TextDecoder();
  141. options?.onController?.(controller);
  142. while (true) {
  143. const resTimeoutId = setTimeout(() => finish(), TIME_OUT_MS);
  144. const content = await reader?.read();
  145. clearTimeout(resTimeoutId);
  146. if (!content || !content.value) {
  147. break;
  148. }
  149. const text = decoder.decode(content.value, { stream: true });
  150. responseText += text;
  151. const done = content.done;
  152. options?.onMessage(responseText, false);
  153. if (done) {
  154. break;
  155. }
  156. }
  157. finish();
  158. } else if (res.status === 401) {
  159. console.error("Unauthorized");
  160. options?.onError(new Error("Unauthorized"), res.status);
  161. } else {
  162. console.error("Stream Error", res.body);
  163. options?.onError(new Error("Stream Error"), res.status);
  164. }
  165. } catch (err) {
  166. console.error("NetWork Error", err);
  167. options?.onError(err as Error);
  168. }
  169. }
  170. export async function requestWithPrompt(messages: Message[], prompt: string) {
  171. messages = messages.concat([
  172. {
  173. role: "user",
  174. content: prompt,
  175. date: new Date().toLocaleString(),
  176. },
  177. ]);
  178. const res = await requestChat(messages);
  179. return res?.choices?.at(0)?.message?.content ?? "";
  180. }
  181. // To store message streaming controller
  182. export const ControllerPool = {
  183. controllers: {} as Record<string, AbortController>,
  184. addController(
  185. sessionIndex: number,
  186. messageId: number,
  187. controller: AbortController,
  188. ) {
  189. const key = this.key(sessionIndex, messageId);
  190. this.controllers[key] = controller;
  191. return key;
  192. },
  193. stop(sessionIndex: number, messageId: number) {
  194. const key = this.key(sessionIndex, messageId);
  195. const controller = this.controllers[key];
  196. controller?.abort();
  197. },
  198. stopAll() {
  199. Object.values(this.controllers).forEach((v) => v.abort());
  200. },
  201. hasPending() {
  202. return Object.values(this.controllers).length > 0;
  203. },
  204. remove(sessionIndex: number, messageId: number) {
  205. const key = this.key(sessionIndex, messageId);
  206. delete this.controllers[key];
  207. },
  208. key(sessionIndex: number, messageIndex: number) {
  209. return `${sessionIndex},${messageIndex}`;
  210. },
  211. };