requests.ts 6.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241
  1. import type { ChatRequest, ChatReponse } from "./api/openai/typing";
  2. import { Message, ModelConfig, useAccessStore, useChatStore } from "./store";
  3. import { showToast } from "./components/ui-lib";
  4. const TIME_OUT_MS = 30000;
  5. const makeRequestParam = (
  6. messages: Message[],
  7. options?: {
  8. filterBot?: boolean;
  9. stream?: boolean;
  10. },
  11. ): ChatRequest => {
  12. let sendMessages = messages.map((v) => ({
  13. role: v.role,
  14. content: v.content,
  15. }));
  16. if (options?.filterBot) {
  17. sendMessages = sendMessages.filter((m) => m.role !== "assistant");
  18. }
  19. const modelConfig = { ...useChatStore.getState().config.modelConfig };
  20. // @yidadaa: wont send max_tokens, because it is nonsense for Muggles
  21. // @ts-expect-error
  22. delete modelConfig.max_tokens;
  23. return {
  24. messages: sendMessages,
  25. stream: options?.stream,
  26. ...modelConfig,
  27. };
  28. };
  29. function getHeaders() {
  30. const accessStore = useAccessStore.getState();
  31. let headers: Record<string, string> = {};
  32. if (accessStore.enabledAccessControl()) {
  33. headers["access-code"] = accessStore.accessCode;
  34. }
  35. if (accessStore.token && accessStore.token.length > 0) {
  36. headers["token"] = accessStore.token;
  37. }
  38. return headers;
  39. }
  40. export function requestOpenaiClient(path: string) {
  41. return (body: any, method = "POST") =>
  42. fetch("/api/openai?_vercel_no_cache=1", {
  43. method,
  44. headers: {
  45. "Content-Type": "application/json",
  46. path,
  47. ...getHeaders(),
  48. },
  49. body: body && JSON.stringify(body),
  50. });
  51. }
  52. export async function requestChat(messages: Message[]) {
  53. const req: ChatRequest = makeRequestParam(messages, { filterBot: true });
  54. const res = await requestOpenaiClient("v1/chat/completions")(req);
  55. try {
  56. const response = (await res.json()) as ChatReponse;
  57. return response;
  58. } catch (error) {
  59. console.error("[Request Chat] ", error, res.body);
  60. }
  61. }
  62. export async function requestUsage() {
  63. const formatDate = (d: Date) =>
  64. `${d.getFullYear()}-${(d.getMonth() + 1).toString().padStart(2, "0")}-${d
  65. .getDate()
  66. .toString()
  67. .padStart(2, "0")}`;
  68. const ONE_DAY = 2 * 24 * 60 * 60 * 1000;
  69. const now = new Date(Date.now() + ONE_DAY);
  70. const startOfMonth = new Date(now.getFullYear(), now.getMonth(), 1);
  71. const startDate = formatDate(startOfMonth);
  72. const endDate = formatDate(now);
  73. const [used, subs] = await Promise.all([
  74. requestOpenaiClient(
  75. `dashboard/billing/usage?start_date=${startDate}&end_date=${endDate}`,
  76. )(null, "GET"),
  77. requestOpenaiClient("dashboard/billing/subscription")(null, "GET"),
  78. ]);
  79. const response = (await used.json()) as {
  80. total_usage?: number;
  81. error?: {
  82. type: string;
  83. message: string;
  84. };
  85. };
  86. const total = (await subs.json()) as {
  87. hard_limit_usd?: number;
  88. };
  89. if (response.error && response.error.type) {
  90. showToast(response.error.message);
  91. return;
  92. }
  93. if (response.total_usage) {
  94. response.total_usage = Math.round(response.total_usage) / 100;
  95. }
  96. return {
  97. used: response.total_usage,
  98. subscription: total.hard_limit_usd,
  99. };
  100. }
  101. export async function requestChatStream(
  102. messages: Message[],
  103. options?: {
  104. filterBot?: boolean;
  105. modelConfig?: ModelConfig;
  106. onMessage: (message: string, done: boolean) => void;
  107. onError: (error: Error, statusCode?: number) => void;
  108. onController?: (controller: AbortController) => void;
  109. },
  110. ) {
  111. const req = makeRequestParam(messages, {
  112. stream: true,
  113. filterBot: options?.filterBot,
  114. });
  115. console.log("[Request] ", req);
  116. const controller = new AbortController();
  117. const reqTimeoutId = setTimeout(() => controller.abort(), TIME_OUT_MS);
  118. try {
  119. const res = await fetch("/api/chat-stream", {
  120. method: "POST",
  121. headers: {
  122. "Content-Type": "application/json",
  123. path: "v1/chat/completions",
  124. ...getHeaders(),
  125. },
  126. body: JSON.stringify(req),
  127. signal: controller.signal,
  128. });
  129. clearTimeout(reqTimeoutId);
  130. let responseText = "";
  131. const finish = () => {
  132. options?.onMessage(responseText, true);
  133. controller.abort();
  134. };
  135. if (res.ok) {
  136. const reader = res.body?.getReader();
  137. const decoder = new TextDecoder();
  138. options?.onController?.(controller);
  139. while (true) {
  140. // handle time out, will stop if no response in 10 secs
  141. const resTimeoutId = setTimeout(() => finish(), TIME_OUT_MS);
  142. const content = await reader?.read();
  143. clearTimeout(resTimeoutId);
  144. const text = decoder.decode(content?.value);
  145. responseText += text;
  146. const done = !content || content.done;
  147. options?.onMessage(responseText, false);
  148. if (done) {
  149. break;
  150. }
  151. }
  152. finish();
  153. } else if (res.status === 401) {
  154. console.error("Anauthorized");
  155. options?.onError(new Error("Anauthorized"), res.status);
  156. } else {
  157. console.error("Stream Error", res.body);
  158. options?.onError(new Error("Stream Error"), res.status);
  159. }
  160. } catch (err) {
  161. console.error("NetWork Error", err);
  162. options?.onError(err as Error);
  163. }
  164. }
  165. export async function requestWithPrompt(messages: Message[], prompt: string) {
  166. messages = messages.concat([
  167. {
  168. role: "user",
  169. content: prompt,
  170. date: new Date().toLocaleString(),
  171. },
  172. ]);
  173. const res = await requestChat(messages);
  174. return res?.choices?.at(0)?.message?.content ?? "";
  175. }
  176. // To store message streaming controller
  177. export const ControllerPool = {
  178. controllers: {} as Record<string, AbortController>,
  179. addController(
  180. sessionIndex: number,
  181. messageId: number,
  182. controller: AbortController,
  183. ) {
  184. const key = this.key(sessionIndex, messageId);
  185. this.controllers[key] = controller;
  186. return key;
  187. },
  188. stop(sessionIndex: number, messageId: number) {
  189. const key = this.key(sessionIndex, messageId);
  190. const controller = this.controllers[key];
  191. controller?.abort();
  192. },
  193. remove(sessionIndex: number, messageId: number) {
  194. const key = this.key(sessionIndex, messageId);
  195. delete this.controllers[key];
  196. },
  197. key(sessionIndex: number, messageIndex: number) {
  198. return `${sessionIndex},${messageIndex}`;
  199. },
  200. };