requests.ts 6.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246
  1. import type { ChatRequest, ChatResponse } from "./api/openai/typing";
  2. import { Message, ModelConfig, useAccessStore, useChatStore } from "./store";
  3. import { showToast } from "./components/ui-lib";
  4. const TIME_OUT_MS = 30000;
  5. const makeRequestParam = (
  6. messages: Message[],
  7. options?: {
  8. filterBot?: boolean;
  9. stream?: boolean;
  10. },
  11. ): ChatRequest => {
  12. let sendMessages = messages.map((v) => ({
  13. role: v.role,
  14. content: v.content,
  15. }));
  16. if (options?.filterBot) {
  17. sendMessages = sendMessages.filter((m) => m.role !== "assistant");
  18. }
  19. const modelConfig = { ...useChatStore.getState().config.modelConfig };
  20. // @yidadaa: wont send max_tokens, because it is nonsense for Muggles
  21. // @ts-expect-error
  22. delete modelConfig.max_tokens;
  23. return {
  24. messages: sendMessages,
  25. stream: options?.stream,
  26. ...modelConfig,
  27. };
  28. };
  29. function getHeaders() {
  30. const accessStore = useAccessStore.getState();
  31. let headers: Record<string, string> = {};
  32. if (accessStore.enabledAccessControl()) {
  33. headers["access-code"] = accessStore.accessCode;
  34. }
  35. if (accessStore.token && accessStore.token.length > 0) {
  36. headers["token"] = accessStore.token;
  37. }
  38. return headers;
  39. }
  40. export function requestOpenaiClient(path: string) {
  41. return (body: any, method = "POST") =>
  42. fetch("/api/openai?_vercel_no_cache=1", {
  43. method,
  44. headers: {
  45. "Content-Type": "application/json",
  46. path,
  47. ...getHeaders(),
  48. },
  49. body: body && JSON.stringify(body),
  50. });
  51. }
  52. export async function requestChat(messages: Message[]) {
  53. const req: ChatRequest = makeRequestParam(messages, { filterBot: true });
  54. const res = await requestOpenaiClient("v1/chat/completions")(req);
  55. try {
  56. const response = (await res.json()) as ChatResponse;
  57. return response;
  58. } catch (error) {
  59. console.error("[Request Chat] ", error, res.body);
  60. }
  61. }
  62. export async function requestUsage() {
  63. const formatDate = (d: Date) =>
  64. `${d.getFullYear()}-${(d.getMonth() + 1).toString().padStart(2, "0")}-${d
  65. .getDate()
  66. .toString()
  67. .padStart(2, "0")}`;
  68. const ONE_DAY = 2 * 24 * 60 * 60 * 1000;
  69. const now = new Date(Date.now() + ONE_DAY);
  70. const startOfMonth = new Date(now.getFullYear(), now.getMonth(), 1);
  71. const startDate = formatDate(startOfMonth);
  72. const endDate = formatDate(now);
  73. const [used, subs] = await Promise.all([
  74. requestOpenaiClient(
  75. `dashboard/billing/usage?start_date=${startDate}&end_date=${endDate}`,
  76. )(null, "GET"),
  77. requestOpenaiClient("dashboard/billing/subscription")(null, "GET"),
  78. ]);
  79. const response = (await used.json()) as {
  80. total_usage?: number;
  81. error?: {
  82. type: string;
  83. message: string;
  84. };
  85. };
  86. const total = (await subs.json()) as {
  87. hard_limit_usd?: number;
  88. };
  89. if (response.error && response.error.type) {
  90. showToast(response.error.message);
  91. return;
  92. }
  93. if (response.total_usage) {
  94. response.total_usage = Math.round(response.total_usage) / 100;
  95. }
  96. return {
  97. used: response.total_usage,
  98. subscription: total.hard_limit_usd,
  99. };
  100. }
  101. export async function requestChatStream(
  102. messages: Message[],
  103. options?: {
  104. filterBot?: boolean;
  105. modelConfig?: ModelConfig;
  106. onMessage: (message: string, done: boolean) => void;
  107. onError: (error: Error, statusCode?: number) => void;
  108. onController?: (controller: AbortController) => void;
  109. },
  110. ) {
  111. const req = makeRequestParam(messages, {
  112. stream: true,
  113. filterBot: options?.filterBot,
  114. });
  115. console.log("[Request] ", req);
  116. const controller = new AbortController();
  117. const reqTimeoutId = setTimeout(() => controller.abort(), TIME_OUT_MS);
  118. try {
  119. const res = await fetch("/api/chat-stream", {
  120. method: "POST",
  121. headers: {
  122. "Content-Type": "application/json",
  123. path: "v1/chat/completions",
  124. ...getHeaders(),
  125. },
  126. body: JSON.stringify(req),
  127. signal: controller.signal,
  128. });
  129. clearTimeout(reqTimeoutId);
  130. let responseText = "";
  131. const finish = () => {
  132. options?.onMessage(responseText, true);
  133. controller.abort();
  134. };
  135. if (res.ok) {
  136. const reader = res.body?.getReader();
  137. const decoder = new TextDecoder();
  138. options?.onController?.(controller);
  139. while (true) {
  140. // handle time out, will stop if no response in 10 secs
  141. const resTimeoutId = setTimeout(() => finish(), TIME_OUT_MS);
  142. const content = await reader?.read();
  143. clearTimeout(resTimeoutId);
  144. if (!content || !content.value) {
  145. break;
  146. }
  147. const text = decoder.decode(content.value, { stream: true });
  148. responseText += text;
  149. const done = content.done;
  150. options?.onMessage(responseText, false);
  151. if (done) {
  152. break;
  153. }
  154. }
  155. finish();
  156. } else if (res.status === 401) {
  157. console.error("Unauthorized");
  158. options?.onError(new Error("Unauthorized"), res.status);
  159. } else {
  160. console.error("Stream Error", res.body);
  161. options?.onError(new Error("Stream Error"), res.status);
  162. }
  163. } catch (err) {
  164. console.error("NetWork Error", err);
  165. options?.onError(err as Error);
  166. }
  167. }
  168. export async function requestWithPrompt(messages: Message[], prompt: string) {
  169. messages = messages.concat([
  170. {
  171. role: "user",
  172. content: prompt,
  173. date: new Date().toLocaleString(),
  174. },
  175. ]);
  176. const res = await requestChat(messages);
  177. return res?.choices?.at(0)?.message?.content ?? "";
  178. }
  179. // To store message streaming controller
  180. export const ControllerPool = {
  181. controllers: {} as Record<string, AbortController>,
  182. addController(
  183. sessionIndex: number,
  184. messageId: number,
  185. controller: AbortController,
  186. ) {
  187. const key = this.key(sessionIndex, messageId);
  188. this.controllers[key] = controller;
  189. return key;
  190. },
  191. stop(sessionIndex: number, messageId: number) {
  192. const key = this.key(sessionIndex, messageId);
  193. const controller = this.controllers[key];
  194. controller?.abort();
  195. },
  196. remove(sessionIndex: number, messageId: number) {
  197. const key = this.key(sessionIndex, messageId);
  198. delete this.controllers[key];
  199. },
  200. key(sessionIndex: number, messageIndex: number) {
  201. return `${sessionIndex},${messageIndex}`;
  202. },
  203. };