requests.ts 6.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279
  1. import type { ChatRequest, ChatResponse } from "./api/openai/typing";
  2. import {
  3. Message,
  4. ModelConfig,
  5. ModelType,
  6. useAccessStore,
  7. useAppConfig,
  8. useChatStore,
  9. } from "./store";
  10. import { showToast } from "./components/ui-lib";
  11. const TIME_OUT_MS = 60000;
  12. const makeRequestParam = (
  13. messages: Message[],
  14. options?: {
  15. stream?: boolean;
  16. overrideModel?: ModelType;
  17. },
  18. ): ChatRequest => {
  19. let sendMessages = messages.map((v) => ({
  20. role: v.role,
  21. content: v.content,
  22. }));
  23. const modelConfig = {
  24. ...useAppConfig.getState().modelConfig,
  25. ...useChatStore.getState().currentSession().mask.modelConfig,
  26. };
  27. // override model config
  28. if (options?.overrideModel) {
  29. modelConfig.model = options.overrideModel;
  30. }
  31. return {
  32. messages: sendMessages,
  33. stream: options?.stream,
  34. model: modelConfig.model,
  35. temperature: modelConfig.temperature,
  36. presence_penalty: modelConfig.presence_penalty,
  37. };
  38. };
  39. function getHeaders() {
  40. const accessStore = useAccessStore.getState();
  41. let headers: Record<string, string> = {};
  42. if (accessStore.enabledAccessControl()) {
  43. headers["access-code"] = accessStore.accessCode;
  44. }
  45. if (accessStore.token && accessStore.token.length > 0) {
  46. headers["token"] = accessStore.token;
  47. }
  48. return headers;
  49. }
  50. export function requestOpenaiClient(path: string) {
  51. return (body: any, method = "POST") =>
  52. fetch("/api/openai", {
  53. method,
  54. headers: {
  55. "Content-Type": "application/json",
  56. path,
  57. ...getHeaders(),
  58. },
  59. body: body && JSON.stringify(body),
  60. });
  61. }
  62. export async function requestChat(
  63. messages: Message[],
  64. options?: {
  65. model?: ModelType;
  66. },
  67. ) {
  68. const req: ChatRequest = makeRequestParam(messages, {
  69. overrideModel: options?.model,
  70. });
  71. const res = await requestOpenaiClient("v1/chat/completions")(req);
  72. try {
  73. const response = (await res.json()) as ChatResponse;
  74. return response;
  75. } catch (error) {
  76. console.error("[Request Chat] ", error, res.body);
  77. }
  78. }
  79. export async function requestUsage() {
  80. const formatDate = (d: Date) =>
  81. `${d.getFullYear()}-${(d.getMonth() + 1).toString().padStart(2, "0")}-${d
  82. .getDate()
  83. .toString()
  84. .padStart(2, "0")}`;
  85. const ONE_DAY = 1 * 24 * 60 * 60 * 1000;
  86. const now = new Date();
  87. const startOfMonth = new Date(now.getFullYear(), now.getMonth(), 1);
  88. const startDate = formatDate(startOfMonth);
  89. const endDate = formatDate(new Date(Date.now() + ONE_DAY));
  90. const [used, subs] = await Promise.all([
  91. requestOpenaiClient(
  92. `dashboard/billing/usage?start_date=${startDate}&end_date=${endDate}`,
  93. )(null, "GET"),
  94. requestOpenaiClient("dashboard/billing/subscription")(null, "GET"),
  95. ]);
  96. const response = (await used.json()) as {
  97. total_usage?: number;
  98. error?: {
  99. type: string;
  100. message: string;
  101. };
  102. };
  103. const total = (await subs.json()) as {
  104. hard_limit_usd?: number;
  105. };
  106. if (response.error && response.error.type) {
  107. showToast(response.error.message);
  108. return;
  109. }
  110. if (response.total_usage) {
  111. response.total_usage = Math.round(response.total_usage) / 100;
  112. }
  113. if (total.hard_limit_usd) {
  114. total.hard_limit_usd = Math.round(total.hard_limit_usd * 100) / 100;
  115. }
  116. return {
  117. used: response.total_usage,
  118. subscription: total.hard_limit_usd,
  119. };
  120. }
  121. export async function requestChatStream(
  122. messages: Message[],
  123. options?: {
  124. modelConfig?: ModelConfig;
  125. overrideModel?: ModelType;
  126. onMessage: (message: string, done: boolean) => void;
  127. onError: (error: Error, statusCode?: number) => void;
  128. onController?: (controller: AbortController) => void;
  129. },
  130. ) {
  131. const req = makeRequestParam(messages, {
  132. stream: true,
  133. overrideModel: options?.overrideModel,
  134. });
  135. console.log("[Request] ", req);
  136. const controller = new AbortController();
  137. const reqTimeoutId = setTimeout(() => controller.abort(), TIME_OUT_MS);
  138. try {
  139. const res = await fetch("/api/chat-stream", {
  140. method: "POST",
  141. headers: {
  142. "Content-Type": "application/json",
  143. path: "v1/chat/completions",
  144. ...getHeaders(),
  145. },
  146. body: JSON.stringify(req),
  147. signal: controller.signal,
  148. });
  149. clearTimeout(reqTimeoutId);
  150. let responseText = "";
  151. const finish = () => {
  152. options?.onMessage(responseText, true);
  153. controller.abort();
  154. };
  155. if (res.ok) {
  156. const reader = res.body?.getReader();
  157. const decoder = new TextDecoder();
  158. options?.onController?.(controller);
  159. while (true) {
  160. const resTimeoutId = setTimeout(() => finish(), TIME_OUT_MS);
  161. const content = await reader?.read();
  162. clearTimeout(resTimeoutId);
  163. if (!content || !content.value) {
  164. break;
  165. }
  166. const text = decoder.decode(content.value, { stream: true });
  167. responseText += text;
  168. const done = content.done;
  169. options?.onMessage(responseText, false);
  170. if (done) {
  171. break;
  172. }
  173. }
  174. finish();
  175. } else if (res.status === 401) {
  176. console.error("Unauthorized");
  177. options?.onError(new Error("Unauthorized"), res.status);
  178. } else {
  179. console.error("Stream Error", res.body);
  180. options?.onError(new Error("Stream Error"), res.status);
  181. }
  182. } catch (err) {
  183. console.error("NetWork Error", err);
  184. options?.onError(err as Error);
  185. }
  186. }
  187. export async function requestWithPrompt(
  188. messages: Message[],
  189. prompt: string,
  190. options?: {
  191. model?: ModelType;
  192. },
  193. ) {
  194. messages = messages.concat([
  195. {
  196. role: "user",
  197. content: prompt,
  198. date: new Date().toLocaleString(),
  199. },
  200. ]);
  201. const res = await requestChat(messages, options);
  202. return res?.choices?.at(0)?.message?.content ?? "";
  203. }
  204. // To store message streaming controller
  205. export const ControllerPool = {
  206. controllers: {} as Record<string, AbortController>,
  207. addController(
  208. sessionIndex: number,
  209. messageId: number,
  210. controller: AbortController,
  211. ) {
  212. const key = this.key(sessionIndex, messageId);
  213. this.controllers[key] = controller;
  214. return key;
  215. },
  216. stop(sessionIndex: number, messageId: number) {
  217. const key = this.key(sessionIndex, messageId);
  218. const controller = this.controllers[key];
  219. controller?.abort();
  220. },
  221. stopAll() {
  222. Object.values(this.controllers).forEach((v) => v.abort());
  223. },
  224. hasPending() {
  225. return Object.values(this.controllers).length > 0;
  226. },
  227. remove(sessionIndex: number, messageId: number) {
  228. const key = this.key(sessionIndex, messageId);
  229. delete this.controllers[key];
  230. },
  231. key(sessionIndex: number, messageIndex: number) {
  232. return `${sessionIndex},${messageIndex}`;
  233. },
  234. };