requests.ts 3.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128
  1. import type { ChatRequest, ChatReponse } from "./api/chat/typing";
  2. import { filterConfig, Message, ModelConfig } from "./store";
  3. const TIME_OUT_MS = 30000;
  4. const makeRequestParam = (
  5. messages: Message[],
  6. options?: {
  7. filterBot?: boolean;
  8. stream?: boolean;
  9. }
  10. ): ChatRequest => {
  11. let sendMessages = messages.map((v) => ({
  12. role: v.role,
  13. content: v.content,
  14. }));
  15. if (options?.filterBot) {
  16. sendMessages = sendMessages.filter((m) => m.role !== "assistant");
  17. }
  18. return {
  19. model: "gpt-3.5-turbo",
  20. messages: sendMessages,
  21. stream: options?.stream,
  22. };
  23. };
  24. export async function requestChat(messages: Message[]) {
  25. const req: ChatRequest = makeRequestParam(messages, { filterBot: true });
  26. const res = await fetch("/api/chat", {
  27. method: "POST",
  28. headers: {
  29. "Content-Type": "application/json",
  30. },
  31. body: JSON.stringify(req),
  32. });
  33. return (await res.json()) as ChatReponse;
  34. }
  35. export async function requestChatStream(
  36. messages: Message[],
  37. options?: {
  38. filterBot?: boolean;
  39. modelConfig?: ModelConfig;
  40. onMessage: (message: string, done: boolean) => void;
  41. onError: (error: Error) => void;
  42. }
  43. ) {
  44. const req = makeRequestParam(messages, {
  45. stream: true,
  46. filterBot: options?.filterBot,
  47. });
  48. // valid and assign model config
  49. if (options?.modelConfig) {
  50. Object.assign(req, filterConfig(options.modelConfig));
  51. }
  52. console.log("[Request] ", req);
  53. const controller = new AbortController();
  54. const reqTimeoutId = setTimeout(() => controller.abort(), TIME_OUT_MS);
  55. try {
  56. const res = await fetch("/api/chat-stream", {
  57. method: "POST",
  58. headers: {
  59. "Content-Type": "application/json",
  60. },
  61. body: JSON.stringify(req),
  62. signal: controller.signal,
  63. });
  64. clearTimeout(reqTimeoutId);
  65. let responseText = "";
  66. const finish = () => {
  67. options?.onMessage(responseText, true);
  68. controller.abort();
  69. };
  70. if (res.ok) {
  71. const reader = res.body?.getReader();
  72. const decoder = new TextDecoder();
  73. while (true) {
  74. // handle time out, will stop if no response in 10 secs
  75. const resTimeoutId = setTimeout(() => finish(), TIME_OUT_MS);
  76. const content = await reader?.read();
  77. clearTimeout(resTimeoutId);
  78. const text = decoder.decode(content?.value);
  79. responseText += text;
  80. const done = !content || content.done;
  81. options?.onMessage(responseText, false);
  82. if (done) {
  83. break;
  84. }
  85. }
  86. finish();
  87. } else {
  88. console.error("Stream Error");
  89. options?.onError(new Error("Stream Error"));
  90. }
  91. } catch (err) {
  92. console.error("NetWork Error");
  93. options?.onError(new Error("NetWork Error"));
  94. }
  95. }
  96. export async function requestWithPrompt(messages: Message[], prompt: string) {
  97. messages = messages.concat([
  98. {
  99. role: "user",
  100. content: prompt,
  101. date: new Date().toLocaleString(),
  102. },
  103. ]);
  104. const res = await requestChat(messages);
  105. return res.choices.at(0)?.message?.content ?? "";
  106. }