requests.ts 2.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120
  1. import type { ChatRequest, ChatReponse } from "./api/chat/typing";
  2. import { Message } from "./store";
  3. const TIME_OUT_MS = 30000
  4. const makeRequestParam = (
  5. messages: Message[],
  6. options?: {
  7. filterBot?: boolean;
  8. stream?: boolean;
  9. }
  10. ): ChatRequest => {
  11. let sendMessages = messages.map((v) => ({
  12. role: v.role,
  13. content: v.content,
  14. }));
  15. if (options?.filterBot) {
  16. sendMessages = sendMessages.filter((m) => m.role !== "assistant");
  17. }
  18. return {
  19. model: "gpt-3.5-turbo",
  20. messages: sendMessages,
  21. stream: options?.stream,
  22. };
  23. };
  24. export async function requestChat(messages: Message[]) {
  25. const req: ChatRequest = makeRequestParam(messages, { filterBot: true });
  26. const res = await fetch("/api/chat", {
  27. method: "POST",
  28. headers: {
  29. "Content-Type": "application/json",
  30. },
  31. body: JSON.stringify(req),
  32. });
  33. return (await res.json()) as ChatReponse;
  34. }
  35. export async function requestChatStream(
  36. messages: Message[],
  37. options?: {
  38. filterBot?: boolean;
  39. onMessage: (message: string, done: boolean) => void;
  40. onError: (error: Error) => void;
  41. }
  42. ) {
  43. const req = makeRequestParam(messages, {
  44. stream: true,
  45. filterBot: options?.filterBot,
  46. });
  47. const controller = new AbortController();
  48. const reqTimeoutId = setTimeout(() => controller.abort(), TIME_OUT_MS);
  49. try {
  50. const res = await fetch("/api/chat-stream", {
  51. method: "POST",
  52. headers: {
  53. "Content-Type": "application/json",
  54. },
  55. body: JSON.stringify(req),
  56. signal: controller.signal,
  57. });
  58. clearTimeout(reqTimeoutId);
  59. let responseText = "";
  60. const finish = () => {
  61. options?.onMessage(responseText, true);
  62. controller.abort();
  63. };
  64. if (res.ok) {
  65. const reader = res.body?.getReader();
  66. const decoder = new TextDecoder();
  67. while (true) {
  68. // handle time out, will stop if no response in 10 secs
  69. const resTimeoutId = setTimeout(() => finish(), TIME_OUT_MS);
  70. const content = await reader?.read();
  71. clearTimeout(resTimeoutId);
  72. const text = decoder.decode(content?.value);
  73. responseText += text;
  74. const done = !content || content.done;
  75. options?.onMessage(responseText, false);
  76. if (done) {
  77. break;
  78. }
  79. }
  80. finish();
  81. } else {
  82. console.error("Stream Error");
  83. options?.onError(new Error("Stream Error"));
  84. }
  85. } catch (err) {
  86. console.error("NetWork Error");
  87. options?.onError(new Error("NetWork Error"));
  88. }
  89. }
  90. export async function requestWithPrompt(messages: Message[], prompt: string) {
  91. messages = messages.concat([
  92. {
  93. role: "user",
  94. content: prompt,
  95. date: new Date().toLocaleString(),
  96. },
  97. ]);
  98. const res = await requestChat(messages);
  99. return res.choices.at(0)?.message?.content ?? "";
  100. }