route.ts 1.8 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162
  1. import { createParser } from "eventsource-parser";
  2. import { NextRequest } from "next/server";
  3. import { requestOpenai } from "../common";
  4. async function createStream(req: NextRequest) {
  5. const encoder = new TextEncoder();
  6. const decoder = new TextDecoder();
  7. const res = await requestOpenai(req);
  8. const contentType = res.headers.get("Content-Type") ?? "";
  9. if (!contentType.includes("stream")) {
  10. const content = await (
  11. await res.text()
  12. ).replace(/provided:.*. You/, "provided: ***. You");
  13. console.log("[Stream] error ", content);
  14. return "```json\n" + content + "```";
  15. }
  16. const stream = new ReadableStream({
  17. async start(controller) {
  18. function onParse(event: any) {
  19. if (event.type === "event") {
  20. const data = event.data;
  21. // https://beta.openai.com/docs/api-reference/completions/create#completions/create-stream
  22. if (data === "[DONE]") {
  23. controller.close();
  24. return;
  25. }
  26. try {
  27. const json = JSON.parse(data);
  28. const text = json.choices[0].delta.content;
  29. const queue = encoder.encode(text);
  30. controller.enqueue(queue);
  31. } catch (e) {
  32. controller.error(e);
  33. }
  34. }
  35. }
  36. const parser = createParser(onParse);
  37. for await (const chunk of res.body as any) {
  38. parser.feed(decoder.decode(chunk, { stream: true }));
  39. }
  40. },
  41. });
  42. return stream;
  43. }
  44. export async function POST(req: NextRequest) {
  45. try {
  46. const stream = await createStream(req);
  47. return new Response(stream);
  48. } catch (error) {
  49. console.error("[Chat Stream]", error);
  50. return new Response(
  51. ["```json\n", JSON.stringify(error, null, " "), "\n```"].join(""),
  52. );
  53. }
  54. }
  55. export const runtime = "edge";