route.ts 1.6 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061
  1. import type { ChatRequest } from "../chat/typing";
  2. import { createParser } from "eventsource-parser";
  3. import { NextRequest } from "next/server";
  4. const apiKey = process.env.OPENAI_API_KEY;
  5. async function createStream(payload: ReadableStream<Uint8Array>) {
  6. const encoder = new TextEncoder();
  7. const decoder = new TextDecoder();
  8. const res = await fetch("https://api.openai.com/v1/chat/completions", {
  9. headers: {
  10. "Content-Type": "application/json",
  11. Authorization: `Bearer ${apiKey}`,
  12. },
  13. method: "POST",
  14. body: payload,
  15. });
  16. const stream = new ReadableStream({
  17. async start(controller) {
  18. function onParse(event: any) {
  19. if (event.type === "event") {
  20. const data = event.data;
  21. // https://beta.openai.com/docs/api-reference/completions/create#completions/create-stream
  22. if (data === "[DONE]") {
  23. controller.close();
  24. return;
  25. }
  26. try {
  27. const json = JSON.parse(data);
  28. const text = json.choices[0].delta.content;
  29. const queue = encoder.encode(text);
  30. controller.enqueue(queue);
  31. } catch (e) {
  32. controller.error(e);
  33. }
  34. }
  35. }
  36. const parser = createParser(onParse);
  37. for await (const chunk of res.body as any) {
  38. parser.feed(decoder.decode(chunk));
  39. }
  40. },
  41. });
  42. return stream;
  43. }
  44. export async function POST(req: NextRequest) {
  45. try {
  46. const stream = await createStream(req.body!);
  47. return new Response(stream);
  48. } catch (error) {
  49. console.error("[Chat Stream]", error);
  50. }
  51. }
  52. export const config = {
  53. runtime: "edge",
  54. };