common.ts 2.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102
  1. import { NextRequest, NextResponse } from "next/server";
  2. import { getServerSideConfig } from "../config/server";
  3. import { DEFAULT_MODELS, OPENAI_BASE_URL } from "../constant";
  4. import { collectModelTable, collectModels } from "../utils/model";
  5. const serverConfig = getServerSideConfig();
  6. export async function requestOpenai(req: NextRequest) {
  7. const controller = new AbortController();
  8. const authValue = req.headers.get("Authorization") ?? "";
  9. const openaiPath = `${req.nextUrl.pathname}${req.nextUrl.search}`.replaceAll(
  10. "/api/openai/",
  11. "",
  12. );
  13. let baseUrl = serverConfig.baseUrl ?? OPENAI_BASE_URL;
  14. if (!baseUrl.startsWith("http")) {
  15. baseUrl = `https://${baseUrl}`;
  16. }
  17. if (baseUrl.endsWith("/")) {
  18. baseUrl = baseUrl.slice(0, -1);
  19. }
  20. console.log("[Proxy] ", openaiPath);
  21. console.log("[Base Url]", baseUrl);
  22. console.log("[Org ID]", serverConfig.openaiOrgId);
  23. const timeoutId = setTimeout(
  24. () => {
  25. controller.abort();
  26. },
  27. 10 * 60 * 1000,
  28. );
  29. const fetchUrl = `${baseUrl}/${openaiPath}`;
  30. const fetchOptions: RequestInit = {
  31. headers: {
  32. "Content-Type": "application/json",
  33. "Cache-Control": "no-store",
  34. Authorization: authValue,
  35. ...(process.env.OPENAI_ORG_ID && {
  36. "OpenAI-Organization": process.env.OPENAI_ORG_ID,
  37. }),
  38. },
  39. method: req.method,
  40. body: req.body,
  41. // to fix #2485: https://stackoverflow.com/questions/55920957/cloudflare-worker-typeerror-one-time-use-body
  42. redirect: "manual",
  43. // @ts-ignore
  44. duplex: "half",
  45. signal: controller.signal,
  46. };
  47. // #1815 try to refuse gpt4 request
  48. if (serverConfig.customModels && req.body) {
  49. try {
  50. const modelTable = collectModelTable(
  51. DEFAULT_MODELS,
  52. serverConfig.customModels,
  53. );
  54. const clonedBody = await req.text();
  55. fetchOptions.body = clonedBody;
  56. const jsonBody = JSON.parse(clonedBody) as { model?: string };
  57. // not undefined and is false
  58. if (modelTable[jsonBody?.model ?? ""] === false) {
  59. return NextResponse.json(
  60. {
  61. error: true,
  62. message: `you are not allowed to use ${jsonBody?.model} model`,
  63. },
  64. {
  65. status: 403,
  66. },
  67. );
  68. }
  69. } catch (e) {
  70. console.error("[OpenAI] gpt4 filter", e);
  71. }
  72. }
  73. try {
  74. const res = await fetch(fetchUrl, fetchOptions);
  75. // to prevent browser prompt for credentials
  76. const newHeaders = new Headers(res.headers);
  77. newHeaders.delete("www-authenticate");
  78. // to disable nginx buffering
  79. newHeaders.set("X-Accel-Buffering", "no");
  80. return new Response(res.body, {
  81. status: res.status,
  82. statusText: res.statusText,
  83. headers: newHeaders,
  84. });
  85. } finally {
  86. clearTimeout(timeoutId);
  87. }
  88. }