api.ts 4.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165
  1. import { getClientConfig } from "../config/client";
  2. import { ACCESS_CODE_PREFIX, Azure, ServiceProvider } from "../constant";
  3. import { ChatMessage, ModelType, useAccessStore } from "../store";
  4. import { ChatGPTApi } from "./platforms/openai";
  5. import { OauthUserApi } from "@/app/client/platforms/user";
  6. export const ROLES = ["system", "user", "assistant"] as const;
  7. export type MessageRole = (typeof ROLES)[number];
  8. export const Models = ["gpt-3.5-turbo", "gpt-4"] as const;
  9. export type ChatModel = ModelType;
  10. export interface RequestMessage {
  11. role: MessageRole;
  12. content: string;
  13. }
  14. export interface LLMConfig {
  15. model: string;
  16. temperature?: number;
  17. top_p?: number;
  18. stream?: boolean;
  19. presence_penalty?: number;
  20. frequency_penalty?: number;
  21. }
  22. export interface ChatOptions {
  23. messages: RequestMessage[];
  24. config: LLMConfig;
  25. onUpdate?: (message: string, chunk: string) => void;
  26. onFinish: (message: string) => void;
  27. onError?: (err: Error) => void;
  28. onController?: (controller: AbortController) => void;
  29. }
  30. export interface LLMUsage {
  31. used: number;
  32. total: number;
  33. }
  34. export interface LLMModel {
  35. name: string;
  36. available: boolean;
  37. }
  38. export abstract class LLMApi {
  39. abstract chat(options: ChatOptions): Promise<void>;
  40. abstract usage(): Promise<LLMUsage>;
  41. abstract models(): Promise<LLMModel[]>;
  42. }
  43. export abstract class UserApi {
  44. abstract userinfo(): Promise<Response | undefined>;
  45. }
  46. type ProviderName = "openai" | "azure" | "claude" | "palm";
  47. interface Model {
  48. name: string;
  49. provider: ProviderName;
  50. ctxlen: number;
  51. }
  52. interface ChatProvider {
  53. name: ProviderName;
  54. apiConfig: {
  55. baseUrl: string;
  56. apiKey: string;
  57. summaryModel: Model;
  58. };
  59. models: Model[];
  60. chat: () => void;
  61. usage: () => void;
  62. }
  63. export class ClientApi {
  64. public llm: LLMApi;
  65. public user: UserApi;
  66. constructor() {
  67. this.llm = new ChatGPTApi();
  68. this.user = new OauthUserApi();
  69. }
  70. config() {}
  71. prompts() {}
  72. masks() {}
  73. async share(messages: ChatMessage[], avatarUrl: string | null = null) {
  74. const msgs = messages
  75. .map((m) => ({
  76. from: m.role === "user" ? "human" : "gpt",
  77. value: m.content,
  78. }))
  79. .concat([
  80. {
  81. from: "human",
  82. value:
  83. "Share from [ChatGPT Next Web]: https://github.com/Yidadaa/ChatGPT-Next-Web",
  84. },
  85. ]);
  86. // 敬告二开开发者们,为了开源大模型的发展,请不要修改上述消息,此消息用于后续数据清洗使用
  87. // Please do not modify this message
  88. console.log("[Share]", messages, msgs);
  89. const clientConfig = getClientConfig();
  90. const proxyUrl = "/sharegpt";
  91. const rawUrl = "https://sharegpt.com/api/conversations";
  92. const shareUrl = clientConfig?.isApp ? rawUrl : proxyUrl;
  93. const res = await fetch(shareUrl, {
  94. body: JSON.stringify({
  95. avatarUrl,
  96. items: msgs,
  97. }),
  98. headers: {
  99. "Content-Type": "application/json",
  100. },
  101. method: "POST",
  102. });
  103. const resJson = await res.json();
  104. console.log("[Share]", resJson);
  105. if (resJson.id) {
  106. return `https://shareg.pt/${resJson.id}`;
  107. }
  108. }
  109. }
  110. export const api = new ClientApi();
  111. export function getHeaders() {
  112. const accessStore = useAccessStore.getState();
  113. const headers: Record<string, string> = {
  114. "Content-Type": "application/json",
  115. "x-requested-with": "XMLHttpRequest",
  116. };
  117. const isAzure = accessStore.provider === ServiceProvider.Azure;
  118. const authHeader = isAzure ? "api-key" : "Authorization";
  119. const apiKey = isAzure ? accessStore.azureApiKey : accessStore.openaiApiKey;
  120. const makeBearer = (s: string) => `${isAzure ? "" : "Bearer "}${s.trim()}`;
  121. const validString = (x: string) => x && x.length > 0;
  122. // console.log('ServiceProvider', accessStore.provider)
  123. // use user's api key first
  124. if (validString(apiKey)) {
  125. headers[authHeader] = makeBearer(apiKey);
  126. } else if (accessStore.provider === ServiceProvider.Oauth) {
  127. headers[authHeader] = makeBearer(accessStore.accessToken);
  128. } else if (
  129. accessStore.enabledAccessControl() &&
  130. validString(accessStore.accessCode)
  131. ) {
  132. headers[authHeader] = makeBearer(
  133. ACCESS_CODE_PREFIX + accessStore.accessCode,
  134. );
  135. }
  136. return headers;
  137. }