123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165 |
- import { getClientConfig } from "../config/client";
- import { ACCESS_CODE_PREFIX, Azure, ServiceProvider } from "../constant";
- import { ChatMessage, ModelType, useAccessStore } from "../store";
- import { ChatGPTApi } from "./platforms/openai";
- import { OauthUserApi } from "@/app/client/platforms/user";
- export const ROLES = ["system", "user", "assistant"] as const;
- export type MessageRole = (typeof ROLES)[number];
- export const Models = ["gpt-3.5-turbo", "gpt-4"] as const;
- export type ChatModel = ModelType;
- export interface RequestMessage {
- role: MessageRole;
- content: string;
- }
- export interface LLMConfig {
- model: string;
- temperature?: number;
- top_p?: number;
- stream?: boolean;
- presence_penalty?: number;
- frequency_penalty?: number;
- }
- export interface ChatOptions {
- messages: RequestMessage[];
- config: LLMConfig;
- onUpdate?: (message: string, chunk: string) => void;
- onFinish: (message: string) => void;
- onError?: (err: Error) => void;
- onController?: (controller: AbortController) => void;
- }
- export interface LLMUsage {
- used: number;
- total: number;
- }
- export interface LLMModel {
- name: string;
- available: boolean;
- }
- export abstract class LLMApi {
- abstract chat(options: ChatOptions): Promise<void>;
- abstract usage(): Promise<LLMUsage>;
- abstract models(): Promise<LLMModel[]>;
- }
- export abstract class UserApi {
- abstract userinfo(): Promise<Response | undefined>;
- }
- type ProviderName = "openai" | "azure" | "claude" | "palm";
- interface Model {
- name: string;
- provider: ProviderName;
- ctxlen: number;
- }
- interface ChatProvider {
- name: ProviderName;
- apiConfig: {
- baseUrl: string;
- apiKey: string;
- summaryModel: Model;
- };
- models: Model[];
- chat: () => void;
- usage: () => void;
- }
- export class ClientApi {
- public llm: LLMApi;
- public user: UserApi;
- constructor() {
- this.llm = new ChatGPTApi();
- this.user = new OauthUserApi();
- }
- config() {}
- prompts() {}
- masks() {}
- async share(messages: ChatMessage[], avatarUrl: string | null = null) {
- const msgs = messages
- .map((m) => ({
- from: m.role === "user" ? "human" : "gpt",
- value: m.content,
- }))
- .concat([
- {
- from: "human",
- value:
- "Share from [ChatGPT Next Web]: https://github.com/Yidadaa/ChatGPT-Next-Web",
- },
- ]);
- // 敬告二开开发者们,为了开源大模型的发展,请不要修改上述消息,此消息用于后续数据清洗使用
- // Please do not modify this message
- console.log("[Share]", messages, msgs);
- const clientConfig = getClientConfig();
- const proxyUrl = "/sharegpt";
- const rawUrl = "https://sharegpt.com/api/conversations";
- const shareUrl = clientConfig?.isApp ? rawUrl : proxyUrl;
- const res = await fetch(shareUrl, {
- body: JSON.stringify({
- avatarUrl,
- items: msgs,
- }),
- headers: {
- "Content-Type": "application/json",
- },
- method: "POST",
- });
- const resJson = await res.json();
- console.log("[Share]", resJson);
- if (resJson.id) {
- return `https://shareg.pt/${resJson.id}`;
- }
- }
- }
- export const api = new ClientApi();
- export function getHeaders() {
- const accessStore = useAccessStore.getState();
- const headers: Record<string, string> = {
- "Content-Type": "application/json",
- "x-requested-with": "XMLHttpRequest",
- };
- const isAzure = accessStore.provider === ServiceProvider.Azure;
- const authHeader = isAzure ? "api-key" : "Authorization";
- const apiKey = isAzure ? accessStore.azureApiKey : accessStore.openaiApiKey;
- const makeBearer = (s: string) => `${isAzure ? "" : "Bearer "}${s.trim()}`;
- const validString = (x: string) => x && x.length > 0;
- // console.log('ServiceProvider', accessStore.provider)
- // use user's api key first
- if (validString(apiKey)) {
- headers[authHeader] = makeBearer(apiKey);
- } else if (accessStore.provider === ServiceProvider.Oauth) {
- headers[authHeader] = makeBearer(accessStore.accessToken);
- } else if (
- accessStore.enabledAccessControl() &&
- validString(accessStore.accessCode)
- ) {
- headers[authHeader] = makeBearer(
- ACCESS_CODE_PREFIX + accessStore.accessCode,
- );
- }
- return headers;
- }
|