mirror of
https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
synced 2025-11-14 13:03:49 +08:00
alpha version
This commit is contained in:
@@ -7,7 +7,7 @@ import {
|
||||
} from "../constant";
|
||||
import { ChatMessage, ModelType, useAccessStore, useChatStore } from "../store";
|
||||
import { ChatGPTApi } from "./platforms/openai";
|
||||
import { FileApi } from "./platforms/utils";
|
||||
import { FileApi, FileInfo } from "./platforms/utils";
|
||||
import { GeminiProApi } from "./platforms/google";
|
||||
export const ROLES = ["system", "user", "assistant"] as const;
|
||||
export type MessageRole = (typeof ROLES)[number];
|
||||
@@ -27,6 +27,7 @@ export interface MultimodalContent {
|
||||
export interface RequestMessage {
|
||||
role: MessageRole;
|
||||
content: string | MultimodalContent[];
|
||||
fileInfos?: FileInfo[];
|
||||
}
|
||||
|
||||
export interface LLMConfig {
|
||||
@@ -74,6 +75,7 @@ export interface ChatOptions {
|
||||
}
|
||||
|
||||
export interface AgentChatOptions {
|
||||
chatSessionId?: string;
|
||||
messages: RequestMessage[];
|
||||
config: LLMConfig;
|
||||
agentConfig: LLMAgentConfig;
|
||||
@@ -84,6 +86,13 @@ export interface AgentChatOptions {
|
||||
onController?: (controller: AbortController) => void;
|
||||
}
|
||||
|
||||
export interface CreateRAGStoreOptions {
|
||||
chatSessionId: string;
|
||||
fileInfos: FileInfo[];
|
||||
onError?: (err: Error) => void;
|
||||
onController?: (controller: AbortController) => void;
|
||||
}
|
||||
|
||||
export interface LLMUsage {
|
||||
used: number;
|
||||
total: number;
|
||||
@@ -106,6 +115,7 @@ export abstract class LLMApi {
|
||||
abstract speech(options: SpeechOptions): Promise<ArrayBuffer>;
|
||||
abstract transcription(options: TranscriptionOptions): Promise<string>;
|
||||
abstract toolAgentChat(options: AgentChatOptions): Promise<void>;
|
||||
abstract createRAGSore(options: CreateRAGStoreOptions): Promise<void>;
|
||||
abstract usage(): Promise<LLMUsage>;
|
||||
abstract models(): Promise<LLMModel[]>;
|
||||
}
|
||||
@@ -213,8 +223,8 @@ export function getHeaders(ignoreHeaders?: boolean) {
|
||||
const apiKey = isGoogle
|
||||
? accessStore.googleApiKey
|
||||
: isAzure
|
||||
? accessStore.azureApiKey
|
||||
: accessStore.openaiApiKey;
|
||||
? accessStore.azureApiKey
|
||||
: accessStore.openaiApiKey;
|
||||
|
||||
const makeBearer = (s: string) =>
|
||||
`${isGoogle || isAzure ? "" : "Bearer "}${s.trim()}`;
|
||||
|
||||
@@ -2,6 +2,7 @@ import { Google, REQUEST_TIMEOUT_MS } from "@/app/constant";
|
||||
import {
|
||||
AgentChatOptions,
|
||||
ChatOptions,
|
||||
CreateRAGStoreOptions,
|
||||
getHeaders,
|
||||
LLMApi,
|
||||
LLMModel,
|
||||
@@ -19,6 +20,9 @@ import {
|
||||
} from "@/app/utils";
|
||||
|
||||
export class GeminiProApi implements LLMApi {
|
||||
createRAGSore(options: CreateRAGStoreOptions): Promise<void> {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
transcription(options: TranscriptionOptions): Promise<string> {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
|
||||
import {
|
||||
AgentChatOptions,
|
||||
ChatOptions,
|
||||
CreateRAGStoreOptions,
|
||||
getHeaders,
|
||||
LLMApi,
|
||||
LLMModel,
|
||||
@@ -362,6 +363,34 @@ export class ChatGPTApi implements LLMApi {
|
||||
}
|
||||
}
|
||||
|
||||
async createRAGSore(options: CreateRAGStoreOptions): Promise<void> {
|
||||
try {
|
||||
const accessStore = useAccessStore.getState();
|
||||
const isAzure = accessStore.provider === ServiceProvider.Azure;
|
||||
let baseUrl = isAzure ? accessStore.azureUrl : accessStore.openaiUrl;
|
||||
const requestPayload = {
|
||||
sessionId: options.chatSessionId,
|
||||
fileInfos: options.fileInfos,
|
||||
baseUrl: baseUrl,
|
||||
};
|
||||
console.log("[Request] openai payload: ", requestPayload);
|
||||
const controller = new AbortController();
|
||||
options.onController?.(controller);
|
||||
let path = "/api/langchain/rag/store";
|
||||
const chatPayload = {
|
||||
method: "POST",
|
||||
body: JSON.stringify(requestPayload),
|
||||
signal: controller.signal,
|
||||
headers: getHeaders(),
|
||||
};
|
||||
const res = await fetch(path, chatPayload);
|
||||
if (res.status !== 200) throw new Error(await res.text());
|
||||
} catch (e) {
|
||||
console.log("[Request] failed to make a chat reqeust", e);
|
||||
options.onError?.(e as Error);
|
||||
}
|
||||
}
|
||||
|
||||
async toolAgentChat(options: AgentChatOptions) {
|
||||
const messages = options.messages.map((v) => ({
|
||||
role: v.role,
|
||||
@@ -379,6 +408,7 @@ export class ChatGPTApi implements LLMApi {
|
||||
const isAzure = accessStore.provider === ServiceProvider.Azure;
|
||||
let baseUrl = isAzure ? accessStore.azureUrl : accessStore.openaiUrl;
|
||||
const requestPayload = {
|
||||
chatSessionId: options.chatSessionId,
|
||||
messages,
|
||||
isAzure,
|
||||
azureApiVersion: accessStore.azureApiVersion,
|
||||
|
||||
Reference in New Issue
Block a user