Merge remote

This commit is contained in:
Hk-Gosuto
2023-12-25 12:40:09 +08:00
parent da29a94714
commit fa2e046285
16 changed files with 47 additions and 31 deletions

View File

@@ -9,15 +9,14 @@ const serverConfig = getServerSideConfig();
export async function requestOpenai(req: NextRequest) {
const controller = new AbortController();
let authValue = req.headers.get("Authorization") ?? "";
if (serverConfig.isAzure) {
const authValue =
authValue =
req.headers
.get("Authorization")
?.trim()
.replaceAll("Bearer ", "")
.trim() ?? "";
} else {
const authValue = req.headers.get("Authorization") ?? "";
}
const authHeaderName = serverConfig.isAzure ? "api-key" : "Authorization";

View File

@@ -1,13 +1,14 @@
import { NextRequest, NextResponse } from "next/server";
import { auth } from "../../auth";
import S3FileStorage from "../../../utils/s3_file_storage";
import { ModelProvider } from "@/app/constant";
async function handle(req: NextRequest) {
if (req.method === "OPTIONS") {
return NextResponse.json({ body: "OK" }, { status: 200 });
}
const authResult = auth(req);
const authResult = auth(req, ModelProvider.GPT);
if (authResult.error) {
return NextResponse.json(authResult, {
status: 401,

View File

@@ -1,7 +1,7 @@
import { NextRequest, NextResponse } from "next/server";
import { auth } from "../../auth";
import { getServerSideConfig } from "@/app/config/server";
import { GEMINI_BASE_URL, Google, ModelProvider } from "@/app/constant";
import { GEMINI_BASE_URL, ModelProvider } from "@/app/constant";
async function handle(
req: NextRequest,
@@ -17,7 +17,7 @@ async function handle(
const serverConfig = getServerSideConfig();
let baseUrl = serverConfig.googleUrl || GEMINI_BASE_URL;
let baseUrl = serverConfig.googleBaseUrl || GEMINI_BASE_URL;
if (!baseUrl.startsWith("http")) {
baseUrl = `https://${baseUrl}`;
@@ -63,7 +63,7 @@ async function handle(
);
}
const fetchUrl = `${baseUrl}/${path}?key=${key}`;
const fetchUrl = `${baseUrl}/${path}?key=${key}&alt=sse`;
const fetchOptions: RequestInit = {
headers: {
"Content-Type": "application/json",

View File

@@ -4,13 +4,14 @@ import { auth } from "@/app/api/auth";
import { EdgeTool } from "../../../../langchain-tools/edge_tools";
import { OpenAI } from "langchain/llms/openai";
import { OpenAIEmbeddings } from "langchain/embeddings/openai";
import { ModelProvider } from "@/app/constant";
async function handle(req: NextRequest) {
if (req.method === "OPTIONS") {
return NextResponse.json({ body: "OK" }, { status: 200 });
}
try {
const authResult = auth(req);
const authResult = auth(req, ModelProvider.GPT);
if (authResult.error) {
return NextResponse.json(authResult, {
status: 401,

View File

@@ -5,13 +5,14 @@ import { EdgeTool } from "../../../../langchain-tools/edge_tools";
import { OpenAI } from "langchain/llms/openai";
import { OpenAIEmbeddings } from "langchain/embeddings/openai";
import { NodeJSTool } from "@/app/api/langchain-tools/nodejs_tools";
import { ModelProvider } from "@/app/constant";
async function handle(req: NextRequest) {
if (req.method === "OPTIONS") {
return NextResponse.json({ body: "OK" }, { status: 200 });
}
try {
const authResult = auth(req);
const authResult = auth(req, ModelProvider.GPT);
if (authResult.error) {
return NextResponse.json(authResult, {
status: 401,