Initial commit

Created from https://vercel.com/new
This commit is contained in:
Dakai
2023-04-02 14:05:05 +00:00
commit a6c598c017
84 changed files with 11839 additions and 0 deletions

17
app/api/access.ts Normal file
View File

@@ -0,0 +1,17 @@
import md5 from "spark-md5";
export function getAccessCodes(): Set<string> {
const code = process.env.CODE;
try {
const codes = (code?.split(",") ?? [])
.filter((v) => !!v)
.map((v) => md5.hash(v.trim()));
return new Set(codes);
} catch (e) {
return new Set();
}
}
export const ACCESS_CODES = getAccessCodes();
export const IS_IN_DOCKER = process.env.DOCKER;

View File

@@ -0,0 +1,52 @@
import { createParser } from "eventsource-parser";
import { NextRequest } from "next/server";
import { requestOpenai } from "../common";
async function createStream(req: NextRequest) {
const encoder = new TextEncoder();
const decoder = new TextDecoder();
const res = await requestOpenai(req);
const stream = new ReadableStream({
async start(controller) {
function onParse(event: any) {
if (event.type === "event") {
const data = event.data;
// https://beta.openai.com/docs/api-reference/completions/create#completions/create-stream
if (data === "[DONE]") {
controller.close();
return;
}
try {
const json = JSON.parse(data);
const text = json.choices[0].delta.content;
const queue = encoder.encode(text);
controller.enqueue(queue);
} catch (e) {
controller.error(e);
}
}
}
const parser = createParser(onParse);
for await (const chunk of res.body as any) {
parser.feed(decoder.decode(chunk));
}
},
});
return stream;
}
export async function POST(req: NextRequest) {
try {
const stream = await createStream(req);
return new Response(stream);
} catch (error) {
console.error("[Chat Stream]", error);
}
}
export const config = {
runtime: "edge",
};

22
app/api/common.ts Normal file
View File

@@ -0,0 +1,22 @@
import { NextRequest } from "next/server";
const OPENAI_URL = "api.openai.com";
const DEFAULT_PROTOCOL = "https";
const PROTOCOL = process.env.PROTOCOL ?? DEFAULT_PROTOCOL;
const BASE_URL = process.env.BASE_URL ?? OPENAI_URL;
export async function requestOpenai(req: NextRequest) {
const apiKey = req.headers.get("token");
const openaiPath = req.headers.get("path");
console.log("[Proxy] ", openaiPath);
return fetch(`${PROTOCOL}://${BASE_URL}/${openaiPath}`, {
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${apiKey}`,
},
method: req.method,
body: req.body,
});
}

30
app/api/openai/route.ts Normal file
View File

@@ -0,0 +1,30 @@
import { NextRequest, NextResponse } from "next/server";
import { requestOpenai } from "../common";
async function makeRequest(req: NextRequest) {
try {
const api = await requestOpenai(req);
const res = new NextResponse(api.body);
res.headers.set("Content-Type", "application/json");
return res;
} catch (e) {
console.error("[OpenAI] ", req.body, e);
return NextResponse.json(
{
error: true,
msg: JSON.stringify(e),
},
{
status: 500,
},
);
}
}
export async function POST(req: NextRequest) {
return makeRequest(req);
}
export async function GET(req: NextRequest) {
return makeRequest(req);
}

7
app/api/openai/typing.ts Normal file
View File

@@ -0,0 +1,7 @@
import type {
CreateChatCompletionRequest,
CreateChatCompletionResponse,
} from "openai";
export type ChatRequest = CreateChatCompletionRequest;
export type ChatReponse = CreateChatCompletionResponse;