Merge remote-tracking branch 'upstream/main'

This commit is contained in:
DirkSchlossmacher
2024-07-16 07:43:08 +02:00
53 changed files with 2916 additions and 569 deletions

23
app/utils/baidu.ts Normal file
View File

@@ -0,0 +1,23 @@
import { BAIDU_OATUH_URL } from "../constant";
/**
* 使用 AKSK 生成鉴权签名Access Token
* @return 鉴权签名信息
*/
export async function getAccessToken(
clientId: string,
clientSecret: string,
): Promise<{
access_token: string;
expires_in: number;
error?: number;
}> {
const res = await fetch(
`${BAIDU_OATUH_URL}?grant_type=client_credentials&client_id=${clientId}&client_secret=${clientSecret}`,
{
method: "POST",
mode: "cors",
},
);
const resJson = await res.json();
return resJson;
}

View File

@@ -1,21 +0,0 @@
import { useAccessStore } from "../store/access";
import { useAppConfig } from "../store/config";
import { collectModels } from "./model";
export function identifyDefaultClaudeModel(modelName: string) {
const accessStore = useAccessStore.getState();
const configStore = useAppConfig.getState();
const allModals = collectModels(
configStore.models,
[configStore.customModels, accessStore.customModels].join(","),
);
const modelMeta = allModals.find((m) => m.name === modelName);
return (
modelName.startsWith("claude") &&
modelMeta &&
modelMeta.provider?.providerType === "anthropic"
);
}

26
app/utils/cloudflare.ts Normal file
View File

@@ -0,0 +1,26 @@
export function cloudflareAIGatewayUrl(fetchUrl: string) {
// rebuild fetchUrl, if using cloudflare ai gateway
// document: https://developers.cloudflare.com/ai-gateway/providers/openai/
const paths = fetchUrl.split("/");
if ("gateway.ai.cloudflare.com" == paths[2]) {
// is cloudflare.com ai gateway
// https://gateway.ai.cloudflare.com/v1/{account_id}/{gateway_id}/azure-openai/{resource_name}/{deployment_name}/chat/completions?api-version=2023-05-15'
if ("azure-openai" == paths[6]) {
// is azure gateway
return paths.slice(0, 8).concat(paths.slice(-3)).join("/"); // rebuild ai gateway azure_url
}
// https://gateway.ai.cloudflare.com/v1/{account_id}/{gateway_id}/openai/chat/completions
if ("openai" == paths[6]) {
// is openai gateway
return paths.slice(0, 7).concat(paths.slice(-2)).join("/"); // rebuild ai gateway openai_url
}
// https://gateway.ai.cloudflare.com/v1/{account_id}/{gateway_id}/anthropic/v1/messages \
if ("anthropic" == paths[6]) {
// is anthropic gateway
return paths.slice(0, 7).concat(paths.slice(-2)).join("/"); // rebuild ai gateway anthropic_url
}
// TODO: Amazon Bedrock, Groq, HuggingFace...
}
return fetchUrl;
}

View File

@@ -11,7 +11,12 @@ export function useAllModels() {
[configStore.customModels, accessStore.customModels].join(","),
accessStore.defaultModel,
);
}, [accessStore.customModels, configStore.customModels, configStore.models]);
}, [
accessStore.customModels,
accessStore.defaultModel,
configStore.customModels,
configStore.models,
]);
return models;
}

View File

@@ -1,8 +1,9 @@
import { DEFAULT_MODELS } from "../constant";
import { LLMModel } from "../client/api";
const customProvider = (modelName: string) => ({
id: modelName,
providerName: "",
const customProvider = (providerName: string) => ({
id: providerName.toLowerCase(),
providerName: providerName,
providerType: "custom",
});
@@ -23,7 +24,8 @@ export function collectModelTable(
// default models
models.forEach((m) => {
modelTable[m.name] = {
// using <modelName>@<providerId> as fullName
modelTable[`${m.name}@${m?.provider?.id}`] = {
...m,
displayName: m.name, // 'provider' is copied over if it exists
};
@@ -37,7 +39,7 @@ export function collectModelTable(
const available = !m.startsWith("-");
const nameConfig =
m.startsWith("+") || m.startsWith("-") ? m.slice(1) : m;
const [name, displayName] = nameConfig.split("=");
let [name, displayName] = nameConfig.split("=");
// enable or disable all models
if (name === "all") {
@@ -45,12 +47,45 @@ export function collectModelTable(
(model) => (model.available = available),
);
} else {
modelTable[name] = {
name,
displayName: displayName || name,
available,
provider: modelTable[name]?.provider ?? customProvider(name), // Use optional chaining
};
// 1. find model by name, and set available value
const [customModelName, customProviderName] = name.split("@");
let count = 0;
for (const fullName in modelTable) {
const [modelName, providerName] = fullName.split("@");
if (
customModelName == modelName &&
(customProviderName === undefined ||
customProviderName === providerName)
) {
count += 1;
modelTable[fullName]["available"] = available;
// swap name and displayName for bytedance
if (providerName === "bytedance") {
[name, displayName] = [displayName, modelName];
modelTable[fullName]["name"] = name;
}
if (displayName) {
modelTable[fullName]["displayName"] = displayName;
}
}
}
// 2. if model not exists, create new model with available value
if (count === 0) {
let [customModelName, customProviderName] = name.split("@");
const provider = customProvider(
customProviderName || customModelName,
);
// swap name and displayName for bytedance
if (displayName && provider.providerName == "ByteDance") {
[customModelName, displayName] = [displayName, customModelName];
}
modelTable[`${customModelName}@${provider?.id}`] = {
name: customModelName,
displayName: displayName || customModelName,
available,
provider, // Use optional chaining
};
}
}
});
@@ -100,3 +135,13 @@ export function collectModelsWithDefaultModel(
const allModels = Object.values(modelTable);
return allModels;
}
export function isModelAvailableInServer(
customModels: string,
modelName: string,
providerName: string,
) {
const fullName = `${modelName}@${providerName}`;
const modelTable = collectModelTable(DEFAULT_MODELS, customModels);
return modelTable[fullName]?.available === false;
}