将3.5模型更换为微软的,进一步节约成本

This commit is contained in:
sijinhui 2024-04-01 18:27:06 +08:00
parent 8ed0fa7504
commit 070f143541
5 changed files with 25 additions and 22 deletions

View File

@ -107,7 +107,7 @@ Latex inline: $x^2$
Latex block: $$e=mc^2$$
`;
export const SUMMARIZE_MODEL = "gpt-3.5-turbo-0125";
export const SUMMARIZE_MODEL = "gpt-35-turbo-0125";
export const GEMINI_SUMMARIZE_MODEL = "gemini-pro";
export const KnowledgeCutOffDate: Record<string, string> = {
@ -132,7 +132,7 @@ export const DEFAULT_MODELS = [
// available: false,
// },
{
name: "gpt-3.5-turbo-0125",
name: "gpt-35-turbo-0125",
describe: "GPT-3,最快,效果一般,最便宜",
available: true,
provider: {
@ -238,7 +238,10 @@ export const DEFAULT_MODELS = [
},
] as const;
export const AZURE_MODELS: string[] = ["gpt-4-0125-preview"];
export const AZURE_MODELS: string[] = [
"gpt-35-turbo-0125",
"gpt-4-0125-preview",
];
// export const AZURE_PATH = AZURE_MODELS.map((m) => { m: `openai/deployments/${m}/chat/completions`});
// export const AZURE_PATH = AZURE_MODELS.map((m) => ({ m: `openai/deployments/${m}/chat/completions`} ));
export const AZURE_PATH = AZURE_MODELS.reduce(

View File

@ -58,7 +58,7 @@ export const CN_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
model: "gpt-3.5-turbo-0125",
model: "gpt-35-turbo-0125",
temperature: 1,
max_tokens: 2000,
presence_penalty: 0,
@ -84,7 +84,7 @@ export const CN_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
model: "gpt-3.5-turbo-0125",
model: "gpt-35-turbo-0125",
temperature: 1,
max_tokens: 2000,
presence_penalty: 0,
@ -110,7 +110,7 @@ export const CN_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
model: "gpt-3.5-turbo-0125",
model: "gpt-35-turbo-0125",
temperature: 1,
max_tokens: 2000,
presence_penalty: 0,
@ -136,7 +136,7 @@ export const CN_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
model: "gpt-3.5-turbo-0125",
model: "gpt-35-turbo-0125",
temperature: 1,
max_tokens: 2000,
presence_penalty: 0,
@ -162,7 +162,7 @@ export const CN_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
model: "gpt-3.5-turbo-0125",
model: "gpt-35-turbo-0125",
temperature: 1,
max_tokens: 2000,
presence_penalty: 0,
@ -188,7 +188,7 @@ export const CN_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
model: "gpt-3.5-turbo-0125",
model: "gpt-35-turbo-0125",
temperature: 1,
max_tokens: 2000,
presence_penalty: 0,
@ -214,7 +214,7 @@ export const CN_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
model: "gpt-3.5-turbo-0125",
model: "gpt-35-turbo-0125",
temperature: 1,
max_tokens: 2000,
presence_penalty: 0,
@ -240,7 +240,7 @@ export const CN_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
model: "gpt-3.5-turbo-0125",
model: "gpt-35-turbo-0125",
temperature: 1,
max_tokens: 2000,
presence_penalty: 0,
@ -272,7 +272,7 @@ export const CN_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
model: "gpt-3.5-turbo-0125",
model: "gpt-35-turbo-0125",
temperature: 0.5,
max_tokens: 2000,
presence_penalty: 0,
@ -298,7 +298,7 @@ export const CN_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
model: "gpt-3.5-turbo-0125",
model: "gpt-35-turbo-0125",
temperature: 1,
max_tokens: 2000,
presence_penalty: 0,
@ -331,7 +331,7 @@ export const CN_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
model: "gpt-3.5-turbo-0125",
model: "gpt-35-turbo-0125",
temperature: 1,
max_tokens: 2000,
presence_penalty: 0,
@ -364,7 +364,7 @@ export const CN_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
model: "gpt-3.5-turbo-0125",
model: "gpt-35-turbo-0125",
temperature: 1,
max_tokens: 2000,
presence_penalty: 0,
@ -422,7 +422,7 @@ export const CN_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
model: "gpt-3.5-turbo-0125",
model: "gpt-35-turbo-0125",
temperature: 1,
max_tokens: 2000,
presence_penalty: 0,

View File

@ -86,7 +86,7 @@ export const EN_MASKS: BuiltinMask[] = [
},
],
modelConfig: {
model: "gpt-3.5-turbo-0125",
model: "gpt-35-turbo-0125",
temperature: 0.5,
max_tokens: 2000,
presence_penalty: 0,

View File

@ -51,7 +51,7 @@ export const DEFAULT_CONFIG = {
dontUseModel: DISABLE_MODELS,
modelConfig: {
model: "gpt-3.5-turbo-0125" as ModelType,
model: "gpt-35-turbo-0125" as ModelType,
temperature: 0.8,
top_p: 1,
max_tokens: 2000,
@ -137,7 +137,7 @@ export const useAppConfig = createPersistStore(
}),
{
name: StoreKey.Config,
version: 3.8992,
version: 3.8993,
migrate(persistedState, version) {
const state = persistedState as ChatConfig;
@ -168,7 +168,7 @@ export const useAppConfig = createPersistStore(
if (version < 3.8) {
state.lastUpdate = Date.now();
}
if (version < 3.8992) {
if (version < 3.8993) {
state.lastUpdate = Date.now();
return { ...DEFAULT_CONFIG };
}

View File

@ -78,7 +78,7 @@ export const authOptions: NextAuthOptions = {
async authorize(credential, req) {
const username = cleanUpString(`${credential?.username}`);
// 验证用户名
console.log(credential, username, '==============3')
// console.log(credential, username, '==============3')
// 判断姓名格式是否符合要求,不符合则拒绝
if (username && isName(username)) {
// Any object returned will be saved in `user` property of the JWT
@ -243,7 +243,7 @@ export async function insertUser(user: {[key: string]: string}) {
data: user
})
} else {
console.log('user==========', existingUser)
// console.log('user==========', existingUser)
return existingUser;
}
} catch (e) {