mirror of
https://github.com/songquanpeng/one-api.git
synced 2025-09-17 09:16:36 +08:00
- Add 50+ new AI models across 10 major providers - Update OpenAI pricing structure for GPT-5/4.1/o3/o4 series - Add Claude 4/4.1 series support with accurate pricing - Include Gemini 2.5 series models and pricing - Add DeepSeek V3 and R1 models with latest pricing - Update completion ratios for accurate output token billing - Add AWS Bedrock channel-specific pricing support - Enhance model organization with clear categorization - Maintain backward compatibility for existing models Providers updated: OpenAI, Anthropic, Google, DeepSeek, Groq, MiniMax, Mistral, Cohere, SiliconFlow, TogetherAI Closes: sync-models-pricing feature request
43 lines
1.2 KiB
Go
43 lines
1.2 KiB
Go
package siliconflow
|
|
|
|
// https://docs.siliconflow.cn/docs/getting-started
|
|
|
|
var ModelList = []string{
|
|
"deepseek-ai/deepseek-llm-67b-chat",
|
|
"Qwen/Qwen1.5-14B-Chat",
|
|
"Qwen/Qwen1.5-7B-Chat",
|
|
"Qwen/Qwen1.5-110B-Chat",
|
|
"Qwen/Qwen1.5-32B-Chat",
|
|
"01-ai/Yi-1.5-6B-Chat",
|
|
"01-ai/Yi-1.5-9B-Chat-16K",
|
|
"01-ai/Yi-1.5-34B-Chat-16K",
|
|
"THUDM/chatglm3-6b",
|
|
"deepseek-ai/DeepSeek-V2-Chat",
|
|
"THUDM/glm-4-9b-chat",
|
|
"Qwen/Qwen2-72B-Instruct",
|
|
"Qwen/Qwen2-7B-Instruct",
|
|
"Qwen/Qwen2-57B-A14B-Instruct",
|
|
"deepseek-ai/DeepSeek-Coder-V2-Instruct",
|
|
"Qwen/Qwen2-1.5B-Instruct",
|
|
"internlm/internlm2_5-7b-chat",
|
|
"BAAI/bge-large-en-v1.5",
|
|
"BAAI/bge-large-zh-v1.5",
|
|
"Pro/Qwen/Qwen2-7B-Instruct",
|
|
"Pro/Qwen/Qwen2-1.5B-Instruct",
|
|
"Pro/Qwen/Qwen1.5-7B-Chat",
|
|
"Pro/THUDM/glm-4-9b-chat",
|
|
"Pro/THUDM/chatglm3-6b",
|
|
"Pro/01-ai/Yi-1.5-9B-Chat-16K",
|
|
"Pro/01-ai/Yi-1.5-6B-Chat",
|
|
"Pro/google/gemma-2-9b-it",
|
|
"Pro/internlm/internlm2_5-7b-chat",
|
|
"Pro/meta-llama/Meta-Llama-3-8B-Instruct",
|
|
"Pro/mistralai/Mistral-7B-Instruct-v0.2",
|
|
// Latest models
|
|
"Qwen/Qwen2.5-72B-Instruct",
|
|
"Qwen/Qwen2.5-7B-Instruct",
|
|
"deepseek-ai/DeepSeek-V2.5",
|
|
"deepseek-ai/DeepSeek-R1",
|
|
"internlm/InternLM2.5-20B-Chat",
|
|
}
|