mirror of
https://github.com/songquanpeng/one-api.git
synced 2026-02-12 17:14:27 +08:00
Compare commits
5 Commits
v0.6.11-pr
...
db83f7ae2d
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
db83f7ae2d | ||
|
|
e5f5c9a4c7 | ||
|
|
8df4a2670b | ||
|
|
7ac553541b | ||
|
|
a5c517c27a |
@@ -72,7 +72,7 @@ _✨ 通过标准的 OpenAI API 格式访问所有的大模型,开箱即用
|
||||
+ [x] [Anthropic Claude 系列模型](https://anthropic.com) (支持 AWS Claude)
|
||||
+ [x] [Google PaLM2/Gemini 系列模型](https://developers.generativeai.google)
|
||||
+ [x] [Mistral 系列模型](https://mistral.ai/)
|
||||
+ [x] [字节跳动豆包大模型](https://console.volcengine.com/ark/region:ark+cn-beijing/model)
|
||||
+ [x] [字节跳动豆包大模型(火山引擎)](https://www.volcengine.com/experience/ark?utm_term=202502dsinvite&ac=DSASUQY5&rc=2QXCA1VI)
|
||||
+ [x] [百度文心一言系列模型](https://cloud.baidu.com/doc/WENXINWORKSHOP/index.html)
|
||||
+ [x] [阿里通义千问系列模型](https://help.aliyun.com/document_detail/2400395.html)
|
||||
+ [x] [讯飞星火认知大模型](https://www.xfyun.cn/doc/spark/Web.html)
|
||||
|
||||
@@ -2,6 +2,9 @@ package controller
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/songquanpeng/one-api/common/config"
|
||||
"github.com/songquanpeng/one-api/common/ctxkey"
|
||||
@@ -9,8 +12,6 @@ import (
|
||||
"github.com/songquanpeng/one-api/common/network"
|
||||
"github.com/songquanpeng/one-api/common/random"
|
||||
"github.com/songquanpeng/one-api/model"
|
||||
"net/http"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
func GetAllTokens(c *gin.Context) {
|
||||
@@ -20,6 +21,14 @@ func GetAllTokens(c *gin.Context) {
|
||||
p = 0
|
||||
}
|
||||
|
||||
myRole := c.GetInt(ctxkey.Role)
|
||||
if myRole == model.RoleRootUser {
|
||||
uId, _ := strconv.Atoi(c.Query("user_id"))
|
||||
if uId != 0 {
|
||||
userId = uId
|
||||
}
|
||||
}
|
||||
|
||||
order := c.Query("order")
|
||||
tokens, err := model.GetAllUserTokens(userId, p*config.ItemsPerPage, config.ItemsPerPage, order)
|
||||
|
||||
@@ -41,6 +50,13 @@ func GetAllTokens(c *gin.Context) {
|
||||
func SearchTokens(c *gin.Context) {
|
||||
userId := c.GetInt(ctxkey.Id)
|
||||
keyword := c.Query("keyword")
|
||||
myRole := c.GetInt(ctxkey.Role)
|
||||
if myRole == model.RoleRootUser {
|
||||
uId, _ := strconv.Atoi(c.Query("user_id"))
|
||||
if uId != 0 {
|
||||
userId = uId
|
||||
}
|
||||
}
|
||||
tokens, err := model.SearchUserTokens(userId, keyword)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
@@ -67,6 +83,13 @@ func GetToken(c *gin.Context) {
|
||||
})
|
||||
return
|
||||
}
|
||||
myRole := c.GetInt(ctxkey.Role)
|
||||
if myRole == model.RoleRootUser {
|
||||
uId, _ := strconv.Atoi(c.Query("user_id"))
|
||||
if uId != 0 {
|
||||
userId = uId
|
||||
}
|
||||
}
|
||||
token, err := model.GetTokenByIds(id, userId)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
@@ -151,6 +174,15 @@ func AddToken(c *gin.Context) {
|
||||
Models: token.Models,
|
||||
Subnet: token.Subnet,
|
||||
}
|
||||
// if the user is root and add the token for other user, set the user id
|
||||
myRole := c.GetInt(ctxkey.Role)
|
||||
if myRole == model.RoleRootUser {
|
||||
if token.UserId == 0 {
|
||||
cleanToken.UserId = c.GetInt(ctxkey.Id)
|
||||
} else {
|
||||
cleanToken.UserId = token.UserId
|
||||
}
|
||||
}
|
||||
err = cleanToken.Insert()
|
||||
if err != nil {
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
@@ -170,6 +202,14 @@ func AddToken(c *gin.Context) {
|
||||
func DeleteToken(c *gin.Context) {
|
||||
id, _ := strconv.Atoi(c.Param("id"))
|
||||
userId := c.GetInt(ctxkey.Id)
|
||||
myRole := c.GetInt(ctxkey.Role)
|
||||
// if the user is root and delete the token for other user, set the user id
|
||||
if myRole == model.RoleRootUser {
|
||||
uId, _ := strconv.Atoi(c.Query("user_id"))
|
||||
if uId != 0 {
|
||||
userId = uId
|
||||
}
|
||||
}
|
||||
err := model.DeleteTokenById(id, userId)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
@@ -205,6 +245,14 @@ func UpdateToken(c *gin.Context) {
|
||||
})
|
||||
return
|
||||
}
|
||||
// if the user is root and update the token for other user, set the user id
|
||||
myRole := c.GetInt(ctxkey.Role)
|
||||
if myRole == model.RoleRootUser {
|
||||
if token.UserId == 0 {
|
||||
userId = token.UserId
|
||||
}
|
||||
}
|
||||
|
||||
cleanToken, err := model.GetTokenByIds(token.Id, userId)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
|
||||
@@ -14,10 +14,14 @@ var ModelList = []string{
|
||||
"qwen2-72b-instruct", "qwen2-57b-a14b-instruct", "qwen2-7b-instruct", "qwen2-1.5b-instruct", "qwen2-0.5b-instruct",
|
||||
"qwen1.5-110b-chat", "qwen1.5-72b-chat", "qwen1.5-32b-chat", "qwen1.5-14b-chat", "qwen1.5-7b-chat", "qwen1.5-1.8b-chat", "qwen1.5-0.5b-chat",
|
||||
"qwen-72b-chat", "qwen-14b-chat", "qwen-7b-chat", "qwen-1.8b-chat", "qwen-1.8b-longcontext-chat",
|
||||
"qvq-72b-preview",
|
||||
"qwen2.5-vl-72b-instruct", "qwen2.5-vl-7b-instruct", "qwen2.5-vl-2b-instruct", "qwen2.5-vl-1b-instruct", "qwen2.5-vl-0.5b-instruct",
|
||||
"qwen2-vl-7b-instruct", "qwen2-vl-2b-instruct", "qwen-vl-v1", "qwen-vl-chat-v1",
|
||||
"qwen2-audio-instruct", "qwen-audio-chat",
|
||||
"qwen2.5-math-72b-instruct", "qwen2.5-math-7b-instruct", "qwen2.5-math-1.5b-instruct", "qwen2-math-72b-instruct", "qwen2-math-7b-instruct", "qwen2-math-1.5b-instruct",
|
||||
"qwen2.5-coder-32b-instruct", "qwen2.5-coder-14b-instruct", "qwen2.5-coder-7b-instruct", "qwen2.5-coder-3b-instruct", "qwen2.5-coder-1.5b-instruct", "qwen2.5-coder-0.5b-instruct",
|
||||
"text-embedding-v1", "text-embedding-v3", "text-embedding-v2", "text-embedding-async-v2", "text-embedding-async-v1",
|
||||
"ali-stable-diffusion-xl", "ali-stable-diffusion-v1.5", "wanx-v1",
|
||||
"qwen-mt-plus", "qwen-mt-turbo",
|
||||
"deepseek-r1", "deepseek-v3", "deepseek-r1-distill-qwen-1.5b", "deepseek-r1-distill-qwen-7b", "deepseek-r1-distill-qwen-14b", "deepseek-r1-distill-qwen-32b", "deepseek-r1-distill-llama-8b", "deepseek-r1-distill-llama-70b",
|
||||
}
|
||||
|
||||
@@ -1,20 +1,235 @@
|
||||
package openrouter
|
||||
|
||||
var ModelList = []string{
|
||||
"openai/gpt-3.5-turbo",
|
||||
"openai/chatgpt-4o-latest",
|
||||
"openai/o1",
|
||||
"openai/o1-preview",
|
||||
"openai/o1-mini",
|
||||
"openai/o3-mini",
|
||||
"google/gemini-2.0-flash-001",
|
||||
"google/gemini-2.0-flash-thinking-exp:free",
|
||||
"google/gemini-2.0-flash-lite-preview-02-05:free",
|
||||
"google/gemini-2.0-pro-exp-02-05:free",
|
||||
"google/gemini-flash-1.5-8b",
|
||||
"anthropic/claude-3.5-sonnet",
|
||||
"01-ai/yi-large",
|
||||
"aetherwiing/mn-starcannon-12b",
|
||||
"ai21/jamba-1-5-large",
|
||||
"ai21/jamba-1-5-mini",
|
||||
"ai21/jamba-instruct",
|
||||
"aion-labs/aion-1.0",
|
||||
"aion-labs/aion-1.0-mini",
|
||||
"aion-labs/aion-rp-llama-3.1-8b",
|
||||
"allenai/llama-3.1-tulu-3-405b",
|
||||
"alpindale/goliath-120b",
|
||||
"alpindale/magnum-72b",
|
||||
"amazon/nova-lite-v1",
|
||||
"amazon/nova-micro-v1",
|
||||
"amazon/nova-pro-v1",
|
||||
"anthracite-org/magnum-v2-72b",
|
||||
"anthracite-org/magnum-v4-72b",
|
||||
"anthropic/claude-2",
|
||||
"anthropic/claude-2.0",
|
||||
"anthropic/claude-2.0:beta",
|
||||
"anthropic/claude-2.1",
|
||||
"anthropic/claude-2.1:beta",
|
||||
"anthropic/claude-2:beta",
|
||||
"anthropic/claude-3-haiku",
|
||||
"anthropic/claude-3-haiku:beta",
|
||||
"anthropic/claude-3-opus",
|
||||
"anthropic/claude-3-opus:beta",
|
||||
"anthropic/claude-3-sonnet",
|
||||
"anthropic/claude-3-sonnet:beta",
|
||||
"anthropic/claude-3.5-haiku",
|
||||
"deepseek/deepseek-r1:free",
|
||||
"anthropic/claude-3.5-haiku-20241022",
|
||||
"anthropic/claude-3.5-haiku-20241022:beta",
|
||||
"anthropic/claude-3.5-haiku:beta",
|
||||
"anthropic/claude-3.5-sonnet",
|
||||
"anthropic/claude-3.5-sonnet-20240620",
|
||||
"anthropic/claude-3.5-sonnet-20240620:beta",
|
||||
"anthropic/claude-3.5-sonnet:beta",
|
||||
"cognitivecomputations/dolphin-mixtral-8x22b",
|
||||
"cognitivecomputations/dolphin-mixtral-8x7b",
|
||||
"cohere/command",
|
||||
"cohere/command-r",
|
||||
"cohere/command-r-03-2024",
|
||||
"cohere/command-r-08-2024",
|
||||
"cohere/command-r-plus",
|
||||
"cohere/command-r-plus-04-2024",
|
||||
"cohere/command-r-plus-08-2024",
|
||||
"cohere/command-r7b-12-2024",
|
||||
"databricks/dbrx-instruct",
|
||||
"deepseek/deepseek-chat",
|
||||
"deepseek/deepseek-chat-v2.5",
|
||||
"deepseek/deepseek-chat:free",
|
||||
"deepseek/deepseek-r1",
|
||||
"deepseek/deepseek-r1-distill-llama-70b",
|
||||
"deepseek/deepseek-r1-distill-llama-70b:free",
|
||||
"deepseek/deepseek-r1-distill-llama-8b",
|
||||
"deepseek/deepseek-r1-distill-qwen-1.5b",
|
||||
"deepseek/deepseek-r1-distill-qwen-14b",
|
||||
"deepseek/deepseek-r1-distill-qwen-32b",
|
||||
"deepseek/deepseek-r1:free",
|
||||
"eva-unit-01/eva-llama-3.33-70b",
|
||||
"eva-unit-01/eva-qwen-2.5-32b",
|
||||
"eva-unit-01/eva-qwen-2.5-72b",
|
||||
"google/gemini-2.0-flash-001",
|
||||
"google/gemini-2.0-flash-exp:free",
|
||||
"google/gemini-2.0-flash-lite-preview-02-05:free",
|
||||
"google/gemini-2.0-flash-thinking-exp-1219:free",
|
||||
"google/gemini-2.0-flash-thinking-exp:free",
|
||||
"google/gemini-2.0-pro-exp-02-05:free",
|
||||
"google/gemini-exp-1206:free",
|
||||
"google/gemini-flash-1.5",
|
||||
"google/gemini-flash-1.5-8b",
|
||||
"google/gemini-flash-1.5-8b-exp",
|
||||
"google/gemini-pro",
|
||||
"google/gemini-pro-1.5",
|
||||
"google/gemini-pro-vision",
|
||||
"google/gemma-2-27b-it",
|
||||
"google/gemma-2-9b-it",
|
||||
"google/gemma-2-9b-it:free",
|
||||
"google/gemma-7b-it",
|
||||
"google/learnlm-1.5-pro-experimental:free",
|
||||
"google/palm-2-chat-bison",
|
||||
"google/palm-2-chat-bison-32k",
|
||||
"google/palm-2-codechat-bison",
|
||||
"google/palm-2-codechat-bison-32k",
|
||||
"gryphe/mythomax-l2-13b",
|
||||
"gryphe/mythomax-l2-13b:free",
|
||||
"huggingfaceh4/zephyr-7b-beta:free",
|
||||
"infermatic/mn-inferor-12b",
|
||||
"inflection/inflection-3-pi",
|
||||
"inflection/inflection-3-productivity",
|
||||
"jondurbin/airoboros-l2-70b",
|
||||
"liquid/lfm-3b",
|
||||
"liquid/lfm-40b",
|
||||
"liquid/lfm-7b",
|
||||
"mancer/weaver",
|
||||
"meta-llama/llama-2-13b-chat",
|
||||
"meta-llama/llama-2-70b-chat",
|
||||
"meta-llama/llama-3-70b-instruct",
|
||||
"meta-llama/llama-3-8b-instruct",
|
||||
"meta-llama/llama-3-8b-instruct:free",
|
||||
"meta-llama/llama-3.1-405b",
|
||||
"meta-llama/llama-3.1-405b-instruct",
|
||||
"meta-llama/llama-3.1-70b-instruct",
|
||||
"meta-llama/llama-3.1-8b-instruct",
|
||||
"meta-llama/llama-3.2-11b-vision-instruct",
|
||||
"meta-llama/llama-3.2-11b-vision-instruct:free",
|
||||
"meta-llama/llama-3.2-1b-instruct",
|
||||
"meta-llama/llama-3.2-3b-instruct",
|
||||
"meta-llama/llama-3.2-90b-vision-instruct",
|
||||
"meta-llama/llama-3.3-70b-instruct",
|
||||
"meta-llama/llama-3.3-70b-instruct:free",
|
||||
"meta-llama/llama-guard-2-8b",
|
||||
"microsoft/phi-3-medium-128k-instruct",
|
||||
"microsoft/phi-3-medium-128k-instruct:free",
|
||||
"microsoft/phi-3-mini-128k-instruct",
|
||||
"microsoft/phi-3-mini-128k-instruct:free",
|
||||
"microsoft/phi-3.5-mini-128k-instruct",
|
||||
"microsoft/phi-4",
|
||||
"microsoft/wizardlm-2-7b",
|
||||
"microsoft/wizardlm-2-8x22b",
|
||||
"minimax/minimax-01",
|
||||
"mistralai/codestral-2501",
|
||||
"mistralai/codestral-mamba",
|
||||
"mistralai/ministral-3b",
|
||||
"mistralai/ministral-8b",
|
||||
"mistralai/mistral-7b-instruct",
|
||||
"mistralai/mistral-7b-instruct-v0.1",
|
||||
"mistralai/mistral-7b-instruct-v0.3",
|
||||
"mistralai/mistral-7b-instruct:free",
|
||||
"mistralai/mistral-large",
|
||||
"mistralai/mistral-large-2407",
|
||||
"mistralai/mistral-large-2411",
|
||||
"mistralai/mistral-medium",
|
||||
"mistralai/mistral-nemo",
|
||||
"mistralai/mistral-nemo:free",
|
||||
"mistralai/mistral-small",
|
||||
"mistralai/mistral-small-24b-instruct-2501",
|
||||
"mistralai/mistral-small-24b-instruct-2501:free",
|
||||
"mistralai/mistral-tiny",
|
||||
"mistralai/mixtral-8x22b-instruct",
|
||||
"mistralai/mixtral-8x7b",
|
||||
"mistralai/mixtral-8x7b-instruct",
|
||||
"mistralai/pixtral-12b",
|
||||
"mistralai/pixtral-large-2411",
|
||||
"neversleep/llama-3-lumimaid-70b",
|
||||
"neversleep/llama-3-lumimaid-8b",
|
||||
"neversleep/llama-3-lumimaid-8b:extended",
|
||||
"neversleep/llama-3.1-lumimaid-70b",
|
||||
"neversleep/llama-3.1-lumimaid-8b",
|
||||
"neversleep/noromaid-20b",
|
||||
"nothingiisreal/mn-celeste-12b",
|
||||
"nousresearch/hermes-2-pro-llama-3-8b",
|
||||
"nousresearch/hermes-3-llama-3.1-405b",
|
||||
"nousresearch/hermes-3-llama-3.1-70b",
|
||||
"nousresearch/nous-hermes-2-mixtral-8x7b-dpo",
|
||||
"nousresearch/nous-hermes-llama2-13b",
|
||||
"nvidia/llama-3.1-nemotron-70b-instruct",
|
||||
"nvidia/llama-3.1-nemotron-70b-instruct:free",
|
||||
"openai/chatgpt-4o-latest",
|
||||
"openai/gpt-3.5-turbo",
|
||||
"openai/gpt-3.5-turbo-0125",
|
||||
"openai/gpt-3.5-turbo-0613",
|
||||
"openai/gpt-3.5-turbo-1106",
|
||||
"openai/gpt-3.5-turbo-16k",
|
||||
"openai/gpt-3.5-turbo-instruct",
|
||||
"openai/gpt-4",
|
||||
"openai/gpt-4-0314",
|
||||
"openai/gpt-4-1106-preview",
|
||||
"openai/gpt-4-32k",
|
||||
"openai/gpt-4-32k-0314",
|
||||
"openai/gpt-4-turbo",
|
||||
"openai/gpt-4-turbo-preview",
|
||||
"openai/gpt-4o",
|
||||
"openai/gpt-4o-2024-05-13",
|
||||
"openai/gpt-4o-2024-08-06",
|
||||
"openai/gpt-4o-2024-11-20",
|
||||
"openai/gpt-4o-mini",
|
||||
"openai/gpt-4o-mini-2024-07-18",
|
||||
"openai/gpt-4o:extended",
|
||||
"openai/o1",
|
||||
"openai/o1-mini",
|
||||
"openai/o1-mini-2024-09-12",
|
||||
"openai/o1-preview",
|
||||
"openai/o1-preview-2024-09-12",
|
||||
"openai/o3-mini",
|
||||
"openai/o3-mini-high",
|
||||
"openchat/openchat-7b",
|
||||
"openchat/openchat-7b:free",
|
||||
"openrouter/auto",
|
||||
"perplexity/llama-3.1-sonar-huge-128k-online",
|
||||
"perplexity/llama-3.1-sonar-large-128k-chat",
|
||||
"perplexity/llama-3.1-sonar-large-128k-online",
|
||||
"perplexity/llama-3.1-sonar-small-128k-chat",
|
||||
"perplexity/llama-3.1-sonar-small-128k-online",
|
||||
"perplexity/sonar",
|
||||
"perplexity/sonar-reasoning",
|
||||
"pygmalionai/mythalion-13b",
|
||||
"qwen/qvq-72b-preview",
|
||||
"qwen/qwen-2-72b-instruct",
|
||||
"qwen/qwen-2-7b-instruct",
|
||||
"qwen/qwen-2-7b-instruct:free",
|
||||
"qwen/qwen-2-vl-72b-instruct",
|
||||
"qwen/qwen-2-vl-7b-instruct",
|
||||
"qwen/qwen-2.5-72b-instruct",
|
||||
"qwen/qwen-2.5-7b-instruct",
|
||||
"qwen/qwen-2.5-coder-32b-instruct",
|
||||
"qwen/qwen-max",
|
||||
"qwen/qwen-plus",
|
||||
"qwen/qwen-turbo",
|
||||
"qwen/qwen-vl-plus:free",
|
||||
"qwen/qwen2.5-vl-72b-instruct:free",
|
||||
"qwen/qwq-32b-preview",
|
||||
"raifle/sorcererlm-8x22b",
|
||||
"sao10k/fimbulvetr-11b-v2",
|
||||
"sao10k/l3-euryale-70b",
|
||||
"sao10k/l3-lunaris-8b",
|
||||
"sao10k/l3.1-70b-hanami-x1",
|
||||
"sao10k/l3.1-euryale-70b",
|
||||
"sao10k/l3.3-euryale-70b",
|
||||
"sophosympatheia/midnight-rose-70b",
|
||||
"sophosympatheia/rogue-rose-103b-v0.2:free",
|
||||
"teknium/openhermes-2.5-mistral-7b",
|
||||
"thedrummer/rocinante-12b",
|
||||
"thedrummer/unslopnemo-12b",
|
||||
"undi95/remm-slerp-l2-13b",
|
||||
"undi95/toppy-m-7b",
|
||||
"undi95/toppy-m-7b:free",
|
||||
"x-ai/grok-2-1212",
|
||||
"x-ai/grok-2-vision-1212",
|
||||
"x-ai/grok-beta",
|
||||
"x-ai/grok-vision-beta",
|
||||
"xwin-lm/xwin-lm-70b",
|
||||
}
|
||||
|
||||
@@ -59,6 +59,8 @@ var ModelRatio = map[string]float64{
|
||||
"o1-preview-2024-09-12": 7.5,
|
||||
"o1-mini": 1.5, // $3.00 / 1M input tokens
|
||||
"o1-mini-2024-09-12": 1.5,
|
||||
"o3-mini": 1.5, // $3.00 / 1M input tokens
|
||||
"o3-mini-2025-01-31": 1.5,
|
||||
"davinci-002": 1, // $0.002 / 1K tokens
|
||||
"babbage-002": 0.2, // $0.0004 / 1K tokens
|
||||
"text-ada-001": 0.2,
|
||||
@@ -159,91 +161,105 @@ var ModelRatio = map[string]float64{
|
||||
"embedding-2": 0.0005 * RMB,
|
||||
"embedding-3": 0.0005 * RMB,
|
||||
// https://help.aliyun.com/zh/dashscope/developer-reference/tongyi-thousand-questions-metering-and-billing
|
||||
"qwen-turbo": 1.4286, // ¥0.02 / 1k tokens
|
||||
"qwen-turbo-latest": 1.4286,
|
||||
"qwen-plus": 1.4286,
|
||||
"qwen-plus-latest": 1.4286,
|
||||
"qwen-max": 1.4286,
|
||||
"qwen-max-latest": 1.4286,
|
||||
"qwen-max-longcontext": 1.4286,
|
||||
"qwen-vl-max": 1.4286,
|
||||
"qwen-vl-max-latest": 1.4286,
|
||||
"qwen-vl-plus": 1.4286,
|
||||
"qwen-vl-plus-latest": 1.4286,
|
||||
"qwen-vl-ocr": 1.4286,
|
||||
"qwen-vl-ocr-latest": 1.4286,
|
||||
"qwen-audio-turbo": 1.4286,
|
||||
"qwen-math-plus": 1.4286,
|
||||
"qwen-math-plus-latest": 1.4286,
|
||||
"qwen-math-turbo": 1.4286,
|
||||
"qwen-math-turbo-latest": 1.4286,
|
||||
"qwen-coder-plus": 1.4286,
|
||||
"qwen-coder-plus-latest": 1.4286,
|
||||
"qwen-coder-turbo": 1.4286,
|
||||
"qwen-coder-turbo-latest": 1.4286,
|
||||
"qwq-32b-preview": 1.4286,
|
||||
"qwen2.5-72b-instruct": 1.4286,
|
||||
"qwen2.5-32b-instruct": 1.4286,
|
||||
"qwen2.5-14b-instruct": 1.4286,
|
||||
"qwen2.5-7b-instruct": 1.4286,
|
||||
"qwen2.5-3b-instruct": 1.4286,
|
||||
"qwen2.5-1.5b-instruct": 1.4286,
|
||||
"qwen2.5-0.5b-instruct": 1.4286,
|
||||
"qwen2-72b-instruct": 1.4286,
|
||||
"qwen2-57b-a14b-instruct": 1.4286,
|
||||
"qwen2-7b-instruct": 1.4286,
|
||||
"qwen2-1.5b-instruct": 1.4286,
|
||||
"qwen2-0.5b-instruct": 1.4286,
|
||||
"qwen1.5-110b-chat": 1.4286,
|
||||
"qwen1.5-72b-chat": 1.4286,
|
||||
"qwen1.5-32b-chat": 1.4286,
|
||||
"qwen1.5-14b-chat": 1.4286,
|
||||
"qwen1.5-7b-chat": 1.4286,
|
||||
"qwen1.5-1.8b-chat": 1.4286,
|
||||
"qwen1.5-0.5b-chat": 1.4286,
|
||||
"qwen-72b-chat": 1.4286,
|
||||
"qwen-14b-chat": 1.4286,
|
||||
"qwen-7b-chat": 1.4286,
|
||||
"qwen-1.8b-chat": 1.4286,
|
||||
"qwen-1.8b-longcontext-chat": 1.4286,
|
||||
"qwen2-vl-7b-instruct": 1.4286,
|
||||
"qwen2-vl-2b-instruct": 1.4286,
|
||||
"qwen-vl-v1": 1.4286,
|
||||
"qwen-vl-chat-v1": 1.4286,
|
||||
"qwen2-audio-instruct": 1.4286,
|
||||
"qwen-audio-chat": 1.4286,
|
||||
"qwen2.5-math-72b-instruct": 1.4286,
|
||||
"qwen2.5-math-7b-instruct": 1.4286,
|
||||
"qwen2.5-math-1.5b-instruct": 1.4286,
|
||||
"qwen2-math-72b-instruct": 1.4286,
|
||||
"qwen2-math-7b-instruct": 1.4286,
|
||||
"qwen2-math-1.5b-instruct": 1.4286,
|
||||
"qwen2.5-coder-32b-instruct": 1.4286,
|
||||
"qwen2.5-coder-14b-instruct": 1.4286,
|
||||
"qwen2.5-coder-7b-instruct": 1.4286,
|
||||
"qwen2.5-coder-3b-instruct": 1.4286,
|
||||
"qwen2.5-coder-1.5b-instruct": 1.4286,
|
||||
"qwen2.5-coder-0.5b-instruct": 1.4286,
|
||||
"text-embedding-v1": 0.05, // ¥0.0007 / 1k tokens
|
||||
"text-embedding-v3": 0.05,
|
||||
"text-embedding-v2": 0.05,
|
||||
"text-embedding-async-v2": 0.05,
|
||||
"text-embedding-async-v1": 0.05,
|
||||
"ali-stable-diffusion-xl": 8.00,
|
||||
"ali-stable-diffusion-v1.5": 8.00,
|
||||
"wanx-v1": 8.00,
|
||||
"SparkDesk": 1.2858, // ¥0.018 / 1k tokens
|
||||
"SparkDesk-v1.1": 1.2858, // ¥0.018 / 1k tokens
|
||||
"SparkDesk-v2.1": 1.2858, // ¥0.018 / 1k tokens
|
||||
"SparkDesk-v3.1": 1.2858, // ¥0.018 / 1k tokens
|
||||
"SparkDesk-v3.1-128K": 1.2858, // ¥0.018 / 1k tokens
|
||||
"SparkDesk-v3.5": 1.2858, // ¥0.018 / 1k tokens
|
||||
"SparkDesk-v3.5-32K": 1.2858, // ¥0.018 / 1k tokens
|
||||
"SparkDesk-v4.0": 1.2858, // ¥0.018 / 1k tokens
|
||||
"360GPT_S2_V9": 0.8572, // ¥0.012 / 1k tokens
|
||||
"embedding-bert-512-v1": 0.0715, // ¥0.001 / 1k tokens
|
||||
"embedding_s1_v1": 0.0715, // ¥0.001 / 1k tokens
|
||||
"semantic_similarity_s1_v1": 0.0715, // ¥0.001 / 1k tokens
|
||||
"qwen-turbo": 0.0003 * RMB,
|
||||
"qwen-turbo-latest": 0.0003 * RMB,
|
||||
"qwen-plus": 0.0008 * RMB,
|
||||
"qwen-plus-latest": 0.0008 * RMB,
|
||||
"qwen-max": 0.0024 * RMB,
|
||||
"qwen-max-latest": 0.0024 * RMB,
|
||||
"qwen-max-longcontext": 0.0005 * RMB,
|
||||
"qwen-vl-max": 0.003 * RMB,
|
||||
"qwen-vl-max-latest": 0.003 * RMB,
|
||||
"qwen-vl-plus": 0.0015 * RMB,
|
||||
"qwen-vl-plus-latest": 0.0015 * RMB,
|
||||
"qwen-vl-ocr": 0.005 * RMB,
|
||||
"qwen-vl-ocr-latest": 0.005 * RMB,
|
||||
"qwen-audio-turbo": 1.4286,
|
||||
"qwen-math-plus": 0.004 * RMB,
|
||||
"qwen-math-plus-latest": 0.004 * RMB,
|
||||
"qwen-math-turbo": 0.002 * RMB,
|
||||
"qwen-math-turbo-latest": 0.002 * RMB,
|
||||
"qwen-coder-plus": 0.0035 * RMB,
|
||||
"qwen-coder-plus-latest": 0.0035 * RMB,
|
||||
"qwen-coder-turbo": 0.002 * RMB,
|
||||
"qwen-coder-turbo-latest": 0.002 * RMB,
|
||||
"qwen-mt-plus": 0.015 * RMB,
|
||||
"qwen-mt-turbo": 0.001 * RMB,
|
||||
"qwq-32b-preview": 0.002 * RMB,
|
||||
"qwen2.5-72b-instruct": 0.004 * RMB,
|
||||
"qwen2.5-32b-instruct": 0.03 * RMB,
|
||||
"qwen2.5-14b-instruct": 0.001 * RMB,
|
||||
"qwen2.5-7b-instruct": 0.0005 * RMB,
|
||||
"qwen2.5-3b-instruct": 0.006 * RMB,
|
||||
"qwen2.5-1.5b-instruct": 0.0003 * RMB,
|
||||
"qwen2.5-0.5b-instruct": 0.0003 * RMB,
|
||||
"qwen2-72b-instruct": 0.004 * RMB,
|
||||
"qwen2-57b-a14b-instruct": 0.0035 * RMB,
|
||||
"qwen2-7b-instruct": 0.001 * RMB,
|
||||
"qwen2-1.5b-instruct": 0.001 * RMB,
|
||||
"qwen2-0.5b-instruct": 0.001 * RMB,
|
||||
"qwen1.5-110b-chat": 0.007 * RMB,
|
||||
"qwen1.5-72b-chat": 0.005 * RMB,
|
||||
"qwen1.5-32b-chat": 0.0035 * RMB,
|
||||
"qwen1.5-14b-chat": 0.002 * RMB,
|
||||
"qwen1.5-7b-chat": 0.001 * RMB,
|
||||
"qwen1.5-1.8b-chat": 0.001 * RMB,
|
||||
"qwen1.5-0.5b-chat": 0.001 * RMB,
|
||||
"qwen-72b-chat": 0.02 * RMB,
|
||||
"qwen-14b-chat": 0.008 * RMB,
|
||||
"qwen-7b-chat": 0.006 * RMB,
|
||||
"qwen-1.8b-chat": 0.006 * RMB,
|
||||
"qwen-1.8b-longcontext-chat": 0.006 * RMB,
|
||||
"qvq-72b-preview": 0.012 * RMB,
|
||||
"qwen2.5-vl-72b-instruct": 0.016 * RMB,
|
||||
"qwen2.5-vl-7b-instruct": 0.002 * RMB,
|
||||
"qwen2.5-vl-3b-instruct": 0.0012 * RMB,
|
||||
"qwen2-vl-7b-instruct": 0.016 * RMB,
|
||||
"qwen2-vl-2b-instruct": 0.002 * RMB,
|
||||
"qwen-vl-v1": 0.002 * RMB,
|
||||
"qwen-vl-chat-v1": 0.002 * RMB,
|
||||
"qwen2-audio-instruct": 0.002 * RMB,
|
||||
"qwen-audio-chat": 0.002 * RMB,
|
||||
"qwen2.5-math-72b-instruct": 0.004 * RMB,
|
||||
"qwen2.5-math-7b-instruct": 0.001 * RMB,
|
||||
"qwen2.5-math-1.5b-instruct": 0.001 * RMB,
|
||||
"qwen2-math-72b-instruct": 0.004 * RMB,
|
||||
"qwen2-math-7b-instruct": 0.001 * RMB,
|
||||
"qwen2-math-1.5b-instruct": 0.001 * RMB,
|
||||
"qwen2.5-coder-32b-instruct": 0.002 * RMB,
|
||||
"qwen2.5-coder-14b-instruct": 0.002 * RMB,
|
||||
"qwen2.5-coder-7b-instruct": 0.001 * RMB,
|
||||
"qwen2.5-coder-3b-instruct": 0.001 * RMB,
|
||||
"qwen2.5-coder-1.5b-instruct": 0.001 * RMB,
|
||||
"qwen2.5-coder-0.5b-instruct": 0.001 * RMB,
|
||||
"text-embedding-v1": 0.0007 * RMB, // ¥0.0007 / 1k tokens
|
||||
"text-embedding-v3": 0.0007 * RMB,
|
||||
"text-embedding-v2": 0.0007 * RMB,
|
||||
"text-embedding-async-v2": 0.0007 * RMB,
|
||||
"text-embedding-async-v1": 0.0007 * RMB,
|
||||
"ali-stable-diffusion-xl": 8.00,
|
||||
"ali-stable-diffusion-v1.5": 8.00,
|
||||
"wanx-v1": 8.00,
|
||||
"deepseek-r1": 0.002 * RMB,
|
||||
"deepseek-v3": 0.001 * RMB,
|
||||
"deepseek-r1-distill-qwen-1.5b": 0.001 * RMB,
|
||||
"deepseek-r1-distill-qwen-7b": 0.0005 * RMB,
|
||||
"deepseek-r1-distill-qwen-14b": 0.001 * RMB,
|
||||
"deepseek-r1-distill-qwen-32b": 0.002 * RMB,
|
||||
"deepseek-r1-distill-llama-8b": 0.0005 * RMB,
|
||||
"deepseek-r1-distill-llama-70b": 0.004 * RMB,
|
||||
"SparkDesk": 1.2858, // ¥0.018 / 1k tokens
|
||||
"SparkDesk-v1.1": 1.2858, // ¥0.018 / 1k tokens
|
||||
"SparkDesk-v2.1": 1.2858, // ¥0.018 / 1k tokens
|
||||
"SparkDesk-v3.1": 1.2858, // ¥0.018 / 1k tokens
|
||||
"SparkDesk-v3.1-128K": 1.2858, // ¥0.018 / 1k tokens
|
||||
"SparkDesk-v3.5": 1.2858, // ¥0.018 / 1k tokens
|
||||
"SparkDesk-v3.5-32K": 1.2858, // ¥0.018 / 1k tokens
|
||||
"SparkDesk-v4.0": 1.2858, // ¥0.018 / 1k tokens
|
||||
"360GPT_S2_V9": 0.8572, // ¥0.012 / 1k tokens
|
||||
"embedding-bert-512-v1": 0.0715, // ¥0.001 / 1k tokens
|
||||
"embedding_s1_v1": 0.0715, // ¥0.001 / 1k tokens
|
||||
"semantic_similarity_s1_v1": 0.0715, // ¥0.001 / 1k tokens
|
||||
// https://cloud.tencent.com/document/product/1729/97731#e0e6be58-60c8-469f-bdeb-6c264ce3b4d0
|
||||
"hunyuan-turbo": 0.015 * RMB,
|
||||
"hunyuan-large": 0.004 * RMB,
|
||||
@@ -371,6 +387,238 @@ var ModelRatio = map[string]float64{
|
||||
"mistralai/mistral-7b-instruct-v0.2": 0.050 * USD,
|
||||
"mistralai/mistral-7b-v0.1": 0.050 * USD,
|
||||
"mistralai/mixtral-8x7b-instruct-v0.1": 0.300 * USD,
|
||||
//https://openrouter.ai/models
|
||||
"01-ai/yi-large": 1.5,
|
||||
"aetherwiing/mn-starcannon-12b": 0.6,
|
||||
"ai21/jamba-1-5-large": 4.0,
|
||||
"ai21/jamba-1-5-mini": 0.2,
|
||||
"ai21/jamba-instruct": 0.35,
|
||||
"aion-labs/aion-1.0": 6.0,
|
||||
"aion-labs/aion-1.0-mini": 1.2,
|
||||
"aion-labs/aion-rp-llama-3.1-8b": 0.1,
|
||||
"allenai/llama-3.1-tulu-3-405b": 5.0,
|
||||
"alpindale/goliath-120b": 4.6875,
|
||||
"alpindale/magnum-72b": 1.125,
|
||||
"amazon/nova-lite-v1": 0.12,
|
||||
"amazon/nova-micro-v1": 0.07,
|
||||
"amazon/nova-pro-v1": 1.6,
|
||||
"anthracite-org/magnum-v2-72b": 1.5,
|
||||
"anthracite-org/magnum-v4-72b": 1.125,
|
||||
"anthropic/claude-2": 12.0,
|
||||
"anthropic/claude-2.0": 12.0,
|
||||
"anthropic/claude-2.0:beta": 12.0,
|
||||
"anthropic/claude-2.1": 12.0,
|
||||
"anthropic/claude-2.1:beta": 12.0,
|
||||
"anthropic/claude-2:beta": 12.0,
|
||||
"anthropic/claude-3-haiku": 0.625,
|
||||
"anthropic/claude-3-haiku:beta": 0.625,
|
||||
"anthropic/claude-3-opus": 37.5,
|
||||
"anthropic/claude-3-opus:beta": 37.5,
|
||||
"anthropic/claude-3-sonnet": 7.5,
|
||||
"anthropic/claude-3-sonnet:beta": 7.5,
|
||||
"anthropic/claude-3.5-haiku": 2.0,
|
||||
"anthropic/claude-3.5-haiku-20241022": 2.0,
|
||||
"anthropic/claude-3.5-haiku-20241022:beta": 2.0,
|
||||
"anthropic/claude-3.5-haiku:beta": 2.0,
|
||||
"anthropic/claude-3.5-sonnet": 7.5,
|
||||
"anthropic/claude-3.5-sonnet-20240620": 7.5,
|
||||
"anthropic/claude-3.5-sonnet-20240620:beta": 7.5,
|
||||
"anthropic/claude-3.5-sonnet:beta": 7.5,
|
||||
"cognitivecomputations/dolphin-mixtral-8x22b": 0.45,
|
||||
"cognitivecomputations/dolphin-mixtral-8x7b": 0.25,
|
||||
"cohere/command": 0.95,
|
||||
"cohere/command-r": 0.7125,
|
||||
"cohere/command-r-03-2024": 0.7125,
|
||||
"cohere/command-r-08-2024": 0.285,
|
||||
"cohere/command-r-plus": 7.125,
|
||||
"cohere/command-r-plus-04-2024": 7.125,
|
||||
"cohere/command-r-plus-08-2024": 4.75,
|
||||
"cohere/command-r7b-12-2024": 0.075,
|
||||
"databricks/dbrx-instruct": 0.6,
|
||||
"deepseek/deepseek-chat": 0.445,
|
||||
"deepseek/deepseek-chat-v2.5": 1.0,
|
||||
"deepseek/deepseek-chat:free": 0.0,
|
||||
"deepseek/deepseek-r1": 1.2,
|
||||
"deepseek/deepseek-r1-distill-llama-70b": 0.345,
|
||||
"deepseek/deepseek-r1-distill-llama-70b:free": 0.0,
|
||||
"deepseek/deepseek-r1-distill-llama-8b": 0.02,
|
||||
"deepseek/deepseek-r1-distill-qwen-1.5b": 0.09,
|
||||
"deepseek/deepseek-r1-distill-qwen-14b": 0.075,
|
||||
"deepseek/deepseek-r1-distill-qwen-32b": 0.09,
|
||||
"deepseek/deepseek-r1:free": 0.0,
|
||||
"eva-unit-01/eva-llama-3.33-70b": 3.0,
|
||||
"eva-unit-01/eva-qwen-2.5-32b": 1.7,
|
||||
"eva-unit-01/eva-qwen-2.5-72b": 3.0,
|
||||
"google/gemini-2.0-flash-001": 0.2,
|
||||
"google/gemini-2.0-flash-exp:free": 0.0,
|
||||
"google/gemini-2.0-flash-lite-preview-02-05:free": 0.0,
|
||||
"google/gemini-2.0-flash-thinking-exp-1219:free": 0.0,
|
||||
"google/gemini-2.0-flash-thinking-exp:free": 0.0,
|
||||
"google/gemini-2.0-pro-exp-02-05:free": 0.0,
|
||||
"google/gemini-exp-1206:free": 0.0,
|
||||
"google/gemini-flash-1.5": 0.15,
|
||||
"google/gemini-flash-1.5-8b": 0.075,
|
||||
"google/gemini-flash-1.5-8b-exp": 0.0,
|
||||
"google/gemini-pro": 0.75,
|
||||
"google/gemini-pro-1.5": 2.5,
|
||||
"google/gemini-pro-vision": 0.75,
|
||||
"google/gemma-2-27b-it": 0.135,
|
||||
"google/gemma-2-9b-it": 0.03,
|
||||
"google/gemma-2-9b-it:free": 0.0,
|
||||
"google/gemma-7b-it": 0.075,
|
||||
"google/learnlm-1.5-pro-experimental:free": 0.0,
|
||||
"google/palm-2-chat-bison": 1.0,
|
||||
"google/palm-2-chat-bison-32k": 1.0,
|
||||
"google/palm-2-codechat-bison": 1.0,
|
||||
"google/palm-2-codechat-bison-32k": 1.0,
|
||||
"gryphe/mythomax-l2-13b": 0.0325,
|
||||
"gryphe/mythomax-l2-13b:free": 0.0,
|
||||
"huggingfaceh4/zephyr-7b-beta:free": 0.0,
|
||||
"infermatic/mn-inferor-12b": 0.6,
|
||||
"inflection/inflection-3-pi": 5.0,
|
||||
"inflection/inflection-3-productivity": 5.0,
|
||||
"jondurbin/airoboros-l2-70b": 0.25,
|
||||
"liquid/lfm-3b": 0.01,
|
||||
"liquid/lfm-40b": 0.075,
|
||||
"liquid/lfm-7b": 0.005,
|
||||
"mancer/weaver": 1.125,
|
||||
"meta-llama/llama-2-13b-chat": 0.11,
|
||||
"meta-llama/llama-2-70b-chat": 0.45,
|
||||
"meta-llama/llama-3-70b-instruct": 0.2,
|
||||
"meta-llama/llama-3-8b-instruct": 0.03,
|
||||
"meta-llama/llama-3-8b-instruct:free": 0.0,
|
||||
"meta-llama/llama-3.1-405b": 1.0,
|
||||
"meta-llama/llama-3.1-405b-instruct": 0.4,
|
||||
"meta-llama/llama-3.1-70b-instruct": 0.15,
|
||||
"meta-llama/llama-3.1-8b-instruct": 0.025,
|
||||
"meta-llama/llama-3.2-11b-vision-instruct": 0.0275,
|
||||
"meta-llama/llama-3.2-11b-vision-instruct:free": 0.0,
|
||||
"meta-llama/llama-3.2-1b-instruct": 0.005,
|
||||
"meta-llama/llama-3.2-3b-instruct": 0.0125,
|
||||
"meta-llama/llama-3.2-90b-vision-instruct": 0.8,
|
||||
"meta-llama/llama-3.3-70b-instruct": 0.15,
|
||||
"meta-llama/llama-3.3-70b-instruct:free": 0.0,
|
||||
"meta-llama/llama-guard-2-8b": 0.1,
|
||||
"microsoft/phi-3-medium-128k-instruct": 0.5,
|
||||
"microsoft/phi-3-medium-128k-instruct:free": 0.0,
|
||||
"microsoft/phi-3-mini-128k-instruct": 0.05,
|
||||
"microsoft/phi-3-mini-128k-instruct:free": 0.0,
|
||||
"microsoft/phi-3.5-mini-128k-instruct": 0.05,
|
||||
"microsoft/phi-4": 0.07,
|
||||
"microsoft/wizardlm-2-7b": 0.035,
|
||||
"microsoft/wizardlm-2-8x22b": 0.25,
|
||||
"minimax/minimax-01": 0.55,
|
||||
"mistralai/codestral-2501": 0.45,
|
||||
"mistralai/codestral-mamba": 0.125,
|
||||
"mistralai/ministral-3b": 0.02,
|
||||
"mistralai/ministral-8b": 0.05,
|
||||
"mistralai/mistral-7b-instruct": 0.0275,
|
||||
"mistralai/mistral-7b-instruct-v0.1": 0.1,
|
||||
"mistralai/mistral-7b-instruct-v0.3": 0.0275,
|
||||
"mistralai/mistral-7b-instruct:free": 0.0,
|
||||
"mistralai/mistral-large": 3.0,
|
||||
"mistralai/mistral-large-2407": 3.0,
|
||||
"mistralai/mistral-large-2411": 3.0,
|
||||
"mistralai/mistral-medium": 4.05,
|
||||
"mistralai/mistral-nemo": 0.04,
|
||||
"mistralai/mistral-nemo:free": 0.0,
|
||||
"mistralai/mistral-small": 0.3,
|
||||
"mistralai/mistral-small-24b-instruct-2501": 0.07,
|
||||
"mistralai/mistral-small-24b-instruct-2501:free": 0.0,
|
||||
"mistralai/mistral-tiny": 0.125,
|
||||
"mistralai/mixtral-8x22b-instruct": 0.45,
|
||||
"mistralai/mixtral-8x7b": 0.3,
|
||||
"mistralai/mixtral-8x7b-instruct": 0.12,
|
||||
"mistralai/pixtral-12b": 0.05,
|
||||
"mistralai/pixtral-large-2411": 3.0,
|
||||
"neversleep/llama-3-lumimaid-70b": 2.25,
|
||||
"neversleep/llama-3-lumimaid-8b": 0.5625,
|
||||
"neversleep/llama-3-lumimaid-8b:extended": 0.5625,
|
||||
"neversleep/llama-3.1-lumimaid-70b": 2.25,
|
||||
"neversleep/llama-3.1-lumimaid-8b": 0.5625,
|
||||
"neversleep/noromaid-20b": 1.125,
|
||||
"nothingiisreal/mn-celeste-12b": 0.6,
|
||||
"nousresearch/hermes-2-pro-llama-3-8b": 0.02,
|
||||
"nousresearch/hermes-3-llama-3.1-405b": 0.4,
|
||||
"nousresearch/hermes-3-llama-3.1-70b": 0.15,
|
||||
"nousresearch/nous-hermes-2-mixtral-8x7b-dpo": 0.3,
|
||||
"nousresearch/nous-hermes-llama2-13b": 0.085,
|
||||
"nvidia/llama-3.1-nemotron-70b-instruct": 0.15,
|
||||
"nvidia/llama-3.1-nemotron-70b-instruct:free": 0.0,
|
||||
"openai/chatgpt-4o-latest": 7.5,
|
||||
"openai/gpt-3.5-turbo": 0.75,
|
||||
"openai/gpt-3.5-turbo-0125": 0.75,
|
||||
"openai/gpt-3.5-turbo-0613": 1.0,
|
||||
"openai/gpt-3.5-turbo-1106": 1.0,
|
||||
"openai/gpt-3.5-turbo-16k": 2.0,
|
||||
"openai/gpt-3.5-turbo-instruct": 1.0,
|
||||
"openai/gpt-4": 30.0,
|
||||
"openai/gpt-4-0314": 30.0,
|
||||
"openai/gpt-4-1106-preview": 15.0,
|
||||
"openai/gpt-4-32k": 60.0,
|
||||
"openai/gpt-4-32k-0314": 60.0,
|
||||
"openai/gpt-4-turbo": 15.0,
|
||||
"openai/gpt-4-turbo-preview": 15.0,
|
||||
"openai/gpt-4o": 5.0,
|
||||
"openai/gpt-4o-2024-05-13": 7.5,
|
||||
"openai/gpt-4o-2024-08-06": 5.0,
|
||||
"openai/gpt-4o-2024-11-20": 5.0,
|
||||
"openai/gpt-4o-mini": 0.3,
|
||||
"openai/gpt-4o-mini-2024-07-18": 0.3,
|
||||
"openai/gpt-4o:extended": 9.0,
|
||||
"openai/o1": 30.0,
|
||||
"openai/o1-mini": 2.2,
|
||||
"openai/o1-mini-2024-09-12": 2.2,
|
||||
"openai/o1-preview": 30.0,
|
||||
"openai/o1-preview-2024-09-12": 30.0,
|
||||
"openai/o3-mini": 2.2,
|
||||
"openai/o3-mini-high": 2.2,
|
||||
"openchat/openchat-7b": 0.0275,
|
||||
"openchat/openchat-7b:free": 0.0,
|
||||
"openrouter/auto": -500000.0,
|
||||
"perplexity/llama-3.1-sonar-huge-128k-online": 2.5,
|
||||
"perplexity/llama-3.1-sonar-large-128k-chat": 0.5,
|
||||
"perplexity/llama-3.1-sonar-large-128k-online": 0.5,
|
||||
"perplexity/llama-3.1-sonar-small-128k-chat": 0.1,
|
||||
"perplexity/llama-3.1-sonar-small-128k-online": 0.1,
|
||||
"perplexity/sonar": 0.5,
|
||||
"perplexity/sonar-reasoning": 2.5,
|
||||
"pygmalionai/mythalion-13b": 0.6,
|
||||
"qwen/qvq-72b-preview": 0.25,
|
||||
"qwen/qwen-2-72b-instruct": 0.45,
|
||||
"qwen/qwen-2-7b-instruct": 0.027,
|
||||
"qwen/qwen-2-7b-instruct:free": 0.0,
|
||||
"qwen/qwen-2-vl-72b-instruct": 0.2,
|
||||
"qwen/qwen-2-vl-7b-instruct": 0.05,
|
||||
"qwen/qwen-2.5-72b-instruct": 0.2,
|
||||
"qwen/qwen-2.5-7b-instruct": 0.025,
|
||||
"qwen/qwen-2.5-coder-32b-instruct": 0.08,
|
||||
"qwen/qwen-max": 3.2,
|
||||
"qwen/qwen-plus": 0.6,
|
||||
"qwen/qwen-turbo": 0.1,
|
||||
"qwen/qwen-vl-plus:free": 0.0,
|
||||
"qwen/qwen2.5-vl-72b-instruct:free": 0.0,
|
||||
"qwen/qwq-32b-preview": 0.09,
|
||||
"raifle/sorcererlm-8x22b": 2.25,
|
||||
"sao10k/fimbulvetr-11b-v2": 0.6,
|
||||
"sao10k/l3-euryale-70b": 0.4,
|
||||
"sao10k/l3-lunaris-8b": 0.03,
|
||||
"sao10k/l3.1-70b-hanami-x1": 1.5,
|
||||
"sao10k/l3.1-euryale-70b": 0.4,
|
||||
"sao10k/l3.3-euryale-70b": 0.4,
|
||||
"sophosympatheia/midnight-rose-70b": 0.4,
|
||||
"sophosympatheia/rogue-rose-103b-v0.2:free": 0.0,
|
||||
"teknium/openhermes-2.5-mistral-7b": 0.085,
|
||||
"thedrummer/rocinante-12b": 0.25,
|
||||
"thedrummer/unslopnemo-12b": 0.25,
|
||||
"undi95/remm-slerp-l2-13b": 0.6,
|
||||
"undi95/toppy-m-7b": 0.035,
|
||||
"undi95/toppy-m-7b:free": 0.0,
|
||||
"x-ai/grok-2-1212": 5.0,
|
||||
"x-ai/grok-2-vision-1212": 5.0,
|
||||
"x-ai/grok-beta": 7.5,
|
||||
"x-ai/grok-vision-beta": 7.5,
|
||||
"xwin-lm/xwin-lm-70b": 1.875,
|
||||
}
|
||||
|
||||
var CompletionRatio = map[string]float64{
|
||||
|
||||
186
sdk/api/channel.go
Normal file
186
sdk/api/channel.go
Normal file
@@ -0,0 +1,186 @@
|
||||
package sdk
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type Channel struct {
|
||||
ID int `json:"id"`
|
||||
Type int `json:"type"`
|
||||
Key string `json:"key"`
|
||||
Status int `json:"status"`
|
||||
Name string `json:"name"`
|
||||
Weight int `json:"weight"`
|
||||
CreatedTime int `json:"created_time"`
|
||||
TestTime int `json:"test_time"`
|
||||
ResponseTime int `json:"response_time"`
|
||||
BaseUrl string `json:"base_url"`
|
||||
Other string `json:"other"`
|
||||
Balance int `json:"balance"`
|
||||
BalanceUpdatedTime int `json:"balance_updated_time"`
|
||||
Models string `json:"models"`
|
||||
Group string `json:"group"`
|
||||
UsedQuota int `json:"used_quota"`
|
||||
ModelMapping string `json:"model_mapping"`
|
||||
Priority int `json:"priority"`
|
||||
Config string `json:"config"`
|
||||
SystemPrompt string `json:"system_prompt"`
|
||||
ChannelConfig ChannelConfig `json:"channel_confi"`
|
||||
}
|
||||
|
||||
type ChannelConfig struct {
|
||||
Region string `json:"region"`
|
||||
Sk string `json:"sk"`
|
||||
Ak string `json:"ak"`
|
||||
UserId string `json:"user_id"`
|
||||
VertexAiProjectId string `json:"vertex_ai_project_id"`
|
||||
VertexAiAdc string `json:"vertex_ai_adc"`
|
||||
}
|
||||
type NewChannel struct {
|
||||
BaseUrl string `json:"base_url"`
|
||||
Config string `json:"config"`
|
||||
Group string `json:"group"`
|
||||
Groups []string `json:"groups"`
|
||||
Key string `json:"key"`
|
||||
ModelMapping string `json:"model_mapping"`
|
||||
Models string `json:"models"`
|
||||
Name string `json:"name"`
|
||||
Other string `json:"other"`
|
||||
SystemPrompt string `json:"system_prompt"`
|
||||
Type int `json:"type"`
|
||||
}
|
||||
|
||||
type Channels struct {
|
||||
Channels []*Channel
|
||||
Query map[string]string
|
||||
}
|
||||
|
||||
type ChannelRespData struct {
|
||||
Data interface{} `json:"data"`
|
||||
Message string `json:"message"`
|
||||
Success bool `json:"success"`
|
||||
}
|
||||
|
||||
type ChannelResp struct {
|
||||
Message string `json:"message"`
|
||||
ModelName string `json:"modelName"`
|
||||
Success bool `json:"success"`
|
||||
Time float64 `json:"time"`
|
||||
}
|
||||
|
||||
type ChannelImpl interface {
|
||||
Add(channel *Channel) error
|
||||
Get(id int) error
|
||||
Update(channel *Channel) error
|
||||
Delete(id int) error
|
||||
Test() error
|
||||
}
|
||||
|
||||
// list channel
|
||||
func (channels *Channels) List(client *OneClient) error {
|
||||
if channels.Query != nil {
|
||||
client.Url = "/api/channel/search?"
|
||||
for k, v := range channels.Query {
|
||||
client.Url += k + "=" + v + "&"
|
||||
}
|
||||
client.Url += "p=0&order="
|
||||
} else {
|
||||
client.Url = "/api/channel/?p=0&order="
|
||||
}
|
||||
resp, err := client.get()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
data := ChannelRespData{Data: []*Channel{}, Message: "", Success: false}
|
||||
if err := json.NewDecoder(resp.Body).Decode(&data); err != nil {
|
||||
return err
|
||||
}
|
||||
for _, v := range data.Data.([]interface{}) {
|
||||
channel := &Channel{}
|
||||
channelData, _ := json.Marshal(v)
|
||||
err = json.Unmarshal(channelData, channel)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
channels.Channels = append(channels.Channels, channel)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// add channel
|
||||
func (channel *Channel) Add(client *OneClient) error {
|
||||
client.Url = "/api/channel/"
|
||||
channelConfigData, err := json.Marshal(channel.ChannelConfig)
|
||||
newChannel := NewChannel{
|
||||
BaseUrl: channel.BaseUrl,
|
||||
Config: string(channelConfigData),
|
||||
Group: channel.Group,
|
||||
Groups: []string{channel.Group},
|
||||
Key: channel.Key,
|
||||
ModelMapping: channel.ModelMapping,
|
||||
Models: channel.Models,
|
||||
Name: channel.Name,
|
||||
Other: channel.Other,
|
||||
SystemPrompt: channel.SystemPrompt,
|
||||
Type: channel.Type,
|
||||
}
|
||||
data, err := json.Marshal(newChannel)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return client.post(data)
|
||||
}
|
||||
|
||||
// update channel
|
||||
func (channel *Channel) Update(client *OneClient) error {
|
||||
client.Url = "/api/channel/"
|
||||
data, err := json.Marshal(channel)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return client.put(data)
|
||||
}
|
||||
|
||||
// delete channel
|
||||
func (channel *Channel) Delete(client *OneClient) error {
|
||||
client.Url = "/api/channel/" + fmt.Sprintf("%d", channel.ID)
|
||||
return client.delete(nil)
|
||||
}
|
||||
|
||||
// get channel
|
||||
func (channel *Channel) Get(client *OneClient) error {
|
||||
client.Url = "/api/channel/" + fmt.Sprintf("%d", channel.ID) + "/"
|
||||
resp, err := client.get()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
data := ChannelRespData{Data: channel, Message: "", Success: false}
|
||||
if err := json.NewDecoder(resp.Body).Decode(&data); err != nil {
|
||||
return err
|
||||
}
|
||||
channel = data.Data.(*Channel)
|
||||
return nil
|
||||
}
|
||||
|
||||
// test channel
|
||||
func (channel *Channel) Test(client *OneClient) error {
|
||||
client.Url = "/api/channel/test/" + fmt.Sprintf("%d", channel.ID) + "/?model=" + strings.Split(channel.Models, ",")[0]
|
||||
resp, err := client.get()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
data := ChannelResp{Message: "", ModelName: "", Success: false, Time: 0}
|
||||
if err := json.NewDecoder(resp.Body).Decode(&data); err != nil {
|
||||
return err
|
||||
}
|
||||
if data.Success {
|
||||
return nil
|
||||
} else {
|
||||
return fmt.Errorf("test channel failed: %s", data.Message)
|
||||
}
|
||||
}
|
||||
116
sdk/api/client.go
Normal file
116
sdk/api/client.go
Normal file
@@ -0,0 +1,116 @@
|
||||
package sdk
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type Config struct {
|
||||
Host string `json:"host"`
|
||||
Port int `json:"port"`
|
||||
Key string `json:"key"`
|
||||
}
|
||||
|
||||
type OneClient struct {
|
||||
Client *http.Client
|
||||
Config *Config
|
||||
Url string
|
||||
}
|
||||
|
||||
type RespMessage struct {
|
||||
Message string `json:"message"`
|
||||
Success bool `json:"success"`
|
||||
}
|
||||
|
||||
// get
|
||||
func (OneClient *OneClient) get() (*http.Response, error) {
|
||||
OneClient.Client = &http.Client{}
|
||||
port := strconv.Itoa(OneClient.Config.Port)
|
||||
url := OneClient.Config.Host + ":" + port + OneClient.Url
|
||||
req, err := http.NewRequest("GET", url, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
req.Header.Set("Authorization", "Bearer "+OneClient.Config.Key)
|
||||
resp, err := OneClient.Client.Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
// post
|
||||
func (OneClient *OneClient) post(data []byte) error {
|
||||
OneClient.Client = &http.Client{}
|
||||
port := strconv.Itoa(OneClient.Config.Port)
|
||||
url := OneClient.Config.Host + ":" + port + OneClient.Url
|
||||
payload := bytes.NewBuffer(data)
|
||||
req, err := http.NewRequest("POST", url, payload)
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
req.Header.Set("Authorization", "Bearer "+OneClient.Config.Key)
|
||||
resp, err := OneClient.Client.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
message := RespMessage{}
|
||||
if err := json.NewDecoder(resp.Body).Decode(&message); err != nil {
|
||||
return err
|
||||
}
|
||||
if message.Success {
|
||||
return nil
|
||||
}
|
||||
return fmt.Errorf("create user failed: %s", message.Message)
|
||||
}
|
||||
|
||||
// put
|
||||
func (OneClient *OneClient) put(data []byte) error {
|
||||
OneClient.Client = &http.Client{}
|
||||
port := strconv.Itoa(OneClient.Config.Port)
|
||||
url := OneClient.Config.Host + ":" + port + OneClient.Url
|
||||
payload := bytes.NewBuffer(data)
|
||||
req, err := http.NewRequest("PUT", url, payload)
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
req.Header.Set("Authorization", "Bearer "+OneClient.Config.Key)
|
||||
resp, err := OneClient.Client.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
message := RespMessage{}
|
||||
if err := json.NewDecoder(resp.Body).Decode(&message); err != nil {
|
||||
return err
|
||||
}
|
||||
if message.Success {
|
||||
return nil
|
||||
}
|
||||
return fmt.Errorf("update user failed: %s", message.Message)
|
||||
}
|
||||
|
||||
// delete
|
||||
func (OneClient *OneClient) delete(data []byte) error {
|
||||
OneClient.Client = &http.Client{}
|
||||
port := strconv.Itoa(OneClient.Config.Port)
|
||||
url := OneClient.Config.Host + ":" + port + OneClient.Url
|
||||
payload := bytes.NewBuffer(data)
|
||||
req, err := http.NewRequest("DELETE", url, payload)
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
req.Header.Set("Authorization", "Bearer "+OneClient.Config.Key)
|
||||
resp, err := OneClient.Client.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
message := RespMessage{}
|
||||
if err := json.NewDecoder(resp.Body).Decode(&message); err != nil {
|
||||
return err
|
||||
}
|
||||
if message.Success {
|
||||
return nil
|
||||
}
|
||||
return fmt.Errorf("delete user failed: %s", message.Message)
|
||||
}
|
||||
91
sdk/api/log.go
Normal file
91
sdk/api/log.go
Normal file
@@ -0,0 +1,91 @@
|
||||
package sdk
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"time"
|
||||
)
|
||||
|
||||
// get log url like http://172.18.2.63:8300/api/log/?p=0&type=0&username=&token_name=&model_name=&start_timestamp=0&end_timestamp=1745237472&channel=
|
||||
// define log struct :{
|
||||
// "id": 349,
|
||||
// "user_id": 21,
|
||||
// "created_at": 1745206602,
|
||||
// "type": 3,
|
||||
// "content": "管理员将用户额度从 $0.000000 额度修改为 $1000.000000 额度",
|
||||
// "username": "test1",
|
||||
// "token_name": "",
|
||||
// "model_name": "",
|
||||
// "quota": 0,
|
||||
// "prompt_tokens": 0,
|
||||
// "completion_tokens": 0,
|
||||
// "channel": 0,
|
||||
// "request_id": "2025042111364245599153931550114",
|
||||
// "elapsed_time": 0,
|
||||
// "is_stream": false,
|
||||
// "system_prompt_reset": false
|
||||
// },
|
||||
|
||||
type Log struct {
|
||||
ID int `json:"id"`
|
||||
UserID int `json:"user_id"`
|
||||
CreatedAt int `json:"created_at"`
|
||||
Type int `json:"type"`
|
||||
Content string `json:"content"`
|
||||
Username string `json:"username"`
|
||||
TokenName string `json:"token_name"`
|
||||
ModelName string `json:"model_name"`
|
||||
Quota int `json:"quota"`
|
||||
PromptTokens int `json:"prompt_tokens"`
|
||||
CompletionTokens int `json:"completion_tokens"`
|
||||
Channel int `json:"channel"`
|
||||
RequestID string `json:"request_id"`
|
||||
ElapsedTime int `json:"elapsed_time"`
|
||||
IsStream bool `json:"is_stream"`
|
||||
SystemPromptReset bool `json:"system_prompt_reset"`
|
||||
}
|
||||
|
||||
type Logs struct {
|
||||
Logs []*Log
|
||||
Query map[string]string
|
||||
}
|
||||
|
||||
type Logsimpl interface {
|
||||
Get(client *OneClient) error
|
||||
}
|
||||
|
||||
type LogRespData struct {
|
||||
Data interface{} `json:"data"`
|
||||
Message string `json:"message"`
|
||||
Success bool `json:"success"`
|
||||
}
|
||||
|
||||
// get log
|
||||
func (logs *Logs) Get(client *OneClient) error {
|
||||
client.Url = "/api/log/?"
|
||||
if logs.Query != nil {
|
||||
for k, v := range logs.Query {
|
||||
client.Url += k + "=" + v + "&"
|
||||
}
|
||||
} else {
|
||||
client.Url = "/api/log/?p=0&type=0&username=&token_name=&model_name=&start_timestamp=0&end_timestamp=" + time.Now().String() + "&channel="
|
||||
}
|
||||
resp, err := client.get()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
data := LogRespData{Data: []*Log{}, Message: "", Success: false}
|
||||
if err := json.NewDecoder(resp.Body).Decode(&data); err != nil {
|
||||
return err
|
||||
}
|
||||
for _, v := range data.Data.([]interface{}) {
|
||||
log := &Log{}
|
||||
logData, _ := json.Marshal(v)
|
||||
err = json.Unmarshal(logData, log)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
logs.Logs = append(logs.Logs, log)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
131
sdk/api/token.go
Normal file
131
sdk/api/token.go
Normal file
@@ -0,0 +1,131 @@
|
||||
package sdk
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type Token struct {
|
||||
ID int `json:"id"`
|
||||
UserID int `json:"user_id"`
|
||||
Key string `json:"key"`
|
||||
Status int `json:"status"`
|
||||
Name string `json:"name"`
|
||||
CreatedTime int `json:"created_time"`
|
||||
AccessedTime int `json:"accessed_time"`
|
||||
ExpiredTime int `json:"expired_time"`
|
||||
RemainQuota int `json:"remain_quota"`
|
||||
UnlimitedQuota bool `json:"unlimited_quota"`
|
||||
UsedQuota int `json:"used_quota"`
|
||||
Models string `json:"models"`
|
||||
Subnet string `json:"subnet"`
|
||||
}
|
||||
|
||||
// define add token function
|
||||
type Tokenimpl interface {
|
||||
Add(token *Token) error
|
||||
List(id int) error
|
||||
Update(token *Token) error
|
||||
Delete(id int) error
|
||||
}
|
||||
|
||||
type Tokens struct {
|
||||
Tokens []*Token
|
||||
UserID int
|
||||
Query map[string]string
|
||||
}
|
||||
|
||||
type TokensImpl interface {
|
||||
List(token *Token) error
|
||||
}
|
||||
|
||||
type TokenRespData struct {
|
||||
Data interface{} `json:"data"`
|
||||
}
|
||||
|
||||
// add token
|
||||
func (token *Token) Add(client *OneClient) error {
|
||||
client.Url = "/api/token/"
|
||||
data, err := json.Marshal(token)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return client.post(data)
|
||||
}
|
||||
|
||||
// update token
|
||||
func (token *Token) Update(client *OneClient) error {
|
||||
client.Url = "/api/token/"
|
||||
data, err := json.Marshal(token)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return client.put(data)
|
||||
}
|
||||
|
||||
// list token
|
||||
func (tokens *Tokens) List(client *OneClient) error {
|
||||
if tokens.UserID != 0 {
|
||||
if tokens.Query != nil {
|
||||
client.Url = "/api/token/search?user_id=" + strconv.Itoa(tokens.UserID)
|
||||
for k, v := range tokens.Query {
|
||||
client.Url += "&" + k + "=" + v
|
||||
}
|
||||
client.Url += "&p=0&order="
|
||||
} else {
|
||||
client.Url = "/api/token/?user_id=" + strconv.Itoa(tokens.UserID) + "&p=0&order="
|
||||
}
|
||||
} else {
|
||||
if tokens.Query != nil {
|
||||
client.Url = "/api/token/search?p=0"
|
||||
for k, v := range tokens.Query {
|
||||
client.Url += "&" + k + "=" + v
|
||||
}
|
||||
} else {
|
||||
client.Url = "/api/token/?p=0"
|
||||
}
|
||||
client.Url += "&order="
|
||||
}
|
||||
resp, err := client.get()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
data := TokenRespData{Data: []*Token{}}
|
||||
if err := json.NewDecoder(resp.Body).Decode(&data); err != nil {
|
||||
return err
|
||||
}
|
||||
for _, v := range data.Data.([]interface{}) {
|
||||
token := &Token{}
|
||||
tokenData, _ := json.Marshal(v)
|
||||
err = json.Unmarshal(tokenData, token)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
tokens.Tokens = append(tokens.Tokens, token)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// delete token
|
||||
func (token *Token) Delete(client *OneClient) error {
|
||||
client.Url = "/api/token/" + strconv.Itoa(token.ID) + "/" + "?user_id=" + strconv.Itoa(token.UserID)
|
||||
return client.delete(nil)
|
||||
}
|
||||
|
||||
// get token
|
||||
func (token *Token) Get(client *OneClient) error {
|
||||
client.Url = "/api/token/" + strconv.Itoa(token.ID) + "/" + "?user_id=" + strconv.Itoa(token.UserID)
|
||||
resp, err := client.get()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
data := TokenRespData{Data: token}
|
||||
if err := json.NewDecoder(resp.Body).Decode(&data); err != nil {
|
||||
return err
|
||||
}
|
||||
tokenData, _ := json.Marshal(data.Data)
|
||||
err = json.Unmarshal(tokenData, token)
|
||||
return nil
|
||||
}
|
||||
134
sdk/api/user.go
Normal file
134
sdk/api/user.go
Normal file
@@ -0,0 +1,134 @@
|
||||
// 6ca91f95d29749db8a93d5b8903c7949
|
||||
|
||||
// description: User API
|
||||
// use httprequest to get user list and user info
|
||||
package sdk
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
type User struct {
|
||||
ID int `json:"id"`
|
||||
Username string `json:"username"`
|
||||
Password string `json:"password"`
|
||||
DisplayName string `json:"display_name"`
|
||||
Role int `json:"role"`
|
||||
Status int `json:"status"`
|
||||
Email string `json:"email"`
|
||||
GithubID string `json:"github_id"`
|
||||
WechatID string `json:"wechat_id"`
|
||||
LarkID string `json:"lark_id"`
|
||||
OidcID string `json:"oidc_id"`
|
||||
VerificationCode string `json:"verification_code"`
|
||||
AccessToken string `json:"access_token"`
|
||||
Quota int `json:"quota"`
|
||||
UsedQuota int `json:"used_quota"`
|
||||
RequestCount int `json:"request_count"`
|
||||
Group string `json:"group"`
|
||||
AffCode string `json:"aff_code"`
|
||||
InviterID int `json:"inviter_id"`
|
||||
}
|
||||
|
||||
type UserRespData struct {
|
||||
Data interface{} `json:"data"`
|
||||
}
|
||||
|
||||
type UserImpl interface {
|
||||
Add(user *User) error
|
||||
Get(id int) error
|
||||
Updater(user *User) error
|
||||
Delete(id int) error
|
||||
}
|
||||
|
||||
type Users struct {
|
||||
Users []*User
|
||||
Query map[string]string
|
||||
}
|
||||
|
||||
// list user
|
||||
func (users *Users) List(client *OneClient) error {
|
||||
if users.Query != nil {
|
||||
client.Url = "/api/user/search?"
|
||||
for k, v := range users.Query {
|
||||
client.Url += k + "=" + v + "&"
|
||||
}
|
||||
client.Url += "p=0&order="
|
||||
} else {
|
||||
client.Url = "/api/user/?p=0&order="
|
||||
}
|
||||
resp, err := client.get()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
data := UserRespData{Data: []*User{}}
|
||||
if err := json.NewDecoder(resp.Body).Decode(&data); err != nil {
|
||||
return err
|
||||
}
|
||||
for _, v := range data.Data.([]interface{}) {
|
||||
user := &User{}
|
||||
userData, _ := json.Marshal(v)
|
||||
err = json.Unmarshal(userData, user)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
users.Users = append(users.Users, user)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// add user
|
||||
func (user *User) Add(client *OneClient) error {
|
||||
client.Url = "/api/user/"
|
||||
data, err := json.Marshal(user)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return client.post(data)
|
||||
}
|
||||
|
||||
// delete user
|
||||
func (user *User) Delete(client *OneClient) error {
|
||||
client.Url = "/api/user/manage"
|
||||
deleteData := map[string]interface{}{
|
||||
"username": user.Username,
|
||||
"action": "delete",
|
||||
}
|
||||
data, err := json.Marshal(deleteData)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return client.post(data)
|
||||
}
|
||||
|
||||
// update user
|
||||
func (user *User) Update(client *OneClient) error {
|
||||
client.Url = "/api/user"
|
||||
data, err := json.Marshal(user)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return client.put(data)
|
||||
}
|
||||
|
||||
// get user
|
||||
func (user *User) Get(client *OneClient) error {
|
||||
client.Url = "/api/user/" + fmt.Sprintf("%d", user.ID)
|
||||
resp, err := client.get()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
data := UserRespData{Data: user}
|
||||
if err := json.NewDecoder(resp.Body).Decode(&data); err != nil {
|
||||
return err
|
||||
}
|
||||
userData, _ := json.Marshal(data.Data)
|
||||
err = json.Unmarshal(userData, user)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
126
sdk/chat/chat.go
Normal file
126
sdk/chat/chat.go
Normal file
@@ -0,0 +1,126 @@
|
||||
package chat
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// ChatCompletionRequest defines the request structure
|
||||
type ChatCompletionRequest struct {
|
||||
Model string `json:"model"`
|
||||
Messages []Message `json:"messages"`
|
||||
Stream bool `json:"stream"`
|
||||
}
|
||||
|
||||
// Message defines the message structure
|
||||
type Message struct {
|
||||
Role string `json:"role"`
|
||||
Content string `json:"content"`
|
||||
}
|
||||
|
||||
// ChatCompletionChunk defines the chunk structure for streaming response
|
||||
type ChatCompletionChunk struct {
|
||||
Choices []Choice `json:"choices"`
|
||||
}
|
||||
|
||||
// Choice defines the choice structure
|
||||
type Choice struct {
|
||||
Delta struct {
|
||||
Content string `json:"content"`
|
||||
} `json:"delta"`
|
||||
}
|
||||
|
||||
// Config defines the OpenAI API configuration
|
||||
type Config struct {
|
||||
Host string `json:"host"`
|
||||
Port int `json:"port"`
|
||||
Key string `json:"key"`
|
||||
}
|
||||
|
||||
func Chatting(config Config) {
|
||||
apiKey := config.Key
|
||||
messages := []Message{}
|
||||
|
||||
scanner := bufio.NewScanner(os.Stdin)
|
||||
for {
|
||||
fmt.Print("Enter your message: ")
|
||||
if !scanner.Scan() {
|
||||
break
|
||||
}
|
||||
userInput := scanner.Text()
|
||||
messages = append(messages, Message{Role: "user", Content: userInput})
|
||||
|
||||
request := ChatCompletionRequest{
|
||||
Model: "ERNIE-3.5-8K",
|
||||
Messages: messages,
|
||||
Stream: true,
|
||||
}
|
||||
|
||||
requestBody, err := json.Marshal(request)
|
||||
if err != nil {
|
||||
log.Fatalf("Error encoding JSON request body: %v", err)
|
||||
}
|
||||
|
||||
url := fmt.Sprintf("%s:%d/v1/chat/completions", config.Host, config.Port)
|
||||
req, err := http.NewRequestWithContext(context.Background(), "POST", url, bytes.NewBuffer(requestBody))
|
||||
if err != nil {
|
||||
log.Fatalf("Error creating HTTP request: %v", err)
|
||||
}
|
||||
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
req.Header.Set("Authorization", "Bearer "+apiKey)
|
||||
|
||||
client := &http.Client{}
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
log.Fatalf("Error sending HTTP request: %v", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
body, _ := io.ReadAll(resp.Body)
|
||||
log.Fatalf("Request failed, status code: %d, response content: %s", resp.StatusCode, string(body))
|
||||
}
|
||||
|
||||
reader := bufio.NewReader(resp.Body)
|
||||
totalResponse := ""
|
||||
for {
|
||||
line, err := reader.ReadBytes('\n')
|
||||
if err != nil {
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
log.Fatalf("Error reading response line: %v", err)
|
||||
}
|
||||
|
||||
if len(line) > 6 && string(line[:6]) == "data: " {
|
||||
line = line[6:]
|
||||
if strings.Contains(string(line), "[DONE]") {
|
||||
break
|
||||
}
|
||||
|
||||
var chunk ChatCompletionChunk
|
||||
err = json.Unmarshal(line, &chunk)
|
||||
if err != nil {
|
||||
log.Printf("Error parsing JSON chunk: %v, data: %s", err, string(line))
|
||||
continue
|
||||
}
|
||||
|
||||
for _, choice := range chunk.Choices {
|
||||
fmt.Print(choice.Delta.Content)
|
||||
totalResponse += choice.Delta.Content
|
||||
}
|
||||
}
|
||||
}
|
||||
messages = append(messages, Message{Role: "assistant", Content: totalResponse})
|
||||
fmt.Println()
|
||||
}
|
||||
}
|
||||
259
sdk/main.go
Normal file
259
sdk/main.go
Normal file
@@ -0,0 +1,259 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/songquanpeng/one-api/model"
|
||||
onesdk "github.com/songquanpeng/one-api/sdk/api"
|
||||
"log"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// for test
|
||||
config := onesdk.Config{
|
||||
Host: "http://127.0.0.1",
|
||||
Port: 3000,
|
||||
Key: "123456789012345678901234567890",
|
||||
}
|
||||
client := onesdk.OneClient{
|
||||
Config: &config,
|
||||
}
|
||||
|
||||
// 用户API使用测试
|
||||
// 添加用户
|
||||
user := onesdk.User{
|
||||
Username: "user1",
|
||||
DisplayName: "user1",
|
||||
Password: "user1@123_%6",
|
||||
}
|
||||
err := user.Add(&client)
|
||||
if err != nil {
|
||||
log.Fatal("add user err=", err)
|
||||
}
|
||||
fmt.Println("add user:", user, "success!")
|
||||
// 查找用户
|
||||
users := onesdk.Users{}
|
||||
// 可根据用户名、显示名、邮箱、手机号等信息进行模糊查询
|
||||
users.Query = map[string]string{
|
||||
"keyword": "user1",
|
||||
}
|
||||
err = users.List(&client)
|
||||
if err != nil {
|
||||
log.Fatal("list user err=", err)
|
||||
}
|
||||
tmpUser := onesdk.User{}
|
||||
for i, u := range users.Users {
|
||||
// 删除的不显示
|
||||
if u.Status == model.UserStatusDeleted {
|
||||
continue
|
||||
}
|
||||
fmt.Println("user["+strconv.Itoa(i)+"]:", *u)
|
||||
if u.Username == "user1" {
|
||||
tmpUser = *u
|
||||
}
|
||||
}
|
||||
fmt.Println("list user success!")
|
||||
// 获取用户
|
||||
user = onesdk.User{}
|
||||
user.ID = tmpUser.ID
|
||||
err = user.Get(&client)
|
||||
if err != nil {
|
||||
log.Fatal("get user err=", err)
|
||||
}
|
||||
fmt.Println("get user:", user, "success!")
|
||||
//更新用户
|
||||
user.Quota = 500000000
|
||||
err = user.Update(&client)
|
||||
if err != nil {
|
||||
log.Fatal("update user err=", err)
|
||||
}
|
||||
fmt.Println("update user:", user, "success!\r\n")
|
||||
|
||||
// 渠道API使用测试
|
||||
channel := onesdk.Channel{
|
||||
Name: "ch1",
|
||||
ChannelConfig: onesdk.ChannelConfig{
|
||||
Region: "",
|
||||
Sk: "",
|
||||
Ak: "",
|
||||
},
|
||||
Group: "default",
|
||||
Models: "moonshot-v1-8k,moonshot-v1-32k,moonshot-v1-128k",
|
||||
ModelMapping: "",
|
||||
Other: "",
|
||||
SystemPrompt: "",
|
||||
Type: 25,
|
||||
Key: "key",
|
||||
}
|
||||
err = channel.Add(&client)
|
||||
if err != nil {
|
||||
log.Fatal("add channel err=", err)
|
||||
}
|
||||
fmt.Println("add channel:", channel, "success!")
|
||||
// 查询渠道
|
||||
channels := onesdk.Channels{}
|
||||
err = channels.List(&client)
|
||||
channels.Query = map[string]string{
|
||||
"keyword": "ch1",
|
||||
}
|
||||
if err != nil {
|
||||
log.Fatal("list channel err=", err)
|
||||
}
|
||||
tmpChannel := onesdk.Channel{}
|
||||
for i, c := range channels.Channels {
|
||||
fmt.Println("channel["+strconv.Itoa(i)+"]:", *c)
|
||||
if c.Name == "ch1" {
|
||||
tmpChannel = *c
|
||||
}
|
||||
}
|
||||
fmt.Println("list channel success!")
|
||||
// 更新渠道
|
||||
updateChannel := tmpChannel
|
||||
updateChannel.Name = "ch1-updated"
|
||||
err = updateChannel.Update(&client)
|
||||
if err != nil {
|
||||
log.Fatal("update channel err=", err)
|
||||
}
|
||||
fmt.Println("update channel:", updateChannel, "success!")
|
||||
// 获取渠道
|
||||
channel = onesdk.Channel{}
|
||||
channel.ID = tmpChannel.ID
|
||||
err = channel.Get(&client)
|
||||
if err != nil {
|
||||
log.Fatal("get channel err=", err)
|
||||
}
|
||||
fmt.Println("get channel:", channel, "success!")
|
||||
// 测试渠道(模型)是否正常
|
||||
err = channel.Test(&client)
|
||||
if err != nil {
|
||||
log.Fatal("test channel err=", err)
|
||||
}
|
||||
fmt.Println("test channel:", channel, "success!")
|
||||
// 删除渠道
|
||||
err = updateChannel.Delete(&client)
|
||||
if err != nil {
|
||||
log.Fatal("delete channel err=", err)
|
||||
}
|
||||
fmt.Println("delete channel:", updateChannel, "success!\r\n")
|
||||
|
||||
// 令牌API使用测试
|
||||
// 添加令牌
|
||||
token := onesdk.Token{
|
||||
Name: "token1",
|
||||
UserID: user.ID,
|
||||
Models: "/data/DeepSeek-R1,ERNIE-3.5-8K",
|
||||
RemainQuota: 5000000000,
|
||||
UnlimitedQuota: false,
|
||||
ExpiredTime: -1,
|
||||
Subnet: "",
|
||||
}
|
||||
err = token.Add(&client)
|
||||
if err != nil {
|
||||
log.Fatal("add token err=", err)
|
||||
}
|
||||
fmt.Println("add token:", token, "success!")
|
||||
//查询令牌
|
||||
tokens := onesdk.Tokens{}
|
||||
tokens.UserID = user.ID
|
||||
tokens.Query = map[string]string{
|
||||
"keyword": "token1",
|
||||
}
|
||||
err = tokens.List(&client)
|
||||
if err != nil {
|
||||
log.Fatal("list token err=", err)
|
||||
}
|
||||
tmpToken := onesdk.Token{}
|
||||
for i, t := range tokens.Tokens {
|
||||
fmt.Println("token["+strconv.Itoa(i)+"]:", *t)
|
||||
if t.Name == "token1" {
|
||||
tmpToken = *t
|
||||
}
|
||||
}
|
||||
//更新令牌
|
||||
token = tmpToken
|
||||
token.Models = "/data/DeepSeek-R1"
|
||||
token.RemainQuota = 9009000000
|
||||
err = token.Update(&client)
|
||||
if err != nil {
|
||||
log.Fatal("update token err=", err)
|
||||
}
|
||||
fmt.Println("update token:", token, "success!")
|
||||
// 获取token
|
||||
token = onesdk.Token{ID: token.ID, UserID: tmpToken.UserID}
|
||||
err = token.Get(&client)
|
||||
if err != nil {
|
||||
log.Fatal("get token err=", err)
|
||||
}
|
||||
fmt.Println("get token:", token, "success!")
|
||||
// delete token
|
||||
err = token.Delete(&client)
|
||||
if err != nil {
|
||||
log.Fatal("delete token err=", err)
|
||||
}
|
||||
fmt.Println("delete token:", token, "success!\r\n")
|
||||
|
||||
// 日志API使用测试
|
||||
logs := onesdk.Logs{}
|
||||
logs.Query = map[string]string{
|
||||
"username": "user1",
|
||||
}
|
||||
err = logs.Get(&client)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
for i, l := range logs.Logs {
|
||||
fmt.Println("log["+strconv.Itoa(i)+"]=", *l)
|
||||
}
|
||||
fmt.Println("get logs success!\r\n\r\n")
|
||||
|
||||
// 删除用户
|
||||
err = user.Delete(&client)
|
||||
if err != nil {
|
||||
log.Fatal("delete user err=", err)
|
||||
}
|
||||
fmt.Println("delete user:", user, "success!")
|
||||
|
||||
// 操作root自己的令牌
|
||||
rootToken := onesdk.Token{
|
||||
Name: "token1",
|
||||
Models: "/data/DeepSeek-R1,ERNIE-3.5-8K",
|
||||
RemainQuota: 5000000000,
|
||||
UnlimitedQuota: false,
|
||||
ExpiredTime: -1,
|
||||
Subnet: "",
|
||||
}
|
||||
err = rootToken.Add(&client)
|
||||
if err != nil {
|
||||
log.Fatal("add root token err=", err)
|
||||
}
|
||||
fmt.Println("add root token:", rootToken, "success!")
|
||||
//查询令牌
|
||||
tokens = onesdk.Tokens{}
|
||||
tokens.Query = map[string]string{
|
||||
"keyword": "token1",
|
||||
}
|
||||
err = tokens.List(&client)
|
||||
if err != nil {
|
||||
log.Fatal("list root token err=", err)
|
||||
}
|
||||
tmpToken = onesdk.Token{}
|
||||
for i, t := range tokens.Tokens {
|
||||
fmt.Println("token["+strconv.Itoa(i)+"]:", *t)
|
||||
if t.Name == "token1" {
|
||||
tmpToken = *t
|
||||
}
|
||||
}
|
||||
// 获取令牌
|
||||
rootToken = onesdk.Token{ID: tmpToken.ID}
|
||||
err = rootToken.Get(&client)
|
||||
if err != nil {
|
||||
log.Fatal("get root token err=", err)
|
||||
}
|
||||
fmt.Println("get root token:", rootToken, "success!")
|
||||
// 删除令牌
|
||||
err = rootToken.Delete(&client)
|
||||
if err != nil {
|
||||
log.Fatal("delete root token err=", err)
|
||||
}
|
||||
fmt.Println("delete root token:", rootToken, "success!")
|
||||
}
|
||||
145
sdk/readme.md
Normal file
145
sdk/readme.md
Normal file
@@ -0,0 +1,145 @@
|
||||
## golang sdk 使用方式
|
||||
### 1. 安装
|
||||
|
||||
```
|
||||
import sdk oneapi "github.com/songquanpeng/one-api/sdk/api"
|
||||
```
|
||||
### 2. 初始化
|
||||
|
||||
```
|
||||
config := oneapi.Config{
|
||||
Host: "http://127.0.0.1", //实际使用时请替换为实际的host
|
||||
Port: 3000, //实际使用时请替换为实际的port
|
||||
Key: "12345678901234567890", //实际使用时请替换为root用户下生成的系统访问令牌
|
||||
}
|
||||
client := oneapi.OneClient{
|
||||
Config: &config,
|
||||
}
|
||||
```
|
||||
### 3. 调用
|
||||
|
||||
```
|
||||
(1) 用户操作
|
||||
// 添加用户
|
||||
user := oneapi.User{
|
||||
Username: "test1",
|
||||
DisplayName: "test1",
|
||||
Password: "test@123_%6",
|
||||
}
|
||||
err := user.Add(&client)
|
||||
// 查询用户(列表)
|
||||
users := oneapi.Users{}
|
||||
// 可以模糊查找条件,username LIKE ? or email LIKE ? or display_name LIKE ?", keyword, keyword+"%", keyword+"%", keyword+"%"
|
||||
users.Query = map[string]string{
|
||||
"keyword": "test1",
|
||||
}
|
||||
err := users.List(&client)
|
||||
// 根据uid获取用户
|
||||
user = oneapi.User{}
|
||||
for _, u := range users.Users {
|
||||
if u.Username == "test1" {
|
||||
user.ID = u.ID
|
||||
}
|
||||
}
|
||||
_ = u.Get(&client)
|
||||
//更新用户信息
|
||||
user.Quota = 500000000
|
||||
err = user.Update(&client)
|
||||
//删除用户
|
||||
//err = user.Delete(&client)
|
||||
|
||||
```
|
||||
```
|
||||
(2) 渠道(模型)操作
|
||||
// 添加渠道
|
||||
channel := oneapi.Channel{
|
||||
Name: "ch1",
|
||||
BaseUrl: "",
|
||||
ChannelConfig: oneapi.ChannelConfig{
|
||||
Region: "",
|
||||
Sk: "",
|
||||
Ak: "",
|
||||
},
|
||||
Group: "default",
|
||||
Models: "deepseek-r1",
|
||||
ModelMapping: "",
|
||||
Other: "",
|
||||
SystemPrompt: "",
|
||||
Type: 50, //渠道类型是前端constant,参考web/default/src/constants/channel.constants.js 50是openai兼容格式
|
||||
Key: "12345678901234567890",
|
||||
}
|
||||
err = channel.Add(&client)
|
||||
// 查询渠道
|
||||
channels := oneapi.Channels{}
|
||||
err = channels.List(&client)
|
||||
// 可模糊查询
|
||||
channels.Query = map[string]string{
|
||||
"keyword": "ch1",
|
||||
}
|
||||
// 修改渠道
|
||||
updateChannel := oneapi.Channel{}
|
||||
for _, c := range channels.Channels {
|
||||
if c.Name == "ch1" {
|
||||
updateChannel = *c
|
||||
}
|
||||
}
|
||||
// update channel
|
||||
updateChannel.Name = "ch1-updated"
|
||||
err = updateChannel.Update(&client)
|
||||
//删除渠道
|
||||
//err = updateChannel.Delete(&client)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
```
|
||||
```
|
||||
(3) 令牌操作
|
||||
// 添加令牌 // expired_time : -1 ,models : "/data/DeepSeek-R1,ERNIE-3.5-8K" ,name : "test" ,remain_quota : 5000000000 ,subnet : "" ,unlimited_quota : false
|
||||
token := oneapi.Token{
|
||||
Name: "test1",
|
||||
UserID: user.ID,
|
||||
Models: "/data/DeepSeek-R1,ERNIE-3.5-8K",
|
||||
RemainQuota: 5000000000,
|
||||
UnlimitedQuota: false,
|
||||
ExpiredTime: -1,
|
||||
Subnet: "",
|
||||
}
|
||||
err := token.Add(&client)
|
||||
// list tokens
|
||||
tokens := oneapi.Tokens{}
|
||||
// 可模糊查询
|
||||
tokens.Query = map[string]string{
|
||||
"keyword": "test1",
|
||||
}
|
||||
// 根据uid获取令牌
|
||||
tokens.userID = user.ID
|
||||
err := tokens.List(&client, 0)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
updateToken = tokens.Tokens[0]
|
||||
//更新令牌
|
||||
updateToken.Models = "/data/DeepSeek-R1"
|
||||
updateToken.RemainQuota = 9009000000
|
||||
err = updateToken.Update(&client)
|
||||
fmt.Println("update token err=", err, "updateToken=", updateToken)
|
||||
|
||||
//删除令牌
|
||||
err = updateToken.Delete(&client)
|
||||
fmt.Println("delete token err=", err)
|
||||
```
|
||||
```
|
||||
(4) 日志操作
|
||||
// 获取日志
|
||||
logs := oneapi.Logs{}
|
||||
logs.Query = map[string]string{
|
||||
"username": "test",
|
||||
}
|
||||
err = logs.Get(&client)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
for _, l := range logs.Logs {
|
||||
fmt.Println("l=", *l)
|
||||
}
|
||||
```
|
||||
Reference in New Issue
Block a user