mirror of
				https://github.com/songquanpeng/one-api.git
				synced 2025-10-31 22:03:41 +08:00 
			
		
		
		
	Compare commits
	
		
			26 Commits
		
	
	
		
			v0.6.11-pr
			...
			v0.6.11-al
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | 54c38de813 | ||
|  | d6284bf6b0 | ||
|  | df5d2ca93d | ||
|  | fef7ae048b | ||
|  | 6916debf66 | ||
|  | 53da209134 | ||
|  | 517f6ad211 | ||
|  | 10aba11f18 | ||
|  | 4d011c5f98 | ||
|  | eb96aa635e | ||
|  | c715f2bc1d | ||
|  | aed090dd55 | ||
|  | 696265774e | ||
|  | 974729426d | ||
|  | 57c1367ec8 | ||
|  | 44233d5c04 | ||
|  | bf45a955c3 | ||
|  | 20435fcbfc | ||
|  | 6e7a1c2323 | ||
|  | dd65b997dd | ||
|  | 0b6d03d6c6 | ||
|  | 4375246e24 | ||
|  | 3e3b8230ac | ||
|  | 07808122a6 | ||
|  | c96895e35b | ||
|  | 2552c68249 | 
							
								
								
									
										6
									
								
								.github/workflows/docker-image.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								.github/workflows/docker-image.yml
									
									
									
									
										vendored
									
									
								
							| @@ -62,9 +62,7 @@ jobs: | |||||||
|         uses: docker/build-push-action@v3 |         uses: docker/build-push-action@v3 | ||||||
|         with: |         with: | ||||||
|           context: . |           context: . | ||||||
|           platforms: ${{ contains(github.ref, 'alpha') && 'linux/amd64' || 'linux/amd64,linux/arm64' }} |           platforms: ${{ contains(github.ref, 'alpha') && 'linux/amd64' || 'linux/amd64' }} | ||||||
|           push: true |           push: true | ||||||
|           tags: ${{ steps.meta.outputs.tags }} |           tags: ${{ steps.meta.outputs.tags }} | ||||||
|           labels: ${{ steps.meta.outputs.labels }} |           labels: ${{ steps.meta.outputs.labels }} | ||||||
|           build-args: | |  | ||||||
|             TARGETARCH=${{ startsWith(matrix.platform, 'linux/arm64') && 'arm64' || 'amd64' }} |  | ||||||
| @@ -24,8 +24,7 @@ RUN apk add --no-cache \ | |||||||
|  |  | ||||||
| ENV GO111MODULE=on \ | ENV GO111MODULE=on \ | ||||||
|     CGO_ENABLED=1 \ |     CGO_ENABLED=1 \ | ||||||
|     GOOS=linux \ |     GOOS=linux | ||||||
|     GOARCH=$TARGETARCH |  | ||||||
|  |  | ||||||
| WORKDIR /build | WORKDIR /build | ||||||
|  |  | ||||||
|   | |||||||
| @@ -315,6 +315,7 @@ If the channel ID is not provided, load balancing will be used to distribute the | |||||||
| * [FastGPT](https://github.com/labring/FastGPT): Knowledge question answering system based on the LLM | * [FastGPT](https://github.com/labring/FastGPT): Knowledge question answering system based on the LLM | ||||||
| * [VChart](https://github.com/VisActor/VChart):  More than just a cross-platform charting library, but also an expressive data storyteller. | * [VChart](https://github.com/VisActor/VChart):  More than just a cross-platform charting library, but also an expressive data storyteller. | ||||||
| * [VMind](https://github.com/VisActor/VMind):  Not just automatic, but also fantastic. Open-source solution for intelligent visualization. | * [VMind](https://github.com/VisActor/VMind):  Not just automatic, but also fantastic. Open-source solution for intelligent visualization. | ||||||
|  | * * [CherryStudio](https://github.com/CherryHQ/cherry-studio):  A cross-platform AI client that integrates multiple service providers and supports local knowledge base management. | ||||||
|  |  | ||||||
| ## Note | ## Note | ||||||
| This project is an open-source project. Please use it in compliance with OpenAI's [Terms of Use](https://openai.com/policies/terms-of-use) and **applicable laws and regulations**. It must not be used for illegal purposes. | This project is an open-source project. Please use it in compliance with OpenAI's [Terms of Use](https://openai.com/policies/terms-of-use) and **applicable laws and regulations**. It must not be used for illegal purposes. | ||||||
|   | |||||||
| @@ -287,8 +287,8 @@ graph LR | |||||||
|     + インターフェイスアドレスと API Key が正しいか再確認してください。 |     + インターフェイスアドレスと API Key が正しいか再確認してください。 | ||||||
|  |  | ||||||
| ## 関連プロジェクト | ## 関連プロジェクト | ||||||
| [FastGPT](https://github.com/labring/FastGPT): LLM に基づく知識質問応答システム | * [FastGPT](https://github.com/labring/FastGPT): LLM に基づく知識質問応答システム | ||||||
|  | * [CherryStudio](https://github.com/CherryHQ/cherry-studio):  マルチプラットフォーム対応のAIクライアント。複数のサービスプロバイダーを統合管理し、ローカル知識ベースをサポートします。 | ||||||
| ## 注 | ## 注 | ||||||
| 本プロジェクトはオープンソースプロジェクトです。OpenAI の[利用規約](https://openai.com/policies/terms-of-use)および**適用される法令**を遵守してご利用ください。違法な目的での利用はご遠慮ください。 | 本プロジェクトはオープンソースプロジェクトです。OpenAI の[利用規約](https://openai.com/policies/terms-of-use)および**適用される法令**を遵守してご利用ください。違法な目的での利用はご遠慮ください。 | ||||||
|  |  | ||||||
|   | |||||||
| @@ -115,7 +115,7 @@ _✨ 通过标准的 OpenAI API 格式访问所有的大模型,开箱即用  | |||||||
| 19. 支持丰富的**自定义**设置, | 19. 支持丰富的**自定义**设置, | ||||||
|     1. 支持自定义系统名称,logo 以及页脚。 |     1. 支持自定义系统名称,logo 以及页脚。 | ||||||
|     2. 支持自定义首页和关于页面,可以选择使用 HTML & Markdown 代码进行自定义,或者使用一个单独的网页通过 iframe 嵌入。 |     2. 支持自定义首页和关于页面,可以选择使用 HTML & Markdown 代码进行自定义,或者使用一个单独的网页通过 iframe 嵌入。 | ||||||
| 20. 支持通过系统访问令牌调用管理 API,进而**在无需二开的情况下扩展和自定义** One API 的功能,详情请参考此处 [API 文档](./docs/API.md)。。 | 20. 支持通过系统访问令牌调用管理 API,进而**在无需二开的情况下扩展和自定义** One API 的功能,详情请参考此处 [API 文档](./docs/API.md)。 | ||||||
| 21. 支持 Cloudflare Turnstile 用户校验。 | 21. 支持 Cloudflare Turnstile 用户校验。 | ||||||
| 22. 支持用户管理,支持**多种用户登录注册方式**: | 22. 支持用户管理,支持**多种用户登录注册方式**: | ||||||
|     + 邮箱登录注册(支持注册邮箱白名单)以及通过邮箱进行密码重置。 |     + 邮箱登录注册(支持注册邮箱白名单)以及通过邮箱进行密码重置。 | ||||||
| @@ -469,6 +469,7 @@ https://openai.justsong.cn | |||||||
| * [ChatGPT Next Web](https://github.com/Yidadaa/ChatGPT-Next-Web):  一键拥有你自己的跨平台 ChatGPT 应用 | * [ChatGPT Next Web](https://github.com/Yidadaa/ChatGPT-Next-Web):  一键拥有你自己的跨平台 ChatGPT 应用 | ||||||
| * [VChart](https://github.com/VisActor/VChart):  不只是开箱即用的多端图表库,更是生动灵活的数据故事讲述者。 | * [VChart](https://github.com/VisActor/VChart):  不只是开箱即用的多端图表库,更是生动灵活的数据故事讲述者。 | ||||||
| * [VMind](https://github.com/VisActor/VMind):  不仅自动,还很智能。开源智能可视化解决方案。 | * [VMind](https://github.com/VisActor/VMind):  不仅自动,还很智能。开源智能可视化解决方案。 | ||||||
|  | * [CherryStudio](https://github.com/CherryHQ/cherry-studio):  全平台支持的AI客户端, 多服务商集成管理、本地知识库支持。 | ||||||
|  |  | ||||||
| ## 注意 | ## 注意 | ||||||
|  |  | ||||||
|   | |||||||
| @@ -163,4 +163,4 @@ var UserContentRequestProxy = env.String("USER_CONTENT_REQUEST_PROXY", "") | |||||||
| var UserContentRequestTimeout = env.Int("USER_CONTENT_REQUEST_TIMEOUT", 30) | var UserContentRequestTimeout = env.Int("USER_CONTENT_REQUEST_TIMEOUT", 30) | ||||||
|  |  | ||||||
| var EnforceIncludeUsage = env.Bool("ENFORCE_INCLUDE_USAGE", false) | var EnforceIncludeUsage = env.Bool("ENFORCE_INCLUDE_USAGE", false) | ||||||
| var TestPrompt = env.String("TEST_PROMPT", "Print your model name exactly and do not output without any other text.") | var TestPrompt = env.String("TEST_PROMPT", "Output only your specific model name with no additional text.") | ||||||
|   | |||||||
| @@ -93,6 +93,9 @@ func Error(ctx context.Context, msg string) { | |||||||
| } | } | ||||||
|  |  | ||||||
| func Debugf(ctx context.Context, format string, a ...any) { | func Debugf(ctx context.Context, format string, a ...any) { | ||||||
|  | 	if !config.DebugEnabled { | ||||||
|  | 		return | ||||||
|  | 	} | ||||||
| 	logHelper(ctx, loggerDEBUG, fmt.Sprintf(format, a...)) | 	logHelper(ctx, loggerDEBUG, fmt.Sprintf(format, a...)) | ||||||
| } | } | ||||||
|  |  | ||||||
|   | |||||||
							
								
								
									
										13
									
								
								common/utils/array.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										13
									
								
								common/utils/array.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,13 @@ | |||||||
|  | package utils | ||||||
|  |  | ||||||
|  | func DeDuplication(slice []string) []string { | ||||||
|  | 	m := make(map[string]bool) | ||||||
|  | 	for _, v := range slice { | ||||||
|  | 		m[v] = true | ||||||
|  | 	} | ||||||
|  | 	result := make([]string, 0, len(m)) | ||||||
|  | 	for v := range m { | ||||||
|  | 		result = append(result, v) | ||||||
|  | 	} | ||||||
|  | 	return result | ||||||
|  | } | ||||||
| @@ -112,6 +112,13 @@ type DeepSeekUsageResponse struct { | |||||||
| 	} `json:"balance_infos"` | 	} `json:"balance_infos"` | ||||||
| } | } | ||||||
|  |  | ||||||
|  | type OpenRouterResponse struct { | ||||||
|  | 	Data struct { | ||||||
|  | 		TotalCredits float64 `json:"total_credits"` | ||||||
|  | 		TotalUsage   float64 `json:"total_usage"` | ||||||
|  | 	} `json:"data"` | ||||||
|  | } | ||||||
|  |  | ||||||
| // GetAuthHeader get auth header | // GetAuthHeader get auth header | ||||||
| func GetAuthHeader(token string) http.Header { | func GetAuthHeader(token string) http.Header { | ||||||
| 	h := http.Header{} | 	h := http.Header{} | ||||||
| @@ -285,6 +292,22 @@ func updateChannelDeepSeekBalance(channel *model.Channel) (float64, error) { | |||||||
| 	return balance, nil | 	return balance, nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func updateChannelOpenRouterBalance(channel *model.Channel) (float64, error) { | ||||||
|  | 	url := "https://openrouter.ai/api/v1/credits" | ||||||
|  | 	body, err := GetResponseBody("GET", url, channel, GetAuthHeader(channel.Key)) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return 0, err | ||||||
|  | 	} | ||||||
|  | 	response := OpenRouterResponse{} | ||||||
|  | 	err = json.Unmarshal(body, &response) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return 0, err | ||||||
|  | 	} | ||||||
|  | 	balance := response.Data.TotalCredits - response.Data.TotalUsage | ||||||
|  | 	channel.UpdateBalance(balance) | ||||||
|  | 	return balance, nil | ||||||
|  | } | ||||||
|  |  | ||||||
| func updateChannelBalance(channel *model.Channel) (float64, error) { | func updateChannelBalance(channel *model.Channel) (float64, error) { | ||||||
| 	baseURL := channeltype.ChannelBaseURLs[channel.Type] | 	baseURL := channeltype.ChannelBaseURLs[channel.Type] | ||||||
| 	if channel.GetBaseURL() == "" { | 	if channel.GetBaseURL() == "" { | ||||||
| @@ -313,6 +336,8 @@ func updateChannelBalance(channel *model.Channel) (float64, error) { | |||||||
| 		return updateChannelSiliconFlowBalance(channel) | 		return updateChannelSiliconFlowBalance(channel) | ||||||
| 	case channeltype.DeepSeek: | 	case channeltype.DeepSeek: | ||||||
| 		return updateChannelDeepSeekBalance(channel) | 		return updateChannelDeepSeekBalance(channel) | ||||||
|  | 	case channeltype.OpenRouter: | ||||||
|  | 		return updateChannelOpenRouterBalance(channel) | ||||||
| 	default: | 	default: | ||||||
| 		return 0, errors.New("尚未实现") | 		return 0, errors.New("尚未实现") | ||||||
| 	} | 	} | ||||||
|   | |||||||
| @@ -153,6 +153,7 @@ func testChannel(ctx context.Context, channel *model.Channel, request *relaymode | |||||||
| 	rawResponse := w.Body.String() | 	rawResponse := w.Body.String() | ||||||
| 	_, responseMessage, err = parseTestResponse(rawResponse) | 	_, responseMessage, err = parseTestResponse(rawResponse) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
|  | 		logger.SysError(fmt.Sprintf("failed to parse error: %s, \nresponse: %s", err.Error(), rawResponse)) | ||||||
| 		return "", err, nil | 		return "", err, nil | ||||||
| 	} | 	} | ||||||
| 	result := w.Result() | 	result := w.Result() | ||||||
|   | |||||||
| @@ -2,10 +2,13 @@ package model | |||||||
|  |  | ||||||
| import ( | import ( | ||||||
| 	"context" | 	"context" | ||||||
| 	"github.com/songquanpeng/one-api/common" |  | ||||||
| 	"gorm.io/gorm" |  | ||||||
| 	"sort" | 	"sort" | ||||||
| 	"strings" | 	"strings" | ||||||
|  |  | ||||||
|  | 	"gorm.io/gorm" | ||||||
|  |  | ||||||
|  | 	"github.com/songquanpeng/one-api/common" | ||||||
|  | 	"github.com/songquanpeng/one-api/common/utils" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| type Ability struct { | type Ability struct { | ||||||
| @@ -49,6 +52,7 @@ func GetRandomSatisfiedChannel(group string, model string, ignoreFirstPriority b | |||||||
|  |  | ||||||
| func (channel *Channel) AddAbilities() error { | func (channel *Channel) AddAbilities() error { | ||||||
| 	models_ := strings.Split(channel.Models, ",") | 	models_ := strings.Split(channel.Models, ",") | ||||||
|  | 	models_ = utils.DeDuplication(models_) | ||||||
| 	groups_ := strings.Split(channel.Group, ",") | 	groups_ := strings.Split(channel.Group, ",") | ||||||
| 	abilities := make([]Ability, 0, len(models_)) | 	abilities := make([]Ability, 0, len(models_)) | ||||||
| 	for _, model := range models_ { | 	for _, model := range models_ { | ||||||
|   | |||||||
							
								
								
									
										20
									
								
								relay/adaptor/alibailian/constants.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										20
									
								
								relay/adaptor/alibailian/constants.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,20 @@ | |||||||
|  | package alibailian | ||||||
|  |  | ||||||
|  | // https://help.aliyun.com/zh/model-studio/getting-started/models | ||||||
|  |  | ||||||
|  | var ModelList = []string{ | ||||||
|  | 	"qwen-turbo", | ||||||
|  | 	"qwen-plus", | ||||||
|  | 	"qwen-long", | ||||||
|  | 	"qwen-max", | ||||||
|  | 	"qwen-coder-plus", | ||||||
|  | 	"qwen-coder-plus-latest", | ||||||
|  | 	"qwen-coder-turbo", | ||||||
|  | 	"qwen-coder-turbo-latest", | ||||||
|  | 	"qwen-mt-plus", | ||||||
|  | 	"qwen-mt-turbo", | ||||||
|  | 	"qwq-32b-preview", | ||||||
|  |  | ||||||
|  | 	"deepseek-r1", | ||||||
|  | 	"deepseek-v3", | ||||||
|  | } | ||||||
							
								
								
									
										19
									
								
								relay/adaptor/alibailian/main.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										19
									
								
								relay/adaptor/alibailian/main.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,19 @@ | |||||||
|  | package alibailian | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"fmt" | ||||||
|  |  | ||||||
|  | 	"github.com/songquanpeng/one-api/relay/meta" | ||||||
|  | 	"github.com/songquanpeng/one-api/relay/relaymode" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func GetRequestURL(meta *meta.Meta) (string, error) { | ||||||
|  | 	switch meta.Mode { | ||||||
|  | 	case relaymode.ChatCompletions: | ||||||
|  | 		return fmt.Sprintf("%s/compatible-mode/v1/chat/completions", meta.BaseURL), nil | ||||||
|  | 	case relaymode.Embeddings: | ||||||
|  | 		return fmt.Sprintf("%s/compatible-mode/v1/embeddings", meta.BaseURL), nil | ||||||
|  | 	default: | ||||||
|  | 	} | ||||||
|  | 	return "", fmt.Errorf("unsupported relay mode %d for ali bailian", meta.Mode) | ||||||
|  | } | ||||||
							
								
								
									
										30
									
								
								relay/adaptor/baiduv2/constants.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										30
									
								
								relay/adaptor/baiduv2/constants.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,30 @@ | |||||||
|  | package baiduv2 | ||||||
|  |  | ||||||
|  | // https://console.bce.baidu.com/support/?_=1692863460488×tamp=1739074632076#/api?product=QIANFAN&project=%E5%8D%83%E5%B8%86ModelBuilder&parent=%E5%AF%B9%E8%AF%9DChat%20V2&api=v2%2Fchat%2Fcompletions&method=post | ||||||
|  | // https://cloud.baidu.com/doc/WENXINWORKSHOP/s/Fm2vrveyu#%E6%94%AF%E6%8C%81%E6%A8%A1%E5%9E%8B%E5%88%97%E8%A1%A8 | ||||||
|  |  | ||||||
|  | var ModelList = []string{ | ||||||
|  | 	"ernie-4.0-8k-latest", | ||||||
|  | 	"ernie-4.0-8k-preview", | ||||||
|  | 	"ernie-4.0-8k", | ||||||
|  | 	"ernie-4.0-turbo-8k-latest", | ||||||
|  | 	"ernie-4.0-turbo-8k-preview", | ||||||
|  | 	"ernie-4.0-turbo-8k", | ||||||
|  | 	"ernie-4.0-turbo-128k", | ||||||
|  | 	"ernie-3.5-8k-preview", | ||||||
|  | 	"ernie-3.5-8k", | ||||||
|  | 	"ernie-3.5-128k", | ||||||
|  | 	"ernie-speed-8k", | ||||||
|  | 	"ernie-speed-128k", | ||||||
|  | 	"ernie-speed-pro-128k", | ||||||
|  | 	"ernie-lite-8k", | ||||||
|  | 	"ernie-lite-pro-128k", | ||||||
|  | 	"ernie-tiny-8k", | ||||||
|  | 	"ernie-char-8k", | ||||||
|  | 	"ernie-char-fiction-8k", | ||||||
|  | 	"ernie-novel-8k", | ||||||
|  | 	"deepseek-v3", | ||||||
|  | 	"deepseek-r1", | ||||||
|  | 	"deepseek-r1-distill-qwen-32b", | ||||||
|  | 	"deepseek-r1-distill-qwen-14b", | ||||||
|  | } | ||||||
							
								
								
									
										17
									
								
								relay/adaptor/baiduv2/main.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										17
									
								
								relay/adaptor/baiduv2/main.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,17 @@ | |||||||
|  | package baiduv2 | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"fmt" | ||||||
|  |  | ||||||
|  | 	"github.com/songquanpeng/one-api/relay/meta" | ||||||
|  | 	"github.com/songquanpeng/one-api/relay/relaymode" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func GetRequestURL(meta *meta.Meta) (string, error) { | ||||||
|  | 	switch meta.Mode { | ||||||
|  | 	case relaymode.ChatCompletions: | ||||||
|  | 		return fmt.Sprintf("%s/v2/chat/completions", meta.BaseURL), nil | ||||||
|  | 	default: | ||||||
|  | 	} | ||||||
|  | 	return "", fmt.Errorf("unsupported relay mode %d for baidu v2", meta.Mode) | ||||||
|  | } | ||||||
| @@ -5,9 +5,10 @@ import ( | |||||||
| 	"fmt" | 	"fmt" | ||||||
| 	"io" | 	"io" | ||||||
| 	"net/http" | 	"net/http" | ||||||
|  | 	"strings" | ||||||
|  |  | ||||||
| 	"github.com/gin-gonic/gin" | 	"github.com/gin-gonic/gin" | ||||||
|  | 	"github.com/songquanpeng/one-api/common/config" | ||||||
| 	"github.com/songquanpeng/one-api/common/helper" | 	"github.com/songquanpeng/one-api/common/helper" | ||||||
| 	channelhelper "github.com/songquanpeng/one-api/relay/adaptor" | 	channelhelper "github.com/songquanpeng/one-api/relay/adaptor" | ||||||
| 	"github.com/songquanpeng/one-api/relay/adaptor/openai" | 	"github.com/songquanpeng/one-api/relay/adaptor/openai" | ||||||
| @@ -20,17 +21,12 @@ type Adaptor struct { | |||||||
| } | } | ||||||
|  |  | ||||||
| func (a *Adaptor) Init(meta *meta.Meta) { | func (a *Adaptor) Init(meta *meta.Meta) { | ||||||
|  |  | ||||||
| } | } | ||||||
|  |  | ||||||
| func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) { | func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) { | ||||||
| 	var defaultVersion string | 	defaultVersion := config.GeminiVersion | ||||||
| 	switch meta.ActualModelName { | 	if strings.Contains(meta.ActualModelName, "gemini-2.0") || | ||||||
| 	case "gemini-2.0-flash-exp", | 		strings.Contains(meta.ActualModelName, "gemini-1.5") { | ||||||
| 		"gemini-2.0-flash-thinking-exp", |  | ||||||
| 		"gemini-2.0-flash-thinking-exp-01-21": |  | ||||||
| 		defaultVersion = "v1beta" |  | ||||||
| 	default: |  | ||||||
| 		defaultVersion = "v1beta" | 		defaultVersion = "v1beta" | ||||||
| 	} | 	} | ||||||
|  |  | ||||||
|   | |||||||
| @@ -4,8 +4,38 @@ package gemini | |||||||
|  |  | ||||||
| var ModelList = []string{ | var ModelList = []string{ | ||||||
| 	"gemini-pro", "gemini-1.0-pro", | 	"gemini-pro", "gemini-1.0-pro", | ||||||
| 	"gemini-1.5-flash", "gemini-1.5-pro", | 	// "gemma-2-2b-it", "gemma-2-9b-it", "gemma-2-27b-it", | ||||||
|  | 	"gemini-1.5-flash", "gemini-1.5-flash-8b", | ||||||
|  | 	"gemini-1.5-pro", "gemini-1.5-pro-experimental", | ||||||
| 	"text-embedding-004", "aqa", | 	"text-embedding-004", "aqa", | ||||||
| 	"gemini-2.0-flash-exp", | 	"gemini-2.0-flash", "gemini-2.0-flash-exp", | ||||||
| 	"gemini-2.0-flash-thinking-exp", "gemini-2.0-flash-thinking-exp-01-21", | 	"gemini-2.0-flash-lite-preview-02-05", | ||||||
|  | 	"gemini-2.0-flash-thinking-exp-01-21", | ||||||
|  | 	"gemini-2.0-pro-exp-02-05", | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // ModelsSupportSystemInstruction is the list of models that support system instruction. | ||||||
|  | // | ||||||
|  | // https://cloud.google.com/vertex-ai/generative-ai/docs/learn/prompts/system-instructions | ||||||
|  | var ModelsSupportSystemInstruction = []string{ | ||||||
|  | 	// "gemini-1.0-pro-002", | ||||||
|  | 	// "gemini-1.5-flash", "gemini-1.5-flash-001", "gemini-1.5-flash-002", | ||||||
|  | 	// "gemini-1.5-flash-8b", | ||||||
|  | 	// "gemini-1.5-pro", "gemini-1.5-pro-001", "gemini-1.5-pro-002", | ||||||
|  | 	// "gemini-1.5-pro-experimental", | ||||||
|  | 	"gemini-2.0-flash", "gemini-2.0-flash-exp", | ||||||
|  | 	"gemini-2.0-flash-thinking-exp-01-21", | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // IsModelSupportSystemInstruction check if the model support system instruction. | ||||||
|  | // | ||||||
|  | // Because the main version of Go is 1.20, slice.Contains cannot be used | ||||||
|  | func IsModelSupportSystemInstruction(model string) bool { | ||||||
|  | 	for _, m := range ModelsSupportSystemInstruction { | ||||||
|  | 		if m == model { | ||||||
|  | 			return true | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	return false | ||||||
| } | } | ||||||
|   | |||||||
| @@ -132,9 +132,16 @@ func ConvertRequest(textRequest model.GeneralOpenAIRequest) *ChatRequest { | |||||||
| 		} | 		} | ||||||
| 		// Converting system prompt to prompt from user for the same reason | 		// Converting system prompt to prompt from user for the same reason | ||||||
| 		if content.Role == "system" { | 		if content.Role == "system" { | ||||||
| 			content.Role = "user" |  | ||||||
| 			shouldAddDummyModelMessage = true | 			shouldAddDummyModelMessage = true | ||||||
|  | 			if IsModelSupportSystemInstruction(textRequest.Model) { | ||||||
|  | 				geminiRequest.SystemInstruction = &content | ||||||
|  | 				geminiRequest.SystemInstruction.Role = "" | ||||||
|  | 				continue | ||||||
|  | 			} else { | ||||||
|  | 				content.Role = "user" | ||||||
|  | 			} | ||||||
| 		} | 		} | ||||||
|  |  | ||||||
| 		geminiRequest.Contents = append(geminiRequest.Contents, content) | 		geminiRequest.Contents = append(geminiRequest.Contents, content) | ||||||
|  |  | ||||||
| 		// If a system message is the last message, we need to add a dummy model message to make gemini happy | 		// If a system message is the last message, we need to add a dummy model message to make gemini happy | ||||||
|   | |||||||
| @@ -1,10 +1,11 @@ | |||||||
| package gemini | package gemini | ||||||
|  |  | ||||||
| type ChatRequest struct { | type ChatRequest struct { | ||||||
| 	Contents         []ChatContent        `json:"contents"` | 	Contents          []ChatContent        `json:"contents"` | ||||||
| 	SafetySettings   []ChatSafetySettings `json:"safety_settings,omitempty"` | 	SafetySettings    []ChatSafetySettings `json:"safety_settings,omitempty"` | ||||||
| 	GenerationConfig ChatGenerationConfig `json:"generation_config,omitempty"` | 	GenerationConfig  ChatGenerationConfig `json:"generation_config,omitempty"` | ||||||
| 	Tools            []ChatTools          `json:"tools,omitempty"` | 	Tools             []ChatTools          `json:"tools,omitempty"` | ||||||
|  | 	SystemInstruction *ChatContent         `json:"system_instruction,omitempty"` | ||||||
| } | } | ||||||
|  |  | ||||||
| type EmbeddingRequest struct { | type EmbeddingRequest struct { | ||||||
|   | |||||||
| @@ -8,4 +8,6 @@ var ModelList = []string{ | |||||||
| 	"abab6-chat", | 	"abab6-chat", | ||||||
| 	"abab5.5-chat", | 	"abab5.5-chat", | ||||||
| 	"abab5.5s-chat", | 	"abab5.5s-chat", | ||||||
|  | 	"MiniMax-VL-01", | ||||||
|  | 	"MiniMax-Text-01", | ||||||
| } | } | ||||||
|   | |||||||
| @@ -8,7 +8,10 @@ import ( | |||||||
| 	"strings" | 	"strings" | ||||||
|  |  | ||||||
| 	"github.com/gin-gonic/gin" | 	"github.com/gin-gonic/gin" | ||||||
|  |  | ||||||
| 	"github.com/songquanpeng/one-api/relay/adaptor" | 	"github.com/songquanpeng/one-api/relay/adaptor" | ||||||
|  | 	"github.com/songquanpeng/one-api/relay/adaptor/alibailian" | ||||||
|  | 	"github.com/songquanpeng/one-api/relay/adaptor/baiduv2" | ||||||
| 	"github.com/songquanpeng/one-api/relay/adaptor/doubao" | 	"github.com/songquanpeng/one-api/relay/adaptor/doubao" | ||||||
| 	"github.com/songquanpeng/one-api/relay/adaptor/minimax" | 	"github.com/songquanpeng/one-api/relay/adaptor/minimax" | ||||||
| 	"github.com/songquanpeng/one-api/relay/adaptor/novita" | 	"github.com/songquanpeng/one-api/relay/adaptor/novita" | ||||||
| @@ -52,6 +55,10 @@ func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) { | |||||||
| 		return doubao.GetRequestURL(meta) | 		return doubao.GetRequestURL(meta) | ||||||
| 	case channeltype.Novita: | 	case channeltype.Novita: | ||||||
| 		return novita.GetRequestURL(meta) | 		return novita.GetRequestURL(meta) | ||||||
|  | 	case channeltype.BaiduV2: | ||||||
|  | 		return baiduv2.GetRequestURL(meta) | ||||||
|  | 	case channeltype.AliBailian: | ||||||
|  | 		return alibailian.GetRequestURL(meta) | ||||||
| 	default: | 	default: | ||||||
| 		return GetFullRequestURL(meta.BaseURL, meta.RequestURLPath, meta.ChannelType), nil | 		return GetFullRequestURL(meta.BaseURL, meta.RequestURLPath, meta.ChannelType), nil | ||||||
| 	} | 	} | ||||||
|   | |||||||
| @@ -2,7 +2,9 @@ package openai | |||||||
|  |  | ||||||
| import ( | import ( | ||||||
| 	"github.com/songquanpeng/one-api/relay/adaptor/ai360" | 	"github.com/songquanpeng/one-api/relay/adaptor/ai360" | ||||||
|  | 	"github.com/songquanpeng/one-api/relay/adaptor/alibailian" | ||||||
| 	"github.com/songquanpeng/one-api/relay/adaptor/baichuan" | 	"github.com/songquanpeng/one-api/relay/adaptor/baichuan" | ||||||
|  | 	"github.com/songquanpeng/one-api/relay/adaptor/baiduv2" | ||||||
| 	"github.com/songquanpeng/one-api/relay/adaptor/deepseek" | 	"github.com/songquanpeng/one-api/relay/adaptor/deepseek" | ||||||
| 	"github.com/songquanpeng/one-api/relay/adaptor/doubao" | 	"github.com/songquanpeng/one-api/relay/adaptor/doubao" | ||||||
| 	"github.com/songquanpeng/one-api/relay/adaptor/groq" | 	"github.com/songquanpeng/one-api/relay/adaptor/groq" | ||||||
| @@ -11,10 +13,12 @@ import ( | |||||||
| 	"github.com/songquanpeng/one-api/relay/adaptor/mistral" | 	"github.com/songquanpeng/one-api/relay/adaptor/mistral" | ||||||
| 	"github.com/songquanpeng/one-api/relay/adaptor/moonshot" | 	"github.com/songquanpeng/one-api/relay/adaptor/moonshot" | ||||||
| 	"github.com/songquanpeng/one-api/relay/adaptor/novita" | 	"github.com/songquanpeng/one-api/relay/adaptor/novita" | ||||||
|  | 	"github.com/songquanpeng/one-api/relay/adaptor/openrouter" | ||||||
| 	"github.com/songquanpeng/one-api/relay/adaptor/siliconflow" | 	"github.com/songquanpeng/one-api/relay/adaptor/siliconflow" | ||||||
| 	"github.com/songquanpeng/one-api/relay/adaptor/stepfun" | 	"github.com/songquanpeng/one-api/relay/adaptor/stepfun" | ||||||
| 	"github.com/songquanpeng/one-api/relay/adaptor/togetherai" | 	"github.com/songquanpeng/one-api/relay/adaptor/togetherai" | ||||||
| 	"github.com/songquanpeng/one-api/relay/adaptor/xai" | 	"github.com/songquanpeng/one-api/relay/adaptor/xai" | ||||||
|  | 	"github.com/songquanpeng/one-api/relay/adaptor/xunfeiv2" | ||||||
| 	"github.com/songquanpeng/one-api/relay/channeltype" | 	"github.com/songquanpeng/one-api/relay/channeltype" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| @@ -34,6 +38,8 @@ var CompatibleChannels = []int{ | |||||||
| 	channeltype.Novita, | 	channeltype.Novita, | ||||||
| 	channeltype.SiliconFlow, | 	channeltype.SiliconFlow, | ||||||
| 	channeltype.XAI, | 	channeltype.XAI, | ||||||
|  | 	channeltype.BaiduV2, | ||||||
|  | 	channeltype.XunfeiV2, | ||||||
| } | } | ||||||
|  |  | ||||||
| func GetCompatibleChannelMeta(channelType int) (string, []string) { | func GetCompatibleChannelMeta(channelType int) (string, []string) { | ||||||
| @@ -68,6 +74,14 @@ func GetCompatibleChannelMeta(channelType int) (string, []string) { | |||||||
| 		return "siliconflow", siliconflow.ModelList | 		return "siliconflow", siliconflow.ModelList | ||||||
| 	case channeltype.XAI: | 	case channeltype.XAI: | ||||||
| 		return "xai", xai.ModelList | 		return "xai", xai.ModelList | ||||||
|  | 	case channeltype.BaiduV2: | ||||||
|  | 		return "baiduv2", baiduv2.ModelList | ||||||
|  | 	case channeltype.XunfeiV2: | ||||||
|  | 		return "xunfeiv2", xunfeiv2.ModelList | ||||||
|  | 	case channeltype.OpenRouter: | ||||||
|  | 		return "openrouter", openrouter.ModelList | ||||||
|  | 	case channeltype.AliBailian: | ||||||
|  | 		return "alibailian", alibailian.ModelList | ||||||
| 	default: | 	default: | ||||||
| 		return "openai", ModelList | 		return "openai", ModelList | ||||||
| 	} | 	} | ||||||
|   | |||||||
							
								
								
									
										20
									
								
								relay/adaptor/openrouter/constants.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										20
									
								
								relay/adaptor/openrouter/constants.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,20 @@ | |||||||
|  | package openrouter | ||||||
|  |  | ||||||
|  | var ModelList = []string{ | ||||||
|  | 	"openai/gpt-3.5-turbo", | ||||||
|  | 	"openai/chatgpt-4o-latest", | ||||||
|  | 	"openai/o1", | ||||||
|  | 	"openai/o1-preview", | ||||||
|  | 	"openai/o1-mini", | ||||||
|  | 	"openai/o3-mini", | ||||||
|  | 	"google/gemini-2.0-flash-001", | ||||||
|  | 	"google/gemini-2.0-flash-thinking-exp:free", | ||||||
|  | 	"google/gemini-2.0-flash-lite-preview-02-05:free", | ||||||
|  | 	"google/gemini-2.0-pro-exp-02-05:free", | ||||||
|  | 	"google/gemini-flash-1.5-8b", | ||||||
|  | 	"anthropic/claude-3.5-sonnet", | ||||||
|  | 	"anthropic/claude-3.5-haiku", | ||||||
|  | 	"deepseek/deepseek-r1:free", | ||||||
|  | 	"deepseek/deepseek-r1", | ||||||
|  | 	"qwen/qwen-vl-plus:free", | ||||||
|  | } | ||||||
| @@ -16,10 +16,12 @@ import ( | |||||||
|  |  | ||||||
| var ModelList = []string{ | var ModelList = []string{ | ||||||
| 	"gemini-pro", "gemini-pro-vision", | 	"gemini-pro", "gemini-pro-vision", | ||||||
| 	"gemini-1.5-pro-001", "gemini-1.5-flash-001", | 	"gemini-exp-1206", | ||||||
| 	"gemini-1.5-pro-002", "gemini-1.5-flash-002", | 	"gemini-1.5-pro-001", "gemini-1.5-pro-002", | ||||||
| 	"gemini-2.0-flash-exp", | 	"gemini-1.5-flash-001", "gemini-1.5-flash-002", | ||||||
| 	"gemini-2.0-flash-thinking-exp", "gemini-2.0-flash-thinking-exp-01-21", | 	"gemini-2.0-flash-exp", "gemini-2.0-flash-001", | ||||||
|  | 	"gemini-2.0-flash-lite-preview-02-05", | ||||||
|  | 	"gemini-2.0-flash-thinking-exp-01-21", | ||||||
| } | } | ||||||
|  |  | ||||||
| type Adaptor struct { | type Adaptor struct { | ||||||
|   | |||||||
| @@ -1,5 +1,14 @@ | |||||||
| package xai | package xai | ||||||
|  |  | ||||||
|  | //https://console.x.ai/ | ||||||
|  |  | ||||||
| var ModelList = []string{ | var ModelList = []string{ | ||||||
|  | 	"grok-2", | ||||||
|  | 	"grok-vision-beta", | ||||||
|  | 	"grok-2-vision-1212", | ||||||
|  | 	"grok-2-vision", | ||||||
|  | 	"grok-2-vision-latest", | ||||||
|  | 	"grok-2-1212", | ||||||
|  | 	"grok-2-latest", | ||||||
| 	"grok-beta", | 	"grok-beta", | ||||||
| } | } | ||||||
|   | |||||||
| @@ -1,12 +1,10 @@ | |||||||
| package xunfei | package xunfei | ||||||
|  |  | ||||||
| var ModelList = []string{ | var ModelList = []string{ | ||||||
| 	"SparkDesk", | 	"Spark-Lite", | ||||||
| 	"SparkDesk-v1.1", | 	"Spark-Pro", | ||||||
| 	"SparkDesk-v2.1", | 	"Spark-Pro-128K", | ||||||
| 	"SparkDesk-v3.1", | 	"Spark-Max", | ||||||
| 	"SparkDesk-v3.1-128K", | 	"Spark-Max-32K", | ||||||
| 	"SparkDesk-v3.5", | 	"Spark-4.0-Ultra", | ||||||
| 	"SparkDesk-v3.5-32K", |  | ||||||
| 	"SparkDesk-v4.0", |  | ||||||
| } | } | ||||||
|   | |||||||
							
								
								
									
										97
									
								
								relay/adaptor/xunfei/domain.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										97
									
								
								relay/adaptor/xunfei/domain.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,97 @@ | |||||||
|  | package xunfei | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"fmt" | ||||||
|  | 	"strings" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | // https://www.xfyun.cn/doc/spark/Web.html#_1-%E6%8E%A5%E5%8F%A3%E8%AF%B4%E6%98%8E | ||||||
|  |  | ||||||
|  | //Spark4.0 Ultra 请求地址,对应的domain参数为4.0Ultra: | ||||||
|  | // | ||||||
|  | //wss://spark-api.xf-yun.com/v4.0/chat | ||||||
|  | //Spark Max-32K请求地址,对应的domain参数为max-32k | ||||||
|  | // | ||||||
|  | //wss://spark-api.xf-yun.com/chat/max-32k | ||||||
|  | //Spark Max请求地址,对应的domain参数为generalv3.5 | ||||||
|  | // | ||||||
|  | //wss://spark-api.xf-yun.com/v3.5/chat | ||||||
|  | //Spark Pro-128K请求地址,对应的domain参数为pro-128k: | ||||||
|  | // | ||||||
|  | // wss://spark-api.xf-yun.com/chat/pro-128k | ||||||
|  | //Spark Pro请求地址,对应的domain参数为generalv3: | ||||||
|  | // | ||||||
|  | //wss://spark-api.xf-yun.com/v3.1/chat | ||||||
|  | //Spark Lite请求地址,对应的domain参数为lite: | ||||||
|  | // | ||||||
|  | //wss://spark-api.xf-yun.com/v1.1/chat | ||||||
|  |  | ||||||
|  | // Lite、Pro、Pro-128K、Max、Max-32K和4.0 Ultra | ||||||
|  |  | ||||||
|  | func parseAPIVersionByModelName(modelName string) string { | ||||||
|  | 	apiVersion := modelName2APIVersion(modelName) | ||||||
|  | 	if apiVersion != "" { | ||||||
|  | 		return apiVersion | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	index := strings.IndexAny(modelName, "-") | ||||||
|  | 	if index != -1 { | ||||||
|  | 		return modelName[index+1:] | ||||||
|  | 	} | ||||||
|  | 	return "" | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func modelName2APIVersion(modelName string) string { | ||||||
|  | 	switch modelName { | ||||||
|  | 	case "Spark-Lite": | ||||||
|  | 		return "v1.1" | ||||||
|  | 	case "Spark-Pro": | ||||||
|  | 		return "v3.1" | ||||||
|  | 	case "Spark-Pro-128K": | ||||||
|  | 		return "v3.1-128K" | ||||||
|  | 	case "Spark-Max": | ||||||
|  | 		return "v3.5" | ||||||
|  | 	case "Spark-Max-32K": | ||||||
|  | 		return "v3.5-32K" | ||||||
|  | 	case "Spark-4.0-Ultra": | ||||||
|  | 		return "v4.0" | ||||||
|  | 	} | ||||||
|  | 	return "" | ||||||
|  | } | ||||||
|  |  | ||||||
|  | // https://www.xfyun.cn/doc/spark/Web.html#_1-%E6%8E%A5%E5%8F%A3%E8%AF%B4%E6%98%8E | ||||||
|  | func apiVersion2domain(apiVersion string) string { | ||||||
|  | 	switch apiVersion { | ||||||
|  | 	case "v1.1": | ||||||
|  | 		return "lite" | ||||||
|  | 	case "v2.1": | ||||||
|  | 		return "generalv2" | ||||||
|  | 	case "v3.1": | ||||||
|  | 		return "generalv3" | ||||||
|  | 	case "v3.1-128K": | ||||||
|  | 		return "pro-128k" | ||||||
|  | 	case "v3.5": | ||||||
|  | 		return "generalv3.5" | ||||||
|  | 	case "v3.5-32K": | ||||||
|  | 		return "max-32k" | ||||||
|  | 	case "v4.0": | ||||||
|  | 		return "4.0Ultra" | ||||||
|  | 	} | ||||||
|  | 	return "general" + apiVersion | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func getXunfeiAuthUrl(apiVersion string, apiKey string, apiSecret string) (string, string) { | ||||||
|  | 	var authUrl string | ||||||
|  | 	domain := apiVersion2domain(apiVersion) | ||||||
|  | 	switch apiVersion { | ||||||
|  | 	case "v3.1-128K": | ||||||
|  | 		authUrl = buildXunfeiAuthUrl(fmt.Sprintf("wss://spark-api.xf-yun.com/chat/pro-128k"), apiKey, apiSecret) | ||||||
|  | 		break | ||||||
|  | 	case "v3.5-32K": | ||||||
|  | 		authUrl = buildXunfeiAuthUrl(fmt.Sprintf("wss://spark-api.xf-yun.com/chat/max-32k"), apiKey, apiSecret) | ||||||
|  | 		break | ||||||
|  | 	default: | ||||||
|  | 		authUrl = buildXunfeiAuthUrl(fmt.Sprintf("wss://spark-api.xf-yun.com/%s/chat", apiVersion), apiKey, apiSecret) | ||||||
|  | 	} | ||||||
|  | 	return domain, authUrl | ||||||
|  | } | ||||||
| @@ -15,6 +15,7 @@ import ( | |||||||
|  |  | ||||||
| 	"github.com/gin-gonic/gin" | 	"github.com/gin-gonic/gin" | ||||||
| 	"github.com/gorilla/websocket" | 	"github.com/gorilla/websocket" | ||||||
|  |  | ||||||
| 	"github.com/songquanpeng/one-api/common" | 	"github.com/songquanpeng/one-api/common" | ||||||
| 	"github.com/songquanpeng/one-api/common/helper" | 	"github.com/songquanpeng/one-api/common/helper" | ||||||
| 	"github.com/songquanpeng/one-api/common/logger" | 	"github.com/songquanpeng/one-api/common/logger" | ||||||
| @@ -270,48 +271,3 @@ func xunfeiMakeRequest(textRequest model.GeneralOpenAIRequest, domain, authUrl, | |||||||
|  |  | ||||||
| 	return dataChan, stopChan, nil | 	return dataChan, stopChan, nil | ||||||
| } | } | ||||||
|  |  | ||||||
| func parseAPIVersionByModelName(modelName string) string { |  | ||||||
| 	index := strings.IndexAny(modelName, "-") |  | ||||||
| 	if index != -1 { |  | ||||||
| 		return modelName[index+1:] |  | ||||||
| 	} |  | ||||||
| 	return "" |  | ||||||
| } |  | ||||||
|  |  | ||||||
| // https://www.xfyun.cn/doc/spark/Web.html#_1-%E6%8E%A5%E5%8F%A3%E8%AF%B4%E6%98%8E |  | ||||||
| func apiVersion2domain(apiVersion string) string { |  | ||||||
| 	switch apiVersion { |  | ||||||
| 	case "v1.1": |  | ||||||
| 		return "lite" |  | ||||||
| 	case "v2.1": |  | ||||||
| 		return "generalv2" |  | ||||||
| 	case "v3.1": |  | ||||||
| 		return "generalv3" |  | ||||||
| 	case "v3.1-128K": |  | ||||||
| 		return "pro-128k" |  | ||||||
| 	case "v3.5": |  | ||||||
| 		return "generalv3.5" |  | ||||||
| 	case "v3.5-32K": |  | ||||||
| 		return "max-32k" |  | ||||||
| 	case "v4.0": |  | ||||||
| 		return "4.0Ultra" |  | ||||||
| 	} |  | ||||||
| 	return "general" + apiVersion |  | ||||||
| } |  | ||||||
|  |  | ||||||
| func getXunfeiAuthUrl(apiVersion string, apiKey string, apiSecret string) (string, string) { |  | ||||||
| 	var authUrl string |  | ||||||
| 	domain := apiVersion2domain(apiVersion) |  | ||||||
| 	switch apiVersion { |  | ||||||
| 	case "v3.1-128K": |  | ||||||
| 		authUrl = buildXunfeiAuthUrl(fmt.Sprintf("wss://spark-api.xf-yun.com/chat/pro-128k"), apiKey, apiSecret) |  | ||||||
| 		break |  | ||||||
| 	case "v3.5-32K": |  | ||||||
| 		authUrl = buildXunfeiAuthUrl(fmt.Sprintf("wss://spark-api.xf-yun.com/chat/max-32k"), apiKey, apiSecret) |  | ||||||
| 		break |  | ||||||
| 	default: |  | ||||||
| 		authUrl = buildXunfeiAuthUrl(fmt.Sprintf("wss://spark-api.xf-yun.com/%s/chat", apiVersion), apiKey, apiSecret) |  | ||||||
| 	} |  | ||||||
| 	return domain, authUrl |  | ||||||
| } |  | ||||||
|   | |||||||
							
								
								
									
										12
									
								
								relay/adaptor/xunfeiv2/constants.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										12
									
								
								relay/adaptor/xunfeiv2/constants.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,12 @@ | |||||||
|  | package xunfeiv2 | ||||||
|  |  | ||||||
|  | // https://www.xfyun.cn/doc/spark/HTTP%E8%B0%83%E7%94%A8%E6%96%87%E6%A1%A3.html#_3-%E8%AF%B7%E6%B1%82%E8%AF%B4%E6%98%8E | ||||||
|  |  | ||||||
|  | var ModelList = []string{ | ||||||
|  | 	"lite", | ||||||
|  | 	"generalv3", | ||||||
|  | 	"pro-128k", | ||||||
|  | 	"generalv3.5", | ||||||
|  | 	"max-32k", | ||||||
|  | 	"4.0Ultra", | ||||||
|  | } | ||||||
| @@ -3,8 +3,10 @@ package ratio | |||||||
| import ( | import ( | ||||||
| 	"encoding/json" | 	"encoding/json" | ||||||
| 	"github.com/songquanpeng/one-api/common/logger" | 	"github.com/songquanpeng/one-api/common/logger" | ||||||
|  | 	"sync" | ||||||
| ) | ) | ||||||
|  |  | ||||||
|  | var groupRatioLock sync.RWMutex | ||||||
| var GroupRatio = map[string]float64{ | var GroupRatio = map[string]float64{ | ||||||
| 	"default": 1, | 	"default": 1, | ||||||
| 	"vip":     1, | 	"vip":     1, | ||||||
| @@ -20,11 +22,15 @@ func GroupRatio2JSONString() string { | |||||||
| } | } | ||||||
|  |  | ||||||
| func UpdateGroupRatioByJSONString(jsonStr string) error { | func UpdateGroupRatioByJSONString(jsonStr string) error { | ||||||
|  | 	groupRatioLock.Lock() | ||||||
|  | 	defer groupRatioLock.Unlock() | ||||||
| 	GroupRatio = make(map[string]float64) | 	GroupRatio = make(map[string]float64) | ||||||
| 	return json.Unmarshal([]byte(jsonStr), &GroupRatio) | 	return json.Unmarshal([]byte(jsonStr), &GroupRatio) | ||||||
| } | } | ||||||
|  |  | ||||||
| func GetGroupRatio(name string) float64 { | func GetGroupRatio(name string) float64 { | ||||||
|  | 	groupRatioLock.RLock() | ||||||
|  | 	defer groupRatioLock.RUnlock() | ||||||
| 	ratio, ok := GroupRatio[name] | 	ratio, ok := GroupRatio[name] | ||||||
| 	if !ok { | 	if !ok { | ||||||
| 		logger.SysError("group ratio not found: " + name) | 		logger.SysError("group ratio not found: " + name) | ||||||
|   | |||||||
| @@ -4,6 +4,7 @@ import ( | |||||||
| 	"encoding/json" | 	"encoding/json" | ||||||
| 	"fmt" | 	"fmt" | ||||||
| 	"strings" | 	"strings" | ||||||
|  | 	"sync" | ||||||
|  |  | ||||||
| 	"github.com/songquanpeng/one-api/common/logger" | 	"github.com/songquanpeng/one-api/common/logger" | ||||||
| ) | ) | ||||||
| @@ -15,6 +16,8 @@ const ( | |||||||
| 	RMB       = USD / USD2RMB | 	RMB       = USD / USD2RMB | ||||||
| ) | ) | ||||||
|  |  | ||||||
|  | var modelRatioLock sync.RWMutex | ||||||
|  |  | ||||||
| // ModelRatio | // ModelRatio | ||||||
| // https://platform.openai.com/docs/models/model-endpoint-compatibility | // https://platform.openai.com/docs/models/model-endpoint-compatibility | ||||||
| // https://cloud.baidu.com/doc/WENXINWORKSHOP/s/Blfmc9dlf | // https://cloud.baidu.com/doc/WENXINWORKSHOP/s/Blfmc9dlf | ||||||
| @@ -88,11 +91,11 @@ var ModelRatio = map[string]float64{ | |||||||
| 	"claude-2.1":                 8.0 / 1000 * USD, | 	"claude-2.1":                 8.0 / 1000 * USD, | ||||||
| 	"claude-3-haiku-20240307":    0.25 / 1000 * USD, | 	"claude-3-haiku-20240307":    0.25 / 1000 * USD, | ||||||
| 	"claude-3-5-haiku-20241022":  1.0 / 1000 * USD, | 	"claude-3-5-haiku-20241022":  1.0 / 1000 * USD, | ||||||
| 	"claude-3-5-haiku-latest":    1.0 / 1000 * USD,	 | 	"claude-3-5-haiku-latest":    1.0 / 1000 * USD, | ||||||
| 	"claude-3-sonnet-20240229":   3.0 / 1000 * USD, | 	"claude-3-sonnet-20240229":   3.0 / 1000 * USD, | ||||||
| 	"claude-3-5-sonnet-20240620": 3.0 / 1000 * USD, | 	"claude-3-5-sonnet-20240620": 3.0 / 1000 * USD, | ||||||
| 	"claude-3-5-sonnet-20241022": 3.0 / 1000 * USD, | 	"claude-3-5-sonnet-20241022": 3.0 / 1000 * USD, | ||||||
| 	"claude-3-5-sonnet-latest"  : 3.0 / 1000 * USD,	 | 	"claude-3-5-sonnet-latest":   3.0 / 1000 * USD, | ||||||
| 	"claude-3-opus-20240229":     15.0 / 1000 * USD, | 	"claude-3-opus-20240229":     15.0 / 1000 * USD, | ||||||
| 	// https://cloud.baidu.com/doc/WENXINWORKSHOP/s/hlrk4akp7 | 	// https://cloud.baidu.com/doc/WENXINWORKSHOP/s/hlrk4akp7 | ||||||
| 	"ERNIE-4.0-8K":       0.120 * RMB, | 	"ERNIE-4.0-8K":       0.120 * RMB, | ||||||
| @@ -112,15 +115,24 @@ var ModelRatio = map[string]float64{ | |||||||
| 	"bge-large-en":       0.002 * RMB, | 	"bge-large-en":       0.002 * RMB, | ||||||
| 	"tao-8k":             0.002 * RMB, | 	"tao-8k":             0.002 * RMB, | ||||||
| 	// https://ai.google.dev/pricing | 	// https://ai.google.dev/pricing | ||||||
| 	"gemini-pro":                          1, // $0.00025 / 1k characters -> $0.001 / 1k tokens | 	// https://cloud.google.com/vertex-ai/generative-ai/pricing | ||||||
| 	"gemini-1.0-pro":                      1, | 	// "gemma-2-2b-it":                       0, | ||||||
| 	"gemini-1.5-pro":                      1, | 	// "gemma-2-9b-it":                       0, | ||||||
| 	"gemini-1.5-pro-001":                  1, | 	// "gemma-2-27b-it":                      0, | ||||||
| 	"gemini-1.5-flash":                    1, | 	"gemini-pro":                          0.25 * MILLI_USD, // $0.00025 / 1k characters -> $0.001 / 1k tokens | ||||||
| 	"gemini-1.5-flash-001":                1, | 	"gemini-1.0-pro":                      0.125 * MILLI_USD, | ||||||
| 	"gemini-2.0-flash-exp":                1, | 	"gemini-1.5-pro":                      1.25 * MILLI_USD, | ||||||
| 	"gemini-2.0-flash-thinking-exp":       1, | 	"gemini-1.5-pro-001":                  1.25 * MILLI_USD, | ||||||
| 	"gemini-2.0-flash-thinking-exp-01-21": 1, | 	"gemini-1.5-pro-experimental":         1.25 * MILLI_USD, | ||||||
|  | 	"gemini-1.5-flash":                    0.075 * MILLI_USD, | ||||||
|  | 	"gemini-1.5-flash-001":                0.075 * MILLI_USD, | ||||||
|  | 	"gemini-1.5-flash-8b":                 0.0375 * MILLI_USD, | ||||||
|  | 	"gemini-2.0-flash-exp":                0.075 * MILLI_USD, | ||||||
|  | 	"gemini-2.0-flash":                    0.15 * MILLI_USD, | ||||||
|  | 	"gemini-2.0-flash-001":                0.15 * MILLI_USD, | ||||||
|  | 	"gemini-2.0-flash-lite-preview-02-05": 0.075 * MILLI_USD, | ||||||
|  | 	"gemini-2.0-flash-thinking-exp-01-21": 0.075 * MILLI_USD, | ||||||
|  | 	"gemini-2.0-pro-exp-02-05":            1.25 * MILLI_USD, | ||||||
| 	"aqa":                                 1, | 	"aqa":                                 1, | ||||||
| 	// https://open.bigmodel.cn/pricing | 	// https://open.bigmodel.cn/pricing | ||||||
| 	"glm-zero-preview": 0.01 * RMB, | 	"glm-zero-preview": 0.01 * RMB, | ||||||
| @@ -417,11 +429,15 @@ func ModelRatio2JSONString() string { | |||||||
| } | } | ||||||
|  |  | ||||||
| func UpdateModelRatioByJSONString(jsonStr string) error { | func UpdateModelRatioByJSONString(jsonStr string) error { | ||||||
|  | 	modelRatioLock.Lock() | ||||||
|  | 	defer modelRatioLock.Unlock() | ||||||
| 	ModelRatio = make(map[string]float64) | 	ModelRatio = make(map[string]float64) | ||||||
| 	return json.Unmarshal([]byte(jsonStr), &ModelRatio) | 	return json.Unmarshal([]byte(jsonStr), &ModelRatio) | ||||||
| } | } | ||||||
|  |  | ||||||
| func GetModelRatio(name string, channelType int) float64 { | func GetModelRatio(name string, channelType int) float64 { | ||||||
|  | 	modelRatioLock.RLock() | ||||||
|  | 	defer modelRatioLock.RUnlock() | ||||||
| 	if strings.HasPrefix(name, "qwen-") && strings.HasSuffix(name, "-internet") { | 	if strings.HasPrefix(name, "qwen-") && strings.HasSuffix(name, "-internet") { | ||||||
| 		name = strings.TrimSuffix(name, "-internet") | 		name = strings.TrimSuffix(name, "-internet") | ||||||
| 	} | 	} | ||||||
|   | |||||||
| @@ -48,5 +48,8 @@ const ( | |||||||
| 	SiliconFlow | 	SiliconFlow | ||||||
| 	XAI | 	XAI | ||||||
| 	Replicate | 	Replicate | ||||||
|  | 	BaiduV2 | ||||||
|  | 	XunfeiV2 | ||||||
|  | 	AliBailian | ||||||
| 	Dummy | 	Dummy | ||||||
| ) | ) | ||||||
|   | |||||||
| @@ -48,6 +48,9 @@ var ChannelBaseURLs = []string{ | |||||||
| 	"https://api.siliconflow.cn",                // 44 | 	"https://api.siliconflow.cn",                // 44 | ||||||
| 	"https://api.x.ai",                          // 45 | 	"https://api.x.ai",                          // 45 | ||||||
| 	"https://api.replicate.com/v1/models/",      // 46 | 	"https://api.replicate.com/v1/models/",      // 46 | ||||||
|  | 	"https://qianfan.baidubce.com",              // 47 | ||||||
|  | 	"https://spark-api-open.xf-yun.com",         // 48 | ||||||
|  | 	"https://dashscope.aliyuncs.com",            // 49 | ||||||
| } | } | ||||||
|  |  | ||||||
| func init() { | func init() { | ||||||
|   | |||||||
| @@ -38,7 +38,7 @@ func RelayTextHelper(c *gin.Context) *model.ErrorWithStatusCode { | |||||||
| 	textRequest.Model, _ = getMappedModelName(textRequest.Model, meta.ModelMapping) | 	textRequest.Model, _ = getMappedModelName(textRequest.Model, meta.ModelMapping) | ||||||
| 	meta.ActualModelName = textRequest.Model | 	meta.ActualModelName = textRequest.Model | ||||||
| 	// set system prompt if not empty | 	// set system prompt if not empty | ||||||
| 	systemPromptReset := setSystemPrompt(ctx, textRequest, meta.SystemPrompt) | 	systemPromptReset := setSystemPrompt(ctx, textRequest, meta.ForcedSystemPrompt) | ||||||
| 	// get model ratio & group ratio | 	// get model ratio & group ratio | ||||||
| 	modelRatio := billingratio.GetModelRatio(textRequest.Model, meta.ChannelType) | 	modelRatio := billingratio.GetModelRatio(textRequest.Model, meta.ChannelType) | ||||||
| 	groupRatio := billingratio.GetGroupRatio(meta.Group) | 	groupRatio := billingratio.GetGroupRatio(meta.Group) | ||||||
| @@ -88,7 +88,11 @@ func RelayTextHelper(c *gin.Context) *model.ErrorWithStatusCode { | |||||||
| } | } | ||||||
|  |  | ||||||
| func getRequestBody(c *gin.Context, meta *meta.Meta, textRequest *model.GeneralOpenAIRequest, adaptor adaptor.Adaptor) (io.Reader, error) { | func getRequestBody(c *gin.Context, meta *meta.Meta, textRequest *model.GeneralOpenAIRequest, adaptor adaptor.Adaptor) (io.Reader, error) { | ||||||
| 	if !config.EnforceIncludeUsage && meta.APIType == apitype.OpenAI && meta.OriginModelName == meta.ActualModelName && meta.ChannelType != channeltype.Baichuan { | 	if !config.EnforceIncludeUsage && | ||||||
|  | 		meta.APIType == apitype.OpenAI && | ||||||
|  | 		meta.OriginModelName == meta.ActualModelName && | ||||||
|  | 		meta.ChannelType != channeltype.Baichuan && | ||||||
|  | 		meta.ForcedSystemPrompt == "" { | ||||||
| 		// no need to convert request for openai | 		// no need to convert request for openai | ||||||
| 		return c.Request.Body, nil | 		return c.Request.Body, nil | ||||||
| 	} | 	} | ||||||
|   | |||||||
| @@ -30,29 +30,29 @@ type Meta struct { | |||||||
| 	// OriginModelName is the model name from the raw user request | 	// OriginModelName is the model name from the raw user request | ||||||
| 	OriginModelName string | 	OriginModelName string | ||||||
| 	// ActualModelName is the model name after mapping | 	// ActualModelName is the model name after mapping | ||||||
| 	ActualModelName string | 	ActualModelName    string | ||||||
| 	RequestURLPath  string | 	RequestURLPath     string | ||||||
| 	PromptTokens    int // only for DoResponse | 	PromptTokens       int // only for DoResponse | ||||||
| 	SystemPrompt    string | 	ForcedSystemPrompt string | ||||||
| 	StartTime       time.Time | 	StartTime          time.Time | ||||||
| } | } | ||||||
|  |  | ||||||
| func GetByContext(c *gin.Context) *Meta { | func GetByContext(c *gin.Context) *Meta { | ||||||
| 	meta := Meta{ | 	meta := Meta{ | ||||||
| 		Mode:            relaymode.GetByPath(c.Request.URL.Path), | 		Mode:               relaymode.GetByPath(c.Request.URL.Path), | ||||||
| 		ChannelType:     c.GetInt(ctxkey.Channel), | 		ChannelType:        c.GetInt(ctxkey.Channel), | ||||||
| 		ChannelId:       c.GetInt(ctxkey.ChannelId), | 		ChannelId:          c.GetInt(ctxkey.ChannelId), | ||||||
| 		TokenId:         c.GetInt(ctxkey.TokenId), | 		TokenId:            c.GetInt(ctxkey.TokenId), | ||||||
| 		TokenName:       c.GetString(ctxkey.TokenName), | 		TokenName:          c.GetString(ctxkey.TokenName), | ||||||
| 		UserId:          c.GetInt(ctxkey.Id), | 		UserId:             c.GetInt(ctxkey.Id), | ||||||
| 		Group:           c.GetString(ctxkey.Group), | 		Group:              c.GetString(ctxkey.Group), | ||||||
| 		ModelMapping:    c.GetStringMapString(ctxkey.ModelMapping), | 		ModelMapping:       c.GetStringMapString(ctxkey.ModelMapping), | ||||||
| 		OriginModelName: c.GetString(ctxkey.RequestModel), | 		OriginModelName:    c.GetString(ctxkey.RequestModel), | ||||||
| 		BaseURL:         c.GetString(ctxkey.BaseURL), | 		BaseURL:            c.GetString(ctxkey.BaseURL), | ||||||
| 		APIKey:          strings.TrimPrefix(c.Request.Header.Get("Authorization"), "Bearer "), | 		APIKey:             strings.TrimPrefix(c.Request.Header.Get("Authorization"), "Bearer "), | ||||||
| 		RequestURLPath:  c.Request.URL.String(), | 		RequestURLPath:     c.Request.URL.String(), | ||||||
| 		SystemPrompt:    c.GetString(ctxkey.SystemPrompt), | 		ForcedSystemPrompt: c.GetString(ctxkey.SystemPrompt), | ||||||
| 		StartTime:       time.Now(), | 		StartTime:          time.Now(), | ||||||
| 	} | 	} | ||||||
| 	cfg, ok := c.Get(ctxkey.Config) | 	cfg, ok := c.Get(ctxkey.Config) | ||||||
| 	if ok { | 	if ok { | ||||||
|   | |||||||
| @@ -26,6 +26,7 @@ type GeneralOpenAIRequest struct { | |||||||
| 	Messages            []Message       `json:"messages,omitempty"` | 	Messages            []Message       `json:"messages,omitempty"` | ||||||
| 	Model               string          `json:"model,omitempty"` | 	Model               string          `json:"model,omitempty"` | ||||||
| 	Store               *bool           `json:"store,omitempty"` | 	Store               *bool           `json:"store,omitempty"` | ||||||
|  | 	ReasoningEffort     *string         `json:"reasoning_effort,omitempty"` | ||||||
| 	Metadata            any             `json:"metadata,omitempty"` | 	Metadata            any             `json:"metadata,omitempty"` | ||||||
| 	FrequencyPenalty    *float64        `json:"frequency_penalty,omitempty"` | 	FrequencyPenalty    *float64        `json:"frequency_penalty,omitempty"` | ||||||
| 	LogitBias           any             `json:"logit_bias,omitempty"` | 	LogitBias           any             `json:"logit_bias,omitempty"` | ||||||
|   | |||||||
| @@ -1,11 +1,12 @@ | |||||||
| package model | package model | ||||||
|  |  | ||||||
| type Message struct { | type Message struct { | ||||||
| 	Role       string  `json:"role,omitempty"` | 	Role             string  `json:"role,omitempty"` | ||||||
| 	Content    any     `json:"content,omitempty"` | 	Content          any     `json:"content,omitempty"` | ||||||
| 	Name       *string `json:"name,omitempty"` | 	ReasoningContent any     `json:"reasoning_content,omitempty"` | ||||||
| 	ToolCalls  []Tool  `json:"tool_calls,omitempty"` | 	Name             *string `json:"name,omitempty"` | ||||||
| 	ToolCallId string  `json:"tool_call_id,omitempty"` | 	ToolCalls        []Tool  `json:"tool_calls,omitempty"` | ||||||
|  | 	ToolCallId       string  `json:"tool_call_id,omitempty"` | ||||||
| } | } | ||||||
|  |  | ||||||
| func (m Message) IsStringContent() bool { | func (m Message) IsStringContent() bool { | ||||||
|   | |||||||
| @@ -4,6 +4,14 @@ type Usage struct { | |||||||
| 	PromptTokens     int `json:"prompt_tokens"` | 	PromptTokens     int `json:"prompt_tokens"` | ||||||
| 	CompletionTokens int `json:"completion_tokens"` | 	CompletionTokens int `json:"completion_tokens"` | ||||||
| 	TotalTokens      int `json:"total_tokens"` | 	TotalTokens      int `json:"total_tokens"` | ||||||
|  |  | ||||||
|  | 	CompletionTokensDetails *CompletionTokensDetails `json:"completion_tokens_details,omitempty"` | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type CompletionTokensDetails struct { | ||||||
|  | 	ReasoningTokens          int `json:"reasoning_tokens"` | ||||||
|  | 	AcceptedPredictionTokens int `json:"accepted_prediction_tokens"` | ||||||
|  | 	RejectedPredictionTokens int `json:"rejected_prediction_tokens"` | ||||||
| } | } | ||||||
|  |  | ||||||
| type Error struct { | type Error struct { | ||||||
|   | |||||||
| @@ -7,7 +7,7 @@ export const CHANNEL_OPTIONS = [ | |||||||
|   { key: 24, text: 'Google Gemini', value: 24, color: 'orange' }, |   { key: 24, text: 'Google Gemini', value: 24, color: 'orange' }, | ||||||
|   { key: 28, text: 'Mistral AI', value: 28, color: 'orange' }, |   { key: 28, text: 'Mistral AI', value: 28, color: 'orange' }, | ||||||
|   { key: 41, text: 'Novita', value: 41, color: 'purple' }, |   { key: 41, text: 'Novita', value: 41, color: 'purple' }, | ||||||
|   { key: 40, text: '字节跳动豆包', value: 40, color: 'blue' }, |   {key: 40, text: '字节火山引擎', value: 40, color: 'blue'}, | ||||||
|   { key: 15, text: '百度文心千帆', value: 15, color: 'blue' }, |   { key: 15, text: '百度文心千帆', value: 15, color: 'blue' }, | ||||||
|   { key: 17, text: '阿里通义千问', value: 17, color: 'orange' }, |   { key: 17, text: '阿里通义千问', value: 17, color: 'orange' }, | ||||||
|   { key: 18, text: '讯飞星火认知', value: 18, color: 'blue' }, |   { key: 18, text: '讯飞星火认知', value: 18, color: 'blue' }, | ||||||
| @@ -35,7 +35,7 @@ export const CHANNEL_OPTIONS = [ | |||||||
|   { key: 8, text: '自定义渠道', value: 8, color: 'pink' }, |   { key: 8, text: '自定义渠道', value: 8, color: 'pink' }, | ||||||
|   { key: 22, text: '知识库:FastGPT', value: 22, color: 'blue' }, |   { key: 22, text: '知识库:FastGPT', value: 22, color: 'blue' }, | ||||||
|   { key: 21, text: '知识库:AI Proxy', value: 21, color: 'purple' }, |   { key: 21, text: '知识库:AI Proxy', value: 21, color: 'purple' }, | ||||||
|   { key: 20, text: '代理:OpenRouter', value: 20, color: 'black' }, |   {key: 20, text: 'OpenRouter', value: 20, color: 'black'}, | ||||||
|   { key: 2, text: '代理:API2D', value: 2, color: 'blue' }, |   { key: 2, text: '代理:API2D', value: 2, color: 'blue' }, | ||||||
|   { key: 5, text: '代理:OpenAI-SB', value: 5, color: 'brown' }, |   { key: 5, text: '代理:OpenAI-SB', value: 5, color: 'brown' }, | ||||||
|   { key: 7, text: '代理:OhMyGPT', value: 7, color: 'purple' }, |   { key: 7, text: '代理:OhMyGPT', value: 7, color: 'purple' }, | ||||||
|   | |||||||
| @@ -49,7 +49,7 @@ export const CHANNEL_OPTIONS = { | |||||||
|   }, |   }, | ||||||
|   40: { |   40: { | ||||||
|     key: 40, |     key: 40, | ||||||
|     text: '字节跳动豆包', |     text: '字节火山引擎', | ||||||
|     value: 40, |     value: 40, | ||||||
|     color: 'primary' |     color: 'primary' | ||||||
|   }, |   }, | ||||||
| @@ -217,7 +217,7 @@ export const CHANNEL_OPTIONS = { | |||||||
|   }, |   }, | ||||||
|   20: { |   20: { | ||||||
|     key: 20, |     key: 20, | ||||||
|     text: '代理:OpenRouter', |       text: 'OpenRouter', | ||||||
|     value: 20, |     value: 20, | ||||||
|     color: 'success' |     color: 'success' | ||||||
|   }, |   }, | ||||||
|   | |||||||
| @@ -67,6 +67,8 @@ function renderBalance(type, balance, t) { | |||||||
|       return <span>¥{balance.toFixed(2)}</span>; |       return <span>¥{balance.toFixed(2)}</span>; | ||||||
|     case 13: // AIGC2D |     case 13: // AIGC2D | ||||||
|       return <span>{renderNumber(balance)}</span>; |       return <span>{renderNumber(balance)}</span>; | ||||||
|  |     case 20: // OpenRouter | ||||||
|  |       return <span>${balance.toFixed(2)}</span>; | ||||||
|     case 36: // DeepSeek |     case 36: // DeepSeek | ||||||
|       return <span>¥{balance.toFixed(2)}</span>; |       return <span>¥{balance.toFixed(2)}</span>; | ||||||
|     case 44: // SiliconFlow |     case 44: // SiliconFlow | ||||||
| @@ -93,30 +95,32 @@ const ChannelsTable = () => { | |||||||
|   const [showPrompt, setShowPrompt] = useState(shouldShowPrompt(promptID)); |   const [showPrompt, setShowPrompt] = useState(shouldShowPrompt(promptID)); | ||||||
|   const [showDetail, setShowDetail] = useState(isShowDetail()); |   const [showDetail, setShowDetail] = useState(isShowDetail()); | ||||||
|  |  | ||||||
|  |   const processChannelData = (channel) => { | ||||||
|  |     if (channel.models === '') { | ||||||
|  |       channel.models = []; | ||||||
|  |       channel.test_model = ''; | ||||||
|  |     } else { | ||||||
|  |       channel.models = channel.models.split(','); | ||||||
|  |       if (channel.models.length > 0) { | ||||||
|  |         channel.test_model = channel.models[0]; | ||||||
|  |       } | ||||||
|  |       channel.model_options = channel.models.map((model) => { | ||||||
|  |         return { | ||||||
|  |           key: model, | ||||||
|  |           text: model, | ||||||
|  |           value: model, | ||||||
|  |         }; | ||||||
|  |       }); | ||||||
|  |       console.log('channel', channel); | ||||||
|  |     } | ||||||
|  |     return channel; | ||||||
|  |   }; | ||||||
|  |  | ||||||
|   const loadChannels = async (startIdx) => { |   const loadChannels = async (startIdx) => { | ||||||
|     const res = await API.get(`/api/channel/?p=${startIdx}`); |     const res = await API.get(`/api/channel/?p=${startIdx}`); | ||||||
|     const { success, message, data } = res.data; |     const { success, message, data } = res.data; | ||||||
|     if (success) { |     if (success) { | ||||||
|       let localChannels = data.map((channel) => { |       let localChannels = data.map(processChannelData); | ||||||
|         if (channel.models === '') { |  | ||||||
|           channel.models = []; |  | ||||||
|           channel.test_model = ''; |  | ||||||
|         } else { |  | ||||||
|           channel.models = channel.models.split(','); |  | ||||||
|           if (channel.models.length > 0) { |  | ||||||
|             channel.test_model = channel.models[0]; |  | ||||||
|           } |  | ||||||
|           channel.model_options = channel.models.map((model) => { |  | ||||||
|             return { |  | ||||||
|               key: model, |  | ||||||
|               text: model, |  | ||||||
|               value: model, |  | ||||||
|             }; |  | ||||||
|           }); |  | ||||||
|           console.log('channel', channel); |  | ||||||
|         } |  | ||||||
|         return channel; |  | ||||||
|       }); |  | ||||||
|       if (startIdx === 0) { |       if (startIdx === 0) { | ||||||
|         setChannels(localChannels); |         setChannels(localChannels); | ||||||
|       } else { |       } else { | ||||||
| @@ -301,7 +305,8 @@ const ChannelsTable = () => { | |||||||
|     const res = await API.get(`/api/channel/search?keyword=${searchKeyword}`); |     const res = await API.get(`/api/channel/search?keyword=${searchKeyword}`); | ||||||
|     const { success, message, data } = res.data; |     const { success, message, data } = res.data; | ||||||
|     if (success) { |     if (success) { | ||||||
|       setChannels(data); |       let localChannels = data.map(processChannelData); | ||||||
|  |       setChannels(localChannels); | ||||||
|       setActivePage(1); |       setActivePage(1); | ||||||
|     } else { |     } else { | ||||||
|       showError(message); |       showError(message); | ||||||
| @@ -495,7 +500,6 @@ const ChannelsTable = () => { | |||||||
|               onClick={() => { |               onClick={() => { | ||||||
|                 sortChannel('balance'); |                 sortChannel('balance'); | ||||||
|               }} |               }} | ||||||
|               hidden={!showDetail} |  | ||||||
|             > |             > | ||||||
|               {t('channel.table.balance')} |               {t('channel.table.balance')} | ||||||
|             </Table.HeaderCell> |             </Table.HeaderCell> | ||||||
| @@ -504,6 +508,7 @@ const ChannelsTable = () => { | |||||||
|               onClick={() => { |               onClick={() => { | ||||||
|                 sortChannel('priority'); |                 sortChannel('priority'); | ||||||
|               }} |               }} | ||||||
|  |               hidden={!showDetail} | ||||||
|             > |             > | ||||||
|               {t('channel.table.priority')} |               {t('channel.table.priority')} | ||||||
|             </Table.HeaderCell> |             </Table.HeaderCell> | ||||||
| @@ -543,7 +548,7 @@ const ChannelsTable = () => { | |||||||
|                       basic |                       basic | ||||||
|                     /> |                     /> | ||||||
|                   </Table.Cell> |                   </Table.Cell> | ||||||
|                   <Table.Cell hidden={!showDetail}> |                   <Table.Cell> | ||||||
|                     <Popup |                     <Popup | ||||||
|                       trigger={ |                       trigger={ | ||||||
|                         <span |                         <span | ||||||
| @@ -559,7 +564,7 @@ const ChannelsTable = () => { | |||||||
|                       basic |                       basic | ||||||
|                     /> |                     /> | ||||||
|                   </Table.Cell> |                   </Table.Cell> | ||||||
|                   <Table.Cell> |                   <Table.Cell hidden={!showDetail}> | ||||||
|                     <Popup |                     <Popup | ||||||
|                       trigger={ |                       trigger={ | ||||||
|                         <Input |                         <Input | ||||||
| @@ -593,7 +598,15 @@ const ChannelsTable = () => { | |||||||
|                     /> |                     /> | ||||||
|                   </Table.Cell> |                   </Table.Cell> | ||||||
|                   <Table.Cell> |                   <Table.Cell> | ||||||
|                     <div> |                     <div | ||||||
|  |                       style={{ | ||||||
|  |                         display: 'flex', | ||||||
|  |                         alignItems: 'center', | ||||||
|  |                         flexWrap: 'wrap', | ||||||
|  |                         gap: '2px', | ||||||
|  |                         rowGap: '6px', | ||||||
|  |                       }} | ||||||
|  |                     > | ||||||
|                       <Button |                       <Button | ||||||
|                         size={'tiny'} |                         size={'tiny'} | ||||||
|                         positive |                         positive | ||||||
|   | |||||||
| @@ -1,48 +1,87 @@ | |||||||
| export const CHANNEL_OPTIONS = [ | export const CHANNEL_OPTIONS = [ | ||||||
|     { key: 1, text: 'OpenAI', value: 1, color: 'green' }, |     {key: 1, text: 'OpenAI', value: 1, color: 'green'}, | ||||||
|     { key: 14, text: 'Anthropic Claude', value: 14, color: 'black' }, |     {key: 14, text: 'Anthropic Claude', value: 14, color: 'black'}, | ||||||
|     { key: 33, text: 'AWS', value: 33, color: 'black' }, |     {key: 33, text: 'AWS', value: 33, color: 'black'}, | ||||||
|     { key: 3, text: 'Azure OpenAI', value: 3, color: 'olive' }, |     {key: 3, text: 'Azure OpenAI', value: 3, color: 'olive'}, | ||||||
|     { key: 11, text: 'Google PaLM2', value: 11, color: 'orange' }, |     {key: 11, text: 'Google PaLM2', value: 11, color: 'orange'}, | ||||||
|     { key: 24, text: 'Google Gemini', value: 24, color: 'orange' }, |     {key: 24, text: 'Google Gemini', value: 24, color: 'orange'}, | ||||||
|     { key: 28, text: 'Mistral AI', value: 28, color: 'orange' }, |     {key: 28, text: 'Mistral AI', value: 28, color: 'orange'}, | ||||||
|     { key: 41, text: 'Novita', value: 41, color: 'purple' }, |     {key: 41, text: 'Novita', value: 41, color: 'purple'}, | ||||||
|     { key: 40, text: '字节跳动豆包', value: 40, color: 'blue' }, |   { | ||||||
|     { key: 15, text: '百度文心千帆', value: 15, color: 'blue' }, |     key: 40, | ||||||
|     { key: 17, text: '阿里通义千问', value: 17, color: 'orange' }, |     text: '字节火山引擎', | ||||||
|     { key: 18, text: '讯飞星火认知', value: 18, color: 'blue' }, |     value: 40, | ||||||
|     { key: 16, text: '智谱 ChatGLM', value: 16, color: 'violet' }, |     color: 'blue', | ||||||
|     { key: 19, text: '360 智脑', value: 19, color: 'blue' }, |     description: '原字节跳动豆包', | ||||||
|     { key: 25, text: 'Moonshot AI', value: 25, color: 'black' }, |   }, | ||||||
|     { key: 23, text: '腾讯混元', value: 23, color: 'teal' }, |   { | ||||||
|     { key: 26, text: '百川大模型', value: 26, color: 'orange' }, |     key: 15, | ||||||
|     { key: 27, text: 'MiniMax', value: 27, color: 'red' }, |     text: '百度文心千帆', | ||||||
|     { key: 29, text: 'Groq', value: 29, color: 'orange' }, |     value: 15, | ||||||
|     { key: 30, text: 'Ollama', value: 30, color: 'black' }, |     color: 'blue', | ||||||
|     { key: 31, text: '零一万物', value: 31, color: 'green' }, |     tip: '请前往<a href="https://console.bce.baidu.com/qianfan/ais/console/applicationConsole/application/v1" target="_blank">此处</a>获取 AK(API Key)以及 SK(Secret Key),注意,V2 版本接口请使用 <strong>百度文心千帆 V2 </strong>渠道类型', | ||||||
|     { key: 32, text: '阶跃星辰', value: 32, color: 'blue' }, |   }, | ||||||
|     { key: 34, text: 'Coze', value: 34, color: 'blue' }, |   { | ||||||
|     { key: 35, text: 'Cohere', value: 35, color: 'blue' }, |     key: 47, | ||||||
|     { key: 36, text: 'DeepSeek', value: 36, color: 'black' }, |     text: '百度文心千帆 V2', | ||||||
|     { key: 37, text: 'Cloudflare', value: 37, color: 'orange' }, |     value: 47, | ||||||
|     { key: 38, text: 'DeepL', value: 38, color: 'black' }, |     color: 'blue', | ||||||
|     { key: 39, text: 'together.ai', value: 39, color: 'blue' }, |     tip: '请前往<a href="https://console.bce.baidu.com/iam/#/iam/apikey/list" target="_blank">此处</a>获取 API Key,注意本渠道仅支持<a target="_blank" href="https://cloud.baidu.com/doc/WENXINWORKSHOP/s/em4tsqo3v">推理服务 V2</a>相关模型', | ||||||
|     { key: 42, text: 'VertexAI', value: 42, color: 'blue' }, |   }, | ||||||
|     { key: 43, text: 'Proxy', value: 43, color: 'blue' }, |     { | ||||||
|     { key: 44, text: 'SiliconFlow', value: 44, color: 'blue' }, |         key: 17, | ||||||
|     { key: 45, text: 'xAI', value: 45, color: 'blue' }, |         text: '阿里通义千问', | ||||||
|     { key: 46, text: 'Replicate', value: 46, color: 'blue' }, |         value: 17, | ||||||
|     { key: 8, text: '自定义渠道', value: 8, color: 'pink' }, |         color: 'orange', | ||||||
|     { key: 22, text: '知识库:FastGPT', value: 22, color: 'blue' }, |         tip: '如需使用阿里云百炼,请使用<strong>阿里云百炼</strong>渠道', | ||||||
|     { key: 21, text: '知识库:AI Proxy', value: 21, color: 'purple' }, |     }, | ||||||
|     { key: 20, text: '代理:OpenRouter', value: 20, color: 'black' }, |     {key: 49, text: '阿里云百炼', value: 49, color: 'orange'}, | ||||||
|     { key: 2, text: '代理:API2D', value: 2, color: 'blue' }, |   { | ||||||
|     { key: 5, text: '代理:OpenAI-SB', value: 5, color: 'brown' }, |     key: 18, | ||||||
|     { key: 7, text: '代理:OhMyGPT', value: 7, color: 'purple' }, |     text: '讯飞星火认知', | ||||||
|     { key: 10, text: '代理:AI Proxy', value: 10, color: 'purple' }, |     value: 18, | ||||||
|     { key: 4, text: '代理:CloseAI', value: 4, color: 'teal' }, |     color: 'blue', | ||||||
|     { key: 6, text: '代理:OpenAI Max', value: 6, color: 'violet' }, |     tip: '本渠道基于讯飞 WebSocket 版本 API,如需 HTTP 版本,请使用<strong>讯飞星火认知 V2</strong>渠道', | ||||||
|     { key: 9, text: '代理:AI.LS', value: 9, color: 'yellow' }, |   }, | ||||||
|     { key: 12, text: '代理:API2GPT', value: 12, color: 'blue' }, |   { | ||||||
|     { key: 13, text: '代理:AIGC2D', value: 13, color: 'purple' } |     key: 48, | ||||||
|  |     text: '讯飞星火认知 V2', | ||||||
|  |     value: 48, | ||||||
|  |     color: 'blue', | ||||||
|  |     tip: 'HTTP 版本的讯飞接口,前往<a href="https://console.xfyun.cn/services/cbm" target="_blank">此处</a>获取 HTTP 服务接口认证密钥', | ||||||
|  |   }, | ||||||
|  |     {key: 16, text: '智谱 ChatGLM', value: 16, color: 'violet'}, | ||||||
|  |     {key: 19, text: '360 智脑', value: 19, color: 'blue'}, | ||||||
|  |     {key: 25, text: 'Moonshot AI', value: 25, color: 'black'}, | ||||||
|  |     {key: 23, text: '腾讯混元', value: 23, color: 'teal'}, | ||||||
|  |     {key: 26, text: '百川大模型', value: 26, color: 'orange'}, | ||||||
|  |     {key: 27, text: 'MiniMax', value: 27, color: 'red'}, | ||||||
|  |     {key: 29, text: 'Groq', value: 29, color: 'orange'}, | ||||||
|  |     {key: 30, text: 'Ollama', value: 30, color: 'black'}, | ||||||
|  |     {key: 31, text: '零一万物', value: 31, color: 'green'}, | ||||||
|  |     {key: 32, text: '阶跃星辰', value: 32, color: 'blue'}, | ||||||
|  |     {key: 34, text: 'Coze', value: 34, color: 'blue'}, | ||||||
|  |     {key: 35, text: 'Cohere', value: 35, color: 'blue'}, | ||||||
|  |     {key: 36, text: 'DeepSeek', value: 36, color: 'black'}, | ||||||
|  |     {key: 37, text: 'Cloudflare', value: 37, color: 'orange'}, | ||||||
|  |     {key: 38, text: 'DeepL', value: 38, color: 'black'}, | ||||||
|  |     {key: 39, text: 'together.ai', value: 39, color: 'blue'}, | ||||||
|  |     {key: 42, text: 'VertexAI', value: 42, color: 'blue'}, | ||||||
|  |     {key: 43, text: 'Proxy', value: 43, color: 'blue'}, | ||||||
|  |     {key: 44, text: 'SiliconFlow', value: 44, color: 'blue'}, | ||||||
|  |     {key: 45, text: 'xAI', value: 45, color: 'blue'}, | ||||||
|  |     {key: 46, text: 'Replicate', value: 46, color: 'blue'}, | ||||||
|  |     {key: 8, text: '自定义渠道', value: 8, color: 'pink'}, | ||||||
|  |     {key: 22, text: '知识库:FastGPT', value: 22, color: 'blue'}, | ||||||
|  |     {key: 21, text: '知识库:AI Proxy', value: 21, color: 'purple'}, | ||||||
|  |     {key: 20, text: 'OpenRouter', value: 20, color: 'black'}, | ||||||
|  |     {key: 2, text: '代理:API2D', value: 2, color: 'blue'}, | ||||||
|  |     {key: 5, text: '代理:OpenAI-SB', value: 5, color: 'brown'}, | ||||||
|  |     {key: 7, text: '代理:OhMyGPT', value: 7, color: 'purple'}, | ||||||
|  |     {key: 10, text: '代理:AI Proxy', value: 10, color: 'purple'}, | ||||||
|  |     {key: 4, text: '代理:CloseAI', value: 4, color: 'teal'}, | ||||||
|  |     {key: 6, text: '代理:OpenAI Max', value: 6, color: 'violet'}, | ||||||
|  |     {key: 9, text: '代理:AI.LS', value: 9, color: 'yellow'}, | ||||||
|  |     {key: 12, text: '代理:API2GPT', value: 12, color: 'blue'}, | ||||||
|  |     {key: 13, text: '代理:AIGC2D', value: 13, color: 'purple'}, | ||||||
| ]; | ]; | ||||||
|   | |||||||
							
								
								
									
										13
									
								
								web/default/src/helpers/helper.js
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										13
									
								
								web/default/src/helpers/helper.js
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,13 @@ | |||||||
|  | import {CHANNEL_OPTIONS} from '../constants'; | ||||||
|  |  | ||||||
|  | let channelMap = undefined; | ||||||
|  |  | ||||||
|  | export function getChannelOption(channelId) { | ||||||
|  |     if (channelMap === undefined) { | ||||||
|  |         channelMap = {}; | ||||||
|  |         CHANNEL_OPTIONS.forEach((option) => { | ||||||
|  |             channelMap[option.key] = option; | ||||||
|  |         }); | ||||||
|  |     } | ||||||
|  |     return channelMap[channelId]; | ||||||
|  | } | ||||||
| @@ -1,5 +1,6 @@ | |||||||
| import { Label } from 'semantic-ui-react'; | import { Label, Message } from 'semantic-ui-react'; | ||||||
| import { useTranslation } from 'react-i18next'; | import { getChannelOption } from './helper'; | ||||||
|  | import React from 'react'; | ||||||
|  |  | ||||||
| export function renderText(text, limit) { | export function renderText(text, limit) { | ||||||
|   if (text.length > limit) { |   if (text.length > limit) { | ||||||
| @@ -15,7 +16,15 @@ export function renderGroup(group) { | |||||||
|   let groups = group.split(','); |   let groups = group.split(','); | ||||||
|   groups.sort(); |   groups.sort(); | ||||||
|   return ( |   return ( | ||||||
|     <> |     <div | ||||||
|  |       style={{ | ||||||
|  |         display: 'flex', | ||||||
|  |         alignItems: 'center', | ||||||
|  |         flexWrap: 'wrap', | ||||||
|  |         gap: '2px', | ||||||
|  |         rowGap: '6px', | ||||||
|  |       }} | ||||||
|  |     > | ||||||
|       {groups.map((group) => { |       {groups.map((group) => { | ||||||
|         if (group === 'vip' || group === 'pro') { |         if (group === 'vip' || group === 'pro') { | ||||||
|           return <Label color='yellow'>{group}</Label>; |           return <Label color='yellow'>{group}</Label>; | ||||||
| @@ -24,7 +33,7 @@ export function renderGroup(group) { | |||||||
|         } |         } | ||||||
|         return <Label>{group}</Label>; |         return <Label>{group}</Label>; | ||||||
|       })} |       })} | ||||||
|     </> |     </div> | ||||||
|   ); |   ); | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -98,3 +107,15 @@ export function renderColorLabel(text) { | |||||||
|     </Label> |     </Label> | ||||||
|   ); |   ); | ||||||
| } | } | ||||||
|  |  | ||||||
|  | export function renderChannelTip(channelId) { | ||||||
|  |   let channel = getChannelOption(channelId); | ||||||
|  |   if (channel === undefined || channel.tip === undefined) { | ||||||
|  |     return <></>; | ||||||
|  |   } | ||||||
|  |   return ( | ||||||
|  |     <Message> | ||||||
|  |       <div dangerouslySetInnerHTML={{ __html: channel.tip }}></div> | ||||||
|  |     </Message> | ||||||
|  |   ); | ||||||
|  | } | ||||||
|   | |||||||
| @@ -1,7 +1,7 @@ | |||||||
| import { toast } from 'react-toastify'; | import {toast} from 'react-toastify'; | ||||||
| import { toastConstants } from '../constants'; | import {toastConstants} from '../constants'; | ||||||
| import React from 'react'; | import React from 'react'; | ||||||
| import { API } from './api'; | import {API} from './api'; | ||||||
|  |  | ||||||
| const HTMLToastContent = ({ htmlContent }) => { | const HTMLToastContent = ({ htmlContent }) => { | ||||||
|   return <div dangerouslySetInnerHTML={{ __html: htmlContent }} />; |   return <div dangerouslySetInnerHTML={{ __html: htmlContent }} />; | ||||||
| @@ -74,6 +74,7 @@ if (isMobile()) { | |||||||
| } | } | ||||||
|  |  | ||||||
| export function showError(error) { | export function showError(error) { | ||||||
|  |   if (!error) return; | ||||||
|   console.error(error); |   console.error(error); | ||||||
|   if (error.message) { |   if (error.message) { | ||||||
|     if (error.name === 'AxiosError') { |     if (error.name === 'AxiosError') { | ||||||
| @@ -158,17 +159,7 @@ export function timestamp2string(timestamp) { | |||||||
|     second = '0' + second; |     second = '0' + second; | ||||||
|   } |   } | ||||||
|   return ( |   return ( | ||||||
|     year + |       year + '-' + month + '-' + day + ' ' + hour + ':' + minute + ':' + second | ||||||
|     '-' + |  | ||||||
|     month + |  | ||||||
|     '-' + |  | ||||||
|     day + |  | ||||||
|     ' ' + |  | ||||||
|     hour + |  | ||||||
|     ':' + |  | ||||||
|     minute + |  | ||||||
|     ':' + |  | ||||||
|     second |  | ||||||
|   ); |   ); | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -193,7 +184,6 @@ export const verifyJSON = (str) => { | |||||||
| export function shouldShowPrompt(id) { | export function shouldShowPrompt(id) { | ||||||
|   let prompt = localStorage.getItem(`prompt-${id}`); |   let prompt = localStorage.getItem(`prompt-${id}`); | ||||||
|   return !prompt; |   return !prompt; | ||||||
|  |  | ||||||
| } | } | ||||||
|  |  | ||||||
| export function setPromptShown(id) { | export function setPromptShown(id) { | ||||||
| @@ -224,4 +214,4 @@ export function getChannelModels(type) { | |||||||
|     return channelModels[type]; |     return channelModels[type]; | ||||||
|   } |   } | ||||||
|   return []; |   return []; | ||||||
| } | } | ||||||
|   | |||||||
| @@ -1,25 +1,10 @@ | |||||||
| import React, { useEffect, useState } from 'react'; | import React, {useEffect, useState} from 'react'; | ||||||
| import { useTranslation } from 'react-i18next'; | import {useTranslation} from 'react-i18next'; | ||||||
| import { | import {Button, Card, Form, Input, Message,} from 'semantic-ui-react'; | ||||||
|   Button, | import {useNavigate, useParams} from 'react-router-dom'; | ||||||
|   Form, | import {API, copy, getChannelModels, showError, showInfo, showSuccess, verifyJSON,} from '../../helpers'; | ||||||
|   Header, | import {CHANNEL_OPTIONS} from '../../constants'; | ||||||
|   Input, | import {renderChannelTip} from '../../helpers/render'; | ||||||
|   Message, |  | ||||||
|   Segment, |  | ||||||
|   Card, |  | ||||||
| } from 'semantic-ui-react'; |  | ||||||
| import { useNavigate, useParams } from 'react-router-dom'; |  | ||||||
| import { |  | ||||||
|   API, |  | ||||||
|   copy, |  | ||||||
|   getChannelModels, |  | ||||||
|   showError, |  | ||||||
|   showInfo, |  | ||||||
|   showSuccess, |  | ||||||
|   verifyJSON, |  | ||||||
| } from '../../helpers'; |  | ||||||
| import { CHANNEL_OPTIONS } from '../../constants'; |  | ||||||
|  |  | ||||||
| const MODEL_MAPPING_EXAMPLE = { | const MODEL_MAPPING_EXAMPLE = { | ||||||
|   'gpt-3.5-turbo-0301': 'gpt-3.5-turbo', |   'gpt-3.5-turbo-0301': 'gpt-3.5-turbo', | ||||||
| @@ -310,6 +295,7 @@ const EditChannel = () => { | |||||||
|                 options={groupOptions} |                 options={groupOptions} | ||||||
|               /> |               /> | ||||||
|             </Form.Field> |             </Form.Field> | ||||||
|  |             {renderChannelTip(inputs.type)} | ||||||
|  |  | ||||||
|             {/* Azure OpenAI specific fields */} |             {/* Azure OpenAI specific fields */} | ||||||
|             {inputs.type === 3 && ( |             {inputs.type === 3 && ( | ||||||
|   | |||||||
| @@ -1,6 +1,6 @@ | |||||||
| import React, { useEffect, useState } from 'react'; | import React, {useEffect, useState} from 'react'; | ||||||
| import { useTranslation } from 'react-i18next'; | import {useTranslation} from 'react-i18next'; | ||||||
| import { Card, Grid } from 'semantic-ui-react'; | import {Card, Grid} from 'semantic-ui-react'; | ||||||
| import { | import { | ||||||
|   Bar, |   Bar, | ||||||
|   BarChart, |   BarChart, | ||||||
| @@ -122,11 +122,11 @@ const Dashboard = () => { | |||||||
|         ? new Date(Math.min(...dates.map((d) => new Date(d)))) |         ? new Date(Math.min(...dates.map((d) => new Date(d)))) | ||||||
|         : new Date(); |         : new Date(); | ||||||
|  |  | ||||||
|     // 确保至少显示5天的数据 |     // 确保至少显示7天的数据 | ||||||
|     const fiveDaysAgo = new Date(); |     const sevenDaysAgo = new Date(); | ||||||
|     fiveDaysAgo.setDate(fiveDaysAgo.getDate() - 4); // -4是因为包含今天 |     sevenDaysAgo.setDate(sevenDaysAgo.getDate() - 6); // -6是因为包含今天 | ||||||
|     if (minDate > fiveDaysAgo) { |     if (minDate > sevenDaysAgo) { | ||||||
|       minDate = fiveDaysAgo; |       minDate = sevenDaysAgo; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     // 生成所有日期 |     // 生成所有日期 | ||||||
| @@ -164,11 +164,11 @@ const Dashboard = () => { | |||||||
|         ? new Date(Math.min(...dates.map((d) => new Date(d)))) |         ? new Date(Math.min(...dates.map((d) => new Date(d)))) | ||||||
|         : new Date(); |         : new Date(); | ||||||
|  |  | ||||||
|     // 确保至少显示5天的数据 |     // 确保至少显示7天的数据 | ||||||
|     const fiveDaysAgo = new Date(); |     const sevenDaysAgo = new Date(); | ||||||
|     fiveDaysAgo.setDate(fiveDaysAgo.getDate() - 4); // -4是因为包含今天 |     sevenDaysAgo.setDate(sevenDaysAgo.getDate() - 6); // -6是因为包含今天 | ||||||
|     if (minDate > fiveDaysAgo) { |     if (minDate > sevenDaysAgo) { | ||||||
|       minDate = fiveDaysAgo; |       minDate = sevenDaysAgo; | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     // 生成所有日期 |     // 生成所有日期 | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user