mirror of
				https://github.com/songquanpeng/one-api.git
				synced 2025-11-04 15:53:42 +08:00 
			
		
		
		
	Compare commits
	
		
			59 Commits
		
	
	
		
			v0.6.6-alp
			...
			v0.6.7-alp
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 
						 | 
					ed717211aa | ||
| 
						 | 
					6ccf3f3cfc | ||
| 
						 | 
					f74577141c | ||
| 
						 | 
					6aafb7a99e | ||
| 
						 | 
					c1971870fa | ||
| 
						 | 
					f83894c83f | ||
| 
						 | 
					e9981fff36 | ||
| 
						 | 
					98669d5d48 | ||
| 
						 | 
					9321427c6e | ||
| 
						 | 
					ceea4c6d4a | ||
| 
						 | 
					b53e00a9b3 | ||
| 
						 | 
					332c8db0b3 | ||
| 
						 | 
					3be28da57b | ||
| 
						 | 
					fa74ba0eaa | ||
| 
						 | 
					a9211d66f6 | ||
| 
						 | 
					07b2fd58d6 | ||
| 
						 | 
					0acee9a065 | ||
| 
						 | 
					f965469e8a | ||
| 
						 | 
					03ea60532a | ||
| 
						 | 
					2457d00afb | ||
| 
						 | 
					91b80ae879 | ||
| 
						 | 
					2720e1a358 | ||
| 
						 | 
					71f4403fd5 | ||
| 
						 | 
					1f76c80553 | ||
| 
						 | 
					7e027d2bd0 | ||
| 
						 | 
					30f373b623 | ||
| 
						 | 
					1c2654320e | ||
| 
						 | 
					6cffb116b7 | ||
| 
						 | 
					a84c7b38b7 | ||
| 
						 | 
					1bd14af47b | ||
| 
						 | 
					6170b91d1c | ||
| 
						 | 
					04b49aa0ec | ||
| 
						 | 
					ef88497f25 | ||
| 
						 | 
					007906216d | ||
| 
						 | 
					e64e7707a0 | ||
| 
						 | 
					ea210b6ed7 | ||
| 
						 | 
					9026ec7510 | ||
| 
						 | 
					c317872097 | ||
| 
						 | 
					da0842272c | ||
| 
						 | 
					0a650b85b4 | ||
| 
						 | 
					24f026d18e | ||
| 
						 | 
					cb33e8aad5 | ||
| 
						 | 
					779b747e9e | ||
| 
						 | 
					3d149fedf4 | ||
| 
						 | 
					83517f687c | ||
| 
						 | 
					e30ebda0fe | ||
| 
						 | 
					d87c55f542 | ||
| 
						 | 
					e5b3e37c46 | ||
| 
						 | 
					8de489cf06 | ||
| 
						 | 
					d14e4aa01b | ||
| 
						 | 
					541182102e | ||
| 
						 | 
					b2679cca65 | ||
| 
						 | 
					8572fac7a2 | ||
| 
						 | 
					a2a00dfbc3 | ||
| 
						 | 
					129282f4a9 | ||
| 
						 | 
					a873cbd392 | ||
| 
						 | 
					35ba1da984 | ||
| 
						 | 
					2369025842 | ||
| 
						 | 
					f452bd481e | 
							
								
								
									
										2
									
								
								.github/workflows/docker-image-amd64-en.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/docker-image-amd64-en.yml
									
									
									
									
										vendored
									
									
								
							@@ -3,7 +3,7 @@ name: Publish Docker image (amd64, English)
 | 
			
		||||
on:
 | 
			
		||||
  push:
 | 
			
		||||
    tags:
 | 
			
		||||
      - '*'
 | 
			
		||||
      - 'v*.*.*'
 | 
			
		||||
  workflow_dispatch:
 | 
			
		||||
    inputs:
 | 
			
		||||
      name:
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										2
									
								
								.github/workflows/docker-image-amd64.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/docker-image-amd64.yml
									
									
									
									
										vendored
									
									
								
							@@ -3,7 +3,7 @@ name: Publish Docker image (amd64)
 | 
			
		||||
on:
 | 
			
		||||
  push:
 | 
			
		||||
    tags:
 | 
			
		||||
      - '*'
 | 
			
		||||
      - 'v*.*.*'
 | 
			
		||||
  workflow_dispatch:
 | 
			
		||||
    inputs:
 | 
			
		||||
      name:
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										2
									
								
								.github/workflows/docker-image-arm64.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/docker-image-arm64.yml
									
									
									
									
										vendored
									
									
								
							@@ -3,7 +3,7 @@ name: Publish Docker image (arm64)
 | 
			
		||||
on:
 | 
			
		||||
  push:
 | 
			
		||||
    tags:
 | 
			
		||||
      - '*'
 | 
			
		||||
      - 'v*.*.*'
 | 
			
		||||
      - '!*-alpha*'
 | 
			
		||||
  workflow_dispatch:
 | 
			
		||||
    inputs:
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										2
									
								
								.github/workflows/linux-release.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/linux-release.yml
									
									
									
									
										vendored
									
									
								
							@@ -5,7 +5,7 @@ permissions:
 | 
			
		||||
on:
 | 
			
		||||
  push:
 | 
			
		||||
    tags:
 | 
			
		||||
      - '*'
 | 
			
		||||
      - 'v*.*.*'
 | 
			
		||||
      - '!*-alpha*'
 | 
			
		||||
  workflow_dispatch:
 | 
			
		||||
    inputs:
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										2
									
								
								.github/workflows/macos-release.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/macos-release.yml
									
									
									
									
										vendored
									
									
								
							@@ -5,7 +5,7 @@ permissions:
 | 
			
		||||
on:
 | 
			
		||||
  push:
 | 
			
		||||
    tags:
 | 
			
		||||
      - '*'
 | 
			
		||||
      - 'v*.*.*'
 | 
			
		||||
      - '!*-alpha*'
 | 
			
		||||
  workflow_dispatch:
 | 
			
		||||
    inputs:
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										2
									
								
								.github/workflows/windows-release.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/windows-release.yml
									
									
									
									
										vendored
									
									
								
							@@ -5,7 +5,7 @@ permissions:
 | 
			
		||||
on:
 | 
			
		||||
  push:
 | 
			
		||||
    tags:
 | 
			
		||||
      - '*'
 | 
			
		||||
      - 'v*.*.*'
 | 
			
		||||
      - '!*-alpha*'
 | 
			
		||||
  workflow_dispatch:
 | 
			
		||||
    inputs:
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										27
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										27
									
								
								README.md
									
									
									
									
									
								
							@@ -68,6 +68,7 @@ _✨ 通过标准的 OpenAI API 格式访问所有的大模型,开箱即用 
 | 
			
		||||
   + [x] [Anthropic Claude 系列模型](https://anthropic.com) (支持 AWS Claude)
 | 
			
		||||
   + [x] [Google PaLM2/Gemini 系列模型](https://developers.generativeai.google)
 | 
			
		||||
   + [x] [Mistral 系列模型](https://mistral.ai/)
 | 
			
		||||
   + [x] [字节跳动豆包大模型](https://console.volcengine.com/ark/region:ark+cn-beijing/model)
 | 
			
		||||
   + [x] [百度文心一言系列模型](https://cloud.baidu.com/doc/WENXINWORKSHOP/index.html)
 | 
			
		||||
   + [x] [阿里通义千问系列模型](https://help.aliyun.com/document_detail/2400395.html)
 | 
			
		||||
   + [x] [讯飞星火认知大模型](https://www.xfyun.cn/doc/spark/Web.html)
 | 
			
		||||
@@ -76,12 +77,17 @@ _✨ 通过标准的 OpenAI API 格式访问所有的大模型,开箱即用 
 | 
			
		||||
   + [x] [腾讯混元大模型](https://cloud.tencent.com/document/product/1729)
 | 
			
		||||
   + [x] [Moonshot AI](https://platform.moonshot.cn/)
 | 
			
		||||
   + [x] [百川大模型](https://platform.baichuan-ai.com)
 | 
			
		||||
   + [ ] [字节云雀大模型](https://www.volcengine.com/product/ark) (WIP)
 | 
			
		||||
   + [x] [MINIMAX](https://api.minimax.chat/)
 | 
			
		||||
   + [x] [Groq](https://wow.groq.com/)
 | 
			
		||||
   + [x] [Ollama](https://github.com/ollama/ollama)
 | 
			
		||||
   + [x] [零一万物](https://platform.lingyiwanwu.com/)
 | 
			
		||||
   + [x] [阶跃星辰](https://platform.stepfun.com/)
 | 
			
		||||
   + [x] [Coze](https://www.coze.com/)
 | 
			
		||||
   + [x] [Cohere](https://cohere.com/)
 | 
			
		||||
   + [x] [DeepSeek](https://www.deepseek.com/)
 | 
			
		||||
   + [x] [Cloudflare Workers AI](https://developers.cloudflare.com/workers-ai/)
 | 
			
		||||
   + [x] [DeepL](https://www.deepl.com/)
 | 
			
		||||
   + [x] [together.ai](https://www.together.ai/)
 | 
			
		||||
2. 支持配置镜像以及众多[第三方代理服务](https://iamazing.cn/page/openai-api-third-party-services)。
 | 
			
		||||
3. 支持通过**负载均衡**的方式访问多个渠道。
 | 
			
		||||
4. 支持 **stream 模式**,可以通过流式传输实现打字机效果。
 | 
			
		||||
@@ -378,14 +384,17 @@ graph LR
 | 
			
		||||
    + `TIKTOKEN_CACHE_DIR`:默认程序启动时会联网下载一些通用的词元的编码,如:`gpt-3.5-turbo`,在一些网络环境不稳定,或者离线情况,可能会导致启动有问题,可以配置此目录缓存数据,可迁移到离线环境。
 | 
			
		||||
    + `DATA_GYM_CACHE_DIR`:目前该配置作用与 `TIKTOKEN_CACHE_DIR` 一致,但是优先级没有它高。
 | 
			
		||||
17. `RELAY_TIMEOUT`:中继超时设置,单位为秒,默认不设置超时时间。
 | 
			
		||||
18. `SQLITE_BUSY_TIMEOUT`:SQLite 锁等待超时设置,单位为毫秒,默认 `3000`。
 | 
			
		||||
19. `GEMINI_SAFETY_SETTING`:Gemini 的安全设置,默认 `BLOCK_NONE`。
 | 
			
		||||
20. `GEMINI_VERSION`:One API 所使用的 Gemini 版本,默认为 `v1`。
 | 
			
		||||
21. `THEME`:系统的主题设置,默认为 `default`,具体可选值参考[此处](./web/README.md)。
 | 
			
		||||
22. `ENABLE_METRIC`:是否根据请求成功率禁用渠道,默认不开启,可选值为 `true` 和 `false`。
 | 
			
		||||
23. `METRIC_QUEUE_SIZE`:请求成功率统计队列大小,默认为 `10`。
 | 
			
		||||
24. `METRIC_SUCCESS_RATE_THRESHOLD`:请求成功率阈值,默认为 `0.8`。
 | 
			
		||||
25. `INITIAL_ROOT_TOKEN`:如果设置了该值,则在系统首次启动时会自动创建一个值为该环境变量值的 root 用户令牌。
 | 
			
		||||
18. `RELAY_PROXY`:设置后使用该代理来请求 API。
 | 
			
		||||
19. `USER_CONTENT_REQUEST_TIMEOUT`:用户上传内容下载超时时间,单位为秒。
 | 
			
		||||
20. `USER_CONTENT_REQUEST_PROXY`:设置后使用该代理来请求用户上传的内容,例如图片。
 | 
			
		||||
21. `SQLITE_BUSY_TIMEOUT`:SQLite 锁等待超时设置,单位为毫秒,默认 `3000`。
 | 
			
		||||
22. `GEMINI_SAFETY_SETTING`:Gemini 的安全设置,默认 `BLOCK_NONE`。
 | 
			
		||||
23. `GEMINI_VERSION`:One API 所使用的 Gemini 版本,默认为 `v1`。
 | 
			
		||||
24. `THEME`:系统的主题设置,默认为 `default`,具体可选值参考[此处](./web/README.md)。
 | 
			
		||||
25. `ENABLE_METRIC`:是否根据请求成功率禁用渠道,默认不开启,可选值为 `true` 和 `false`。
 | 
			
		||||
26. `METRIC_QUEUE_SIZE`:请求成功率统计队列大小,默认为 `10`。
 | 
			
		||||
27. `METRIC_SUCCESS_RATE_THRESHOLD`:请求成功率阈值,默认为 `0.8`。
 | 
			
		||||
28. `INITIAL_ROOT_TOKEN`:如果设置了该值,则在系统首次启动时会自动创建一个值为该环境变量值的 root 用户令牌。
 | 
			
		||||
 | 
			
		||||
### 命令行参数
 | 
			
		||||
1. `--port <port_number>`: 指定服务器监听的端口号,默认为 `3000`。
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										60
									
								
								common/client/init.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										60
									
								
								common/client/init.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,60 @@
 | 
			
		||||
package client
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/config"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/logger"
 | 
			
		||||
	"net/http"
 | 
			
		||||
	"net/url"
 | 
			
		||||
	"time"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
var HTTPClient *http.Client
 | 
			
		||||
var ImpatientHTTPClient *http.Client
 | 
			
		||||
var UserContentRequestHTTPClient *http.Client
 | 
			
		||||
 | 
			
		||||
func Init() {
 | 
			
		||||
	if config.UserContentRequestProxy != "" {
 | 
			
		||||
		logger.SysLog(fmt.Sprintf("using %s as proxy to fetch user content", config.UserContentRequestProxy))
 | 
			
		||||
		proxyURL, err := url.Parse(config.UserContentRequestProxy)
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			logger.FatalLog(fmt.Sprintf("USER_CONTENT_REQUEST_PROXY set but invalid: %s", config.UserContentRequestProxy))
 | 
			
		||||
		}
 | 
			
		||||
		transport := &http.Transport{
 | 
			
		||||
			Proxy: http.ProxyURL(proxyURL),
 | 
			
		||||
		}
 | 
			
		||||
		UserContentRequestHTTPClient = &http.Client{
 | 
			
		||||
			Transport: transport,
 | 
			
		||||
			Timeout:   time.Second * time.Duration(config.UserContentRequestTimeout),
 | 
			
		||||
		}
 | 
			
		||||
	} else {
 | 
			
		||||
		UserContentRequestHTTPClient = &http.Client{}
 | 
			
		||||
	}
 | 
			
		||||
	var transport http.RoundTripper
 | 
			
		||||
	if config.RelayProxy != "" {
 | 
			
		||||
		logger.SysLog(fmt.Sprintf("using %s as api relay proxy", config.RelayProxy))
 | 
			
		||||
		proxyURL, err := url.Parse(config.RelayProxy)
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			logger.FatalLog(fmt.Sprintf("USER_CONTENT_REQUEST_PROXY set but invalid: %s", config.UserContentRequestProxy))
 | 
			
		||||
		}
 | 
			
		||||
		transport = &http.Transport{
 | 
			
		||||
			Proxy: http.ProxyURL(proxyURL),
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if config.RelayTimeout == 0 {
 | 
			
		||||
		HTTPClient = &http.Client{
 | 
			
		||||
			Transport: transport,
 | 
			
		||||
		}
 | 
			
		||||
	} else {
 | 
			
		||||
		HTTPClient = &http.Client{
 | 
			
		||||
			Timeout:   time.Duration(config.RelayTimeout) * time.Second,
 | 
			
		||||
			Transport: transport,
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	ImpatientHTTPClient = &http.Client{
 | 
			
		||||
		Timeout:   5 * time.Second,
 | 
			
		||||
		Transport: transport,
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
@@ -117,10 +117,10 @@ var ValidThemes = map[string]bool{
 | 
			
		||||
// All duration's unit is seconds
 | 
			
		||||
// Shouldn't larger then RateLimitKeyExpirationDuration
 | 
			
		||||
var (
 | 
			
		||||
	GlobalApiRateLimitNum            = env.Int("GLOBAL_API_RATE_LIMIT", 180)
 | 
			
		||||
	GlobalApiRateLimitNum            = env.Int("GLOBAL_API_RATE_LIMIT", 240)
 | 
			
		||||
	GlobalApiRateLimitDuration int64 = 3 * 60
 | 
			
		||||
 | 
			
		||||
	GlobalWebRateLimitNum            = env.Int("GLOBAL_WEB_RATE_LIMIT", 60)
 | 
			
		||||
	GlobalWebRateLimitNum            = env.Int("GLOBAL_WEB_RATE_LIMIT", 120)
 | 
			
		||||
	GlobalWebRateLimitDuration int64 = 3 * 60
 | 
			
		||||
 | 
			
		||||
	UploadRateLimitNum            = 10
 | 
			
		||||
@@ -144,3 +144,7 @@ var MetricFailChanSize = env.Int("METRIC_FAIL_CHAN_SIZE", 128)
 | 
			
		||||
var InitialRootToken = os.Getenv("INITIAL_ROOT_TOKEN")
 | 
			
		||||
 | 
			
		||||
var GeminiVersion = env.String("GEMINI_VERSION", "v1")
 | 
			
		||||
 | 
			
		||||
var RelayProxy = env.String("RELAY_PROXY", "")
 | 
			
		||||
var UserContentRequestProxy = env.String("USER_CONTENT_REQUEST_PROXY", "")
 | 
			
		||||
var UserContentRequestTimeout = env.Int("USER_CONTENT_REQUEST_TIMEOUT", 30)
 | 
			
		||||
 
 | 
			
		||||
@@ -1,12 +0,0 @@
 | 
			
		||||
package config
 | 
			
		||||
 | 
			
		||||
const (
 | 
			
		||||
	KeyPrefix = "cfg_"
 | 
			
		||||
 | 
			
		||||
	KeyAPIVersion = KeyPrefix + "api_version"
 | 
			
		||||
	KeyLibraryID  = KeyPrefix + "library_id"
 | 
			
		||||
	KeyPlugin     = KeyPrefix + "plugin"
 | 
			
		||||
	KeySK         = KeyPrefix + "sk"
 | 
			
		||||
	KeyAK         = KeyPrefix + "ak"
 | 
			
		||||
	KeyRegion     = KeyPrefix + "region"
 | 
			
		||||
)
 | 
			
		||||
@@ -1,7 +1,22 @@
 | 
			
		||||
package ctxkey
 | 
			
		||||
 | 
			
		||||
var (
 | 
			
		||||
const (
 | 
			
		||||
	Config            = "config"
 | 
			
		||||
	Id                = "id"
 | 
			
		||||
	Username          = "username"
 | 
			
		||||
	Role              = "role"
 | 
			
		||||
	Status            = "status"
 | 
			
		||||
	Channel           = "channel"
 | 
			
		||||
	ChannelId         = "channel_id"
 | 
			
		||||
	SpecificChannelId = "specific_channel_id"
 | 
			
		||||
	RequestModel      = "request_model"
 | 
			
		||||
	ConvertedRequest  = "converted_request"
 | 
			
		||||
	OriginalModel     = "original_model"
 | 
			
		||||
	Group             = "group"
 | 
			
		||||
	ModelMapping      = "model_mapping"
 | 
			
		||||
	ChannelName       = "channel_name"
 | 
			
		||||
	TokenId           = "token_id"
 | 
			
		||||
	TokenName         = "token_name"
 | 
			
		||||
	BaseURL           = "base_url"
 | 
			
		||||
	AvailableModels   = "available_models"
 | 
			
		||||
)
 | 
			
		||||
 
 | 
			
		||||
@@ -2,6 +2,7 @@ package helper
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/random"
 | 
			
		||||
	"html/template"
 | 
			
		||||
	"log"
 | 
			
		||||
@@ -105,6 +106,11 @@ func GenRequestID() string {
 | 
			
		||||
	return GetTimeString() + random.GetRandomNumberString(8)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func GetResponseID(c *gin.Context) string {
 | 
			
		||||
	logID := c.GetString(RequestIdKey)
 | 
			
		||||
	return fmt.Sprintf("chatcmpl-%s", logID)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func Max(a int, b int) int {
 | 
			
		||||
	if a >= b {
 | 
			
		||||
		return a
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										5
									
								
								common/helper/key.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										5
									
								
								common/helper/key.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,5 @@
 | 
			
		||||
package helper
 | 
			
		||||
 | 
			
		||||
const (
 | 
			
		||||
	RequestIdKey = "X-Oneapi-Request-Id"
 | 
			
		||||
)
 | 
			
		||||
@@ -3,6 +3,7 @@ package image
 | 
			
		||||
import (
 | 
			
		||||
	"bytes"
 | 
			
		||||
	"encoding/base64"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/client"
 | 
			
		||||
	"image"
 | 
			
		||||
	_ "image/gif"
 | 
			
		||||
	_ "image/jpeg"
 | 
			
		||||
@@ -19,7 +20,7 @@ import (
 | 
			
		||||
var dataURLPattern = regexp.MustCompile(`data:image/([^;]+);base64,(.*)`)
 | 
			
		||||
 | 
			
		||||
func IsImageUrl(url string) (bool, error) {
 | 
			
		||||
	resp, err := http.Head(url)
 | 
			
		||||
	resp, err := client.UserContentRequestHTTPClient.Head(url)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return false, err
 | 
			
		||||
	}
 | 
			
		||||
@@ -34,7 +35,7 @@ func GetImageSizeFromUrl(url string) (width int, height int, err error) {
 | 
			
		||||
	if !isImage {
 | 
			
		||||
		return
 | 
			
		||||
	}
 | 
			
		||||
	resp, err := http.Get(url)
 | 
			
		||||
	resp, err := client.UserContentRequestHTTPClient.Get(url)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return
 | 
			
		||||
	}
 | 
			
		||||
 
 | 
			
		||||
@@ -1,7 +1,3 @@
 | 
			
		||||
package logger
 | 
			
		||||
 | 
			
		||||
const (
 | 
			
		||||
	RequestIdKey = "X-Oneapi-Request-Id"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
var LogDir string
 | 
			
		||||
 
 | 
			
		||||
@@ -3,15 +3,16 @@ package logger
 | 
			
		||||
import (
 | 
			
		||||
	"context"
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/config"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/helper"
 | 
			
		||||
	"io"
 | 
			
		||||
	"log"
 | 
			
		||||
	"os"
 | 
			
		||||
	"path/filepath"
 | 
			
		||||
	"sync"
 | 
			
		||||
	"time"
 | 
			
		||||
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/config"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/helper"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
const (
 | 
			
		||||
@@ -21,20 +22,11 @@ const (
 | 
			
		||||
	loggerError = "ERR"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
var setupLogLock sync.Mutex
 | 
			
		||||
var setupLogWorking bool
 | 
			
		||||
var setupLogOnce sync.Once
 | 
			
		||||
 | 
			
		||||
func SetupLogger() {
 | 
			
		||||
	setupLogOnce.Do(func() {
 | 
			
		||||
		if LogDir != "" {
 | 
			
		||||
		ok := setupLogLock.TryLock()
 | 
			
		||||
		if !ok {
 | 
			
		||||
			log.Println("setup log is already working")
 | 
			
		||||
			return
 | 
			
		||||
		}
 | 
			
		||||
		defer func() {
 | 
			
		||||
			setupLogLock.Unlock()
 | 
			
		||||
			setupLogWorking = false
 | 
			
		||||
		}()
 | 
			
		||||
			logPath := filepath.Join(LogDir, fmt.Sprintf("oneapi-%s.log", time.Now().Format("20060102")))
 | 
			
		||||
			fd, err := os.OpenFile(logPath, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
 | 
			
		||||
			if err != nil {
 | 
			
		||||
@@ -43,6 +35,7 @@ func SetupLogger() {
 | 
			
		||||
			gin.DefaultWriter = io.MultiWriter(os.Stdout, fd)
 | 
			
		||||
			gin.DefaultErrorWriter = io.MultiWriter(os.Stderr, fd)
 | 
			
		||||
		}
 | 
			
		||||
	})
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func SysLog(s string) {
 | 
			
		||||
@@ -50,11 +43,19 @@ func SysLog(s string) {
 | 
			
		||||
	_, _ = fmt.Fprintf(gin.DefaultWriter, "[SYS] %v | %s \n", t.Format("2006/01/02 - 15:04:05"), s)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func SysLogf(format string, a ...any) {
 | 
			
		||||
	SysLog(fmt.Sprintf(format, a...))
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func SysError(s string) {
 | 
			
		||||
	t := time.Now()
 | 
			
		||||
	_, _ = fmt.Fprintf(gin.DefaultErrorWriter, "[SYS] %v | %s \n", t.Format("2006/01/02 - 15:04:05"), s)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func SysErrorf(format string, a ...any) {
 | 
			
		||||
	SysError(fmt.Sprintf(format, a...))
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func Debug(ctx context.Context, msg string) {
 | 
			
		||||
	if config.DebugEnabled {
 | 
			
		||||
		logHelper(ctx, loggerDEBUG, msg)
 | 
			
		||||
@@ -94,18 +95,13 @@ func logHelper(ctx context.Context, level string, msg string) {
 | 
			
		||||
	if level == loggerINFO {
 | 
			
		||||
		writer = gin.DefaultWriter
 | 
			
		||||
	}
 | 
			
		||||
	id := ctx.Value(RequestIdKey)
 | 
			
		||||
	id := ctx.Value(helper.RequestIdKey)
 | 
			
		||||
	if id == nil {
 | 
			
		||||
		id = helper.GenRequestID()
 | 
			
		||||
	}
 | 
			
		||||
	now := time.Now()
 | 
			
		||||
	_, _ = fmt.Fprintf(writer, "[%s] %v | %s | %s \n", level, now.Format("2006/01/02 - 15:04:05"), id, msg)
 | 
			
		||||
	if !setupLogWorking {
 | 
			
		||||
		setupLogWorking = true
 | 
			
		||||
		go func() {
 | 
			
		||||
	SetupLogger()
 | 
			
		||||
		}()
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func FatalLog(v ...any) {
 | 
			
		||||
 
 | 
			
		||||
@@ -6,6 +6,7 @@ import (
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/config"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/ctxkey"
 | 
			
		||||
	"github.com/songquanpeng/one-api/controller"
 | 
			
		||||
	"github.com/songquanpeng/one-api/model"
 | 
			
		||||
	"net/http"
 | 
			
		||||
@@ -136,7 +137,7 @@ func WeChatBind(c *gin.Context) {
 | 
			
		||||
		})
 | 
			
		||||
		return
 | 
			
		||||
	}
 | 
			
		||||
	id := c.GetInt("id")
 | 
			
		||||
	id := c.GetInt(ctxkey.Id)
 | 
			
		||||
	user := model.User{
 | 
			
		||||
		Id: id,
 | 
			
		||||
	}
 | 
			
		||||
 
 | 
			
		||||
@@ -3,6 +3,7 @@ package controller
 | 
			
		||||
import (
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/config"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/ctxkey"
 | 
			
		||||
	"github.com/songquanpeng/one-api/model"
 | 
			
		||||
	relaymodel "github.com/songquanpeng/one-api/relay/model"
 | 
			
		||||
)
 | 
			
		||||
@@ -14,13 +15,13 @@ func GetSubscription(c *gin.Context) {
 | 
			
		||||
	var token *model.Token
 | 
			
		||||
	var expiredTime int64
 | 
			
		||||
	if config.DisplayTokenStatEnabled {
 | 
			
		||||
		tokenId := c.GetInt("token_id")
 | 
			
		||||
		tokenId := c.GetInt(ctxkey.TokenId)
 | 
			
		||||
		token, err = model.GetTokenById(tokenId)
 | 
			
		||||
		expiredTime = token.ExpiredTime
 | 
			
		||||
		remainQuota = token.RemainQuota
 | 
			
		||||
		usedQuota = token.UsedQuota
 | 
			
		||||
	} else {
 | 
			
		||||
		userId := c.GetInt("id")
 | 
			
		||||
		userId := c.GetInt(ctxkey.Id)
 | 
			
		||||
		remainQuota, err = model.GetUserQuota(userId)
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			usedQuota, err = model.GetUserUsedQuota(userId)
 | 
			
		||||
@@ -64,11 +65,11 @@ func GetUsage(c *gin.Context) {
 | 
			
		||||
	var err error
 | 
			
		||||
	var token *model.Token
 | 
			
		||||
	if config.DisplayTokenStatEnabled {
 | 
			
		||||
		tokenId := c.GetInt("token_id")
 | 
			
		||||
		tokenId := c.GetInt(ctxkey.TokenId)
 | 
			
		||||
		token, err = model.GetTokenById(tokenId)
 | 
			
		||||
		quota = token.UsedQuota
 | 
			
		||||
	} else {
 | 
			
		||||
		userId := c.GetInt("id")
 | 
			
		||||
		userId := c.GetInt(ctxkey.Id)
 | 
			
		||||
		quota, err = model.GetUserUsedQuota(userId)
 | 
			
		||||
	}
 | 
			
		||||
	if err != nil {
 | 
			
		||||
 
 | 
			
		||||
@@ -4,12 +4,12 @@ import (
 | 
			
		||||
	"encoding/json"
 | 
			
		||||
	"errors"
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/client"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/config"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/logger"
 | 
			
		||||
	"github.com/songquanpeng/one-api/model"
 | 
			
		||||
	"github.com/songquanpeng/one-api/monitor"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/channeltype"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/client"
 | 
			
		||||
	"io"
 | 
			
		||||
	"net/http"
 | 
			
		||||
	"strconv"
 | 
			
		||||
 
 | 
			
		||||
@@ -5,7 +5,17 @@ import (
 | 
			
		||||
	"encoding/json"
 | 
			
		||||
	"errors"
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"io"
 | 
			
		||||
	"net/http"
 | 
			
		||||
	"net/http/httptest"
 | 
			
		||||
	"net/url"
 | 
			
		||||
	"strconv"
 | 
			
		||||
	"strings"
 | 
			
		||||
	"sync"
 | 
			
		||||
	"time"
 | 
			
		||||
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/config"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/ctxkey"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/logger"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/message"
 | 
			
		||||
	"github.com/songquanpeng/one-api/middleware"
 | 
			
		||||
@@ -17,14 +27,6 @@ import (
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/meta"
 | 
			
		||||
	relaymodel "github.com/songquanpeng/one-api/relay/model"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/relaymode"
 | 
			
		||||
	"io"
 | 
			
		||||
	"net/http"
 | 
			
		||||
	"net/http/httptest"
 | 
			
		||||
	"net/url"
 | 
			
		||||
	"strconv"
 | 
			
		||||
	"strings"
 | 
			
		||||
	"sync"
 | 
			
		||||
	"time"
 | 
			
		||||
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
)
 | 
			
		||||
@@ -54,8 +56,10 @@ func testChannel(channel *model.Channel) (err error, openaiErr *relaymodel.Error
 | 
			
		||||
	}
 | 
			
		||||
	c.Request.Header.Set("Authorization", "Bearer "+channel.Key)
 | 
			
		||||
	c.Request.Header.Set("Content-Type", "application/json")
 | 
			
		||||
	c.Set("channel", channel.Type)
 | 
			
		||||
	c.Set("base_url", channel.GetBaseURL())
 | 
			
		||||
	c.Set(ctxkey.Channel, channel.Type)
 | 
			
		||||
	c.Set(ctxkey.BaseURL, channel.GetBaseURL())
 | 
			
		||||
	cfg, _ := channel.LoadConfig()
 | 
			
		||||
	c.Set(ctxkey.Config, cfg)
 | 
			
		||||
	middleware.SetupContextForSelectedChannel(c, channel, "")
 | 
			
		||||
	meta := meta.GetByContext(c)
 | 
			
		||||
	apiType := channeltype.ToAPIType(channel.Type)
 | 
			
		||||
@@ -64,12 +68,20 @@ func testChannel(channel *model.Channel) (err error, openaiErr *relaymodel.Error
 | 
			
		||||
		return fmt.Errorf("invalid api type: %d, adaptor is nil", apiType), nil
 | 
			
		||||
	}
 | 
			
		||||
	adaptor.Init(meta)
 | 
			
		||||
	modelName := adaptor.GetModelList()[0]
 | 
			
		||||
	if !strings.Contains(channel.Models, modelName) {
 | 
			
		||||
	var modelName string
 | 
			
		||||
	modelList := adaptor.GetModelList()
 | 
			
		||||
	modelMap := channel.GetModelMapping()
 | 
			
		||||
	if len(modelList) != 0 {
 | 
			
		||||
		modelName = modelList[0]
 | 
			
		||||
	}
 | 
			
		||||
	if modelName == "" || !strings.Contains(channel.Models, modelName) {
 | 
			
		||||
		modelNames := strings.Split(channel.Models, ",")
 | 
			
		||||
		if len(modelNames) > 0 {
 | 
			
		||||
			modelName = modelNames[0]
 | 
			
		||||
		}
 | 
			
		||||
		if modelMap != nil && modelMap[modelName] != "" {
 | 
			
		||||
			modelName = modelMap[modelName]
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
	request := buildTestRequest()
 | 
			
		||||
	request.Model = modelName
 | 
			
		||||
@@ -82,6 +94,7 @@ func testChannel(channel *model.Channel) (err error, openaiErr *relaymodel.Error
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return err, nil
 | 
			
		||||
	}
 | 
			
		||||
	logger.SysLog(string(jsonData))
 | 
			
		||||
	requestBody := bytes.NewBuffer(jsonData)
 | 
			
		||||
	c.Request.Body = io.NopCloser(requestBody)
 | 
			
		||||
	resp, err := adaptor.DoRequest(c, meta, requestBody)
 | 
			
		||||
 
 | 
			
		||||
@@ -3,6 +3,7 @@ package controller
 | 
			
		||||
import (
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/config"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/ctxkey"
 | 
			
		||||
	"github.com/songquanpeng/one-api/model"
 | 
			
		||||
	"net/http"
 | 
			
		||||
	"strconv"
 | 
			
		||||
@@ -41,7 +42,7 @@ func GetUserLogs(c *gin.Context) {
 | 
			
		||||
	if p < 0 {
 | 
			
		||||
		p = 0
 | 
			
		||||
	}
 | 
			
		||||
	userId := c.GetInt("id")
 | 
			
		||||
	userId := c.GetInt(ctxkey.Id)
 | 
			
		||||
	logType, _ := strconv.Atoi(c.Query("type"))
 | 
			
		||||
	startTimestamp, _ := strconv.ParseInt(c.Query("start_timestamp"), 10, 64)
 | 
			
		||||
	endTimestamp, _ := strconv.ParseInt(c.Query("end_timestamp"), 10, 64)
 | 
			
		||||
@@ -83,7 +84,7 @@ func SearchAllLogs(c *gin.Context) {
 | 
			
		||||
 | 
			
		||||
func SearchUserLogs(c *gin.Context) {
 | 
			
		||||
	keyword := c.Query("keyword")
 | 
			
		||||
	userId := c.GetInt("id")
 | 
			
		||||
	userId := c.GetInt(ctxkey.Id)
 | 
			
		||||
	logs, err := model.SearchUserLogs(userId, keyword)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		c.JSON(http.StatusOK, gin.H{
 | 
			
		||||
@@ -122,7 +123,7 @@ func GetLogsStat(c *gin.Context) {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func GetLogsSelfStat(c *gin.Context) {
 | 
			
		||||
	username := c.GetString("username")
 | 
			
		||||
	username := c.GetString(ctxkey.Username)
 | 
			
		||||
	logType, _ := strconv.Atoi(c.Query("type"))
 | 
			
		||||
	startTimestamp, _ := strconv.ParseInt(c.Query("start_timestamp"), 10, 64)
 | 
			
		||||
	endTimestamp, _ := strconv.ParseInt(c.Query("end_timestamp"), 10, 64)
 | 
			
		||||
 
 | 
			
		||||
@@ -3,6 +3,7 @@ package controller
 | 
			
		||||
import (
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/ctxkey"
 | 
			
		||||
	"github.com/songquanpeng/one-api/model"
 | 
			
		||||
	relay "github.com/songquanpeng/one-api/relay"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/openai"
 | 
			
		||||
@@ -131,10 +132,10 @@ func ListAllModels(c *gin.Context) {
 | 
			
		||||
func ListModels(c *gin.Context) {
 | 
			
		||||
	ctx := c.Request.Context()
 | 
			
		||||
	var availableModels []string
 | 
			
		||||
	if c.GetString("available_models") != "" {
 | 
			
		||||
		availableModels = strings.Split(c.GetString("available_models"), ",")
 | 
			
		||||
	if c.GetString(ctxkey.AvailableModels) != "" {
 | 
			
		||||
		availableModels = strings.Split(c.GetString(ctxkey.AvailableModels), ",")
 | 
			
		||||
	} else {
 | 
			
		||||
		userId := c.GetInt("id")
 | 
			
		||||
		userId := c.GetInt(ctxkey.Id)
 | 
			
		||||
		userGroup, _ := model.CacheGetUserGroup(userId)
 | 
			
		||||
		availableModels, _ = model.CacheGetGroupModels(ctx, userGroup)
 | 
			
		||||
	}
 | 
			
		||||
@@ -186,7 +187,7 @@ func RetrieveModel(c *gin.Context) {
 | 
			
		||||
 | 
			
		||||
func GetUserAvailableModels(c *gin.Context) {
 | 
			
		||||
	ctx := c.Request.Context()
 | 
			
		||||
	id := c.GetInt("id")
 | 
			
		||||
	id := c.GetInt(ctxkey.Id)
 | 
			
		||||
	userGroup, err := model.CacheGetUserGroup(id)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		c.JSON(http.StatusOK, gin.H{
 | 
			
		||||
 
 | 
			
		||||
@@ -3,6 +3,7 @@ package controller
 | 
			
		||||
import (
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/config"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/ctxkey"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/helper"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/random"
 | 
			
		||||
	"github.com/songquanpeng/one-api/model"
 | 
			
		||||
@@ -109,7 +110,7 @@ func AddRedemption(c *gin.Context) {
 | 
			
		||||
	for i := 0; i < redemption.Count; i++ {
 | 
			
		||||
		key := random.GetUUID()
 | 
			
		||||
		cleanRedemption := model.Redemption{
 | 
			
		||||
			UserId:      c.GetInt("id"),
 | 
			
		||||
			UserId:      c.GetInt(ctxkey.Id),
 | 
			
		||||
			Name:        redemption.Name,
 | 
			
		||||
			Key:         key,
 | 
			
		||||
			CreatedTime: helper.GetTimestamp(),
 | 
			
		||||
 
 | 
			
		||||
@@ -4,6 +4,9 @@ import (
 | 
			
		||||
	"bytes"
 | 
			
		||||
	"context"
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"io"
 | 
			
		||||
	"net/http"
 | 
			
		||||
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/config"
 | 
			
		||||
@@ -16,8 +19,6 @@ import (
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/controller"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/model"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/relaymode"
 | 
			
		||||
	"io"
 | 
			
		||||
	"net/http"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
// https://platform.openai.com/docs/api-reference/chat
 | 
			
		||||
@@ -46,18 +47,19 @@ func Relay(c *gin.Context) {
 | 
			
		||||
		requestBody, _ := common.GetRequestBody(c)
 | 
			
		||||
		logger.Debugf(ctx, "request body: %s", string(requestBody))
 | 
			
		||||
	}
 | 
			
		||||
	channelId := c.GetInt("channel_id")
 | 
			
		||||
	channelId := c.GetInt(ctxkey.ChannelId)
 | 
			
		||||
	userId := c.GetInt("id")
 | 
			
		||||
	bizErr := relayHelper(c, relayMode)
 | 
			
		||||
	if bizErr == nil {
 | 
			
		||||
		monitor.Emit(channelId, true)
 | 
			
		||||
		return
 | 
			
		||||
	}
 | 
			
		||||
	lastFailedChannelId := channelId
 | 
			
		||||
	channelName := c.GetString("channel_name")
 | 
			
		||||
	group := c.GetString("group")
 | 
			
		||||
	channelName := c.GetString(ctxkey.ChannelName)
 | 
			
		||||
	group := c.GetString(ctxkey.Group)
 | 
			
		||||
	originalModel := c.GetString(ctxkey.OriginalModel)
 | 
			
		||||
	go processChannelRelayError(ctx, channelId, channelName, bizErr)
 | 
			
		||||
	requestId := c.GetString(logger.RequestIdKey)
 | 
			
		||||
	go processChannelRelayError(ctx, userId, channelId, channelName, bizErr)
 | 
			
		||||
	requestId := c.GetString(helper.RequestIdKey)
 | 
			
		||||
	retryTimes := config.RetryTimes
 | 
			
		||||
	if !shouldRetry(c, bizErr.StatusCode) {
 | 
			
		||||
		logger.Errorf(ctx, "relay error happen, status code is %d, won't retry in this case", bizErr.StatusCode)
 | 
			
		||||
@@ -80,10 +82,10 @@ func Relay(c *gin.Context) {
 | 
			
		||||
		if bizErr == nil {
 | 
			
		||||
			return
 | 
			
		||||
		}
 | 
			
		||||
		channelId := c.GetInt("channel_id")
 | 
			
		||||
		channelId := c.GetInt(ctxkey.ChannelId)
 | 
			
		||||
		lastFailedChannelId = channelId
 | 
			
		||||
		channelName := c.GetString("channel_name")
 | 
			
		||||
		go processChannelRelayError(ctx, channelId, channelName, bizErr)
 | 
			
		||||
		channelName := c.GetString(ctxkey.ChannelName)
 | 
			
		||||
		go processChannelRelayError(ctx, userId, channelId, channelName, bizErr)
 | 
			
		||||
	}
 | 
			
		||||
	if bizErr != nil {
 | 
			
		||||
		if bizErr.StatusCode == http.StatusTooManyRequests {
 | 
			
		||||
@@ -97,7 +99,7 @@ func Relay(c *gin.Context) {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func shouldRetry(c *gin.Context, statusCode int) bool {
 | 
			
		||||
	if _, ok := c.Get("specific_channel_id"); ok {
 | 
			
		||||
	if _, ok := c.Get(ctxkey.SpecificChannelId); ok {
 | 
			
		||||
		return false
 | 
			
		||||
	}
 | 
			
		||||
	if statusCode == http.StatusTooManyRequests {
 | 
			
		||||
@@ -115,8 +117,8 @@ func shouldRetry(c *gin.Context, statusCode int) bool {
 | 
			
		||||
	return true
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func processChannelRelayError(ctx context.Context, channelId int, channelName string, err *model.ErrorWithStatusCode) {
 | 
			
		||||
	logger.Errorf(ctx, "relay error (channel #%d): %s", channelId, err.Message)
 | 
			
		||||
func processChannelRelayError(ctx context.Context, userId int, channelId int, channelName string, err *model.ErrorWithStatusCode) {
 | 
			
		||||
	logger.Errorf(ctx, "relay error (channel id %d, user id: %d): %s", channelId, userId, err.Message)
 | 
			
		||||
	// https://platform.openai.com/docs/guides/error-codes/api-errors
 | 
			
		||||
	if monitor.ShouldDisableChannel(&err.Error, err.StatusCode) {
 | 
			
		||||
		monitor.DisableChannel(channelId, channelName, err.Message)
 | 
			
		||||
 
 | 
			
		||||
@@ -4,6 +4,7 @@ import (
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/config"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/ctxkey"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/helper"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/network"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/random"
 | 
			
		||||
@@ -13,7 +14,7 @@ import (
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
func GetAllTokens(c *gin.Context) {
 | 
			
		||||
	userId := c.GetInt("id")
 | 
			
		||||
	userId := c.GetInt(ctxkey.Id)
 | 
			
		||||
	p, _ := strconv.Atoi(c.Query("p"))
 | 
			
		||||
	if p < 0 {
 | 
			
		||||
		p = 0
 | 
			
		||||
@@ -38,7 +39,7 @@ func GetAllTokens(c *gin.Context) {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func SearchTokens(c *gin.Context) {
 | 
			
		||||
	userId := c.GetInt("id")
 | 
			
		||||
	userId := c.GetInt(ctxkey.Id)
 | 
			
		||||
	keyword := c.Query("keyword")
 | 
			
		||||
	tokens, err := model.SearchUserTokens(userId, keyword)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
@@ -58,7 +59,7 @@ func SearchTokens(c *gin.Context) {
 | 
			
		||||
 | 
			
		||||
func GetToken(c *gin.Context) {
 | 
			
		||||
	id, err := strconv.Atoi(c.Param("id"))
 | 
			
		||||
	userId := c.GetInt("id")
 | 
			
		||||
	userId := c.GetInt(ctxkey.Id)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		c.JSON(http.StatusOK, gin.H{
 | 
			
		||||
			"success": false,
 | 
			
		||||
@@ -83,8 +84,8 @@ func GetToken(c *gin.Context) {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func GetTokenStatus(c *gin.Context) {
 | 
			
		||||
	tokenId := c.GetInt("token_id")
 | 
			
		||||
	userId := c.GetInt("id")
 | 
			
		||||
	tokenId := c.GetInt(ctxkey.TokenId)
 | 
			
		||||
	userId := c.GetInt(ctxkey.Id)
 | 
			
		||||
	token, err := model.GetTokenByIds(tokenId, userId)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		c.JSON(http.StatusOK, gin.H{
 | 
			
		||||
@@ -139,7 +140,7 @@ func AddToken(c *gin.Context) {
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	cleanToken := model.Token{
 | 
			
		||||
		UserId:         c.GetInt("id"),
 | 
			
		||||
		UserId:         c.GetInt(ctxkey.Id),
 | 
			
		||||
		Name:           token.Name,
 | 
			
		||||
		Key:            random.GenerateKey(),
 | 
			
		||||
		CreatedTime:    helper.GetTimestamp(),
 | 
			
		||||
@@ -168,7 +169,7 @@ func AddToken(c *gin.Context) {
 | 
			
		||||
 | 
			
		||||
func DeleteToken(c *gin.Context) {
 | 
			
		||||
	id, _ := strconv.Atoi(c.Param("id"))
 | 
			
		||||
	userId := c.GetInt("id")
 | 
			
		||||
	userId := c.GetInt(ctxkey.Id)
 | 
			
		||||
	err := model.DeleteTokenById(id, userId)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		c.JSON(http.StatusOK, gin.H{
 | 
			
		||||
@@ -185,7 +186,7 @@ func DeleteToken(c *gin.Context) {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func UpdateToken(c *gin.Context) {
 | 
			
		||||
	userId := c.GetInt("id")
 | 
			
		||||
	userId := c.GetInt(ctxkey.Id)
 | 
			
		||||
	statusOnly := c.Query("status_only")
 | 
			
		||||
	token := model.Token{}
 | 
			
		||||
	err := c.ShouldBindJSON(&token)
 | 
			
		||||
 
 | 
			
		||||
@@ -5,6 +5,7 @@ import (
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/config"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/ctxkey"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/random"
 | 
			
		||||
	"github.com/songquanpeng/one-api/model"
 | 
			
		||||
	"net/http"
 | 
			
		||||
@@ -172,6 +173,7 @@ func Register(c *gin.Context) {
 | 
			
		||||
		})
 | 
			
		||||
		return
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	c.JSON(http.StatusOK, gin.H{
 | 
			
		||||
		"success": true,
 | 
			
		||||
		"message": "",
 | 
			
		||||
@@ -238,7 +240,7 @@ func GetUser(c *gin.Context) {
 | 
			
		||||
		})
 | 
			
		||||
		return
 | 
			
		||||
	}
 | 
			
		||||
	myRole := c.GetInt("role")
 | 
			
		||||
	myRole := c.GetInt(ctxkey.Role)
 | 
			
		||||
	if myRole <= user.Role && myRole != model.RoleRootUser {
 | 
			
		||||
		c.JSON(http.StatusOK, gin.H{
 | 
			
		||||
			"success": false,
 | 
			
		||||
@@ -255,7 +257,7 @@ func GetUser(c *gin.Context) {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func GetUserDashboard(c *gin.Context) {
 | 
			
		||||
	id := c.GetInt("id")
 | 
			
		||||
	id := c.GetInt(ctxkey.Id)
 | 
			
		||||
	now := time.Now()
 | 
			
		||||
	startOfDay := now.Truncate(24*time.Hour).AddDate(0, 0, -6).Unix()
 | 
			
		||||
	endOfDay := now.Truncate(24 * time.Hour).Add(24*time.Hour - time.Second).Unix()
 | 
			
		||||
@@ -278,7 +280,7 @@ func GetUserDashboard(c *gin.Context) {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func GenerateAccessToken(c *gin.Context) {
 | 
			
		||||
	id := c.GetInt("id")
 | 
			
		||||
	id := c.GetInt(ctxkey.Id)
 | 
			
		||||
	user, err := model.GetUserById(id, true)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		c.JSON(http.StatusOK, gin.H{
 | 
			
		||||
@@ -314,7 +316,7 @@ func GenerateAccessToken(c *gin.Context) {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func GetAffCode(c *gin.Context) {
 | 
			
		||||
	id := c.GetInt("id")
 | 
			
		||||
	id := c.GetInt(ctxkey.Id)
 | 
			
		||||
	user, err := model.GetUserById(id, true)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		c.JSON(http.StatusOK, gin.H{
 | 
			
		||||
@@ -342,7 +344,7 @@ func GetAffCode(c *gin.Context) {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func GetSelf(c *gin.Context) {
 | 
			
		||||
	id := c.GetInt("id")
 | 
			
		||||
	id := c.GetInt(ctxkey.Id)
 | 
			
		||||
	user, err := model.GetUserById(id, false)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		c.JSON(http.StatusOK, gin.H{
 | 
			
		||||
@@ -387,7 +389,7 @@ func UpdateUser(c *gin.Context) {
 | 
			
		||||
		})
 | 
			
		||||
		return
 | 
			
		||||
	}
 | 
			
		||||
	myRole := c.GetInt("role")
 | 
			
		||||
	myRole := c.GetInt(ctxkey.Role)
 | 
			
		||||
	if myRole <= originUser.Role && myRole != model.RoleRootUser {
 | 
			
		||||
		c.JSON(http.StatusOK, gin.H{
 | 
			
		||||
			"success": false,
 | 
			
		||||
@@ -445,7 +447,7 @@ func UpdateSelf(c *gin.Context) {
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	cleanUser := model.User{
 | 
			
		||||
		Id:          c.GetInt("id"),
 | 
			
		||||
		Id:          c.GetInt(ctxkey.Id),
 | 
			
		||||
		Username:    user.Username,
 | 
			
		||||
		Password:    user.Password,
 | 
			
		||||
		DisplayName: user.DisplayName,
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										48
									
								
								go.mod
									
									
									
									
									
								
							
							
						
						
									
										48
									
								
								go.mod
									
									
									
									
									
								
							@@ -4,42 +4,42 @@ module github.com/songquanpeng/one-api
 | 
			
		||||
go 1.20
 | 
			
		||||
 | 
			
		||||
require (
 | 
			
		||||
	github.com/aws/aws-sdk-go-v2 v1.26.1
 | 
			
		||||
	github.com/aws/aws-sdk-go-v2/credentials v1.17.11
 | 
			
		||||
	github.com/aws/aws-sdk-go-v2/service/bedrockruntime v1.7.4
 | 
			
		||||
	github.com/gin-contrib/cors v1.7.1
 | 
			
		||||
	github.com/gin-contrib/gzip v1.0.0
 | 
			
		||||
	github.com/gin-contrib/sessions v1.0.0
 | 
			
		||||
	github.com/gin-contrib/static v1.1.1
 | 
			
		||||
	github.com/gin-gonic/gin v1.9.1
 | 
			
		||||
	github.com/go-playground/validator/v10 v10.19.0
 | 
			
		||||
	github.com/aws/aws-sdk-go-v2 v1.27.0
 | 
			
		||||
	github.com/aws/aws-sdk-go-v2/credentials v1.17.15
 | 
			
		||||
	github.com/aws/aws-sdk-go-v2/service/bedrockruntime v1.8.3
 | 
			
		||||
	github.com/gin-contrib/cors v1.7.2
 | 
			
		||||
	github.com/gin-contrib/gzip v1.0.1
 | 
			
		||||
	github.com/gin-contrib/sessions v1.0.1
 | 
			
		||||
	github.com/gin-contrib/static v1.1.2
 | 
			
		||||
	github.com/gin-gonic/gin v1.10.0
 | 
			
		||||
	github.com/go-playground/validator/v10 v10.20.0
 | 
			
		||||
	github.com/go-redis/redis/v8 v8.11.5
 | 
			
		||||
	github.com/golang-jwt/jwt v3.2.2+incompatible
 | 
			
		||||
	github.com/google/uuid v1.6.0
 | 
			
		||||
	github.com/gorilla/websocket v1.5.1
 | 
			
		||||
	github.com/jinzhu/copier v0.4.0
 | 
			
		||||
	github.com/pkg/errors v0.9.1
 | 
			
		||||
	github.com/pkoukk/tiktoken-go v0.1.6
 | 
			
		||||
	github.com/pkoukk/tiktoken-go v0.1.7
 | 
			
		||||
	github.com/smartystreets/goconvey v1.8.1
 | 
			
		||||
	github.com/stretchr/testify v1.9.0
 | 
			
		||||
	golang.org/x/crypto v0.22.0
 | 
			
		||||
	golang.org/x/image v0.15.0
 | 
			
		||||
	golang.org/x/crypto v0.23.0
 | 
			
		||||
	golang.org/x/image v0.16.0
 | 
			
		||||
	gorm.io/driver/mysql v1.5.6
 | 
			
		||||
	gorm.io/driver/postgres v1.5.7
 | 
			
		||||
	gorm.io/driver/sqlite v1.5.5
 | 
			
		||||
	gorm.io/gorm v1.25.9
 | 
			
		||||
	gorm.io/gorm v1.25.10
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
require (
 | 
			
		||||
	filippo.io/edwards25519 v1.1.0 // indirect
 | 
			
		||||
	github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.2 // indirect
 | 
			
		||||
	github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.5 // indirect
 | 
			
		||||
	github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.5 // indirect
 | 
			
		||||
	github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.7 // indirect
 | 
			
		||||
	github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.7 // indirect
 | 
			
		||||
	github.com/aws/smithy-go v1.20.2 // indirect
 | 
			
		||||
	github.com/bytedance/sonic v1.11.5 // indirect
 | 
			
		||||
	github.com/bytedance/sonic v1.11.6 // indirect
 | 
			
		||||
	github.com/bytedance/sonic/loader v0.1.1 // indirect
 | 
			
		||||
	github.com/cespare/xxhash/v2 v2.3.0 // indirect
 | 
			
		||||
	github.com/cloudwego/base64x v0.1.3 // indirect
 | 
			
		||||
	github.com/cloudwego/base64x v0.1.4 // indirect
 | 
			
		||||
	github.com/cloudwego/iasm v0.2.0 // indirect
 | 
			
		||||
	github.com/davecgh/go-spew v1.1.1 // indirect
 | 
			
		||||
	github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
 | 
			
		||||
@@ -50,7 +50,7 @@ require (
 | 
			
		||||
	github.com/go-playground/locales v0.14.1 // indirect
 | 
			
		||||
	github.com/go-playground/universal-translator v0.18.1 // indirect
 | 
			
		||||
	github.com/go-sql-driver/mysql v1.8.1 // indirect
 | 
			
		||||
	github.com/goccy/go-json v0.10.2 // indirect
 | 
			
		||||
	github.com/goccy/go-json v0.10.3 // indirect
 | 
			
		||||
	github.com/gopherjs/gopherjs v1.17.2 // indirect
 | 
			
		||||
	github.com/gorilla/context v1.1.2 // indirect
 | 
			
		||||
	github.com/gorilla/securecookie v1.1.2 // indirect
 | 
			
		||||
@@ -70,16 +70,16 @@ require (
 | 
			
		||||
	github.com/mattn/go-sqlite3 v2.0.3+incompatible // indirect
 | 
			
		||||
	github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
 | 
			
		||||
	github.com/modern-go/reflect2 v1.0.2 // indirect
 | 
			
		||||
	github.com/pelletier/go-toml/v2 v2.2.1 // indirect
 | 
			
		||||
	github.com/pelletier/go-toml/v2 v2.2.2 // indirect
 | 
			
		||||
	github.com/pmezard/go-difflib v1.0.0 // indirect
 | 
			
		||||
	github.com/smarty/assertions v1.15.0 // indirect
 | 
			
		||||
	github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
 | 
			
		||||
	github.com/ugorji/go/codec v1.2.12 // indirect
 | 
			
		||||
	golang.org/x/arch v0.7.0 // indirect
 | 
			
		||||
	golang.org/x/net v0.24.0 // indirect
 | 
			
		||||
	golang.org/x/arch v0.8.0 // indirect
 | 
			
		||||
	golang.org/x/net v0.25.0 // indirect
 | 
			
		||||
	golang.org/x/sync v0.7.0 // indirect
 | 
			
		||||
	golang.org/x/sys v0.19.0 // indirect
 | 
			
		||||
	golang.org/x/text v0.14.0 // indirect
 | 
			
		||||
	google.golang.org/protobuf v1.33.0 // indirect
 | 
			
		||||
	golang.org/x/sys v0.20.0 // indirect
 | 
			
		||||
	golang.org/x/text v0.15.0 // indirect
 | 
			
		||||
	google.golang.org/protobuf v1.34.1 // indirect
 | 
			
		||||
	gopkg.in/yaml.v3 v3.0.1 // indirect
 | 
			
		||||
)
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										48
									
								
								go.sum
									
									
									
									
									
								
							
							
						
						
									
										48
									
								
								go.sum
									
									
									
									
									
								
							@@ -2,20 +2,32 @@ filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA=
 | 
			
		||||
filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4=
 | 
			
		||||
github.com/aws/aws-sdk-go-v2 v1.26.1 h1:5554eUqIYVWpU0YmeeYZ0wU64H2VLBs8TlhRB2L+EkA=
 | 
			
		||||
github.com/aws/aws-sdk-go-v2 v1.26.1/go.mod h1:ffIFB97e2yNsv4aTSGkqtHnppsIJzw7G7BReUZ3jCXM=
 | 
			
		||||
github.com/aws/aws-sdk-go-v2 v1.27.0 h1:7bZWKoXhzI+mMR/HjdMx8ZCC5+6fY0lS5tr0bbgiLlo=
 | 
			
		||||
github.com/aws/aws-sdk-go-v2 v1.27.0/go.mod h1:ffIFB97e2yNsv4aTSGkqtHnppsIJzw7G7BReUZ3jCXM=
 | 
			
		||||
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.2 h1:x6xsQXGSmW6frevwDA+vi/wqhp1ct18mVXYN08/93to=
 | 
			
		||||
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.2/go.mod h1:lPprDr1e6cJdyYeGXnRaJoP4Md+cDBvi2eOj00BlGmg=
 | 
			
		||||
github.com/aws/aws-sdk-go-v2/credentials v1.17.11 h1:YuIB1dJNf1Re822rriUOTxopaHHvIq0l/pX3fwO+Tzs=
 | 
			
		||||
github.com/aws/aws-sdk-go-v2/credentials v1.17.11/go.mod h1:AQtFPsDH9bI2O+71anW6EKL+NcD7LG3dpKGMV4SShgo=
 | 
			
		||||
github.com/aws/aws-sdk-go-v2/credentials v1.17.15 h1:YDexlvDRCA8ems2T5IP1xkMtOZ1uLJOCJdTr0igs5zo=
 | 
			
		||||
github.com/aws/aws-sdk-go-v2/credentials v1.17.15/go.mod h1:vxHggqW6hFNaeNC0WyXS3VdyjcV0a4KMUY4dKJ96buU=
 | 
			
		||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.5 h1:aw39xVGeRWlWx9EzGVnhOR4yOjQDHPQ6o6NmBlscyQg=
 | 
			
		||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.5/go.mod h1:FSaRudD0dXiMPK2UjknVwwTYyZMRsHv3TtkabsZih5I=
 | 
			
		||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.7 h1:lf/8VTF2cM+N4SLzaYJERKEWAXq8MOMpZfU6wEPWsPk=
 | 
			
		||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.7/go.mod h1:4SjkU7QiqK2M9oozyMzfZ/23LmUY+h3oFqhdeP5OMiI=
 | 
			
		||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.5 h1:PG1F3OD1szkuQPzDw3CIQsRIrtTlUC3lP84taWzHlq0=
 | 
			
		||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.5/go.mod h1:jU1li6RFryMz+so64PpKtudI+QzbKoIEivqdf6LNpOc=
 | 
			
		||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.7 h1:4OYVp0705xu8yjdyoWix0r9wPIRXnIzzOoUpQVHIJ/g=
 | 
			
		||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.7/go.mod h1:vd7ESTEvI76T2Na050gODNmNU7+OyKrIKroYTu4ABiI=
 | 
			
		||||
github.com/aws/aws-sdk-go-v2/service/bedrockruntime v1.7.4 h1:JgHnonzbnA3pbqj76wYsSZIZZQYBxkmMEjvL6GHy8XU=
 | 
			
		||||
github.com/aws/aws-sdk-go-v2/service/bedrockruntime v1.7.4/go.mod h1:nZspkhg+9p8iApLFoyAqfyuMP0F38acy2Hm3r5r95Cg=
 | 
			
		||||
github.com/aws/aws-sdk-go-v2/service/bedrockruntime v1.8.3 h1:Fihjyd6DeNjcawBEGLH9dkIEUi6AdhucDKPE9nJ4QiY=
 | 
			
		||||
github.com/aws/aws-sdk-go-v2/service/bedrockruntime v1.8.3/go.mod h1:opvUj3ismqSCxYc+m4WIjPL0ewZGtvp0ess7cKvBPOQ=
 | 
			
		||||
github.com/aws/smithy-go v1.20.2 h1:tbp628ireGtzcHDDmLT/6ADHidqnwgF57XOXZe6tp4Q=
 | 
			
		||||
github.com/aws/smithy-go v1.20.2/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E=
 | 
			
		||||
github.com/bytedance/sonic v1.11.5 h1:G00FYjjqll5iQ1PYXynbg/hyzqBqavH8Mo9/oTopd9k=
 | 
			
		||||
github.com/bytedance/sonic v1.11.5/go.mod h1:X2PC2giUdj/Cv2lliWFLk6c/DUQok5rViJSemeB0wDw=
 | 
			
		||||
github.com/bytedance/sonic v1.11.6 h1:oUp34TzMlL+OY1OUWxHqsdkgC/Zfc85zGqw9siXjrc0=
 | 
			
		||||
github.com/bytedance/sonic v1.11.6/go.mod h1:LysEHSvpvDySVdC2f87zGWf6CIKJcAvqab1ZaiQtds4=
 | 
			
		||||
github.com/bytedance/sonic/loader v0.1.0/go.mod h1:UmRT+IRTGKz/DAkzcEGzyVqQFJ7H9BqwBO3pm9H/+HY=
 | 
			
		||||
github.com/bytedance/sonic/loader v0.1.1 h1:c+e5Pt1k/cy5wMveRDyk2X4B9hF4g7an8N3zCYjJFNM=
 | 
			
		||||
github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
 | 
			
		||||
@@ -23,6 +35,8 @@ github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UF
 | 
			
		||||
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
 | 
			
		||||
github.com/cloudwego/base64x v0.1.3 h1:b5J/l8xolB7dyDTTmhJP2oTs5LdrjyrUFuNxdfq5hAg=
 | 
			
		||||
github.com/cloudwego/base64x v0.1.3/go.mod h1:1+1K5BUHIQzyapgpF7LwvOGAEDicKtt1umPV+aN8pi8=
 | 
			
		||||
github.com/cloudwego/base64x v0.1.4 h1:jwCgWpFanWmN8xoIUHa2rtzmkd5J2plF/dnLS6Xd/0Y=
 | 
			
		||||
github.com/cloudwego/base64x v0.1.4/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w=
 | 
			
		||||
github.com/cloudwego/iasm v0.2.0 h1:1KNIy1I1H9hNNFEEH3DVnI4UujN+1zjpuk6gwHLTssg=
 | 
			
		||||
github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY=
 | 
			
		||||
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
 | 
			
		||||
@@ -39,16 +53,26 @@ github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uq
 | 
			
		||||
github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk=
 | 
			
		||||
github.com/gin-contrib/cors v1.7.1 h1:s9SIppU/rk8enVvkzwiC2VK3UZ/0NNGsWfUKvV55rqs=
 | 
			
		||||
github.com/gin-contrib/cors v1.7.1/go.mod h1:n/Zj7B4xyrgk/cX1WCX2dkzFfaNm/xJb6oIUk7WTtps=
 | 
			
		||||
github.com/gin-contrib/cors v1.7.2 h1:oLDHxdg8W/XDoN/8zamqk/Drgt4oVZDvaV0YmvVICQw=
 | 
			
		||||
github.com/gin-contrib/cors v1.7.2/go.mod h1:SUJVARKgQ40dmrzgXEVxj2m7Ig1v1qIboQkPDTQ9t2E=
 | 
			
		||||
github.com/gin-contrib/gzip v1.0.0 h1:UKN586Po/92IDX6ie5CWLgMI81obiIp5nSP85T3wlTk=
 | 
			
		||||
github.com/gin-contrib/gzip v1.0.0/go.mod h1:CtG7tQrPB3vIBo6Gat9FVUsis+1emjvQqd66ME5TdnE=
 | 
			
		||||
github.com/gin-contrib/gzip v1.0.1 h1:HQ8ENHODeLY7a4g1Au/46Z92bdGFl74OhxcZble9WJE=
 | 
			
		||||
github.com/gin-contrib/gzip v1.0.1/go.mod h1:njt428fdUNRvjuJf16tZMYZ2Yl+WQB53X5wmhDwXvC4=
 | 
			
		||||
github.com/gin-contrib/sessions v1.0.0 h1:r5GLta4Oy5xo9rAwMHx8B4wLpeRGHMdz9NafzJAdP8Y=
 | 
			
		||||
github.com/gin-contrib/sessions v1.0.0/go.mod h1:DN0f4bvpqMQElDdi+gNGScrP2QEI04IErRyMFyorUOI=
 | 
			
		||||
github.com/gin-contrib/sessions v1.0.1 h1:3hsJyNs7v7N8OtelFmYXFrulAf6zSR7nW/putcPEHxI=
 | 
			
		||||
github.com/gin-contrib/sessions v1.0.1/go.mod h1:ouxSFM24/OgIud5MJYQJLpy6AwxQ5EYO9yLhbtObGkM=
 | 
			
		||||
github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
 | 
			
		||||
github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
 | 
			
		||||
github.com/gin-contrib/static v1.1.1 h1:XEvBd4DDLG1HBlyPBQU1XO8NlTpw6mgdqcPteetYA5k=
 | 
			
		||||
github.com/gin-contrib/static v1.1.1/go.mod h1:yRGmar7+JYvbMLRPIi4H5TVVSBwULfT9vetnVD0IO74=
 | 
			
		||||
github.com/gin-contrib/static v1.1.2 h1:c3kT4bFkUJn2aoRU3s6XnMjJT8J6nNWJkR0NglqmlZ4=
 | 
			
		||||
github.com/gin-contrib/static v1.1.2/go.mod h1:Fw90ozjHCmZBWbgrsqrDvO28YbhKEKzKp8GixhR4yLw=
 | 
			
		||||
github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg=
 | 
			
		||||
github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU=
 | 
			
		||||
github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU=
 | 
			
		||||
github.com/gin-gonic/gin v1.10.0/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y=
 | 
			
		||||
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
 | 
			
		||||
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
 | 
			
		||||
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
 | 
			
		||||
@@ -56,6 +80,8 @@ github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJn
 | 
			
		||||
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
 | 
			
		||||
github.com/go-playground/validator/v10 v10.19.0 h1:ol+5Fu+cSq9JD7SoSqe04GMI92cbn0+wvQ3bZ8b/AU4=
 | 
			
		||||
github.com/go-playground/validator/v10 v10.19.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
 | 
			
		||||
github.com/go-playground/validator/v10 v10.20.0 h1:K9ISHbSaI0lyB2eWMPJo+kOS/FBExVwjEviJTixqxL8=
 | 
			
		||||
github.com/go-playground/validator/v10 v10.20.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
 | 
			
		||||
github.com/go-redis/redis/v8 v8.11.5 h1:AcZZR7igkdvfVmQTPnu9WE37LRrO/YrBH5zWyjDC0oI=
 | 
			
		||||
github.com/go-redis/redis/v8 v8.11.5/go.mod h1:gREzHqY1hg6oD9ngVRbLStwAWKhA0FEgq8Jd4h5lpwo=
 | 
			
		||||
github.com/go-sql-driver/mysql v1.7.0/go.mod h1:OXbVy3sEdcQ2Doequ6Z5BW6fXNQTmx+9S1MCJN5yJMI=
 | 
			
		||||
@@ -63,6 +89,8 @@ github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpv
 | 
			
		||||
github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg=
 | 
			
		||||
github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
 | 
			
		||||
github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
 | 
			
		||||
github.com/goccy/go-json v0.10.3 h1:KZ5WoDbxAIgm2HNbYckL0se1fHD6rz5j4ywS6ebzDqA=
 | 
			
		||||
github.com/goccy/go-json v0.10.3/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
 | 
			
		||||
github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY=
 | 
			
		||||
github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I=
 | 
			
		||||
github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ=
 | 
			
		||||
@@ -121,10 +149,14 @@ github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE=
 | 
			
		||||
github.com/onsi/gomega v1.18.1 h1:M1GfJqGRrBrrGGsbxzV5dqM2U2ApXefZCQpkukxYRLE=
 | 
			
		||||
github.com/pelletier/go-toml/v2 v2.2.1 h1:9TA9+T8+8CUCO2+WYnDLCgrYi9+omqKXyjDtosvtEhg=
 | 
			
		||||
github.com/pelletier/go-toml/v2 v2.2.1/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs=
 | 
			
		||||
github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM=
 | 
			
		||||
github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs=
 | 
			
		||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
 | 
			
		||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
 | 
			
		||||
github.com/pkoukk/tiktoken-go v0.1.6 h1:JF0TlJzhTbrI30wCvFuiw6FzP2+/bR+FIxUdgEAcUsw=
 | 
			
		||||
github.com/pkoukk/tiktoken-go v0.1.6/go.mod h1:9NiV+i9mJKGj1rYOT+njbv+ZwA/zJxYdewGl6qVatpg=
 | 
			
		||||
github.com/pkoukk/tiktoken-go v0.1.7 h1:qOBHXX4PHtvIvmOtyg1EeKlwFRiMKAcoMp4Q+bLQDmw=
 | 
			
		||||
github.com/pkoukk/tiktoken-go v0.1.7/go.mod h1:9NiV+i9mJKGj1rYOT+njbv+ZwA/zJxYdewGl6qVatpg=
 | 
			
		||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
 | 
			
		||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
 | 
			
		||||
github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8=
 | 
			
		||||
@@ -151,23 +183,37 @@ github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZ
 | 
			
		||||
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
 | 
			
		||||
golang.org/x/arch v0.7.0 h1:pskyeJh/3AmoQ8CPE95vxHLqp1G1GfGNXTmcl9NEKTc=
 | 
			
		||||
golang.org/x/arch v0.7.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
 | 
			
		||||
golang.org/x/arch v0.8.0 h1:3wRIsP3pM4yUptoR96otTUOXI367OS0+c9eeRi9doIc=
 | 
			
		||||
golang.org/x/arch v0.8.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
 | 
			
		||||
golang.org/x/crypto v0.22.0 h1:g1v0xeRhjcugydODzvb3mEM9SQ0HGp9s/nh3COQ/C30=
 | 
			
		||||
golang.org/x/crypto v0.22.0/go.mod h1:vr6Su+7cTlO45qkww3VDJlzDn0ctJvRgYbC2NvXHt+M=
 | 
			
		||||
golang.org/x/crypto v0.23.0 h1:dIJU/v2J8Mdglj/8rJ6UUOM3Zc9zLZxVZwwxMooUSAI=
 | 
			
		||||
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
 | 
			
		||||
golang.org/x/image v0.15.0 h1:kOELfmgrmJlw4Cdb7g/QGuB3CvDrXbqEIww/pNtNBm8=
 | 
			
		||||
golang.org/x/image v0.15.0/go.mod h1:HUYqC05R2ZcZ3ejNQsIHQDQiwWM4JBqmm6MKANTp4LE=
 | 
			
		||||
golang.org/x/image v0.16.0 h1:9kloLAKhUufZhA12l5fwnx2NZW39/we1UhBesW433jw=
 | 
			
		||||
golang.org/x/image v0.16.0/go.mod h1:ugSZItdV4nOxyqp56HmXwH0Ry0nBCpjnZdpDaIHdoPs=
 | 
			
		||||
golang.org/x/net v0.24.0 h1:1PcaxkF854Fu3+lvBIx5SYn9wRlBzzcnHZSiaFFAb0w=
 | 
			
		||||
golang.org/x/net v0.24.0/go.mod h1:2Q7sJY5mzlzWjKtYUEXSlBWCdyaioyXzRB2RtU8KVE8=
 | 
			
		||||
golang.org/x/net v0.25.0 h1:d/OCCoBEUq33pjydKrGQhw7IlUPI2Oylr+8qLx49kac=
 | 
			
		||||
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
 | 
			
		||||
golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M=
 | 
			
		||||
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
 | 
			
		||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
 | 
			
		||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
 | 
			
		||||
golang.org/x/sys v0.19.0 h1:q5f1RH2jigJ1MoAWp2KTp3gm5zAGFUTarQZ5U386+4o=
 | 
			
		||||
golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
 | 
			
		||||
golang.org/x/sys v0.20.0 h1:Od9JTbYCk261bKm4M/mw7AklTlFYIa0bIp9BgSm1S8Y=
 | 
			
		||||
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
 | 
			
		||||
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
 | 
			
		||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
 | 
			
		||||
golang.org/x/text v0.15.0 h1:h1V/4gjBv8v9cjcR6+AR5+/cIYK5N/WAgiv4xlsEtAk=
 | 
			
		||||
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
 | 
			
		||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=
 | 
			
		||||
google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI=
 | 
			
		||||
google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
 | 
			
		||||
google.golang.org/protobuf v1.34.1 h1:9ddQBjfCyZPOHPUiPxpYESBLc+T8P3E+Vo4IbKZgFWg=
 | 
			
		||||
google.golang.org/protobuf v1.34.1/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
 | 
			
		||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
 | 
			
		||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
 | 
			
		||||
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
 | 
			
		||||
@@ -184,5 +230,7 @@ gorm.io/driver/sqlite v1.5.5/go.mod h1:6NgQ7sQWAIFsPrJJl1lSNSu2TABh0ZZ/zm5fosATa
 | 
			
		||||
gorm.io/gorm v1.25.7/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8=
 | 
			
		||||
gorm.io/gorm v1.25.9 h1:wct0gxZIELDk8+ZqF/MVnHLkA1rvYlBWUMv2EdsK1g8=
 | 
			
		||||
gorm.io/gorm v1.25.9/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8=
 | 
			
		||||
gorm.io/gorm v1.25.10 h1:dQpO+33KalOA+aFYGlK+EfxcI5MbO7EP2yYygwh9h+s=
 | 
			
		||||
gorm.io/gorm v1.25.10/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8=
 | 
			
		||||
nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50=
 | 
			
		||||
rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4=
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										6
									
								
								main.go
									
									
									
									
									
								
							
							
						
						
									
										6
									
								
								main.go
									
									
									
									
									
								
							@@ -7,6 +7,7 @@ import (
 | 
			
		||||
	"github.com/gin-contrib/sessions/cookie"
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/client"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/config"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/logger"
 | 
			
		||||
	"github.com/songquanpeng/one-api/controller"
 | 
			
		||||
@@ -23,7 +24,7 @@ var buildFS embed.FS
 | 
			
		||||
 | 
			
		||||
func main() {
 | 
			
		||||
	logger.SetupLogger()
 | 
			
		||||
	logger.SysLog(fmt.Sprintf("One API %s started", common.Version))
 | 
			
		||||
	logger.SysLogf("One API %s started", common.Version)
 | 
			
		||||
	if os.Getenv("GIN_MODE") != "debug" {
 | 
			
		||||
		gin.SetMode(gin.ReleaseMode)
 | 
			
		||||
	}
 | 
			
		||||
@@ -71,7 +72,7 @@ func main() {
 | 
			
		||||
	}
 | 
			
		||||
	if config.MemoryCacheEnabled {
 | 
			
		||||
		logger.SysLog("memory cache enabled")
 | 
			
		||||
		logger.SysError(fmt.Sprintf("sync frequency: %d seconds", config.SyncFrequency))
 | 
			
		||||
		logger.SysLog(fmt.Sprintf("sync frequency: %d seconds", config.SyncFrequency))
 | 
			
		||||
		model.InitChannelCache()
 | 
			
		||||
	}
 | 
			
		||||
	if config.MemoryCacheEnabled {
 | 
			
		||||
@@ -94,6 +95,7 @@ func main() {
 | 
			
		||||
		logger.SysLog("metric enabled, will disable channel if too much request failed")
 | 
			
		||||
	}
 | 
			
		||||
	openai.InitTokenEncoders()
 | 
			
		||||
	client.Init()
 | 
			
		||||
 | 
			
		||||
	// Initialize HTTP server
 | 
			
		||||
	server := gin.New()
 | 
			
		||||
 
 | 
			
		||||
@@ -5,6 +5,7 @@ import (
 | 
			
		||||
	"github.com/gin-contrib/sessions"
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/blacklist"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/ctxkey"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/network"
 | 
			
		||||
	"github.com/songquanpeng/one-api/model"
 | 
			
		||||
	"net/http"
 | 
			
		||||
@@ -120,20 +121,20 @@ func TokenAuth() func(c *gin.Context) {
 | 
			
		||||
			abortWithMessage(c, http.StatusBadRequest, err.Error())
 | 
			
		||||
			return
 | 
			
		||||
		}
 | 
			
		||||
		c.Set("request_model", requestModel)
 | 
			
		||||
		c.Set(ctxkey.RequestModel, requestModel)
 | 
			
		||||
		if token.Models != nil && *token.Models != "" {
 | 
			
		||||
			c.Set("available_models", *token.Models)
 | 
			
		||||
			c.Set(ctxkey.AvailableModels, *token.Models)
 | 
			
		||||
			if requestModel != "" && !isModelInList(requestModel, *token.Models) {
 | 
			
		||||
				abortWithMessage(c, http.StatusForbidden, fmt.Sprintf("该令牌无权使用模型:%s", requestModel))
 | 
			
		||||
				return
 | 
			
		||||
			}
 | 
			
		||||
		}
 | 
			
		||||
		c.Set("id", token.UserId)
 | 
			
		||||
		c.Set("token_id", token.Id)
 | 
			
		||||
		c.Set("token_name", token.Name)
 | 
			
		||||
		c.Set(ctxkey.Id, token.UserId)
 | 
			
		||||
		c.Set(ctxkey.TokenId, token.Id)
 | 
			
		||||
		c.Set(ctxkey.TokenName, token.Name)
 | 
			
		||||
		if len(parts) > 1 {
 | 
			
		||||
			if model.IsAdmin(token.UserId) {
 | 
			
		||||
				c.Set("specific_channel_id", parts[1])
 | 
			
		||||
				c.Set(ctxkey.SpecificChannelId, parts[1])
 | 
			
		||||
			} else {
 | 
			
		||||
				abortWithMessage(c, http.StatusForbidden, "普通用户不支持指定渠道")
 | 
			
		||||
				return
 | 
			
		||||
 
 | 
			
		||||
@@ -3,7 +3,6 @@ package middleware
 | 
			
		||||
import (
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/config"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/ctxkey"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/logger"
 | 
			
		||||
	"github.com/songquanpeng/one-api/model"
 | 
			
		||||
@@ -18,12 +17,12 @@ type ModelRequest struct {
 | 
			
		||||
 | 
			
		||||
func Distribute() func(c *gin.Context) {
 | 
			
		||||
	return func(c *gin.Context) {
 | 
			
		||||
		userId := c.GetInt("id")
 | 
			
		||||
		userId := c.GetInt(ctxkey.Id)
 | 
			
		||||
		userGroup, _ := model.CacheGetUserGroup(userId)
 | 
			
		||||
		c.Set("group", userGroup)
 | 
			
		||||
		c.Set(ctxkey.Group, userGroup)
 | 
			
		||||
		var requestModel string
 | 
			
		||||
		var channel *model.Channel
 | 
			
		||||
		channelId, ok := c.Get("specific_channel_id")
 | 
			
		||||
		channelId, ok := c.Get(ctxkey.SpecificChannelId)
 | 
			
		||||
		if ok {
 | 
			
		||||
			id, err := strconv.Atoi(channelId.(string))
 | 
			
		||||
			if err != nil {
 | 
			
		||||
@@ -40,7 +39,7 @@ func Distribute() func(c *gin.Context) {
 | 
			
		||||
				return
 | 
			
		||||
			}
 | 
			
		||||
		} else {
 | 
			
		||||
			requestModel = c.GetString("request_model")
 | 
			
		||||
			requestModel = c.GetString(ctxkey.RequestModel)
 | 
			
		||||
			var err error
 | 
			
		||||
			channel, err = model.CacheGetRandomSatisfiedChannel(userGroup, requestModel, false)
 | 
			
		||||
			if err != nil {
 | 
			
		||||
@@ -59,28 +58,38 @@ func Distribute() func(c *gin.Context) {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func SetupContextForSelectedChannel(c *gin.Context, channel *model.Channel, modelName string) {
 | 
			
		||||
	c.Set("channel", channel.Type)
 | 
			
		||||
	c.Set("channel_id", channel.Id)
 | 
			
		||||
	c.Set("channel_name", channel.Name)
 | 
			
		||||
	c.Set("model_mapping", channel.GetModelMapping())
 | 
			
		||||
	c.Set(ctxkey.Channel, channel.Type)
 | 
			
		||||
	c.Set(ctxkey.ChannelId, channel.Id)
 | 
			
		||||
	c.Set(ctxkey.ChannelName, channel.Name)
 | 
			
		||||
	c.Set(ctxkey.ModelMapping, channel.GetModelMapping())
 | 
			
		||||
	c.Set(ctxkey.OriginalModel, modelName) // for retry
 | 
			
		||||
	c.Request.Header.Set("Authorization", fmt.Sprintf("Bearer %s", channel.Key))
 | 
			
		||||
	c.Set("base_url", channel.GetBaseURL())
 | 
			
		||||
	c.Set(ctxkey.BaseURL, channel.GetBaseURL())
 | 
			
		||||
	cfg, _ := channel.LoadConfig()
 | 
			
		||||
	// this is for backward compatibility
 | 
			
		||||
	if channel.Other != nil {
 | 
			
		||||
		switch channel.Type {
 | 
			
		||||
		case channeltype.Azure:
 | 
			
		||||
		c.Set(config.KeyAPIVersion, channel.Other)
 | 
			
		||||
			if cfg.APIVersion == "" {
 | 
			
		||||
				cfg.APIVersion = *channel.Other
 | 
			
		||||
			}
 | 
			
		||||
		case channeltype.Xunfei:
 | 
			
		||||
		c.Set(config.KeyAPIVersion, channel.Other)
 | 
			
		||||
			if cfg.APIVersion == "" {
 | 
			
		||||
				cfg.APIVersion = *channel.Other
 | 
			
		||||
			}
 | 
			
		||||
		case channeltype.Gemini:
 | 
			
		||||
		c.Set(config.KeyAPIVersion, channel.Other)
 | 
			
		||||
			if cfg.APIVersion == "" {
 | 
			
		||||
				cfg.APIVersion = *channel.Other
 | 
			
		||||
			}
 | 
			
		||||
		case channeltype.AIProxyLibrary:
 | 
			
		||||
		c.Set(config.KeyLibraryID, channel.Other)
 | 
			
		||||
			if cfg.LibraryID == "" {
 | 
			
		||||
				cfg.LibraryID = *channel.Other
 | 
			
		||||
			}
 | 
			
		||||
		case channeltype.Ali:
 | 
			
		||||
		c.Set(config.KeyPlugin, channel.Other)
 | 
			
		||||
	}
 | 
			
		||||
	cfg, _ := channel.LoadConfig()
 | 
			
		||||
	for k, v := range cfg {
 | 
			
		||||
		c.Set(config.KeyPrefix+k, v)
 | 
			
		||||
			if cfg.Plugin == "" {
 | 
			
		||||
				cfg.Plugin = *channel.Other
 | 
			
		||||
			}
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
	c.Set(ctxkey.Config, cfg)
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -3,14 +3,14 @@ package middleware
 | 
			
		||||
import (
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/logger"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/helper"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
func SetUpLogger(server *gin.Engine) {
 | 
			
		||||
	server.Use(gin.LoggerWithFormatter(func(param gin.LogFormatterParams) string {
 | 
			
		||||
		var requestID string
 | 
			
		||||
		if param.Keys != nil {
 | 
			
		||||
			requestID = param.Keys[logger.RequestIdKey].(string)
 | 
			
		||||
			requestID = param.Keys[helper.RequestIdKey].(string)
 | 
			
		||||
		}
 | 
			
		||||
		return fmt.Sprintf("[GIN] %s | %s | %3d | %13v | %15s | %7s %s\n",
 | 
			
		||||
			param.TimeStamp.Format("2006/01/02 - 15:04:05"),
 | 
			
		||||
 
 | 
			
		||||
@@ -4,16 +4,15 @@ import (
 | 
			
		||||
	"context"
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/helper"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/logger"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
func RequestId() func(c *gin.Context) {
 | 
			
		||||
	return func(c *gin.Context) {
 | 
			
		||||
		id := helper.GenRequestID()
 | 
			
		||||
		c.Set(logger.RequestIdKey, id)
 | 
			
		||||
		ctx := context.WithValue(c.Request.Context(), logger.RequestIdKey, id)
 | 
			
		||||
		c.Set(helper.RequestIdKey, id)
 | 
			
		||||
		ctx := context.WithValue(c.Request.Context(), helper.RequestIdKey, id)
 | 
			
		||||
		c.Request = c.Request.WithContext(ctx)
 | 
			
		||||
		c.Header(logger.RequestIdKey, id)
 | 
			
		||||
		c.Header(helper.RequestIdKey, id)
 | 
			
		||||
		c.Next()
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -12,7 +12,7 @@ import (
 | 
			
		||||
func abortWithMessage(c *gin.Context, statusCode int, message string) {
 | 
			
		||||
	c.JSON(statusCode, gin.H{
 | 
			
		||||
		"error": gin.H{
 | 
			
		||||
			"message": helper.MessageWithRequestId(message, c.GetString(logger.RequestIdKey)),
 | 
			
		||||
			"message": helper.MessageWithRequestId(message, c.GetString(helper.RequestIdKey)),
 | 
			
		||||
			"type":    "one_api_error",
 | 
			
		||||
		},
 | 
			
		||||
	})
 | 
			
		||||
 
 | 
			
		||||
@@ -27,7 +27,7 @@ type Channel struct {
 | 
			
		||||
	TestTime           int64   `json:"test_time" gorm:"bigint"`
 | 
			
		||||
	ResponseTime       int     `json:"response_time"` // in milliseconds
 | 
			
		||||
	BaseURL            *string `json:"base_url" gorm:"column:base_url;default:''"`
 | 
			
		||||
	Other              string  `json:"other"`   // DEPRECATED: please save config to field Config
 | 
			
		||||
	Other              *string `json:"other"`   // DEPRECATED: please save config to field Config
 | 
			
		||||
	Balance            float64 `json:"balance"` // in USD
 | 
			
		||||
	BalanceUpdatedTime int64   `json:"balance_updated_time" gorm:"bigint"`
 | 
			
		||||
	Models             string  `json:"models"`
 | 
			
		||||
@@ -38,6 +38,16 @@ type Channel struct {
 | 
			
		||||
	Config             string  `json:"config"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type ChannelConfig struct {
 | 
			
		||||
	Region     string `json:"region,omitempty"`
 | 
			
		||||
	SK         string `json:"sk,omitempty"`
 | 
			
		||||
	AK         string `json:"ak,omitempty"`
 | 
			
		||||
	UserID     string `json:"user_id,omitempty"`
 | 
			
		||||
	APIVersion string `json:"api_version,omitempty"`
 | 
			
		||||
	LibraryID  string `json:"library_id,omitempty"`
 | 
			
		||||
	Plugin     string `json:"plugin,omitempty"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func GetAllChannels(startIdx int, num int, scope string) ([]*Channel, error) {
 | 
			
		||||
	var channels []*Channel
 | 
			
		||||
	var err error
 | 
			
		||||
@@ -161,14 +171,14 @@ func (channel *Channel) Delete() error {
 | 
			
		||||
	return err
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (channel *Channel) LoadConfig() (map[string]string, error) {
 | 
			
		||||
func (channel *Channel) LoadConfig() (ChannelConfig, error) {
 | 
			
		||||
	var cfg ChannelConfig
 | 
			
		||||
	if channel.Config == "" {
 | 
			
		||||
		return nil, nil
 | 
			
		||||
		return cfg, nil
 | 
			
		||||
	}
 | 
			
		||||
	cfg := make(map[string]string)
 | 
			
		||||
	err := json.Unmarshal([]byte(channel.Config), &cfg)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return nil, err
 | 
			
		||||
		return cfg, err
 | 
			
		||||
	}
 | 
			
		||||
	return cfg, nil
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -6,6 +6,7 @@ import (
 | 
			
		||||
	"github.com/songquanpeng/one-api/common"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/blacklist"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/config"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/helper"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/logger"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/random"
 | 
			
		||||
	"gorm.io/gorm"
 | 
			
		||||
@@ -140,6 +141,22 @@ func (user *User) Insert(inviterId int) error {
 | 
			
		||||
			RecordLog(inviterId, LogTypeSystem, fmt.Sprintf("邀请用户赠送 %s", common.LogQuota(config.QuotaForInviter)))
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
	// create default token
 | 
			
		||||
	cleanToken := Token{
 | 
			
		||||
		UserId:         user.Id,
 | 
			
		||||
		Name:           "default",
 | 
			
		||||
		Key:            random.GenerateKey(),
 | 
			
		||||
		CreatedTime:    helper.GetTimestamp(),
 | 
			
		||||
		AccessedTime:   helper.GetTimestamp(),
 | 
			
		||||
		ExpiredTime:    -1,
 | 
			
		||||
		RemainQuota:    -1,
 | 
			
		||||
		UnlimitedQuota: true,
 | 
			
		||||
	}
 | 
			
		||||
	result.Error = cleanToken.Insert()
 | 
			
		||||
	if result.Error != nil {
 | 
			
		||||
		// do not block
 | 
			
		||||
		logger.SysError(fmt.Sprintf("create default token for user %d failed: %s", user.Id, result.Error.Error()))
 | 
			
		||||
	}
 | 
			
		||||
	return nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -7,6 +7,10 @@ import (
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/anthropic"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/aws"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/baidu"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/cloudflare"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/cohere"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/coze"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/deepl"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/gemini"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/ollama"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/openai"
 | 
			
		||||
@@ -43,6 +47,14 @@ func GetAdaptor(apiType int) adaptor.Adaptor {
 | 
			
		||||
		return &zhipu.Adaptor{}
 | 
			
		||||
	case apitype.Ollama:
 | 
			
		||||
		return &ollama.Adaptor{}
 | 
			
		||||
	case apitype.Coze:
 | 
			
		||||
		return &coze.Adaptor{}
 | 
			
		||||
	case apitype.Cohere:
 | 
			
		||||
		return &cohere.Adaptor{}
 | 
			
		||||
	case apitype.Cloudflare:
 | 
			
		||||
		return &cloudflare.Adaptor{}
 | 
			
		||||
	case apitype.DeepL:
 | 
			
		||||
		return &deepl.Adaptor{}
 | 
			
		||||
	}
 | 
			
		||||
	return nil
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -4,7 +4,6 @@ import (
 | 
			
		||||
	"errors"
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/config"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/meta"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/model"
 | 
			
		||||
@@ -13,10 +12,11 @@ import (
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
type Adaptor struct {
 | 
			
		||||
	meta *meta.Meta
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) Init(meta *meta.Meta) {
 | 
			
		||||
 | 
			
		||||
	a.meta = meta
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
 | 
			
		||||
@@ -34,7 +34,7 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
 | 
			
		||||
		return nil, errors.New("request is nil")
 | 
			
		||||
	}
 | 
			
		||||
	aiProxyLibraryRequest := ConvertRequest(*request)
 | 
			
		||||
	aiProxyLibraryRequest.LibraryId = c.GetString(config.KeyLibraryID)
 | 
			
		||||
	aiProxyLibraryRequest.LibraryId = a.meta.Config.LibraryID
 | 
			
		||||
	return aiProxyLibraryRequest, nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -4,7 +4,6 @@ import (
 | 
			
		||||
	"errors"
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/config"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/meta"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/model"
 | 
			
		||||
@@ -16,10 +15,11 @@ import (
 | 
			
		||||
// https://help.aliyun.com/zh/dashscope/developer-reference/api-details
 | 
			
		||||
 | 
			
		||||
type Adaptor struct {
 | 
			
		||||
	meta *meta.Meta
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) Init(meta *meta.Meta) {
 | 
			
		||||
 | 
			
		||||
	a.meta = meta
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
 | 
			
		||||
@@ -47,8 +47,8 @@ func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *me
 | 
			
		||||
	if meta.Mode == relaymode.ImagesGenerations {
 | 
			
		||||
		req.Header.Set("X-DashScope-Async", "enable")
 | 
			
		||||
	}
 | 
			
		||||
	if c.GetString(config.KeyPlugin) != "" {
 | 
			
		||||
		req.Header.Set("X-DashScope-Plugin", c.GetString(config.KeyPlugin))
 | 
			
		||||
	if a.meta.Config.Plugin != "" {
 | 
			
		||||
		req.Header.Set("X-DashScope-Plugin", a.meta.Config.Plugin)
 | 
			
		||||
	}
 | 
			
		||||
	return nil
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -4,6 +4,10 @@ import (
 | 
			
		||||
	"bufio"
 | 
			
		||||
	"encoding/json"
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"io"
 | 
			
		||||
	"net/http"
 | 
			
		||||
	"strings"
 | 
			
		||||
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/helper"
 | 
			
		||||
@@ -11,9 +15,6 @@ import (
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/logger"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/openai"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/model"
 | 
			
		||||
	"io"
 | 
			
		||||
	"net/http"
 | 
			
		||||
	"strings"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
func stopReasonClaude2OpenAI(reason *string) string {
 | 
			
		||||
@@ -192,7 +193,7 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusC
 | 
			
		||||
		select {
 | 
			
		||||
		case data := <-dataChan:
 | 
			
		||||
			// some implementations may add \r at the end of data
 | 
			
		||||
			data = strings.TrimSuffix(data, "\r")
 | 
			
		||||
			data = strings.TrimSpace(data)
 | 
			
		||||
			var claudeResponse StreamResponse
 | 
			
		||||
			err := json.Unmarshal([]byte(data), &claudeResponse)
 | 
			
		||||
			if err != nil {
 | 
			
		||||
 
 | 
			
		||||
@@ -1,6 +1,9 @@
 | 
			
		||||
package aws
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"github.com/aws/aws-sdk-go-v2/aws"
 | 
			
		||||
	"github.com/aws/aws-sdk-go-v2/credentials"
 | 
			
		||||
	"github.com/aws/aws-sdk-go-v2/service/bedrockruntime"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/ctxkey"
 | 
			
		||||
	"io"
 | 
			
		||||
	"net/http"
 | 
			
		||||
@@ -16,10 +19,16 @@ import (
 | 
			
		||||
var _ adaptor.Adaptor = new(Adaptor)
 | 
			
		||||
 | 
			
		||||
type Adaptor struct {
 | 
			
		||||
	meta      *meta.Meta
 | 
			
		||||
	awsClient *bedrockruntime.Client
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) Init(meta *meta.Meta) {
 | 
			
		||||
 | 
			
		||||
	a.meta = meta
 | 
			
		||||
	a.awsClient = bedrockruntime.New(bedrockruntime.Options{
 | 
			
		||||
		Region:      meta.Config.Region,
 | 
			
		||||
		Credentials: aws.NewCredentialsCache(credentials.NewStaticCredentialsProvider(meta.Config.AK, meta.Config.SK, "")),
 | 
			
		||||
	})
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
 | 
			
		||||
@@ -54,9 +63,9 @@ func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Read
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
 | 
			
		||||
	if meta.IsStream {
 | 
			
		||||
		err, usage = StreamHandler(c, resp)
 | 
			
		||||
		err, usage = StreamHandler(c, a.awsClient)
 | 
			
		||||
	} else {
 | 
			
		||||
		err, usage = Handler(c, resp, meta.PromptTokens, meta.ActualModelName)
 | 
			
		||||
		err, usage = Handler(c, a.awsClient, meta.ActualModelName)
 | 
			
		||||
	}
 | 
			
		||||
	return
 | 
			
		||||
}
 | 
			
		||||
@@ -65,7 +74,6 @@ func (a *Adaptor) GetModelList() (models []string) {
 | 
			
		||||
	for n := range awsModelIDMap {
 | 
			
		||||
		models = append(models, n)
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	return
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -5,13 +5,11 @@ import (
 | 
			
		||||
	"bytes"
 | 
			
		||||
	"encoding/json"
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/config"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/ctxkey"
 | 
			
		||||
	"io"
 | 
			
		||||
	"net/http"
 | 
			
		||||
 | 
			
		||||
	"github.com/aws/aws-sdk-go-v2/aws"
 | 
			
		||||
	"github.com/aws/aws-sdk-go-v2/credentials"
 | 
			
		||||
	"github.com/aws/aws-sdk-go-v2/service/bedrockruntime"
 | 
			
		||||
	"github.com/aws/aws-sdk-go-v2/service/bedrockruntime/types"
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
@@ -24,18 +22,6 @@ import (
 | 
			
		||||
	relaymodel "github.com/songquanpeng/one-api/relay/model"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
func newAwsClient(c *gin.Context) (*bedrockruntime.Client, error) {
 | 
			
		||||
	ak := c.GetString(config.KeyAK)
 | 
			
		||||
	sk := c.GetString(config.KeySK)
 | 
			
		||||
	region := c.GetString(config.KeyRegion)
 | 
			
		||||
	client := bedrockruntime.New(bedrockruntime.Options{
 | 
			
		||||
		Region:      region,
 | 
			
		||||
		Credentials: aws.NewCredentialsCache(credentials.NewStaticCredentialsProvider(ak, sk, "")),
 | 
			
		||||
	})
 | 
			
		||||
 | 
			
		||||
	return client, nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func wrapErr(err error) *relaymodel.ErrorWithStatusCode {
 | 
			
		||||
	return &relaymodel.ErrorWithStatusCode{
 | 
			
		||||
		StatusCode: http.StatusInternalServerError,
 | 
			
		||||
@@ -63,12 +49,7 @@ func awsModelID(requestModel string) (string, error) {
 | 
			
		||||
	return "", errors.Errorf("model %s not found", requestModel)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName string) (*relaymodel.ErrorWithStatusCode, *relaymodel.Usage) {
 | 
			
		||||
	awsCli, err := newAwsClient(c)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return wrapErr(errors.Wrap(err, "newAwsClient")), nil
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
func Handler(c *gin.Context, awsCli *bedrockruntime.Client, modelName string) (*relaymodel.ErrorWithStatusCode, *relaymodel.Usage) {
 | 
			
		||||
	awsModelId, err := awsModelID(c.GetString(ctxkey.RequestModel))
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return wrapErr(errors.Wrap(err, "awsModelID")), nil
 | 
			
		||||
@@ -121,13 +102,8 @@ func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName st
 | 
			
		||||
	return nil, &usage
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func StreamHandler(c *gin.Context, resp *http.Response) (*relaymodel.ErrorWithStatusCode, *relaymodel.Usage) {
 | 
			
		||||
func StreamHandler(c *gin.Context, awsCli *bedrockruntime.Client) (*relaymodel.ErrorWithStatusCode, *relaymodel.Usage) {
 | 
			
		||||
	createdTime := helper.GetTimestamp()
 | 
			
		||||
	awsCli, err := newAwsClient(c)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return wrapErr(errors.Wrap(err, "newAwsClient")), nil
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	awsModelId, err := awsModelID(c.GetString(ctxkey.RequestModel))
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return wrapErr(errors.Wrap(err, "awsModelID")), nil
 | 
			
		||||
 
 | 
			
		||||
@@ -1,15 +0,0 @@
 | 
			
		||||
package azure
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/config"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
func GetAPIVersion(c *gin.Context) string {
 | 
			
		||||
	query := c.Request.URL.Query()
 | 
			
		||||
	apiVersion := query.Get("api-version")
 | 
			
		||||
	if apiVersion == "" {
 | 
			
		||||
		apiVersion = c.GetString(config.KeyAPIVersion)
 | 
			
		||||
	}
 | 
			
		||||
	return apiVersion
 | 
			
		||||
}
 | 
			
		||||
@@ -7,9 +7,9 @@ import (
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/client"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/logger"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/openai"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/client"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/constant"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/model"
 | 
			
		||||
	"io"
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										66
									
								
								relay/adaptor/cloudflare/adaptor.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										66
									
								
								relay/adaptor/cloudflare/adaptor.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,66 @@
 | 
			
		||||
package cloudflare
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"errors"
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"io"
 | 
			
		||||
	"net/http"
 | 
			
		||||
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/meta"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/model"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
type Adaptor struct {
 | 
			
		||||
	meta *meta.Meta
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// ConvertImageRequest implements adaptor.Adaptor.
 | 
			
		||||
func (*Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
 | 
			
		||||
	return nil, errors.New("not implemented")
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// ConvertImageRequest implements adaptor.Adaptor.
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) Init(meta *meta.Meta) {
 | 
			
		||||
	a.meta = meta
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
 | 
			
		||||
	return fmt.Sprintf("%s/client/v4/accounts/%s/ai/run/%s", meta.BaseURL, meta.Config.UserID, meta.ActualModelName), nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error {
 | 
			
		||||
	adaptor.SetupCommonRequestHeader(c, req, meta)
 | 
			
		||||
	req.Header.Set("Authorization", "Bearer "+meta.APIKey)
 | 
			
		||||
	return nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) {
 | 
			
		||||
	if request == nil {
 | 
			
		||||
		return nil, errors.New("request is nil")
 | 
			
		||||
	}
 | 
			
		||||
	return ConvertRequest(*request), nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) {
 | 
			
		||||
	return adaptor.DoRequestHelper(a, c, meta, requestBody)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
 | 
			
		||||
	if meta.IsStream {
 | 
			
		||||
		err, usage = StreamHandler(c, resp, meta.PromptTokens, meta.ActualModelName)
 | 
			
		||||
	} else {
 | 
			
		||||
		err, usage = Handler(c, resp, meta.PromptTokens, meta.ActualModelName)
 | 
			
		||||
	}
 | 
			
		||||
	return
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) GetModelList() []string {
 | 
			
		||||
	return ModelList
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) GetChannelName() string {
 | 
			
		||||
	return "cloudflare"
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										36
									
								
								relay/adaptor/cloudflare/constant.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										36
									
								
								relay/adaptor/cloudflare/constant.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,36 @@
 | 
			
		||||
package cloudflare
 | 
			
		||||
 | 
			
		||||
var ModelList = []string{
 | 
			
		||||
	"@cf/meta/llama-2-7b-chat-fp16",
 | 
			
		||||
	"@cf/meta/llama-2-7b-chat-int8",
 | 
			
		||||
	"@cf/mistral/mistral-7b-instruct-v0.1",
 | 
			
		||||
	"@hf/thebloke/deepseek-coder-6.7b-base-awq",
 | 
			
		||||
	"@hf/thebloke/deepseek-coder-6.7b-instruct-awq",
 | 
			
		||||
	"@cf/deepseek-ai/deepseek-math-7b-base",
 | 
			
		||||
	"@cf/deepseek-ai/deepseek-math-7b-instruct",
 | 
			
		||||
	"@cf/thebloke/discolm-german-7b-v1-awq",
 | 
			
		||||
	"@cf/tiiuae/falcon-7b-instruct",
 | 
			
		||||
	"@cf/google/gemma-2b-it-lora",
 | 
			
		||||
	"@hf/google/gemma-7b-it",
 | 
			
		||||
	"@cf/google/gemma-7b-it-lora",
 | 
			
		||||
	"@hf/nousresearch/hermes-2-pro-mistral-7b",
 | 
			
		||||
	"@hf/thebloke/llama-2-13b-chat-awq",
 | 
			
		||||
	"@cf/meta-llama/llama-2-7b-chat-hf-lora",
 | 
			
		||||
	"@cf/meta/llama-3-8b-instruct",
 | 
			
		||||
	"@hf/thebloke/llamaguard-7b-awq",
 | 
			
		||||
	"@hf/thebloke/mistral-7b-instruct-v0.1-awq",
 | 
			
		||||
	"@hf/mistralai/mistral-7b-instruct-v0.2",
 | 
			
		||||
	"@cf/mistral/mistral-7b-instruct-v0.2-lora",
 | 
			
		||||
	"@hf/thebloke/neural-chat-7b-v3-1-awq",
 | 
			
		||||
	"@cf/openchat/openchat-3.5-0106",
 | 
			
		||||
	"@hf/thebloke/openhermes-2.5-mistral-7b-awq",
 | 
			
		||||
	"@cf/microsoft/phi-2",
 | 
			
		||||
	"@cf/qwen/qwen1.5-0.5b-chat",
 | 
			
		||||
	"@cf/qwen/qwen1.5-1.8b-chat",
 | 
			
		||||
	"@cf/qwen/qwen1.5-14b-chat-awq",
 | 
			
		||||
	"@cf/qwen/qwen1.5-7b-chat-awq",
 | 
			
		||||
	"@cf/defog/sqlcoder-7b-2",
 | 
			
		||||
	"@hf/nexusflow/starling-lm-7b-beta",
 | 
			
		||||
	"@cf/tinyllama/tinyllama-1.1b-chat-v1.0",
 | 
			
		||||
	"@hf/thebloke/zephyr-7b-beta-awq",
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										158
									
								
								relay/adaptor/cloudflare/main.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										158
									
								
								relay/adaptor/cloudflare/main.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,158 @@
 | 
			
		||||
package cloudflare
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"bufio"
 | 
			
		||||
	"bytes"
 | 
			
		||||
	"encoding/json"
 | 
			
		||||
	"io"
 | 
			
		||||
	"net/http"
 | 
			
		||||
	"strings"
 | 
			
		||||
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/helper"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/logger"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/openai"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/model"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
func ConvertRequest(textRequest model.GeneralOpenAIRequest) *Request {
 | 
			
		||||
    var promptBuilder strings.Builder
 | 
			
		||||
    for _, message := range textRequest.Messages {
 | 
			
		||||
        promptBuilder.WriteString(message.StringContent())
 | 
			
		||||
        promptBuilder.WriteString("\n")  // 添加换行符来分隔每个消息
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    return &Request{
 | 
			
		||||
        MaxTokens:   textRequest.MaxTokens,
 | 
			
		||||
        Prompt:      promptBuilder.String(),
 | 
			
		||||
        Stream:      textRequest.Stream,
 | 
			
		||||
        Temperature: textRequest.Temperature,
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
func ResponseCloudflare2OpenAI(cloudflareResponse *Response) *openai.TextResponse {
 | 
			
		||||
	choice := openai.TextResponseChoice{
 | 
			
		||||
		Index: 0,
 | 
			
		||||
		Message: model.Message{
 | 
			
		||||
			Role:    "assistant",
 | 
			
		||||
			Content: cloudflareResponse.Result.Response,
 | 
			
		||||
		},
 | 
			
		||||
		FinishReason: "stop",
 | 
			
		||||
	}
 | 
			
		||||
	fullTextResponse := openai.TextResponse{
 | 
			
		||||
		Object:  "chat.completion",
 | 
			
		||||
		Created: helper.GetTimestamp(),
 | 
			
		||||
		Choices: []openai.TextResponseChoice{choice},
 | 
			
		||||
	}
 | 
			
		||||
	return &fullTextResponse
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func StreamResponseCloudflare2OpenAI(cloudflareResponse *StreamResponse) *openai.ChatCompletionsStreamResponse {
 | 
			
		||||
	var choice openai.ChatCompletionsStreamResponseChoice
 | 
			
		||||
	choice.Delta.Content = cloudflareResponse.Response
 | 
			
		||||
	choice.Delta.Role = "assistant"
 | 
			
		||||
	openaiResponse := openai.ChatCompletionsStreamResponse{
 | 
			
		||||
		Object:  "chat.completion.chunk",
 | 
			
		||||
		Choices: []openai.ChatCompletionsStreamResponseChoice{choice},
 | 
			
		||||
		Created: helper.GetTimestamp(),
 | 
			
		||||
	}
 | 
			
		||||
	return &openaiResponse
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func StreamHandler(c *gin.Context, resp *http.Response, promptTokens int, modelName string) (*model.ErrorWithStatusCode, *model.Usage) {
 | 
			
		||||
	scanner := bufio.NewScanner(resp.Body)
 | 
			
		||||
	scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
 | 
			
		||||
		if atEOF && len(data) == 0 {
 | 
			
		||||
			return 0, nil, nil
 | 
			
		||||
		}
 | 
			
		||||
		if i := bytes.IndexByte(data, '\n'); i >= 0 {
 | 
			
		||||
			return i + 1, data[0:i], nil
 | 
			
		||||
		}
 | 
			
		||||
		if atEOF {
 | 
			
		||||
			return len(data), data, nil
 | 
			
		||||
		}
 | 
			
		||||
		return 0, nil, nil
 | 
			
		||||
	})
 | 
			
		||||
 | 
			
		||||
	dataChan := make(chan string)
 | 
			
		||||
	stopChan := make(chan bool)
 | 
			
		||||
	go func() {
 | 
			
		||||
		for scanner.Scan() {
 | 
			
		||||
			data := scanner.Text()
 | 
			
		||||
			if len(data) < len("data: ") {
 | 
			
		||||
				continue
 | 
			
		||||
			}
 | 
			
		||||
			data = strings.TrimPrefix(data, "data: ")
 | 
			
		||||
			dataChan <- data
 | 
			
		||||
		}
 | 
			
		||||
		stopChan <- true
 | 
			
		||||
	}()
 | 
			
		||||
	common.SetEventStreamHeaders(c)
 | 
			
		||||
	id := helper.GetResponseID(c)
 | 
			
		||||
	responseModel := c.GetString("original_model")
 | 
			
		||||
	var responseText string
 | 
			
		||||
	c.Stream(func(w io.Writer) bool {
 | 
			
		||||
		select {
 | 
			
		||||
		case data := <-dataChan:
 | 
			
		||||
			// some implementations may add \r at the end of data
 | 
			
		||||
			data = strings.TrimSuffix(data, "\r")
 | 
			
		||||
			var cloudflareResponse StreamResponse
 | 
			
		||||
			err := json.Unmarshal([]byte(data), &cloudflareResponse)
 | 
			
		||||
			if err != nil {
 | 
			
		||||
				logger.SysError("error unmarshalling stream response: " + err.Error())
 | 
			
		||||
				return true
 | 
			
		||||
			}
 | 
			
		||||
			response := StreamResponseCloudflare2OpenAI(&cloudflareResponse)
 | 
			
		||||
			if response == nil {
 | 
			
		||||
				return true
 | 
			
		||||
			}
 | 
			
		||||
			responseText += cloudflareResponse.Response
 | 
			
		||||
			response.Id = id
 | 
			
		||||
			response.Model = responseModel
 | 
			
		||||
			jsonStr, err := json.Marshal(response)
 | 
			
		||||
			if err != nil {
 | 
			
		||||
				logger.SysError("error marshalling stream response: " + err.Error())
 | 
			
		||||
				return true
 | 
			
		||||
			}
 | 
			
		||||
			c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonStr)})
 | 
			
		||||
			return true
 | 
			
		||||
		case <-stopChan:
 | 
			
		||||
			c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
 | 
			
		||||
			return false
 | 
			
		||||
		}
 | 
			
		||||
	})
 | 
			
		||||
	_ = resp.Body.Close()
 | 
			
		||||
	usage := openai.ResponseText2Usage(responseText, responseModel, promptTokens)
 | 
			
		||||
	return nil, usage
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName string) (*model.ErrorWithStatusCode, *model.Usage) {
 | 
			
		||||
	responseBody, err := io.ReadAll(resp.Body)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	err = resp.Body.Close()
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	var cloudflareResponse Response
 | 
			
		||||
	err = json.Unmarshal(responseBody, &cloudflareResponse)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	fullTextResponse := ResponseCloudflare2OpenAI(&cloudflareResponse)
 | 
			
		||||
	fullTextResponse.Model = modelName
 | 
			
		||||
	usage := openai.ResponseText2Usage(cloudflareResponse.Result.Response, modelName, promptTokens)
 | 
			
		||||
	fullTextResponse.Usage = *usage
 | 
			
		||||
	fullTextResponse.Id = helper.GetResponseID(c)
 | 
			
		||||
	jsonResponse, err := json.Marshal(fullTextResponse)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	c.Writer.Header().Set("Content-Type", "application/json")
 | 
			
		||||
	c.Writer.WriteHeader(resp.StatusCode)
 | 
			
		||||
	_, err = c.Writer.Write(jsonResponse)
 | 
			
		||||
	return nil, usage
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										25
									
								
								relay/adaptor/cloudflare/model.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										25
									
								
								relay/adaptor/cloudflare/model.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,25 @@
 | 
			
		||||
package cloudflare
 | 
			
		||||
 | 
			
		||||
type Request struct {
 | 
			
		||||
	Lora        string  `json:"lora,omitempty"`
 | 
			
		||||
	MaxTokens   int     `json:"max_tokens,omitempty"`
 | 
			
		||||
	Prompt      string  `json:"prompt,omitempty"`
 | 
			
		||||
	Raw         bool    `json:"raw,omitempty"`
 | 
			
		||||
	Stream      bool    `json:"stream,omitempty"`
 | 
			
		||||
	Temperature float64 `json:"temperature,omitempty"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type Result struct {
 | 
			
		||||
	Response string `json:"response"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type Response struct {
 | 
			
		||||
	Result   Result   `json:"result"`
 | 
			
		||||
	Success  bool     `json:"success"`
 | 
			
		||||
	Errors   []string `json:"errors"`
 | 
			
		||||
	Messages []string `json:"messages"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type StreamResponse struct {
 | 
			
		||||
	Response string `json:"response"`
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										64
									
								
								relay/adaptor/cohere/adaptor.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										64
									
								
								relay/adaptor/cohere/adaptor.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,64 @@
 | 
			
		||||
package cohere
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"errors"
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"io"
 | 
			
		||||
	"net/http"
 | 
			
		||||
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/meta"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/model"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
type Adaptor struct{}
 | 
			
		||||
 | 
			
		||||
// ConvertImageRequest implements adaptor.Adaptor.
 | 
			
		||||
func (*Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
 | 
			
		||||
	return nil, errors.New("not implemented")
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// ConvertImageRequest implements adaptor.Adaptor.
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) Init(meta *meta.Meta) {
 | 
			
		||||
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
 | 
			
		||||
	return fmt.Sprintf("%s/v1/chat", meta.BaseURL), nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error {
 | 
			
		||||
	adaptor.SetupCommonRequestHeader(c, req, meta)
 | 
			
		||||
	req.Header.Set("Authorization", "Bearer "+meta.APIKey)
 | 
			
		||||
	return nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) {
 | 
			
		||||
	if request == nil {
 | 
			
		||||
		return nil, errors.New("request is nil")
 | 
			
		||||
	}
 | 
			
		||||
	return ConvertRequest(*request), nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) {
 | 
			
		||||
	return adaptor.DoRequestHelper(a, c, meta, requestBody)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
 | 
			
		||||
	if meta.IsStream {
 | 
			
		||||
		err, usage = StreamHandler(c, resp)
 | 
			
		||||
	} else {
 | 
			
		||||
		err, usage = Handler(c, resp, meta.PromptTokens, meta.ActualModelName)
 | 
			
		||||
	}
 | 
			
		||||
	return
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) GetModelList() []string {
 | 
			
		||||
	return ModelList
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) GetChannelName() string {
 | 
			
		||||
	return "Cohere"
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										14
									
								
								relay/adaptor/cohere/constant.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										14
									
								
								relay/adaptor/cohere/constant.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,14 @@
 | 
			
		||||
package cohere
 | 
			
		||||
 | 
			
		||||
var ModelList = []string{
 | 
			
		||||
	"command", "command-nightly",
 | 
			
		||||
	"command-light", "command-light-nightly",
 | 
			
		||||
	"command-r", "command-r-plus",
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func init() {
 | 
			
		||||
	num := len(ModelList)
 | 
			
		||||
	for i := 0; i < num; i++ {
 | 
			
		||||
		ModelList = append(ModelList, ModelList[i]+"-internet")
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										241
									
								
								relay/adaptor/cohere/main.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										241
									
								
								relay/adaptor/cohere/main.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,241 @@
 | 
			
		||||
package cohere
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"bufio"
 | 
			
		||||
	"bytes"
 | 
			
		||||
	"encoding/json"
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"io"
 | 
			
		||||
	"net/http"
 | 
			
		||||
	"strings"
 | 
			
		||||
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/helper"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/logger"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/openai"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/model"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
var (
 | 
			
		||||
	WebSearchConnector = Connector{ID: "web-search"}
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
func stopReasonCohere2OpenAI(reason *string) string {
 | 
			
		||||
	if reason == nil {
 | 
			
		||||
		return ""
 | 
			
		||||
	}
 | 
			
		||||
	switch *reason {
 | 
			
		||||
	case "COMPLETE":
 | 
			
		||||
		return "stop"
 | 
			
		||||
	default:
 | 
			
		||||
		return *reason
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func ConvertRequest(textRequest model.GeneralOpenAIRequest) *Request {
 | 
			
		||||
	cohereRequest := Request{
 | 
			
		||||
		Model:            textRequest.Model,
 | 
			
		||||
		Message:          "",
 | 
			
		||||
		MaxTokens:        textRequest.MaxTokens,
 | 
			
		||||
		Temperature:      textRequest.Temperature,
 | 
			
		||||
		P:                textRequest.TopP,
 | 
			
		||||
		K:                textRequest.TopK,
 | 
			
		||||
		Stream:           textRequest.Stream,
 | 
			
		||||
		FrequencyPenalty: textRequest.FrequencyPenalty,
 | 
			
		||||
		PresencePenalty:  textRequest.FrequencyPenalty,
 | 
			
		||||
		Seed:             int(textRequest.Seed),
 | 
			
		||||
	}
 | 
			
		||||
	if cohereRequest.Model == "" {
 | 
			
		||||
		cohereRequest.Model = "command-r"
 | 
			
		||||
	}
 | 
			
		||||
	if strings.HasSuffix(cohereRequest.Model, "-internet") {
 | 
			
		||||
		cohereRequest.Model = strings.TrimSuffix(cohereRequest.Model, "-internet")
 | 
			
		||||
		cohereRequest.Connectors = append(cohereRequest.Connectors, WebSearchConnector)
 | 
			
		||||
	}
 | 
			
		||||
	for _, message := range textRequest.Messages {
 | 
			
		||||
		if message.Role == "user" {
 | 
			
		||||
			cohereRequest.Message = message.Content.(string)
 | 
			
		||||
		} else {
 | 
			
		||||
			var role string
 | 
			
		||||
			if message.Role == "assistant" {
 | 
			
		||||
				role = "CHATBOT"
 | 
			
		||||
			} else if message.Role == "system" {
 | 
			
		||||
				role = "SYSTEM"
 | 
			
		||||
			} else {
 | 
			
		||||
				role = "USER"
 | 
			
		||||
			}
 | 
			
		||||
			cohereRequest.ChatHistory = append(cohereRequest.ChatHistory, ChatMessage{
 | 
			
		||||
				Role:    role,
 | 
			
		||||
				Message: message.Content.(string),
 | 
			
		||||
			})
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
	return &cohereRequest
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func StreamResponseCohere2OpenAI(cohereResponse *StreamResponse) (*openai.ChatCompletionsStreamResponse, *Response) {
 | 
			
		||||
	var response *Response
 | 
			
		||||
	var responseText string
 | 
			
		||||
	var finishReason string
 | 
			
		||||
 | 
			
		||||
	switch cohereResponse.EventType {
 | 
			
		||||
	case "stream-start":
 | 
			
		||||
		return nil, nil
 | 
			
		||||
	case "text-generation":
 | 
			
		||||
		responseText += cohereResponse.Text
 | 
			
		||||
	case "stream-end":
 | 
			
		||||
		usage := cohereResponse.Response.Meta.Tokens
 | 
			
		||||
		response = &Response{
 | 
			
		||||
			Meta: Meta{
 | 
			
		||||
				Tokens: Usage{
 | 
			
		||||
					InputTokens:  usage.InputTokens,
 | 
			
		||||
					OutputTokens: usage.OutputTokens,
 | 
			
		||||
				},
 | 
			
		||||
			},
 | 
			
		||||
		}
 | 
			
		||||
		finishReason = *cohereResponse.Response.FinishReason
 | 
			
		||||
	default:
 | 
			
		||||
		return nil, nil
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	var choice openai.ChatCompletionsStreamResponseChoice
 | 
			
		||||
	choice.Delta.Content = responseText
 | 
			
		||||
	choice.Delta.Role = "assistant"
 | 
			
		||||
	if finishReason != "" {
 | 
			
		||||
		choice.FinishReason = &finishReason
 | 
			
		||||
	}
 | 
			
		||||
	var openaiResponse openai.ChatCompletionsStreamResponse
 | 
			
		||||
	openaiResponse.Object = "chat.completion.chunk"
 | 
			
		||||
	openaiResponse.Choices = []openai.ChatCompletionsStreamResponseChoice{choice}
 | 
			
		||||
	return &openaiResponse, response
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func ResponseCohere2OpenAI(cohereResponse *Response) *openai.TextResponse {
 | 
			
		||||
	choice := openai.TextResponseChoice{
 | 
			
		||||
		Index: 0,
 | 
			
		||||
		Message: model.Message{
 | 
			
		||||
			Role:    "assistant",
 | 
			
		||||
			Content: cohereResponse.Text,
 | 
			
		||||
			Name:    nil,
 | 
			
		||||
		},
 | 
			
		||||
		FinishReason: stopReasonCohere2OpenAI(cohereResponse.FinishReason),
 | 
			
		||||
	}
 | 
			
		||||
	fullTextResponse := openai.TextResponse{
 | 
			
		||||
		Id:      fmt.Sprintf("chatcmpl-%s", cohereResponse.ResponseID),
 | 
			
		||||
		Model:   "model",
 | 
			
		||||
		Object:  "chat.completion",
 | 
			
		||||
		Created: helper.GetTimestamp(),
 | 
			
		||||
		Choices: []openai.TextResponseChoice{choice},
 | 
			
		||||
	}
 | 
			
		||||
	return &fullTextResponse
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) {
 | 
			
		||||
	createdTime := helper.GetTimestamp()
 | 
			
		||||
	scanner := bufio.NewScanner(resp.Body)
 | 
			
		||||
	scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
 | 
			
		||||
		if atEOF && len(data) == 0 {
 | 
			
		||||
			return 0, nil, nil
 | 
			
		||||
		}
 | 
			
		||||
		if i := bytes.IndexByte(data, '\n'); i >= 0 {
 | 
			
		||||
			return i + 1, data[0:i], nil
 | 
			
		||||
		}
 | 
			
		||||
		if atEOF {
 | 
			
		||||
			return len(data), data, nil
 | 
			
		||||
		}
 | 
			
		||||
		return 0, nil, nil
 | 
			
		||||
	})
 | 
			
		||||
 | 
			
		||||
	dataChan := make(chan string)
 | 
			
		||||
	stopChan := make(chan bool)
 | 
			
		||||
	go func() {
 | 
			
		||||
		for scanner.Scan() {
 | 
			
		||||
			data := scanner.Text()
 | 
			
		||||
			dataChan <- data
 | 
			
		||||
		}
 | 
			
		||||
		stopChan <- true
 | 
			
		||||
	}()
 | 
			
		||||
	common.SetEventStreamHeaders(c)
 | 
			
		||||
	var usage model.Usage
 | 
			
		||||
	c.Stream(func(w io.Writer) bool {
 | 
			
		||||
		select {
 | 
			
		||||
		case data := <-dataChan:
 | 
			
		||||
			// some implementations may add \r at the end of data
 | 
			
		||||
			data = strings.TrimSuffix(data, "\r")
 | 
			
		||||
			var cohereResponse StreamResponse
 | 
			
		||||
			err := json.Unmarshal([]byte(data), &cohereResponse)
 | 
			
		||||
			if err != nil {
 | 
			
		||||
				logger.SysError("error unmarshalling stream response: " + err.Error())
 | 
			
		||||
				return true
 | 
			
		||||
			}
 | 
			
		||||
			response, meta := StreamResponseCohere2OpenAI(&cohereResponse)
 | 
			
		||||
			if meta != nil {
 | 
			
		||||
				usage.PromptTokens += meta.Meta.Tokens.InputTokens
 | 
			
		||||
				usage.CompletionTokens += meta.Meta.Tokens.OutputTokens
 | 
			
		||||
				return true
 | 
			
		||||
			}
 | 
			
		||||
			if response == nil {
 | 
			
		||||
				return true
 | 
			
		||||
			}
 | 
			
		||||
			response.Id = fmt.Sprintf("chatcmpl-%d", createdTime)
 | 
			
		||||
			response.Model = c.GetString("original_model")
 | 
			
		||||
			response.Created = createdTime
 | 
			
		||||
			jsonStr, err := json.Marshal(response)
 | 
			
		||||
			if err != nil {
 | 
			
		||||
				logger.SysError("error marshalling stream response: " + err.Error())
 | 
			
		||||
				return true
 | 
			
		||||
			}
 | 
			
		||||
			c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonStr)})
 | 
			
		||||
			return true
 | 
			
		||||
		case <-stopChan:
 | 
			
		||||
			c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
 | 
			
		||||
			return false
 | 
			
		||||
		}
 | 
			
		||||
	})
 | 
			
		||||
	_ = resp.Body.Close()
 | 
			
		||||
	return nil, &usage
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName string) (*model.ErrorWithStatusCode, *model.Usage) {
 | 
			
		||||
	responseBody, err := io.ReadAll(resp.Body)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	err = resp.Body.Close()
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	var cohereResponse Response
 | 
			
		||||
	err = json.Unmarshal(responseBody, &cohereResponse)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	if cohereResponse.ResponseID == "" {
 | 
			
		||||
		return &model.ErrorWithStatusCode{
 | 
			
		||||
			Error: model.Error{
 | 
			
		||||
				Message: cohereResponse.Message,
 | 
			
		||||
				Type:    cohereResponse.Message,
 | 
			
		||||
				Param:   "",
 | 
			
		||||
				Code:    resp.StatusCode,
 | 
			
		||||
			},
 | 
			
		||||
			StatusCode: resp.StatusCode,
 | 
			
		||||
		}, nil
 | 
			
		||||
	}
 | 
			
		||||
	fullTextResponse := ResponseCohere2OpenAI(&cohereResponse)
 | 
			
		||||
	fullTextResponse.Model = modelName
 | 
			
		||||
	usage := model.Usage{
 | 
			
		||||
		PromptTokens:     cohereResponse.Meta.Tokens.InputTokens,
 | 
			
		||||
		CompletionTokens: cohereResponse.Meta.Tokens.OutputTokens,
 | 
			
		||||
		TotalTokens:      cohereResponse.Meta.Tokens.InputTokens + cohereResponse.Meta.Tokens.OutputTokens,
 | 
			
		||||
	}
 | 
			
		||||
	fullTextResponse.Usage = usage
 | 
			
		||||
	jsonResponse, err := json.Marshal(fullTextResponse)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	c.Writer.Header().Set("Content-Type", "application/json")
 | 
			
		||||
	c.Writer.WriteHeader(resp.StatusCode)
 | 
			
		||||
	_, err = c.Writer.Write(jsonResponse)
 | 
			
		||||
	return nil, &usage
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										147
									
								
								relay/adaptor/cohere/model.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										147
									
								
								relay/adaptor/cohere/model.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,147 @@
 | 
			
		||||
package cohere
 | 
			
		||||
 | 
			
		||||
type Request struct {
 | 
			
		||||
	Message          string        `json:"message" required:"true"`
 | 
			
		||||
	Model            string        `json:"model,omitempty"`  // 默认值为"command-r"
 | 
			
		||||
	Stream           bool          `json:"stream,omitempty"` // 默认值为false
 | 
			
		||||
	Preamble         string        `json:"preamble,omitempty"`
 | 
			
		||||
	ChatHistory      []ChatMessage `json:"chat_history,omitempty"`
 | 
			
		||||
	ConversationID   string        `json:"conversation_id,omitempty"`
 | 
			
		||||
	PromptTruncation string        `json:"prompt_truncation,omitempty"` // 默认值为"AUTO"
 | 
			
		||||
	Connectors       []Connector   `json:"connectors,omitempty"`
 | 
			
		||||
	Documents        []Document    `json:"documents,omitempty"`
 | 
			
		||||
	Temperature      float64       `json:"temperature,omitempty"` // 默认值为0.3
 | 
			
		||||
	MaxTokens        int           `json:"max_tokens,omitempty"`
 | 
			
		||||
	MaxInputTokens   int           `json:"max_input_tokens,omitempty"`
 | 
			
		||||
	K                int           `json:"k,omitempty"` // 默认值为0
 | 
			
		||||
	P                float64       `json:"p,omitempty"` // 默认值为0.75
 | 
			
		||||
	Seed             int           `json:"seed,omitempty"`
 | 
			
		||||
	StopSequences    []string      `json:"stop_sequences,omitempty"`
 | 
			
		||||
	FrequencyPenalty float64       `json:"frequency_penalty,omitempty"` // 默认值为0.0
 | 
			
		||||
	PresencePenalty  float64       `json:"presence_penalty,omitempty"`  // 默认值为0.0
 | 
			
		||||
	Tools            []Tool        `json:"tools,omitempty"`
 | 
			
		||||
	ToolResults      []ToolResult  `json:"tool_results,omitempty"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type ChatMessage struct {
 | 
			
		||||
	Role    string `json:"role" required:"true"`
 | 
			
		||||
	Message string `json:"message" required:"true"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type Tool struct {
 | 
			
		||||
	Name                 string                   `json:"name" required:"true"`
 | 
			
		||||
	Description          string                   `json:"description" required:"true"`
 | 
			
		||||
	ParameterDefinitions map[string]ParameterSpec `json:"parameter_definitions"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type ParameterSpec struct {
 | 
			
		||||
	Description string `json:"description"`
 | 
			
		||||
	Type        string `json:"type" required:"true"`
 | 
			
		||||
	Required    bool   `json:"required"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type ToolResult struct {
 | 
			
		||||
	Call    ToolCall                 `json:"call"`
 | 
			
		||||
	Outputs []map[string]interface{} `json:"outputs"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type ToolCall struct {
 | 
			
		||||
	Name       string                 `json:"name" required:"true"`
 | 
			
		||||
	Parameters map[string]interface{} `json:"parameters" required:"true"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type StreamResponse struct {
 | 
			
		||||
	IsFinished    bool            `json:"is_finished"`
 | 
			
		||||
	EventType     string          `json:"event_type"`
 | 
			
		||||
	GenerationID  string          `json:"generation_id,omitempty"`
 | 
			
		||||
	SearchQueries []*SearchQuery  `json:"search_queries,omitempty"`
 | 
			
		||||
	SearchResults []*SearchResult `json:"search_results,omitempty"`
 | 
			
		||||
	Documents     []*Document     `json:"documents,omitempty"`
 | 
			
		||||
	Text          string          `json:"text,omitempty"`
 | 
			
		||||
	Citations     []*Citation     `json:"citations,omitempty"`
 | 
			
		||||
	Response      *Response       `json:"response,omitempty"`
 | 
			
		||||
	FinishReason  string          `json:"finish_reason,omitempty"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type SearchQuery struct {
 | 
			
		||||
	Text         string `json:"text"`
 | 
			
		||||
	GenerationID string `json:"generation_id"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type SearchResult struct {
 | 
			
		||||
	SearchQuery *SearchQuery `json:"search_query"`
 | 
			
		||||
	DocumentIDs []string     `json:"document_ids"`
 | 
			
		||||
	Connector   *Connector   `json:"connector"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type Connector struct {
 | 
			
		||||
	ID string `json:"id"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type Document struct {
 | 
			
		||||
	ID        string `json:"id"`
 | 
			
		||||
	Snippet   string `json:"snippet"`
 | 
			
		||||
	Timestamp string `json:"timestamp"`
 | 
			
		||||
	Title     string `json:"title"`
 | 
			
		||||
	URL       string `json:"url"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type Citation struct {
 | 
			
		||||
	Start       int      `json:"start"`
 | 
			
		||||
	End         int      `json:"end"`
 | 
			
		||||
	Text        string   `json:"text"`
 | 
			
		||||
	DocumentIDs []string `json:"document_ids"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type Response struct {
 | 
			
		||||
	ResponseID    string          `json:"response_id"`
 | 
			
		||||
	Text          string          `json:"text"`
 | 
			
		||||
	GenerationID  string          `json:"generation_id"`
 | 
			
		||||
	ChatHistory   []*Message      `json:"chat_history"`
 | 
			
		||||
	FinishReason  *string         `json:"finish_reason"`
 | 
			
		||||
	Meta          Meta            `json:"meta"`
 | 
			
		||||
	Citations     []*Citation     `json:"citations"`
 | 
			
		||||
	Documents     []*Document     `json:"documents"`
 | 
			
		||||
	SearchResults []*SearchResult `json:"search_results"`
 | 
			
		||||
	SearchQueries []*SearchQuery  `json:"search_queries"`
 | 
			
		||||
	Message       string          `json:"message"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type Message struct {
 | 
			
		||||
	Role    string `json:"role"`
 | 
			
		||||
	Message string `json:"message"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type Version struct {
 | 
			
		||||
	Version string `json:"version"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type Units struct {
 | 
			
		||||
	InputTokens  int `json:"input_tokens"`
 | 
			
		||||
	OutputTokens int `json:"output_tokens"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type ChatEntry struct {
 | 
			
		||||
	Role    string `json:"role"`
 | 
			
		||||
	Message string `json:"message"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type Meta struct {
 | 
			
		||||
	APIVersion  APIVersion  `json:"api_version"`
 | 
			
		||||
	BilledUnits BilledUnits `json:"billed_units"`
 | 
			
		||||
	Tokens      Usage       `json:"tokens"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type APIVersion struct {
 | 
			
		||||
	Version string `json:"version"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type BilledUnits struct {
 | 
			
		||||
	InputTokens  int `json:"input_tokens"`
 | 
			
		||||
	OutputTokens int `json:"output_tokens"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type Usage struct {
 | 
			
		||||
	InputTokens  int `json:"input_tokens"`
 | 
			
		||||
	OutputTokens int `json:"output_tokens"`
 | 
			
		||||
}
 | 
			
		||||
@@ -4,7 +4,7 @@ import (
 | 
			
		||||
	"errors"
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/client"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/client"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/meta"
 | 
			
		||||
	"io"
 | 
			
		||||
	"net/http"
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										75
									
								
								relay/adaptor/coze/adaptor.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										75
									
								
								relay/adaptor/coze/adaptor.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,75 @@
 | 
			
		||||
package coze
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"errors"
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/openai"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/meta"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/model"
 | 
			
		||||
	"io"
 | 
			
		||||
	"net/http"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
type Adaptor struct {
 | 
			
		||||
	meta *meta.Meta
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) Init(meta *meta.Meta) {
 | 
			
		||||
	a.meta = meta
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
 | 
			
		||||
	return fmt.Sprintf("%s/open_api/v2/chat", meta.BaseURL), nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error {
 | 
			
		||||
	adaptor.SetupCommonRequestHeader(c, req, meta)
 | 
			
		||||
	req.Header.Set("Authorization", "Bearer "+meta.APIKey)
 | 
			
		||||
	return nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) {
 | 
			
		||||
	if request == nil {
 | 
			
		||||
		return nil, errors.New("request is nil")
 | 
			
		||||
	}
 | 
			
		||||
	request.User = a.meta.Config.UserID
 | 
			
		||||
	return ConvertRequest(*request), nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
 | 
			
		||||
	if request == nil {
 | 
			
		||||
		return nil, errors.New("request is nil")
 | 
			
		||||
	}
 | 
			
		||||
	return request, nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) {
 | 
			
		||||
	return adaptor.DoRequestHelper(a, c, meta, requestBody)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
 | 
			
		||||
	var responseText *string
 | 
			
		||||
	if meta.IsStream {
 | 
			
		||||
		err, responseText = StreamHandler(c, resp)
 | 
			
		||||
	} else {
 | 
			
		||||
		err, responseText = Handler(c, resp, meta.PromptTokens, meta.ActualModelName)
 | 
			
		||||
	}
 | 
			
		||||
	if responseText != nil {
 | 
			
		||||
		usage = openai.ResponseText2Usage(*responseText, meta.ActualModelName, meta.PromptTokens)
 | 
			
		||||
	} else {
 | 
			
		||||
		usage = &model.Usage{}
 | 
			
		||||
	}
 | 
			
		||||
	usage.PromptTokens = meta.PromptTokens
 | 
			
		||||
	usage.TotalTokens = usage.PromptTokens + usage.CompletionTokens
 | 
			
		||||
	return
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) GetModelList() []string {
 | 
			
		||||
	return ModelList
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) GetChannelName() string {
 | 
			
		||||
	return "coze"
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										5
									
								
								relay/adaptor/coze/constant/contenttype/define.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										5
									
								
								relay/adaptor/coze/constant/contenttype/define.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,5 @@
 | 
			
		||||
package contenttype
 | 
			
		||||
 | 
			
		||||
const (
 | 
			
		||||
	Text = "text"
 | 
			
		||||
)
 | 
			
		||||
							
								
								
									
										7
									
								
								relay/adaptor/coze/constant/event/define.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										7
									
								
								relay/adaptor/coze/constant/event/define.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,7 @@
 | 
			
		||||
package event
 | 
			
		||||
 | 
			
		||||
const (
 | 
			
		||||
	Message = "message"
 | 
			
		||||
	Done    = "done"
 | 
			
		||||
	Error   = "error"
 | 
			
		||||
)
 | 
			
		||||
							
								
								
									
										6
									
								
								relay/adaptor/coze/constant/messagetype/define.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								relay/adaptor/coze/constant/messagetype/define.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,6 @@
 | 
			
		||||
package messagetype
 | 
			
		||||
 | 
			
		||||
const (
 | 
			
		||||
	Answer   = "answer"
 | 
			
		||||
	FollowUp = "follow_up"
 | 
			
		||||
)
 | 
			
		||||
							
								
								
									
										3
									
								
								relay/adaptor/coze/constants.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										3
									
								
								relay/adaptor/coze/constants.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,3 @@
 | 
			
		||||
package coze
 | 
			
		||||
 | 
			
		||||
var ModelList = []string{}
 | 
			
		||||
							
								
								
									
										10
									
								
								relay/adaptor/coze/helper.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										10
									
								
								relay/adaptor/coze/helper.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,10 @@
 | 
			
		||||
package coze
 | 
			
		||||
 | 
			
		||||
import "github.com/songquanpeng/one-api/relay/adaptor/coze/constant/event"
 | 
			
		||||
 | 
			
		||||
func event2StopReason(e *string) string {
 | 
			
		||||
	if e == nil || *e == event.Message {
 | 
			
		||||
		return ""
 | 
			
		||||
	}
 | 
			
		||||
	return "stop"
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										215
									
								
								relay/adaptor/coze/main.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										215
									
								
								relay/adaptor/coze/main.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,215 @@
 | 
			
		||||
package coze
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"bufio"
 | 
			
		||||
	"encoding/json"
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/conv"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/helper"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/logger"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/coze/constant/messagetype"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/openai"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/model"
 | 
			
		||||
	"io"
 | 
			
		||||
	"net/http"
 | 
			
		||||
	"strings"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
// https://www.coze.com/open
 | 
			
		||||
 | 
			
		||||
func stopReasonCoze2OpenAI(reason *string) string {
 | 
			
		||||
	if reason == nil {
 | 
			
		||||
		return ""
 | 
			
		||||
	}
 | 
			
		||||
	switch *reason {
 | 
			
		||||
	case "end_turn":
 | 
			
		||||
		return "stop"
 | 
			
		||||
	case "stop_sequence":
 | 
			
		||||
		return "stop"
 | 
			
		||||
	case "max_tokens":
 | 
			
		||||
		return "length"
 | 
			
		||||
	default:
 | 
			
		||||
		return *reason
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func ConvertRequest(textRequest model.GeneralOpenAIRequest) *Request {
 | 
			
		||||
	cozeRequest := Request{
 | 
			
		||||
		Stream: textRequest.Stream,
 | 
			
		||||
		User:   textRequest.User,
 | 
			
		||||
		BotId:  strings.TrimPrefix(textRequest.Model, "bot-"),
 | 
			
		||||
	}
 | 
			
		||||
	for i, message := range textRequest.Messages {
 | 
			
		||||
		if i == len(textRequest.Messages)-1 {
 | 
			
		||||
			cozeRequest.Query = message.StringContent()
 | 
			
		||||
			continue
 | 
			
		||||
		}
 | 
			
		||||
		cozeMessage := Message{
 | 
			
		||||
			Role:    message.Role,
 | 
			
		||||
			Content: message.StringContent(),
 | 
			
		||||
		}
 | 
			
		||||
		cozeRequest.ChatHistory = append(cozeRequest.ChatHistory, cozeMessage)
 | 
			
		||||
	}
 | 
			
		||||
	return &cozeRequest
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func StreamResponseCoze2OpenAI(cozeResponse *StreamResponse) (*openai.ChatCompletionsStreamResponse, *Response) {
 | 
			
		||||
	var response *Response
 | 
			
		||||
	var stopReason string
 | 
			
		||||
	var choice openai.ChatCompletionsStreamResponseChoice
 | 
			
		||||
 | 
			
		||||
	if cozeResponse.Message != nil {
 | 
			
		||||
		if cozeResponse.Message.Type != messagetype.Answer {
 | 
			
		||||
			return nil, nil
 | 
			
		||||
		}
 | 
			
		||||
		choice.Delta.Content = cozeResponse.Message.Content
 | 
			
		||||
	}
 | 
			
		||||
	choice.Delta.Role = "assistant"
 | 
			
		||||
	finishReason := stopReasonCoze2OpenAI(&stopReason)
 | 
			
		||||
	if finishReason != "null" {
 | 
			
		||||
		choice.FinishReason = &finishReason
 | 
			
		||||
	}
 | 
			
		||||
	var openaiResponse openai.ChatCompletionsStreamResponse
 | 
			
		||||
	openaiResponse.Object = "chat.completion.chunk"
 | 
			
		||||
	openaiResponse.Choices = []openai.ChatCompletionsStreamResponseChoice{choice}
 | 
			
		||||
	openaiResponse.Id = cozeResponse.ConversationId
 | 
			
		||||
	return &openaiResponse, response
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func ResponseCoze2OpenAI(cozeResponse *Response) *openai.TextResponse {
 | 
			
		||||
	var responseText string
 | 
			
		||||
	for _, message := range cozeResponse.Messages {
 | 
			
		||||
		if message.Type == messagetype.Answer {
 | 
			
		||||
			responseText = message.Content
 | 
			
		||||
			break
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
	choice := openai.TextResponseChoice{
 | 
			
		||||
		Index: 0,
 | 
			
		||||
		Message: model.Message{
 | 
			
		||||
			Role:    "assistant",
 | 
			
		||||
			Content: responseText,
 | 
			
		||||
			Name:    nil,
 | 
			
		||||
		},
 | 
			
		||||
		FinishReason: "stop",
 | 
			
		||||
	}
 | 
			
		||||
	fullTextResponse := openai.TextResponse{
 | 
			
		||||
		Id:      fmt.Sprintf("chatcmpl-%s", cozeResponse.ConversationId),
 | 
			
		||||
		Model:   "coze-bot",
 | 
			
		||||
		Object:  "chat.completion",
 | 
			
		||||
		Created: helper.GetTimestamp(),
 | 
			
		||||
		Choices: []openai.TextResponseChoice{choice},
 | 
			
		||||
	}
 | 
			
		||||
	return &fullTextResponse
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *string) {
 | 
			
		||||
	var responseText string
 | 
			
		||||
	createdTime := helper.GetTimestamp()
 | 
			
		||||
	scanner := bufio.NewScanner(resp.Body)
 | 
			
		||||
	scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
 | 
			
		||||
		if atEOF && len(data) == 0 {
 | 
			
		||||
			return 0, nil, nil
 | 
			
		||||
		}
 | 
			
		||||
		if i := strings.Index(string(data), "\n"); i >= 0 {
 | 
			
		||||
			return i + 1, data[0:i], nil
 | 
			
		||||
		}
 | 
			
		||||
		if atEOF {
 | 
			
		||||
			return len(data), data, nil
 | 
			
		||||
		}
 | 
			
		||||
		return 0, nil, nil
 | 
			
		||||
	})
 | 
			
		||||
	dataChan := make(chan string)
 | 
			
		||||
	stopChan := make(chan bool)
 | 
			
		||||
	go func() {
 | 
			
		||||
		for scanner.Scan() {
 | 
			
		||||
			data := scanner.Text()
 | 
			
		||||
			if len(data) < 5 {
 | 
			
		||||
				continue
 | 
			
		||||
			}
 | 
			
		||||
			if !strings.HasPrefix(data, "data:") {
 | 
			
		||||
				continue
 | 
			
		||||
			}
 | 
			
		||||
			data = strings.TrimPrefix(data, "data:")
 | 
			
		||||
			dataChan <- data
 | 
			
		||||
		}
 | 
			
		||||
		stopChan <- true
 | 
			
		||||
	}()
 | 
			
		||||
	common.SetEventStreamHeaders(c)
 | 
			
		||||
	var modelName string
 | 
			
		||||
	c.Stream(func(w io.Writer) bool {
 | 
			
		||||
		select {
 | 
			
		||||
		case data := <-dataChan:
 | 
			
		||||
			// some implementations may add \r at the end of data
 | 
			
		||||
			data = strings.TrimSuffix(data, "\r")
 | 
			
		||||
			var cozeResponse StreamResponse
 | 
			
		||||
			err := json.Unmarshal([]byte(data), &cozeResponse)
 | 
			
		||||
			if err != nil {
 | 
			
		||||
				logger.SysError("error unmarshalling stream response: " + err.Error())
 | 
			
		||||
				return true
 | 
			
		||||
			}
 | 
			
		||||
			response, _ := StreamResponseCoze2OpenAI(&cozeResponse)
 | 
			
		||||
			if response == nil {
 | 
			
		||||
				return true
 | 
			
		||||
			}
 | 
			
		||||
			for _, choice := range response.Choices {
 | 
			
		||||
				responseText += conv.AsString(choice.Delta.Content)
 | 
			
		||||
			}
 | 
			
		||||
			response.Model = modelName
 | 
			
		||||
			response.Created = createdTime
 | 
			
		||||
			jsonStr, err := json.Marshal(response)
 | 
			
		||||
			if err != nil {
 | 
			
		||||
				logger.SysError("error marshalling stream response: " + err.Error())
 | 
			
		||||
				return true
 | 
			
		||||
			}
 | 
			
		||||
			c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonStr)})
 | 
			
		||||
			return true
 | 
			
		||||
		case <-stopChan:
 | 
			
		||||
			c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
 | 
			
		||||
			return false
 | 
			
		||||
		}
 | 
			
		||||
	})
 | 
			
		||||
	_ = resp.Body.Close()
 | 
			
		||||
	return nil, &responseText
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName string) (*model.ErrorWithStatusCode, *string) {
 | 
			
		||||
	responseBody, err := io.ReadAll(resp.Body)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	err = resp.Body.Close()
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	var cozeResponse Response
 | 
			
		||||
	err = json.Unmarshal(responseBody, &cozeResponse)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	if cozeResponse.Code != 0 {
 | 
			
		||||
		return &model.ErrorWithStatusCode{
 | 
			
		||||
			Error: model.Error{
 | 
			
		||||
				Message: cozeResponse.Msg,
 | 
			
		||||
				Code:    cozeResponse.Code,
 | 
			
		||||
			},
 | 
			
		||||
			StatusCode: resp.StatusCode,
 | 
			
		||||
		}, nil
 | 
			
		||||
	}
 | 
			
		||||
	fullTextResponse := ResponseCoze2OpenAI(&cozeResponse)
 | 
			
		||||
	fullTextResponse.Model = modelName
 | 
			
		||||
	jsonResponse, err := json.Marshal(fullTextResponse)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	c.Writer.Header().Set("Content-Type", "application/json")
 | 
			
		||||
	c.Writer.WriteHeader(resp.StatusCode)
 | 
			
		||||
	_, err = c.Writer.Write(jsonResponse)
 | 
			
		||||
	var responseText string
 | 
			
		||||
	if len(fullTextResponse.Choices) > 0 {
 | 
			
		||||
		responseText = fullTextResponse.Choices[0].Message.StringContent()
 | 
			
		||||
	}
 | 
			
		||||
	return nil, &responseText
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										38
									
								
								relay/adaptor/coze/model.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										38
									
								
								relay/adaptor/coze/model.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,38 @@
 | 
			
		||||
package coze
 | 
			
		||||
 | 
			
		||||
type Message struct {
 | 
			
		||||
	Role        string `json:"role"`
 | 
			
		||||
	Type        string `json:"type"`
 | 
			
		||||
	Content     string `json:"content"`
 | 
			
		||||
	ContentType string `json:"content_type"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type ErrorInformation struct {
 | 
			
		||||
	Code int    `json:"code"`
 | 
			
		||||
	Msg  string `json:"msg"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type Request struct {
 | 
			
		||||
	ConversationId string    `json:"conversation_id,omitempty"`
 | 
			
		||||
	BotId          string    `json:"bot_id"`
 | 
			
		||||
	User           string    `json:"user"`
 | 
			
		||||
	Query          string    `json:"query"`
 | 
			
		||||
	ChatHistory    []Message `json:"chat_history,omitempty"`
 | 
			
		||||
	Stream         bool      `json:"stream"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type Response struct {
 | 
			
		||||
	ConversationId string    `json:"conversation_id,omitempty"`
 | 
			
		||||
	Messages       []Message `json:"messages,omitempty"`
 | 
			
		||||
	Code           int       `json:"code,omitempty"`
 | 
			
		||||
	Msg            string    `json:"msg,omitempty"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type StreamResponse struct {
 | 
			
		||||
	Event            string            `json:"event,omitempty"`
 | 
			
		||||
	Message          *Message          `json:"message,omitempty"`
 | 
			
		||||
	IsFinish         bool              `json:"is_finish,omitempty"`
 | 
			
		||||
	Index            int               `json:"index,omitempty"`
 | 
			
		||||
	ConversationId   string            `json:"conversation_id,omitempty"`
 | 
			
		||||
	ErrorInformation *ErrorInformation `json:"error_information,omitempty"`
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										73
									
								
								relay/adaptor/deepl/adaptor.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										73
									
								
								relay/adaptor/deepl/adaptor.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,73 @@
 | 
			
		||||
package deepl
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"errors"
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/meta"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/model"
 | 
			
		||||
	"io"
 | 
			
		||||
	"net/http"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
type Adaptor struct {
 | 
			
		||||
	meta       *meta.Meta
 | 
			
		||||
	promptText string
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) Init(meta *meta.Meta) {
 | 
			
		||||
	a.meta = meta
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
 | 
			
		||||
	return fmt.Sprintf("%s/v2/translate", meta.BaseURL), nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error {
 | 
			
		||||
	adaptor.SetupCommonRequestHeader(c, req, meta)
 | 
			
		||||
	req.Header.Set("Authorization", "DeepL-Auth-Key "+meta.APIKey)
 | 
			
		||||
	return nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) {
 | 
			
		||||
	if request == nil {
 | 
			
		||||
		return nil, errors.New("request is nil")
 | 
			
		||||
	}
 | 
			
		||||
	convertedRequest, text := ConvertRequest(*request)
 | 
			
		||||
	a.promptText = text
 | 
			
		||||
	return convertedRequest, nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
 | 
			
		||||
	if request == nil {
 | 
			
		||||
		return nil, errors.New("request is nil")
 | 
			
		||||
	}
 | 
			
		||||
	return request, nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) {
 | 
			
		||||
	return adaptor.DoRequestHelper(a, c, meta, requestBody)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
 | 
			
		||||
	if meta.IsStream {
 | 
			
		||||
		err = StreamHandler(c, resp, meta.ActualModelName)
 | 
			
		||||
	} else {
 | 
			
		||||
		err = Handler(c, resp, meta.ActualModelName)
 | 
			
		||||
	}
 | 
			
		||||
	promptTokens := len(a.promptText)
 | 
			
		||||
	usage = &model.Usage{
 | 
			
		||||
		PromptTokens: promptTokens,
 | 
			
		||||
		TotalTokens:  promptTokens,
 | 
			
		||||
	}
 | 
			
		||||
	return
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) GetModelList() []string {
 | 
			
		||||
	return ModelList
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) GetChannelName() string {
 | 
			
		||||
	return "deepl"
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										9
									
								
								relay/adaptor/deepl/constants.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										9
									
								
								relay/adaptor/deepl/constants.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,9 @@
 | 
			
		||||
package deepl
 | 
			
		||||
 | 
			
		||||
// https://developers.deepl.com/docs/api-reference/glossaries
 | 
			
		||||
 | 
			
		||||
var ModelList = []string{
 | 
			
		||||
	"deepl-zh",
 | 
			
		||||
	"deepl-en",
 | 
			
		||||
	"deepl-ja",
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										11
									
								
								relay/adaptor/deepl/helper.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										11
									
								
								relay/adaptor/deepl/helper.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,11 @@
 | 
			
		||||
package deepl
 | 
			
		||||
 | 
			
		||||
import "strings"
 | 
			
		||||
 | 
			
		||||
func parseLangFromModelName(modelName string) string {
 | 
			
		||||
	parts := strings.Split(modelName, "-")
 | 
			
		||||
	if len(parts) == 1 {
 | 
			
		||||
		return "ZH"
 | 
			
		||||
	}
 | 
			
		||||
	return parts[1]
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										137
									
								
								relay/adaptor/deepl/main.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										137
									
								
								relay/adaptor/deepl/main.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,137 @@
 | 
			
		||||
package deepl
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"encoding/json"
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/helper"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/openai"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/constant"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/constant/finishreason"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/constant/role"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/model"
 | 
			
		||||
	"io"
 | 
			
		||||
	"net/http"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
// https://developers.deepl.com/docs/getting-started/your-first-api-request
 | 
			
		||||
 | 
			
		||||
func ConvertRequest(textRequest model.GeneralOpenAIRequest) (*Request, string) {
 | 
			
		||||
	var text string
 | 
			
		||||
	if len(textRequest.Messages) != 0 {
 | 
			
		||||
		text = textRequest.Messages[len(textRequest.Messages)-1].StringContent()
 | 
			
		||||
	}
 | 
			
		||||
	deeplRequest := Request{
 | 
			
		||||
		TargetLang: parseLangFromModelName(textRequest.Model),
 | 
			
		||||
		Text:       []string{text},
 | 
			
		||||
	}
 | 
			
		||||
	return &deeplRequest, text
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func StreamResponseDeepL2OpenAI(deeplResponse *Response) *openai.ChatCompletionsStreamResponse {
 | 
			
		||||
	var choice openai.ChatCompletionsStreamResponseChoice
 | 
			
		||||
	if len(deeplResponse.Translations) != 0 {
 | 
			
		||||
		choice.Delta.Content = deeplResponse.Translations[0].Text
 | 
			
		||||
	}
 | 
			
		||||
	choice.Delta.Role = role.Assistant
 | 
			
		||||
	choice.FinishReason = &constant.StopFinishReason
 | 
			
		||||
	openaiResponse := openai.ChatCompletionsStreamResponse{
 | 
			
		||||
		Object:  constant.StreamObject,
 | 
			
		||||
		Created: helper.GetTimestamp(),
 | 
			
		||||
		Choices: []openai.ChatCompletionsStreamResponseChoice{choice},
 | 
			
		||||
	}
 | 
			
		||||
	return &openaiResponse
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func ResponseDeepL2OpenAI(deeplResponse *Response) *openai.TextResponse {
 | 
			
		||||
	var responseText string
 | 
			
		||||
	if len(deeplResponse.Translations) != 0 {
 | 
			
		||||
		responseText = deeplResponse.Translations[0].Text
 | 
			
		||||
	}
 | 
			
		||||
	choice := openai.TextResponseChoice{
 | 
			
		||||
		Index: 0,
 | 
			
		||||
		Message: model.Message{
 | 
			
		||||
			Role:    role.Assistant,
 | 
			
		||||
			Content: responseText,
 | 
			
		||||
			Name:    nil,
 | 
			
		||||
		},
 | 
			
		||||
		FinishReason: finishreason.Stop,
 | 
			
		||||
	}
 | 
			
		||||
	fullTextResponse := openai.TextResponse{
 | 
			
		||||
		Object:  constant.NonStreamObject,
 | 
			
		||||
		Created: helper.GetTimestamp(),
 | 
			
		||||
		Choices: []openai.TextResponseChoice{choice},
 | 
			
		||||
	}
 | 
			
		||||
	return &fullTextResponse
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func StreamHandler(c *gin.Context, resp *http.Response, modelName string) *model.ErrorWithStatusCode {
 | 
			
		||||
	responseBody, err := io.ReadAll(resp.Body)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError)
 | 
			
		||||
	}
 | 
			
		||||
	err = resp.Body.Close()
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError)
 | 
			
		||||
	}
 | 
			
		||||
	var deeplResponse Response
 | 
			
		||||
	err = json.Unmarshal(responseBody, &deeplResponse)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
 | 
			
		||||
	}
 | 
			
		||||
	fullTextResponse := StreamResponseDeepL2OpenAI(&deeplResponse)
 | 
			
		||||
	fullTextResponse.Model = modelName
 | 
			
		||||
	fullTextResponse.Id = helper.GetResponseID(c)
 | 
			
		||||
	jsonData, err := json.Marshal(fullTextResponse)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError)
 | 
			
		||||
	}
 | 
			
		||||
	common.SetEventStreamHeaders(c)
 | 
			
		||||
	c.Stream(func(w io.Writer) bool {
 | 
			
		||||
		if jsonData != nil {
 | 
			
		||||
			c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonData)})
 | 
			
		||||
			jsonData = nil
 | 
			
		||||
			return true
 | 
			
		||||
		}
 | 
			
		||||
		c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
 | 
			
		||||
		return false
 | 
			
		||||
	})
 | 
			
		||||
	_ = resp.Body.Close()
 | 
			
		||||
	return nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func Handler(c *gin.Context, resp *http.Response, modelName string) *model.ErrorWithStatusCode {
 | 
			
		||||
	responseBody, err := io.ReadAll(resp.Body)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError)
 | 
			
		||||
	}
 | 
			
		||||
	err = resp.Body.Close()
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError)
 | 
			
		||||
	}
 | 
			
		||||
	var deeplResponse Response
 | 
			
		||||
	err = json.Unmarshal(responseBody, &deeplResponse)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
 | 
			
		||||
	}
 | 
			
		||||
	if deeplResponse.Message != "" {
 | 
			
		||||
		return &model.ErrorWithStatusCode{
 | 
			
		||||
			Error: model.Error{
 | 
			
		||||
				Message: deeplResponse.Message,
 | 
			
		||||
				Code:    "deepl_error",
 | 
			
		||||
			},
 | 
			
		||||
			StatusCode: resp.StatusCode,
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
	fullTextResponse := ResponseDeepL2OpenAI(&deeplResponse)
 | 
			
		||||
	fullTextResponse.Model = modelName
 | 
			
		||||
	fullTextResponse.Id = helper.GetResponseID(c)
 | 
			
		||||
	jsonResponse, err := json.Marshal(fullTextResponse)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError)
 | 
			
		||||
	}
 | 
			
		||||
	c.Writer.Header().Set("Content-Type", "application/json")
 | 
			
		||||
	c.Writer.WriteHeader(resp.StatusCode)
 | 
			
		||||
	_, err = c.Writer.Write(jsonResponse)
 | 
			
		||||
	return nil
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										16
									
								
								relay/adaptor/deepl/model.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										16
									
								
								relay/adaptor/deepl/model.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,16 @@
 | 
			
		||||
package deepl
 | 
			
		||||
 | 
			
		||||
type Request struct {
 | 
			
		||||
	Text       []string `json:"text"`
 | 
			
		||||
	TargetLang string   `json:"target_lang"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type Translation struct {
 | 
			
		||||
	DetectedSourceLanguage string `json:"detected_source_language,omitempty"`
 | 
			
		||||
	Text                   string `json:"text,omitempty"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type Response struct {
 | 
			
		||||
	Translations []Translation `json:"translations,omitempty"`
 | 
			
		||||
	Message      string        `json:"message,omitempty"`
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										6
									
								
								relay/adaptor/deepseek/constants.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								relay/adaptor/deepseek/constants.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,6 @@
 | 
			
		||||
package deepseek
 | 
			
		||||
 | 
			
		||||
var ModelList = []string{
 | 
			
		||||
	"deepseek-chat",
 | 
			
		||||
	"deepseek-coder",
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										13
									
								
								relay/adaptor/doubao/constants.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										13
									
								
								relay/adaptor/doubao/constants.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,13 @@
 | 
			
		||||
package doubao
 | 
			
		||||
 | 
			
		||||
// https://console.volcengine.com/ark/region:ark+cn-beijing/model
 | 
			
		||||
 | 
			
		||||
var ModelList = []string{
 | 
			
		||||
	"Doubao-pro-128k",
 | 
			
		||||
	"Doubao-pro-32k",
 | 
			
		||||
	"Doubao-pro-4k",
 | 
			
		||||
	"Doubao-lite-128k",
 | 
			
		||||
	"Doubao-lite-32k",
 | 
			
		||||
	"Doubao-lite-4k",
 | 
			
		||||
	"Doubao-embedding",
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										14
									
								
								relay/adaptor/doubao/main.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										14
									
								
								relay/adaptor/doubao/main.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,14 @@
 | 
			
		||||
package doubao
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/meta"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/relaymode"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
func GetRequestURL(meta *meta.Meta) (string, error) {
 | 
			
		||||
	if meta.Mode == relaymode.ChatCompletions {
 | 
			
		||||
		return fmt.Sprintf("%s/api/v3/chat/completions", meta.BaseURL), nil
 | 
			
		||||
	}
 | 
			
		||||
	return "", fmt.Errorf("unsupported relay mode %d for doubao", meta.Mode)
 | 
			
		||||
}
 | 
			
		||||
@@ -3,6 +3,9 @@ package gemini
 | 
			
		||||
import (
 | 
			
		||||
	"errors"
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"io"
 | 
			
		||||
	"net/http"
 | 
			
		||||
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/config"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/helper"
 | 
			
		||||
@@ -10,8 +13,7 @@ import (
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/openai"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/meta"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/model"
 | 
			
		||||
	"io"
 | 
			
		||||
	"net/http"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/relaymode"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
type Adaptor struct {
 | 
			
		||||
@@ -22,10 +24,17 @@ func (a *Adaptor) Init(meta *meta.Meta) {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
 | 
			
		||||
	version := helper.AssignOrDefault(meta.APIVersion, config.GeminiVersion)
 | 
			
		||||
	action := "generateContent"
 | 
			
		||||
	version := helper.AssignOrDefault(meta.Config.APIVersion, config.GeminiVersion)
 | 
			
		||||
	action := ""
 | 
			
		||||
	switch meta.Mode {
 | 
			
		||||
	case relaymode.Embeddings:
 | 
			
		||||
		action = "batchEmbedContents"
 | 
			
		||||
	default:
 | 
			
		||||
		action = "generateContent"
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if meta.IsStream {
 | 
			
		||||
		action = "streamGenerateContent"
 | 
			
		||||
		action = "streamGenerateContent?alt=sse"
 | 
			
		||||
	}
 | 
			
		||||
	return fmt.Sprintf("%s/%s/models/%s:%s", meta.BaseURL, version, meta.ActualModelName, action), nil
 | 
			
		||||
}
 | 
			
		||||
@@ -40,7 +49,14 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
 | 
			
		||||
	if request == nil {
 | 
			
		||||
		return nil, errors.New("request is nil")
 | 
			
		||||
	}
 | 
			
		||||
	return ConvertRequest(*request), nil
 | 
			
		||||
	switch relayMode {
 | 
			
		||||
	case relaymode.Embeddings:
 | 
			
		||||
		geminiEmbeddingRequest := ConvertEmbeddingRequest(*request)
 | 
			
		||||
		return geminiEmbeddingRequest, nil
 | 
			
		||||
	default:
 | 
			
		||||
		geminiRequest := ConvertRequest(*request)
 | 
			
		||||
		return geminiRequest, nil
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
 | 
			
		||||
@@ -60,8 +76,13 @@ func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Met
 | 
			
		||||
		err, responseText = StreamHandler(c, resp)
 | 
			
		||||
		usage = openai.ResponseText2Usage(responseText, meta.ActualModelName, meta.PromptTokens)
 | 
			
		||||
	} else {
 | 
			
		||||
		switch meta.Mode {
 | 
			
		||||
		case relaymode.Embeddings:
 | 
			
		||||
			err, usage = EmbeddingHandler(c, resp)
 | 
			
		||||
		default:
 | 
			
		||||
			err, usage = Handler(c, resp, meta.PromptTokens, meta.ActualModelName)
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
	return
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -4,5 +4,5 @@ package gemini
 | 
			
		||||
 | 
			
		||||
var ModelList = []string{
 | 
			
		||||
	"gemini-pro", "gemini-1.0-pro-001", "gemini-1.5-pro",
 | 
			
		||||
	"gemini-pro-vision", "gemini-1.0-pro-vision-001",
 | 
			
		||||
	"gemini-pro-vision", "gemini-1.0-pro-vision-001", "embedding-001", "text-embedding-004",
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -4,6 +4,10 @@ import (
 | 
			
		||||
	"bufio"
 | 
			
		||||
	"encoding/json"
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"io"
 | 
			
		||||
	"net/http"
 | 
			
		||||
	"strings"
 | 
			
		||||
 | 
			
		||||
	"github.com/songquanpeng/one-api/common"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/config"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/helper"
 | 
			
		||||
@@ -13,9 +17,6 @@ import (
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/openai"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/constant"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/model"
 | 
			
		||||
	"io"
 | 
			
		||||
	"net/http"
 | 
			
		||||
	"strings"
 | 
			
		||||
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
)
 | 
			
		||||
@@ -54,7 +55,17 @@ func ConvertRequest(textRequest model.GeneralOpenAIRequest) *ChatRequest {
 | 
			
		||||
			MaxOutputTokens: textRequest.MaxTokens,
 | 
			
		||||
		},
 | 
			
		||||
	}
 | 
			
		||||
	if textRequest.Functions != nil {
 | 
			
		||||
	if textRequest.Tools != nil {
 | 
			
		||||
		functions := make([]model.Function, 0, len(textRequest.Tools))
 | 
			
		||||
		for _, tool := range textRequest.Tools {
 | 
			
		||||
			functions = append(functions, tool.Function)
 | 
			
		||||
		}
 | 
			
		||||
		geminiRequest.Tools = []ChatTools{
 | 
			
		||||
			{
 | 
			
		||||
				FunctionDeclarations: functions,
 | 
			
		||||
			},
 | 
			
		||||
		}
 | 
			
		||||
	} else if textRequest.Functions != nil {
 | 
			
		||||
		geminiRequest.Tools = []ChatTools{
 | 
			
		||||
			{
 | 
			
		||||
				FunctionDeclarations: textRequest.Functions,
 | 
			
		||||
@@ -123,6 +134,29 @@ func ConvertRequest(textRequest model.GeneralOpenAIRequest) *ChatRequest {
 | 
			
		||||
	return &geminiRequest
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func ConvertEmbeddingRequest(request model.GeneralOpenAIRequest) *BatchEmbeddingRequest {
 | 
			
		||||
	inputs := request.ParseInput()
 | 
			
		||||
	requests := make([]EmbeddingRequest, len(inputs))
 | 
			
		||||
	model := fmt.Sprintf("models/%s", request.Model)
 | 
			
		||||
 | 
			
		||||
	for i, input := range inputs {
 | 
			
		||||
		requests[i] = EmbeddingRequest{
 | 
			
		||||
			Model: model,
 | 
			
		||||
			Content: ChatContent{
 | 
			
		||||
				Parts: []Part{
 | 
			
		||||
					{
 | 
			
		||||
						Text: input,
 | 
			
		||||
					},
 | 
			
		||||
				},
 | 
			
		||||
			},
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	return &BatchEmbeddingRequest{
 | 
			
		||||
		Requests: requests,
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type ChatResponse struct {
 | 
			
		||||
	Candidates     []ChatCandidate    `json:"candidates"`
 | 
			
		||||
	PromptFeedback ChatPromptFeedback `json:"promptFeedback"`
 | 
			
		||||
@@ -154,6 +188,30 @@ type ChatPromptFeedback struct {
 | 
			
		||||
	SafetyRatings []ChatSafetyRating `json:"safetyRatings"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func getToolCalls(candidate *ChatCandidate) []model.Tool {
 | 
			
		||||
	var toolCalls []model.Tool
 | 
			
		||||
 | 
			
		||||
	item := candidate.Content.Parts[0]
 | 
			
		||||
	if item.FunctionCall == nil {
 | 
			
		||||
		return toolCalls
 | 
			
		||||
	}
 | 
			
		||||
	argsBytes, err := json.Marshal(item.FunctionCall.Arguments)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		logger.FatalLog("getToolCalls failed: " + err.Error())
 | 
			
		||||
		return toolCalls
 | 
			
		||||
	}
 | 
			
		||||
	toolCall := model.Tool{
 | 
			
		||||
		Id:   fmt.Sprintf("call_%s", random.GetUUID()),
 | 
			
		||||
		Type: "function",
 | 
			
		||||
		Function: model.Function{
 | 
			
		||||
			Arguments: string(argsBytes),
 | 
			
		||||
			Name:      item.FunctionCall.FunctionName,
 | 
			
		||||
		},
 | 
			
		||||
	}
 | 
			
		||||
	toolCalls = append(toolCalls, toolCall)
 | 
			
		||||
	return toolCalls
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func responseGeminiChat2OpenAI(response *ChatResponse) *openai.TextResponse {
 | 
			
		||||
	fullTextResponse := openai.TextResponse{
 | 
			
		||||
		Id:      fmt.Sprintf("chatcmpl-%s", random.GetUUID()),
 | 
			
		||||
@@ -166,13 +224,19 @@ func responseGeminiChat2OpenAI(response *ChatResponse) *openai.TextResponse {
 | 
			
		||||
			Index: i,
 | 
			
		||||
			Message: model.Message{
 | 
			
		||||
				Role: "assistant",
 | 
			
		||||
				Content: "",
 | 
			
		||||
			},
 | 
			
		||||
			FinishReason: constant.StopFinishReason,
 | 
			
		||||
		}
 | 
			
		||||
		if len(candidate.Content.Parts) > 0 {
 | 
			
		||||
			if candidate.Content.Parts[0].FunctionCall != nil {
 | 
			
		||||
				choice.Message.ToolCalls = getToolCalls(&candidate)
 | 
			
		||||
			} else {
 | 
			
		||||
				choice.Message.Content = candidate.Content.Parts[0].Text
 | 
			
		||||
			}
 | 
			
		||||
		} else {
 | 
			
		||||
			choice.Message.Content = ""
 | 
			
		||||
			choice.FinishReason = candidate.FinishReason
 | 
			
		||||
		}
 | 
			
		||||
		fullTextResponse.Choices = append(fullTextResponse.Choices, choice)
 | 
			
		||||
	}
 | 
			
		||||
	return &fullTextResponse
 | 
			
		||||
@@ -189,10 +253,25 @@ func streamResponseGeminiChat2OpenAI(geminiResponse *ChatResponse) *openai.ChatC
 | 
			
		||||
	return &response
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func embeddingResponseGemini2OpenAI(response *EmbeddingResponse) *openai.EmbeddingResponse {
 | 
			
		||||
	openAIEmbeddingResponse := openai.EmbeddingResponse{
 | 
			
		||||
		Object: "list",
 | 
			
		||||
		Data:   make([]openai.EmbeddingResponseItem, 0, len(response.Embeddings)),
 | 
			
		||||
		Model:  "gemini-embedding",
 | 
			
		||||
		Usage:  model.Usage{TotalTokens: 0},
 | 
			
		||||
	}
 | 
			
		||||
	for _, item := range response.Embeddings {
 | 
			
		||||
		openAIEmbeddingResponse.Data = append(openAIEmbeddingResponse.Data, openai.EmbeddingResponseItem{
 | 
			
		||||
			Object:    `embedding`,
 | 
			
		||||
			Index:     0,
 | 
			
		||||
			Embedding: item.Values,
 | 
			
		||||
		})
 | 
			
		||||
	}
 | 
			
		||||
	return &openAIEmbeddingResponse
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, string) {
 | 
			
		||||
	responseText := ""
 | 
			
		||||
	dataChan := make(chan string)
 | 
			
		||||
	stopChan := make(chan bool)
 | 
			
		||||
	scanner := bufio.NewScanner(resp.Body)
 | 
			
		||||
	scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
 | 
			
		||||
		if atEOF && len(data) == 0 {
 | 
			
		||||
@@ -206,14 +285,16 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusC
 | 
			
		||||
		}
 | 
			
		||||
		return 0, nil, nil
 | 
			
		||||
	})
 | 
			
		||||
	dataChan := make(chan string)
 | 
			
		||||
	stopChan := make(chan bool)
 | 
			
		||||
	go func() {
 | 
			
		||||
		for scanner.Scan() {
 | 
			
		||||
			data := scanner.Text()
 | 
			
		||||
			data = strings.TrimSpace(data)
 | 
			
		||||
			if !strings.HasPrefix(data, "\"text\": \"") {
 | 
			
		||||
			if !strings.HasPrefix(data, "data: ") {
 | 
			
		||||
				continue
 | 
			
		||||
			}
 | 
			
		||||
			data = strings.TrimPrefix(data, "\"text\": \"")
 | 
			
		||||
			data = strings.TrimPrefix(data, "data: ")
 | 
			
		||||
			data = strings.TrimSuffix(data, "\"")
 | 
			
		||||
			dataChan <- data
 | 
			
		||||
		}
 | 
			
		||||
@@ -223,23 +304,17 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusC
 | 
			
		||||
	c.Stream(func(w io.Writer) bool {
 | 
			
		||||
		select {
 | 
			
		||||
		case data := <-dataChan:
 | 
			
		||||
			// this is used to prevent annoying \ related format bug
 | 
			
		||||
			data = fmt.Sprintf("{\"content\": \"%s\"}", data)
 | 
			
		||||
			type dummyStruct struct {
 | 
			
		||||
				Content string `json:"content"`
 | 
			
		||||
			var geminiResponse ChatResponse
 | 
			
		||||
			err := json.Unmarshal([]byte(data), &geminiResponse)
 | 
			
		||||
			if err != nil {
 | 
			
		||||
				logger.SysError("error unmarshalling stream response: " + err.Error())
 | 
			
		||||
				return true
 | 
			
		||||
			}
 | 
			
		||||
			var dummy dummyStruct
 | 
			
		||||
			err := json.Unmarshal([]byte(data), &dummy)
 | 
			
		||||
			responseText += dummy.Content
 | 
			
		||||
			var choice openai.ChatCompletionsStreamResponseChoice
 | 
			
		||||
			choice.Delta.Content = dummy.Content
 | 
			
		||||
			response := openai.ChatCompletionsStreamResponse{
 | 
			
		||||
				Id:      fmt.Sprintf("chatcmpl-%s", random.GetUUID()),
 | 
			
		||||
				Object:  "chat.completion.chunk",
 | 
			
		||||
				Created: helper.GetTimestamp(),
 | 
			
		||||
				Model:   "gemini-pro",
 | 
			
		||||
				Choices: []openai.ChatCompletionsStreamResponseChoice{choice},
 | 
			
		||||
			response := streamResponseGeminiChat2OpenAI(&geminiResponse)
 | 
			
		||||
			if response == nil {
 | 
			
		||||
				return true
 | 
			
		||||
			}
 | 
			
		||||
			responseText += response.Choices[0].Delta.StringContent()
 | 
			
		||||
			jsonResponse, err := json.Marshal(response)
 | 
			
		||||
			if err != nil {
 | 
			
		||||
				logger.SysError("error marshalling stream response: " + err.Error())
 | 
			
		||||
@@ -302,3 +377,39 @@ func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName st
 | 
			
		||||
	_, err = c.Writer.Write(jsonResponse)
 | 
			
		||||
	return nil, &usage
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func EmbeddingHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) {
 | 
			
		||||
	var geminiEmbeddingResponse EmbeddingResponse
 | 
			
		||||
	responseBody, err := io.ReadAll(resp.Body)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	err = resp.Body.Close()
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	err = json.Unmarshal(responseBody, &geminiEmbeddingResponse)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	if geminiEmbeddingResponse.Error != nil {
 | 
			
		||||
		return &model.ErrorWithStatusCode{
 | 
			
		||||
			Error: model.Error{
 | 
			
		||||
				Message: geminiEmbeddingResponse.Error.Message,
 | 
			
		||||
				Type:    "gemini_error",
 | 
			
		||||
				Param:   "",
 | 
			
		||||
				Code:    geminiEmbeddingResponse.Error.Code,
 | 
			
		||||
			},
 | 
			
		||||
			StatusCode: resp.StatusCode,
 | 
			
		||||
		}, nil
 | 
			
		||||
	}
 | 
			
		||||
	fullTextResponse := embeddingResponseGemini2OpenAI(&geminiEmbeddingResponse)
 | 
			
		||||
	jsonResponse, err := json.Marshal(fullTextResponse)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	c.Writer.Header().Set("Content-Type", "application/json")
 | 
			
		||||
	c.Writer.WriteHeader(resp.StatusCode)
 | 
			
		||||
	_, err = c.Writer.Write(jsonResponse)
 | 
			
		||||
	return nil, &fullTextResponse.Usage
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -7,14 +7,47 @@ type ChatRequest struct {
 | 
			
		||||
	Tools            []ChatTools          `json:"tools,omitempty"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type EmbeddingRequest struct {
 | 
			
		||||
	Model                string      `json:"model"`
 | 
			
		||||
	Content              ChatContent `json:"content"`
 | 
			
		||||
	TaskType             string      `json:"taskType,omitempty"`
 | 
			
		||||
	Title                string      `json:"title,omitempty"`
 | 
			
		||||
	OutputDimensionality int         `json:"outputDimensionality,omitempty"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type BatchEmbeddingRequest struct {
 | 
			
		||||
	Requests []EmbeddingRequest `json:"requests"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type EmbeddingData struct {
 | 
			
		||||
	Values []float64 `json:"values"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type EmbeddingResponse struct {
 | 
			
		||||
	Embeddings []EmbeddingData `json:"embeddings"`
 | 
			
		||||
	Error      *Error          `json:"error,omitempty"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type Error struct {
 | 
			
		||||
	Code    int    `json:"code,omitempty"`
 | 
			
		||||
	Message string `json:"message,omitempty"`
 | 
			
		||||
	Status  string `json:"status,omitempty"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type InlineData struct {
 | 
			
		||||
	MimeType string `json:"mimeType"`
 | 
			
		||||
	Data     string `json:"data"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type FunctionCall struct {
 | 
			
		||||
	FunctionName string `json:"name"`
 | 
			
		||||
	Arguments    any    `json:"args"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type Part struct {
 | 
			
		||||
	Text         string        `json:"text,omitempty"`
 | 
			
		||||
	InlineData   *InlineData   `json:"inlineData,omitempty"`
 | 
			
		||||
	FunctionCall *FunctionCall `json:"functionCall,omitempty"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type ChatContent struct {
 | 
			
		||||
@@ -28,7 +61,7 @@ type ChatSafetySettings struct {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type ChatTools struct {
 | 
			
		||||
	FunctionDeclarations any `json:"functionDeclarations,omitempty"`
 | 
			
		||||
	FunctionDeclarations any `json:"function_declarations,omitempty"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type ChatGenerationConfig struct {
 | 
			
		||||
 
 | 
			
		||||
@@ -7,4 +7,6 @@ var ModelList = []string{
 | 
			
		||||
	"llama2-7b-2048",
 | 
			
		||||
	"llama2-70b-4096",
 | 
			
		||||
	"mixtral-8x7b-32768",
 | 
			
		||||
	"llama3-8b-8192",
 | 
			
		||||
	"llama3-70b-8192",
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -1,7 +1,11 @@
 | 
			
		||||
package minimax
 | 
			
		||||
 | 
			
		||||
// https://www.minimaxi.com/document/guides/chat-model/V2?id=65e0736ab2845de20908e2dd
 | 
			
		||||
 | 
			
		||||
var ModelList = []string{
 | 
			
		||||
	"abab5.5s-chat",
 | 
			
		||||
	"abab5.5-chat",
 | 
			
		||||
	"abab6.5-chat",
 | 
			
		||||
	"abab6.5s-chat",
 | 
			
		||||
	"abab6-chat",
 | 
			
		||||
	"abab5.5-chat",
 | 
			
		||||
	"abab5.5s-chat",
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -1,5 +1,11 @@
 | 
			
		||||
package ollama
 | 
			
		||||
 | 
			
		||||
var ModelList = []string{
 | 
			
		||||
	"codellama:7b-instruct",
 | 
			
		||||
	"llama2:7b",
 | 
			
		||||
	"llama2:latest",
 | 
			
		||||
	"llama3:latest",
 | 
			
		||||
	"phi3:latest",
 | 
			
		||||
	"qwen:0.5b-chat",
 | 
			
		||||
	"qwen:7b",
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -13,6 +13,7 @@ import (
 | 
			
		||||
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/image"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/logger"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/openai"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/constant"
 | 
			
		||||
@@ -32,9 +33,22 @@ func ConvertRequest(request model.GeneralOpenAIRequest) *ChatRequest {
 | 
			
		||||
		Stream: request.Stream,
 | 
			
		||||
	}
 | 
			
		||||
	for _, message := range request.Messages {
 | 
			
		||||
		openaiContent := message.ParseContent()
 | 
			
		||||
		var imageUrls []string
 | 
			
		||||
		var contentText string
 | 
			
		||||
		for _, part := range openaiContent {
 | 
			
		||||
			switch part.Type {
 | 
			
		||||
			case model.ContentTypeText:
 | 
			
		||||
				contentText = part.Text
 | 
			
		||||
			case model.ContentTypeImageURL:
 | 
			
		||||
				_, data, _ := image.GetImageFromUrl(part.ImageURL.Url)
 | 
			
		||||
				imageUrls = append(imageUrls, data)
 | 
			
		||||
			}
 | 
			
		||||
		}
 | 
			
		||||
		ollamaRequest.Messages = append(ollamaRequest.Messages, Message{
 | 
			
		||||
			Role:    message.Role,
 | 
			
		||||
			Content: message.StringContent(),
 | 
			
		||||
			Content: contentText,
 | 
			
		||||
			Images:  imageUrls,
 | 
			
		||||
		})
 | 
			
		||||
	}
 | 
			
		||||
	return &ollamaRequest
 | 
			
		||||
@@ -53,6 +67,7 @@ func responseOllama2OpenAI(response *ChatResponse) *openai.TextResponse {
 | 
			
		||||
	}
 | 
			
		||||
	fullTextResponse := openai.TextResponse{
 | 
			
		||||
		Id:      fmt.Sprintf("chatcmpl-%s", random.GetUUID()),
 | 
			
		||||
		Model:   response.Model,
 | 
			
		||||
		Object:  "chat.completion",
 | 
			
		||||
		Created: helper.GetTimestamp(),
 | 
			
		||||
		Choices: []openai.TextResponseChoice{choice},
 | 
			
		||||
 
 | 
			
		||||
@@ -5,6 +5,7 @@ import (
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/doubao"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/minimax"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/channeltype"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/meta"
 | 
			
		||||
@@ -29,13 +30,13 @@ func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
 | 
			
		||||
		if meta.Mode == relaymode.ImagesGenerations {
 | 
			
		||||
			// https://learn.microsoft.com/en-us/azure/ai-services/openai/dall-e-quickstart?tabs=dalle3%2Ccommand-line&pivots=rest-api
 | 
			
		||||
			// https://{resource_name}.openai.azure.com/openai/deployments/dall-e-3/images/generations?api-version=2024-03-01-preview
 | 
			
		||||
			fullRequestURL := fmt.Sprintf("%s/openai/deployments/%s/images/generations?api-version=%s", meta.BaseURL, meta.ActualModelName, meta.APIVersion)
 | 
			
		||||
			fullRequestURL := fmt.Sprintf("%s/openai/deployments/%s/images/generations?api-version=%s", meta.BaseURL, meta.ActualModelName, meta.Config.APIVersion)
 | 
			
		||||
			return fullRequestURL, nil
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		// https://learn.microsoft.com/en-us/azure/cognitive-services/openai/chatgpt-quickstart?pivots=rest-api&tabs=command-line#rest-api
 | 
			
		||||
		requestURL := strings.Split(meta.RequestURLPath, "?")[0]
 | 
			
		||||
		requestURL = fmt.Sprintf("%s?api-version=%s", requestURL, meta.APIVersion)
 | 
			
		||||
		requestURL = fmt.Sprintf("%s?api-version=%s", requestURL, meta.Config.APIVersion)
 | 
			
		||||
		task := strings.TrimPrefix(requestURL, "/v1/")
 | 
			
		||||
		model_ := meta.ActualModelName
 | 
			
		||||
		model_ = strings.Replace(model_, ".", "", -1)
 | 
			
		||||
@@ -45,6 +46,8 @@ func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
 | 
			
		||||
		return GetFullRequestURL(meta.BaseURL, requestURL, meta.ChannelType), nil
 | 
			
		||||
	case channeltype.Minimax:
 | 
			
		||||
		return minimax.GetRequestURL(meta)
 | 
			
		||||
	case channeltype.Doubao:
 | 
			
		||||
		return doubao.GetRequestURL(meta)
 | 
			
		||||
	default:
 | 
			
		||||
		return GetFullRequestURL(meta.BaseURL, meta.RequestURLPath, meta.ChannelType), nil
 | 
			
		||||
	}
 | 
			
		||||
@@ -86,9 +89,13 @@ func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Met
 | 
			
		||||
	if meta.IsStream {
 | 
			
		||||
		var responseText string
 | 
			
		||||
		err, responseText, usage = StreamHandler(c, resp, meta.Mode)
 | 
			
		||||
		if usage == nil {
 | 
			
		||||
		if usage == nil || usage.TotalTokens == 0 {
 | 
			
		||||
			usage = ResponseText2Usage(responseText, meta.ActualModelName, meta.PromptTokens)
 | 
			
		||||
		}
 | 
			
		||||
		if usage.TotalTokens != 0 && usage.PromptTokens == 0 { // some channels don't return prompt tokens & completion tokens
 | 
			
		||||
			usage.PromptTokens = meta.PromptTokens
 | 
			
		||||
			usage.CompletionTokens = usage.TotalTokens - meta.PromptTokens
 | 
			
		||||
		}
 | 
			
		||||
	} else {
 | 
			
		||||
		switch meta.Mode {
 | 
			
		||||
		case relaymode.ImagesGenerations:
 | 
			
		||||
 
 | 
			
		||||
@@ -3,12 +3,15 @@ package openai
 | 
			
		||||
import (
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/ai360"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/baichuan"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/deepseek"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/doubao"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/groq"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/lingyiwanwu"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/minimax"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/mistral"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/moonshot"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/stepfun"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/togetherai"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/channeltype"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
@@ -18,10 +21,13 @@ var CompatibleChannels = []int{
 | 
			
		||||
	channeltype.Moonshot,
 | 
			
		||||
	channeltype.Baichuan,
 | 
			
		||||
	channeltype.Minimax,
 | 
			
		||||
	channeltype.Doubao,
 | 
			
		||||
	channeltype.Mistral,
 | 
			
		||||
	channeltype.Groq,
 | 
			
		||||
	channeltype.LingYiWanWu,
 | 
			
		||||
	channeltype.StepFun,
 | 
			
		||||
	channeltype.DeepSeek,
 | 
			
		||||
	channeltype.TogetherAI,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func GetCompatibleChannelMeta(channelType int) (string, []string) {
 | 
			
		||||
@@ -44,6 +50,12 @@ func GetCompatibleChannelMeta(channelType int) (string, []string) {
 | 
			
		||||
		return "lingyiwanwu", lingyiwanwu.ModelList
 | 
			
		||||
	case channeltype.StepFun:
 | 
			
		||||
		return "stepfun", stepfun.ModelList
 | 
			
		||||
	case channeltype.DeepSeek:
 | 
			
		||||
		return "deepseek", deepseek.ModelList
 | 
			
		||||
	case channeltype.TogetherAI:
 | 
			
		||||
		return "together.ai", togetherai.ModelList
 | 
			
		||||
	case channeltype.Doubao:
 | 
			
		||||
		return "doubao", doubao.ModelList
 | 
			
		||||
	default:
 | 
			
		||||
		return "openai", ModelList
 | 
			
		||||
	}
 | 
			
		||||
 
 | 
			
		||||
@@ -7,6 +7,7 @@ var ModelList = []string{
 | 
			
		||||
	"gpt-4", "gpt-4-0314", "gpt-4-0613", "gpt-4-1106-preview", "gpt-4-0125-preview",
 | 
			
		||||
	"gpt-4-32k", "gpt-4-32k-0314", "gpt-4-32k-0613",
 | 
			
		||||
	"gpt-4-turbo-preview", "gpt-4-turbo", "gpt-4-turbo-2024-04-09",
 | 
			
		||||
	"gpt-4o", "gpt-4o-2024-05-13",
 | 
			
		||||
	"gpt-4-vision-preview",
 | 
			
		||||
	"text-embedding-ada-002", "text-embedding-3-small", "text-embedding-3-large",
 | 
			
		||||
	"text-curie-001", "text-babbage-001", "text-ada-001", "text-davinci-002", "text-davinci-003",
 | 
			
		||||
 
 | 
			
		||||
@@ -15,6 +15,12 @@ import (
 | 
			
		||||
	"strings"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
const (
 | 
			
		||||
	dataPrefix       = "data: "
 | 
			
		||||
	done             = "[DONE]"
 | 
			
		||||
	dataPrefixLength = len(dataPrefix)
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
func StreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*model.ErrorWithStatusCode, string, *model.Usage) {
 | 
			
		||||
	responseText := ""
 | 
			
		||||
	scanner := bufio.NewScanner(resp.Body)
 | 
			
		||||
@@ -36,23 +42,30 @@ func StreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*model.E
 | 
			
		||||
	go func() {
 | 
			
		||||
		for scanner.Scan() {
 | 
			
		||||
			data := scanner.Text()
 | 
			
		||||
			if len(data) < 6 { // ignore blank line or wrong format
 | 
			
		||||
			if len(data) < dataPrefixLength { // ignore blank line or wrong format
 | 
			
		||||
				continue
 | 
			
		||||
			}
 | 
			
		||||
			if data[:6] != "data: " && data[:6] != "[DONE]" {
 | 
			
		||||
			if data[:dataPrefixLength] != dataPrefix && data[:dataPrefixLength] != done {
 | 
			
		||||
				continue
 | 
			
		||||
			}
 | 
			
		||||
			if strings.HasPrefix(data[dataPrefixLength:], done) {
 | 
			
		||||
				dataChan <- data
 | 
			
		||||
			data = data[6:]
 | 
			
		||||
			if !strings.HasPrefix(data, "[DONE]") {
 | 
			
		||||
				continue
 | 
			
		||||
			}
 | 
			
		||||
			switch relayMode {
 | 
			
		||||
			case relaymode.ChatCompletions:
 | 
			
		||||
				var streamResponse ChatCompletionsStreamResponse
 | 
			
		||||
					err := json.Unmarshal([]byte(data), &streamResponse)
 | 
			
		||||
				err := json.Unmarshal([]byte(data[dataPrefixLength:]), &streamResponse)
 | 
			
		||||
				if err != nil {
 | 
			
		||||
					logger.SysError("error unmarshalling stream response: " + err.Error())
 | 
			
		||||
					dataChan <- data // if error happened, pass the data to client
 | 
			
		||||
					continue         // just ignore the error
 | 
			
		||||
				}
 | 
			
		||||
				if len(streamResponse.Choices) == 0 {
 | 
			
		||||
					// but for empty choice, we should not pass it to client, this is for azure
 | 
			
		||||
					continue // just ignore empty choice
 | 
			
		||||
				}
 | 
			
		||||
				dataChan <- data
 | 
			
		||||
				for _, choice := range streamResponse.Choices {
 | 
			
		||||
					responseText += conv.AsString(choice.Delta.Content)
 | 
			
		||||
				}
 | 
			
		||||
@@ -60,8 +73,9 @@ func StreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*model.E
 | 
			
		||||
					usage = streamResponse.Usage
 | 
			
		||||
				}
 | 
			
		||||
			case relaymode.Completions:
 | 
			
		||||
				dataChan <- data
 | 
			
		||||
				var streamResponse CompletionsStreamResponse
 | 
			
		||||
					err := json.Unmarshal([]byte(data), &streamResponse)
 | 
			
		||||
				err := json.Unmarshal([]byte(data[dataPrefixLength:]), &streamResponse)
 | 
			
		||||
				if err != nil {
 | 
			
		||||
					logger.SysError("error unmarshalling stream response: " + err.Error())
 | 
			
		||||
					continue
 | 
			
		||||
@@ -71,7 +85,6 @@ func StreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*model.E
 | 
			
		||||
				}
 | 
			
		||||
			}
 | 
			
		||||
		}
 | 
			
		||||
		}
 | 
			
		||||
		stopChan <- true
 | 
			
		||||
	}()
 | 
			
		||||
	common.SetEventStreamHeaders(c)
 | 
			
		||||
 
 | 
			
		||||
@@ -134,7 +134,7 @@ type ChatCompletionsStreamResponse struct {
 | 
			
		||||
	Created int64                                 `json:"created"`
 | 
			
		||||
	Model   string                                `json:"model"`
 | 
			
		||||
	Choices []ChatCompletionsStreamResponseChoice `json:"choices"`
 | 
			
		||||
	Usage   *model.Usage                          `json:"usage"`
 | 
			
		||||
	Usage   *model.Usage                          `json:"usage,omitempty"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type CompletionsStreamResponse struct {
 | 
			
		||||
 
 | 
			
		||||
@@ -24,6 +24,10 @@ func InitTokenEncoders() {
 | 
			
		||||
		logger.FatalLog(fmt.Sprintf("failed to get gpt-3.5-turbo token encoder: %s", err.Error()))
 | 
			
		||||
	}
 | 
			
		||||
	defaultTokenEncoder = gpt35TokenEncoder
 | 
			
		||||
	gpt4oTokenEncoder, err := tiktoken.EncodingForModel("gpt-4o")
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		logger.FatalLog(fmt.Sprintf("failed to get gpt-4o token encoder: %s", err.Error()))
 | 
			
		||||
	}
 | 
			
		||||
	gpt4TokenEncoder, err := tiktoken.EncodingForModel("gpt-4")
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		logger.FatalLog(fmt.Sprintf("failed to get gpt-4 token encoder: %s", err.Error()))
 | 
			
		||||
@@ -31,6 +35,8 @@ func InitTokenEncoders() {
 | 
			
		||||
	for model := range billingratio.ModelRatio {
 | 
			
		||||
		if strings.HasPrefix(model, "gpt-3.5") {
 | 
			
		||||
			tokenEncoderMap[model] = gpt35TokenEncoder
 | 
			
		||||
		} else if strings.HasPrefix(model, "gpt-4o") {
 | 
			
		||||
			tokenEncoderMap[model] = gpt4oTokenEncoder
 | 
			
		||||
		} else if strings.HasPrefix(model, "gpt-4") {
 | 
			
		||||
			tokenEncoderMap[model] = gpt4TokenEncoder
 | 
			
		||||
		} else {
 | 
			
		||||
@@ -206,3 +212,7 @@ func CountTokenText(text string, model string) int {
 | 
			
		||||
	tokenEncoder := getTokenEncoder(model)
 | 
			
		||||
	return getTokenNum(tokenEncoder, text)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func CountToken(text string) int {
 | 
			
		||||
	return CountTokenInput(text, "gpt-3.5-turbo")
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										10
									
								
								relay/adaptor/togetherai/constants.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										10
									
								
								relay/adaptor/togetherai/constants.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,10 @@
 | 
			
		||||
package togetherai
 | 
			
		||||
 | 
			
		||||
// https://docs.together.ai/docs/inference-models
 | 
			
		||||
 | 
			
		||||
var ModelList = []string{
 | 
			
		||||
	"meta-llama/Llama-3-70b-chat-hf",
 | 
			
		||||
	"deepseek-ai/deepseek-coder-33b-instruct",
 | 
			
		||||
	"mistralai/Mixtral-8x22B-Instruct-v0.1",
 | 
			
		||||
	"Qwen/Qwen1.5-72B-Chat",
 | 
			
		||||
}
 | 
			
		||||
@@ -14,10 +14,11 @@ import (
 | 
			
		||||
 | 
			
		||||
type Adaptor struct {
 | 
			
		||||
	request *model.GeneralOpenAIRequest
 | 
			
		||||
	meta    *meta.Meta
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) Init(meta *meta.Meta) {
 | 
			
		||||
 | 
			
		||||
	a.meta = meta
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
 | 
			
		||||
@@ -60,10 +61,18 @@ func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Met
 | 
			
		||||
	if a.request == nil {
 | 
			
		||||
		return nil, openai.ErrorWrapper(errors.New("request is nil"), "request_is_nil", http.StatusBadRequest)
 | 
			
		||||
	}
 | 
			
		||||
	version := parseAPIVersionByModelName(meta.ActualModelName)
 | 
			
		||||
	if version == "" {
 | 
			
		||||
		version = a.meta.Config.APIVersion
 | 
			
		||||
	}
 | 
			
		||||
	if version == "" {
 | 
			
		||||
		version = "v1.1"
 | 
			
		||||
	}
 | 
			
		||||
	a.meta.Config.APIVersion = version
 | 
			
		||||
	if meta.IsStream {
 | 
			
		||||
		err, usage = StreamHandler(c, *a.request, splits[0], splits[1], splits[2])
 | 
			
		||||
		err, usage = StreamHandler(c, meta, *a.request, splits[0], splits[1], splits[2])
 | 
			
		||||
	} else {
 | 
			
		||||
		err, usage = Handler(c, *a.request, splits[0], splits[1], splits[2])
 | 
			
		||||
		err, usage = Handler(c, meta, *a.request, splits[0], splits[1], splits[2])
 | 
			
		||||
	}
 | 
			
		||||
	return
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -5,22 +5,24 @@ import (
 | 
			
		||||
	"crypto/sha256"
 | 
			
		||||
	"encoding/base64"
 | 
			
		||||
	"encoding/json"
 | 
			
		||||
	"errors"
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/gorilla/websocket"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/config"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/helper"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/logger"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/random"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/openai"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/constant"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/model"
 | 
			
		||||
	"io"
 | 
			
		||||
	"net/http"
 | 
			
		||||
	"net/url"
 | 
			
		||||
	"strings"
 | 
			
		||||
	"time"
 | 
			
		||||
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/gorilla/websocket"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/helper"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/logger"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/random"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/openai"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/constant"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/meta"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/model"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
// https://console.xfyun.cn/services/cbm
 | 
			
		||||
@@ -28,11 +30,7 @@ import (
 | 
			
		||||
 | 
			
		||||
func requestOpenAI2Xunfei(request model.GeneralOpenAIRequest, xunfeiAppId string, domain string) *ChatRequest {
 | 
			
		||||
	messages := make([]Message, 0, len(request.Messages))
 | 
			
		||||
	var lastToolCalls []model.Tool
 | 
			
		||||
	for _, message := range request.Messages {
 | 
			
		||||
		if message.ToolCalls != nil {
 | 
			
		||||
			lastToolCalls = message.ToolCalls
 | 
			
		||||
		}
 | 
			
		||||
		messages = append(messages, Message{
 | 
			
		||||
			Role:    message.Role,
 | 
			
		||||
			Content: message.StringContent(),
 | 
			
		||||
@@ -45,9 +43,10 @@ func requestOpenAI2Xunfei(request model.GeneralOpenAIRequest, xunfeiAppId string
 | 
			
		||||
	xunfeiRequest.Parameter.Chat.TopK = request.N
 | 
			
		||||
	xunfeiRequest.Parameter.Chat.MaxTokens = request.MaxTokens
 | 
			
		||||
	xunfeiRequest.Payload.Message.Text = messages
 | 
			
		||||
	if len(lastToolCalls) != 0 {
 | 
			
		||||
		for _, toolCall := range lastToolCalls {
 | 
			
		||||
			xunfeiRequest.Payload.Functions.Text = append(xunfeiRequest.Payload.Functions.Text, toolCall.Function)
 | 
			
		||||
 | 
			
		||||
	if strings.HasPrefix(domain, "generalv3") {
 | 
			
		||||
		xunfeiRequest.Payload.Functions = &Functions{
 | 
			
		||||
			Text: request.Tools,
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
@@ -149,8 +148,8 @@ func buildXunfeiAuthUrl(hostUrl string, apiKey, apiSecret string) string {
 | 
			
		||||
	return callUrl
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func StreamHandler(c *gin.Context, textRequest model.GeneralOpenAIRequest, appId string, apiSecret string, apiKey string) (*model.ErrorWithStatusCode, *model.Usage) {
 | 
			
		||||
	domain, authUrl := getXunfeiAuthUrl(c, apiKey, apiSecret, textRequest.Model)
 | 
			
		||||
func StreamHandler(c *gin.Context, meta *meta.Meta, textRequest model.GeneralOpenAIRequest, appId string, apiSecret string, apiKey string) (*model.ErrorWithStatusCode, *model.Usage) {
 | 
			
		||||
	domain, authUrl := getXunfeiAuthUrl(meta.Config.APIVersion, apiKey, apiSecret)
 | 
			
		||||
	dataChan, stopChan, err := xunfeiMakeRequest(textRequest, domain, authUrl, appId)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "xunfei_request_failed", http.StatusInternalServerError), nil
 | 
			
		||||
@@ -179,8 +178,8 @@ func StreamHandler(c *gin.Context, textRequest model.GeneralOpenAIRequest, appId
 | 
			
		||||
	return nil, &usage
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func Handler(c *gin.Context, textRequest model.GeneralOpenAIRequest, appId string, apiSecret string, apiKey string) (*model.ErrorWithStatusCode, *model.Usage) {
 | 
			
		||||
	domain, authUrl := getXunfeiAuthUrl(c, apiKey, apiSecret, textRequest.Model)
 | 
			
		||||
func Handler(c *gin.Context, meta *meta.Meta, textRequest model.GeneralOpenAIRequest, appId string, apiSecret string, apiKey string) (*model.ErrorWithStatusCode, *model.Usage) {
 | 
			
		||||
	domain, authUrl := getXunfeiAuthUrl(meta.Config.APIVersion, apiKey, apiSecret)
 | 
			
		||||
	dataChan, stopChan, err := xunfeiMakeRequest(textRequest, domain, authUrl, appId)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "xunfei_request_failed", http.StatusInternalServerError), nil
 | 
			
		||||
@@ -203,7 +202,7 @@ func Handler(c *gin.Context, textRequest model.GeneralOpenAIRequest, appId strin
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
	if len(xunfeiResponse.Payload.Choices.Text) == 0 {
 | 
			
		||||
		return openai.ErrorWrapper(err, "xunfei_empty_response_detected", http.StatusInternalServerError), nil
 | 
			
		||||
		return openai.ErrorWrapper(errors.New("xunfei empty response detected"), "xunfei_empty_response_detected", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	xunfeiResponse.Payload.Choices.Text[0].Content = content
 | 
			
		||||
 | 
			
		||||
@@ -268,25 +267,12 @@ func xunfeiMakeRequest(textRequest model.GeneralOpenAIRequest, domain, authUrl,
 | 
			
		||||
	return dataChan, stopChan, nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func getAPIVersion(c *gin.Context, modelName string) string {
 | 
			
		||||
	query := c.Request.URL.Query()
 | 
			
		||||
	apiVersion := query.Get("api-version")
 | 
			
		||||
	if apiVersion != "" {
 | 
			
		||||
		return apiVersion
 | 
			
		||||
	}
 | 
			
		||||
func parseAPIVersionByModelName(modelName string) string {
 | 
			
		||||
	parts := strings.Split(modelName, "-")
 | 
			
		||||
	if len(parts) == 2 {
 | 
			
		||||
		apiVersion = parts[1]
 | 
			
		||||
		return apiVersion
 | 
			
		||||
 | 
			
		||||
		return parts[1]
 | 
			
		||||
	}
 | 
			
		||||
	apiVersion = c.GetString(config.KeyAPIVersion)
 | 
			
		||||
	if apiVersion != "" {
 | 
			
		||||
		return apiVersion
 | 
			
		||||
	}
 | 
			
		||||
	apiVersion = "v1.1"
 | 
			
		||||
	logger.SysLog("api_version not found, using default: " + apiVersion)
 | 
			
		||||
	return apiVersion
 | 
			
		||||
	return ""
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// https://www.xfyun.cn/doc/spark/Web.html#_1-%E6%8E%A5%E5%8F%A3%E8%AF%B4%E6%98%8E
 | 
			
		||||
@@ -304,8 +290,7 @@ func apiVersion2domain(apiVersion string) string {
 | 
			
		||||
	return "general" + apiVersion
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func getXunfeiAuthUrl(c *gin.Context, apiKey string, apiSecret string, modelName string) (string, string) {
 | 
			
		||||
	apiVersion := getAPIVersion(c, modelName)
 | 
			
		||||
func getXunfeiAuthUrl(apiVersion string, apiKey string, apiSecret string) (string, string) {
 | 
			
		||||
	domain := apiVersion2domain(apiVersion)
 | 
			
		||||
	authUrl := buildXunfeiAuthUrl(fmt.Sprintf("wss://spark-api.xf-yun.com/%s/chat", apiVersion), apiKey, apiSecret)
 | 
			
		||||
	return domain, authUrl
 | 
			
		||||
 
 | 
			
		||||
@@ -9,6 +9,10 @@ type Message struct {
 | 
			
		||||
	Content string `json:"content"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type Functions struct {
 | 
			
		||||
	Text []model.Tool `json:"text,omitempty"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type ChatRequest struct {
 | 
			
		||||
	Header struct {
 | 
			
		||||
		AppId string `json:"app_id"`
 | 
			
		||||
@@ -26,9 +30,7 @@ type ChatRequest struct {
 | 
			
		||||
		Message struct {
 | 
			
		||||
			Text []Message `json:"text"`
 | 
			
		||||
		} `json:"message"`
 | 
			
		||||
		Functions struct {
 | 
			
		||||
			Text []model.Function `json:"text,omitempty"`
 | 
			
		||||
		} `json:"functions,omitempty"`
 | 
			
		||||
		Functions *Functions `json:"functions,omitempty"`
 | 
			
		||||
	} `json:"payload"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -62,8 +62,8 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
 | 
			
		||||
	}
 | 
			
		||||
	switch relayMode {
 | 
			
		||||
	case relaymode.Embeddings:
 | 
			
		||||
		baiduEmbeddingRequest := ConvertEmbeddingRequest(*request)
 | 
			
		||||
		return baiduEmbeddingRequest, nil
 | 
			
		||||
		baiduEmbeddingRequest, err := ConvertEmbeddingRequest(*request)
 | 
			
		||||
		return baiduEmbeddingRequest, err
 | 
			
		||||
	default:
 | 
			
		||||
		// TopP (0.0, 1.0)
 | 
			
		||||
		request.TopP = math.Min(0.99, request.TopP)
 | 
			
		||||
@@ -129,11 +129,15 @@ func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Met
 | 
			
		||||
	return
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func ConvertEmbeddingRequest(request model.GeneralOpenAIRequest) *EmbeddingRequest {
 | 
			
		||||
	return &EmbeddingRequest{
 | 
			
		||||
		Model: "embedding-2",
 | 
			
		||||
		Input: request.Input.(string),
 | 
			
		||||
func ConvertEmbeddingRequest(request model.GeneralOpenAIRequest) (*EmbeddingRequest, error) {
 | 
			
		||||
	inputs := request.ParseInput()
 | 
			
		||||
	if len(inputs) != 1 {
 | 
			
		||||
		return nil, errors.New("invalid input length, zhipu only support one input")
 | 
			
		||||
	}
 | 
			
		||||
	return &EmbeddingRequest{
 | 
			
		||||
		Model: request.Model,
 | 
			
		||||
		Input: inputs[0],
 | 
			
		||||
	}, nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) GetModelList() []string {
 | 
			
		||||
 
 | 
			
		||||
@@ -13,6 +13,10 @@ const (
 | 
			
		||||
	Gemini
 | 
			
		||||
	Ollama
 | 
			
		||||
	AwsClaude
 | 
			
		||||
	Coze
 | 
			
		||||
	Cohere
 | 
			
		||||
	Cloudflare
 | 
			
		||||
	DeepL
 | 
			
		||||
 | 
			
		||||
	Dummy // this one is only for count, do not add any channel after this
 | 
			
		||||
)
 | 
			
		||||
 
 | 
			
		||||
@@ -49,3 +49,8 @@ var ImagePromptLengthLimitations = map[string]int{
 | 
			
		||||
	"wanx-v1":                   4000,
 | 
			
		||||
	"cogview-3":                 833,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
var ImageOriginModelName = map[string]string{
 | 
			
		||||
	"ali-stable-diffusion-xl":   "stable-diffusion-xl",
 | 
			
		||||
	"ali-stable-diffusion-v1.5": "stable-diffusion-v1.5",
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -2,8 +2,9 @@ package ratio
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"encoding/json"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/logger"
 | 
			
		||||
	"strings"
 | 
			
		||||
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/logger"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
const (
 | 
			
		||||
@@ -31,6 +32,8 @@ var ModelRatio = map[string]float64{
 | 
			
		||||
	"gpt-4-turbo-preview":     5,    // $0.01 / 1K tokens
 | 
			
		||||
	"gpt-4-turbo":             5,    // $0.01 / 1K tokens
 | 
			
		||||
	"gpt-4-turbo-2024-04-09":  5,    // $0.01 / 1K tokens
 | 
			
		||||
	"gpt-4o":                  2.5,  // $0.005 / 1K tokens
 | 
			
		||||
	"gpt-4o-2024-05-13":       2.5,  // $0.005 / 1K tokens
 | 
			
		||||
	"gpt-4-vision-preview":    5,    // $0.01 / 1K tokens
 | 
			
		||||
	"gpt-3.5-turbo":           0.25, // $0.0005 / 1K tokens
 | 
			
		||||
	"gpt-3.5-turbo-0301":      0.75,
 | 
			
		||||
@@ -137,6 +140,8 @@ var ModelRatio = map[string]float64{
 | 
			
		||||
	"Baichuan2-Turbo-192k": 0.016 * RMB,
 | 
			
		||||
	"Baichuan2-53B":        0.02 * RMB,
 | 
			
		||||
	// https://api.minimax.chat/document/price
 | 
			
		||||
	"abab6.5-chat":  0.03 * RMB,
 | 
			
		||||
	"abab6.5s-chat": 0.01 * RMB,
 | 
			
		||||
	"abab6-chat":    0.1 * RMB,
 | 
			
		||||
	"abab5.5-chat":  0.015 * RMB,
 | 
			
		||||
	"abab5.5s-chat": 0.005 * RMB,
 | 
			
		||||
@@ -147,11 +152,13 @@ var ModelRatio = map[string]float64{
 | 
			
		||||
	"mistral-medium-latest": 2.7 / 1000 * USD,
 | 
			
		||||
	"mistral-large-latest":  8.0 / 1000 * USD,
 | 
			
		||||
	"mistral-embed":         0.1 / 1000 * USD,
 | 
			
		||||
	// https://wow.groq.com/
 | 
			
		||||
	"llama2-70b-4096":    0.7 / 1000 * USD,
 | 
			
		||||
	"llama2-7b-2048":     0.1 / 1000 * USD,
 | 
			
		||||
	// https://wow.groq.com/#:~:text=inquiries%C2%A0here.-,Model,-Current%20Speed
 | 
			
		||||
	"llama3-70b-8192":    0.59 / 1000 * USD,
 | 
			
		||||
	"mixtral-8x7b-32768": 0.27 / 1000 * USD,
 | 
			
		||||
	"llama3-8b-8192":     0.05 / 1000 * USD,
 | 
			
		||||
	"gemma-7b-it":        0.1 / 1000 * USD,
 | 
			
		||||
	"llama2-70b-4096":    0.64 / 1000 * USD,
 | 
			
		||||
	"llama2-7b-2048":     0.1 / 1000 * USD,
 | 
			
		||||
	// https://platform.lingyiwanwu.com/docs#-计费单元
 | 
			
		||||
	"yi-34b-chat-0205": 2.5 / 1000 * RMB,
 | 
			
		||||
	"yi-34b-chat-200k": 12.0 / 1000 * RMB,
 | 
			
		||||
@@ -160,6 +167,20 @@ var ModelRatio = map[string]float64{
 | 
			
		||||
	"step-1v-32k": 0.024 * RMB,
 | 
			
		||||
	"step-1-32k":  0.024 * RMB,
 | 
			
		||||
	"step-1-200k": 0.15 * RMB,
 | 
			
		||||
	// https://cohere.com/pricing
 | 
			
		||||
	"command":               0.5,
 | 
			
		||||
	"command-nightly":       0.5,
 | 
			
		||||
	"command-light":         0.5,
 | 
			
		||||
	"command-light-nightly": 0.5,
 | 
			
		||||
	"command-r":             0.5 / 1000 * USD,
 | 
			
		||||
	"command-r-plus":        3.0 / 1000 * USD,
 | 
			
		||||
	// https://platform.deepseek.com/api-docs/pricing/
 | 
			
		||||
	"deepseek-chat":  1.0 / 1000 * RMB,
 | 
			
		||||
	"deepseek-coder": 1.0 / 1000 * RMB,
 | 
			
		||||
	// https://www.deepl.com/pro?cta=header-prices
 | 
			
		||||
	"deepl-zh": 25.0 / 1000 * USD,
 | 
			
		||||
	"deepl-en": 25.0 / 1000 * USD,
 | 
			
		||||
	"deepl-ja": 25.0 / 1000 * USD,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
var CompletionRatio = map[string]float64{}
 | 
			
		||||
@@ -215,6 +236,9 @@ func GetModelRatio(name string) float64 {
 | 
			
		||||
	if strings.HasPrefix(name, "qwen-") && strings.HasSuffix(name, "-internet") {
 | 
			
		||||
		name = strings.TrimSuffix(name, "-internet")
 | 
			
		||||
	}
 | 
			
		||||
	if strings.HasPrefix(name, "command-") && strings.HasSuffix(name, "-internet") {
 | 
			
		||||
		name = strings.TrimSuffix(name, "-internet")
 | 
			
		||||
	}
 | 
			
		||||
	ratio, ok := ModelRatio[name]
 | 
			
		||||
	if !ok {
 | 
			
		||||
		ratio, ok = DefaultModelRatio[name]
 | 
			
		||||
@@ -258,7 +282,9 @@ func GetCompletionRatio(name string) float64 {
 | 
			
		||||
		return 4.0 / 3.0
 | 
			
		||||
	}
 | 
			
		||||
	if strings.HasPrefix(name, "gpt-4") {
 | 
			
		||||
		if strings.HasPrefix(name, "gpt-4-turbo") {
 | 
			
		||||
		if strings.HasPrefix(name, "gpt-4-turbo") ||
 | 
			
		||||
			strings.HasPrefix(name, "gpt-4o") ||
 | 
			
		||||
			strings.HasSuffix(name, "preview") {
 | 
			
		||||
			return 3
 | 
			
		||||
		}
 | 
			
		||||
		return 2
 | 
			
		||||
@@ -275,9 +301,22 @@ func GetCompletionRatio(name string) float64 {
 | 
			
		||||
	if strings.HasPrefix(name, "gemini-") {
 | 
			
		||||
		return 3
 | 
			
		||||
	}
 | 
			
		||||
	if strings.HasPrefix(name, "deepseek-") {
 | 
			
		||||
		return 2
 | 
			
		||||
	}
 | 
			
		||||
	switch name {
 | 
			
		||||
	case "llama2-70b-4096":
 | 
			
		||||
		return 0.8 / 0.7
 | 
			
		||||
		return 0.8 / 0.64
 | 
			
		||||
	case "llama3-8b-8192":
 | 
			
		||||
		return 2
 | 
			
		||||
	case "llama3-70b-8192":
 | 
			
		||||
		return 0.79 / 0.59
 | 
			
		||||
	case "command", "command-light", "command-nightly", "command-light-nightly":
 | 
			
		||||
		return 2
 | 
			
		||||
	case "command-r":
 | 
			
		||||
		return 3
 | 
			
		||||
	case "command-r-plus":
 | 
			
		||||
		return 5
 | 
			
		||||
	}
 | 
			
		||||
	return 1
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -35,6 +35,12 @@ const (
 | 
			
		||||
	LingYiWanWu
 | 
			
		||||
	StepFun
 | 
			
		||||
	AwsClaude
 | 
			
		||||
 | 
			
		||||
	Coze
 | 
			
		||||
	Cohere
 | 
			
		||||
	DeepSeek
 | 
			
		||||
	Cloudflare
 | 
			
		||||
	DeepL
 | 
			
		||||
	TogetherAI
 | 
			
		||||
	Doubao
 | 
			
		||||
	Dummy
 | 
			
		||||
)
 | 
			
		||||
 
 | 
			
		||||
@@ -27,6 +27,14 @@ func ToAPIType(channelType int) int {
 | 
			
		||||
		apiType = apitype.Ollama
 | 
			
		||||
	case AwsClaude:
 | 
			
		||||
		apiType = apitype.AwsClaude
 | 
			
		||||
	case Coze:
 | 
			
		||||
		apiType = apitype.Coze
 | 
			
		||||
	case Cohere:
 | 
			
		||||
		apiType = apitype.Cohere
 | 
			
		||||
	case Cloudflare:
 | 
			
		||||
		apiType = apitype.Cloudflare
 | 
			
		||||
	case DeepL:
 | 
			
		||||
		apiType = apitype.DeepL
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	return apiType
 | 
			
		||||
 
 | 
			
		||||
@@ -35,6 +35,13 @@ var ChannelBaseURLs = []string{
 | 
			
		||||
	"https://api.lingyiwanwu.com",               // 31
 | 
			
		||||
	"https://api.stepfun.com",                   // 32
 | 
			
		||||
	"",                                          // 33
 | 
			
		||||
	"https://api.coze.com",                      // 34
 | 
			
		||||
	"https://api.cohere.ai",                     // 35
 | 
			
		||||
	"https://api.deepseek.com",                  // 36
 | 
			
		||||
	"https://api.cloudflare.com",                // 37
 | 
			
		||||
	"https://api-free.deepl.com",                // 38
 | 
			
		||||
	"https://api.together.xyz",                  // 39
 | 
			
		||||
	"https://ark.cn-beijing.volces.com",         // 40
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func init() {
 | 
			
		||||
 
 | 
			
		||||
@@ -1,24 +0,0 @@
 | 
			
		||||
package client
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/config"
 | 
			
		||||
	"net/http"
 | 
			
		||||
	"time"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
var HTTPClient *http.Client
 | 
			
		||||
var ImpatientHTTPClient *http.Client
 | 
			
		||||
 | 
			
		||||
func init() {
 | 
			
		||||
	if config.RelayTimeout == 0 {
 | 
			
		||||
		HTTPClient = &http.Client{}
 | 
			
		||||
	} else {
 | 
			
		||||
		HTTPClient = &http.Client{
 | 
			
		||||
			Timeout: time.Duration(config.RelayTimeout) * time.Second,
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	ImpatientHTTPClient = &http.Client{
 | 
			
		||||
		Timeout: 5 * time.Second,
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
@@ -1,3 +1,5 @@
 | 
			
		||||
package constant
 | 
			
		||||
 | 
			
		||||
var StopFinishReason = "stop"
 | 
			
		||||
var StreamObject = "chat.completion.chunk"
 | 
			
		||||
var NonStreamObject = "chat.completion"
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										5
									
								
								relay/constant/finishreason/define.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										5
									
								
								relay/constant/finishreason/define.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,5 @@
 | 
			
		||||
package finishreason
 | 
			
		||||
 | 
			
		||||
const (
 | 
			
		||||
	Stop = "stop"
 | 
			
		||||
)
 | 
			
		||||
							
								
								
									
										5
									
								
								relay/constant/role/define.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										5
									
								
								relay/constant/role/define.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,5 @@
 | 
			
		||||
package role
 | 
			
		||||
 | 
			
		||||
const (
 | 
			
		||||
	Assistant = "assistant"
 | 
			
		||||
)
 | 
			
		||||
@@ -9,15 +9,16 @@ import (
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/client"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/config"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/ctxkey"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/logger"
 | 
			
		||||
	"github.com/songquanpeng/one-api/model"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/azure"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/openai"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/billing"
 | 
			
		||||
	billingratio "github.com/songquanpeng/one-api/relay/billing/ratio"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/channeltype"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/client"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/meta"
 | 
			
		||||
	relaymodel "github.com/songquanpeng/one-api/relay/model"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/relaymode"
 | 
			
		||||
	"io"
 | 
			
		||||
@@ -27,14 +28,15 @@ import (
 | 
			
		||||
 | 
			
		||||
func RelayAudioHelper(c *gin.Context, relayMode int) *relaymodel.ErrorWithStatusCode {
 | 
			
		||||
	ctx := c.Request.Context()
 | 
			
		||||
	meta := meta.GetByContext(c)
 | 
			
		||||
	audioModel := "whisper-1"
 | 
			
		||||
 | 
			
		||||
	tokenId := c.GetInt("token_id")
 | 
			
		||||
	channelType := c.GetInt("channel")
 | 
			
		||||
	channelId := c.GetInt("channel_id")
 | 
			
		||||
	userId := c.GetInt("id")
 | 
			
		||||
	group := c.GetString("group")
 | 
			
		||||
	tokenName := c.GetString("token_name")
 | 
			
		||||
	tokenId := c.GetInt(ctxkey.TokenId)
 | 
			
		||||
	channelType := c.GetInt(ctxkey.Channel)
 | 
			
		||||
	channelId := c.GetInt(ctxkey.ChannelId)
 | 
			
		||||
	userId := c.GetInt(ctxkey.Id)
 | 
			
		||||
	group := c.GetString(ctxkey.Group)
 | 
			
		||||
	tokenName := c.GetString(ctxkey.TokenName)
 | 
			
		||||
 | 
			
		||||
	var ttsRequest openai.TextToSpeechRequest
 | 
			
		||||
	if relayMode == relaymode.AudioSpeech {
 | 
			
		||||
@@ -107,7 +109,7 @@ func RelayAudioHelper(c *gin.Context, relayMode int) *relaymodel.ErrorWithStatus
 | 
			
		||||
	}()
 | 
			
		||||
 | 
			
		||||
	// map model name
 | 
			
		||||
	modelMapping := c.GetString("model_mapping")
 | 
			
		||||
	modelMapping := c.GetString(ctxkey.ModelMapping)
 | 
			
		||||
	if modelMapping != "" {
 | 
			
		||||
		modelMap := make(map[string]string)
 | 
			
		||||
		err := json.Unmarshal([]byte(modelMapping), &modelMap)
 | 
			
		||||
@@ -121,13 +123,13 @@ func RelayAudioHelper(c *gin.Context, relayMode int) *relaymodel.ErrorWithStatus
 | 
			
		||||
 | 
			
		||||
	baseURL := channeltype.ChannelBaseURLs[channelType]
 | 
			
		||||
	requestURL := c.Request.URL.String()
 | 
			
		||||
	if c.GetString("base_url") != "" {
 | 
			
		||||
		baseURL = c.GetString("base_url")
 | 
			
		||||
	if c.GetString(ctxkey.BaseURL) != "" {
 | 
			
		||||
		baseURL = c.GetString(ctxkey.BaseURL)
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	fullRequestURL := openai.GetFullRequestURL(baseURL, requestURL, channelType)
 | 
			
		||||
	if channelType == channeltype.Azure {
 | 
			
		||||
		apiVersion := azure.GetAPIVersion(c)
 | 
			
		||||
		apiVersion := meta.Config.APIVersion
 | 
			
		||||
		if relayMode == relaymode.AudioTranscription {
 | 
			
		||||
			// https://learn.microsoft.com/en-us/azure/ai-services/openai/whisper-quickstart?tabs=command-line#rest-api
 | 
			
		||||
			fullRequestURL = fmt.Sprintf("%s/openai/deployments/%s/audio/transcriptions?api-version=%s", baseURL, audioModel, apiVersion)
 | 
			
		||||
 
 | 
			
		||||
@@ -53,6 +53,16 @@ func (e GeneralErrorResponse) ToMessage() string {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func RelayErrorHandler(resp *http.Response) (ErrorWithStatusCode *model.ErrorWithStatusCode) {
 | 
			
		||||
	if resp == nil {
 | 
			
		||||
		return &model.ErrorWithStatusCode{
 | 
			
		||||
			StatusCode: 500,
 | 
			
		||||
			Error: model.Error{
 | 
			
		||||
				Message: "resp is nil",
 | 
			
		||||
				Type:    "upstream_error",
 | 
			
		||||
				Code:    "bad_response",
 | 
			
		||||
			},
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
	ErrorWithStatusCode = &model.ErrorWithStatusCode{
 | 
			
		||||
		StatusCode: resp.StatusCode,
 | 
			
		||||
		Error: model.Error{
 | 
			
		||||
 
 | 
			
		||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user