mirror of
				https://github.com/songquanpeng/one-api.git
				synced 2025-11-04 15:53:42 +08:00 
			
		
		
		
	Compare commits
	
		
			11 Commits
		
	
	
		
			v0.4.10-al
			...
			v0.4.10-al
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 
						 | 
					fcc1e2d568 | ||
| 
						 | 
					9a1db61675 | ||
| 
						 | 
					3c940113ab | ||
| 
						 | 
					0495b9a0d7 | ||
| 
						 | 
					12a0e7105e | ||
| 
						 | 
					e628b643cd | ||
| 
						 | 
					675847bf98 | ||
| 
						 | 
					2ff15baf66 | ||
| 
						 | 
					4139a7036f | ||
| 
						 | 
					02da0b51f8 | ||
| 
						 | 
					35cfebee12 | 
							
								
								
									
										17
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										17
									
								
								README.md
									
									
									
									
									
								
							@@ -61,6 +61,8 @@ _✨ All in one 的 OpenAI 接口,整合各种 API 访问方式,开箱即用
 | 
			
		||||
1. 支持多种 API 访问渠道:
 | 
			
		||||
   + [x] OpenAI 官方通道(支持配置镜像)
 | 
			
		||||
   + [x] **Azure OpenAI API**
 | 
			
		||||
   + [x] [Anthropic Claude 系列模型](https://anthropic.com)
 | 
			
		||||
   + [x] [百度文心一言系列模型](https://cloud.baidu.com/doc/WENXINWORKSHOP/index.html)
 | 
			
		||||
   + [x] [API Distribute](https://api.gptjk.top/register?aff=QGxj)
 | 
			
		||||
   + [x] [OpenAI-SB](https://openai-sb.com)
 | 
			
		||||
   + [x] [API2D](https://api2d.com/r/197971)
 | 
			
		||||
@@ -81,18 +83,19 @@ _✨ All in one 的 OpenAI 接口,整合各种 API 访问方式,开箱即用
 | 
			
		||||
12. 支持以美元为单位显示额度。
 | 
			
		||||
13. 支持发布公告,设置充值链接,设置新用户初始额度。
 | 
			
		||||
14. 支持模型映射,重定向用户的请求模型。
 | 
			
		||||
15. 支持绘图接口。
 | 
			
		||||
16. 支持丰富的**自定义**设置,
 | 
			
		||||
15. 支持失败自动重试。
 | 
			
		||||
16. 支持绘图接口。
 | 
			
		||||
17. 支持丰富的**自定义**设置,
 | 
			
		||||
    1. 支持自定义系统名称,logo 以及页脚。
 | 
			
		||||
    2. 支持自定义首页和关于页面,可以选择使用 HTML & Markdown 代码进行自定义,或者使用一个单独的网页通过 iframe 嵌入。
 | 
			
		||||
17. 支持通过系统访问令牌访问管理 API。
 | 
			
		||||
18. 支持 Cloudflare Turnstile 用户校验。
 | 
			
		||||
19. 支持用户管理,支持**多种用户登录注册方式**:
 | 
			
		||||
18. 支持通过系统访问令牌访问管理 API。
 | 
			
		||||
19. 支持 Cloudflare Turnstile 用户校验。
 | 
			
		||||
20. 支持用户管理,支持**多种用户登录注册方式**:
 | 
			
		||||
    + 邮箱登录注册以及通过邮箱进行密码重置。
 | 
			
		||||
    + [GitHub 开放授权](https://github.com/settings/applications/new)。
 | 
			
		||||
    + 微信公众号授权(需要额外部署 [WeChat Server](https://github.com/songquanpeng/wechat-server))。
 | 
			
		||||
20. 支持 [ChatGLM](https://github.com/THUDM/ChatGLM2-6B)。
 | 
			
		||||
21. 未来其他大模型开放 API 后,将第一时间支持,并将其封装成同样的 API 访问方式。
 | 
			
		||||
21. 支持 [ChatGLM](https://github.com/THUDM/ChatGLM2-6B)。
 | 
			
		||||
22. 未来其他大模型开放 API 后,将第一时间支持,并将其封装成同样的 API 访问方式。
 | 
			
		||||
 | 
			
		||||
## 部署
 | 
			
		||||
### 基于 Docker 进行部署
 | 
			
		||||
 
 | 
			
		||||
@@ -68,6 +68,7 @@ var AutomaticDisableChannelEnabled = false
 | 
			
		||||
var QuotaRemindThreshold = 1000
 | 
			
		||||
var PreConsumedQuota = 500
 | 
			
		||||
var ApproximateTokenEnabled = false
 | 
			
		||||
var RetryTimes = 0
 | 
			
		||||
 | 
			
		||||
var RootUserEmail = ""
 | 
			
		||||
 | 
			
		||||
@@ -150,6 +151,8 @@ const (
 | 
			
		||||
	ChannelTypePaLM      = 11
 | 
			
		||||
	ChannelTypeAPI2GPT   = 12
 | 
			
		||||
	ChannelTypeAIGC2D    = 13
 | 
			
		||||
	ChannelTypeAnthropic = 14
 | 
			
		||||
	ChannelTypeBaidu     = 15
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
var ChannelBaseURLs = []string{
 | 
			
		||||
@@ -167,4 +170,6 @@ var ChannelBaseURLs = []string{
 | 
			
		||||
	"",                              // 11
 | 
			
		||||
	"https://api.api2gpt.com",       // 12
 | 
			
		||||
	"https://api.aigc2d.com",        // 13
 | 
			
		||||
	"https://api.anthropic.com",     // 14
 | 
			
		||||
	"https://aip.baidubce.com",      // 15
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -4,6 +4,7 @@ import "encoding/json"
 | 
			
		||||
 | 
			
		||||
// ModelRatio
 | 
			
		||||
// https://platform.openai.com/docs/models/model-endpoint-compatibility
 | 
			
		||||
// https://cloud.baidu.com/doc/WENXINWORKSHOP/s/Blfmc9dlf
 | 
			
		||||
// https://openai.com/pricing
 | 
			
		||||
// TODO: when a new api is enabled, check the pricing here
 | 
			
		||||
// 1 === $0.002 / 1K tokens
 | 
			
		||||
@@ -36,6 +37,10 @@ var ModelRatio = map[string]float64{
 | 
			
		||||
	"text-moderation-stable":  0.1,
 | 
			
		||||
	"text-moderation-latest":  0.1,
 | 
			
		||||
	"dall-e":                  8,
 | 
			
		||||
	"claude-instant-1":        0.75,
 | 
			
		||||
	"claude-2":                30,
 | 
			
		||||
	"ERNIE-Bot":               1,    // 0.012元/千tokens
 | 
			
		||||
	"ERNIE-Bot-turbo":         0.67, // 0.008元/千tokens
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func ModelRatio2JSONString() string {
 | 
			
		||||
 
 | 
			
		||||
@@ -7,16 +7,19 @@ import (
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
func GetSubscription(c *gin.Context) {
 | 
			
		||||
	var quota int
 | 
			
		||||
	var remainQuota int
 | 
			
		||||
	var usedQuota int
 | 
			
		||||
	var err error
 | 
			
		||||
	var token *model.Token
 | 
			
		||||
	if common.DisplayTokenStatEnabled {
 | 
			
		||||
		tokenId := c.GetInt("token_id")
 | 
			
		||||
		token, err = model.GetTokenById(tokenId)
 | 
			
		||||
		quota = token.RemainQuota
 | 
			
		||||
		remainQuota = token.RemainQuota
 | 
			
		||||
		usedQuota = token.UsedQuota
 | 
			
		||||
	} else {
 | 
			
		||||
		userId := c.GetInt("id")
 | 
			
		||||
		quota, err = model.GetUserQuota(userId)
 | 
			
		||||
		remainQuota, err = model.GetUserQuota(userId)
 | 
			
		||||
		usedQuota, err = model.GetUserUsedQuota(userId)
 | 
			
		||||
	}
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		openAIError := OpenAIError{
 | 
			
		||||
@@ -28,6 +31,7 @@ func GetSubscription(c *gin.Context) {
 | 
			
		||||
		})
 | 
			
		||||
		return
 | 
			
		||||
	}
 | 
			
		||||
	quota := remainQuota + usedQuota
 | 
			
		||||
	amount := float64(quota)
 | 
			
		||||
	if common.DisplayInCurrencyEnabled {
 | 
			
		||||
		amount /= common.QuotaPerUnit
 | 
			
		||||
 
 | 
			
		||||
@@ -14,7 +14,7 @@ import (
 | 
			
		||||
	"time"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
func testChannel(channel *model.Channel, request ChatRequest) error {
 | 
			
		||||
func testChannel(channel *model.Channel, request ChatRequest) (error, *OpenAIError) {
 | 
			
		||||
	switch channel.Type {
 | 
			
		||||
	case common.ChannelTypeAzure:
 | 
			
		||||
		request.Model = "gpt-35-turbo"
 | 
			
		||||
@@ -33,11 +33,11 @@ func testChannel(channel *model.Channel, request ChatRequest) error {
 | 
			
		||||
 | 
			
		||||
	jsonData, err := json.Marshal(request)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return err
 | 
			
		||||
		return err, nil
 | 
			
		||||
	}
 | 
			
		||||
	req, err := http.NewRequest("POST", requestURL, bytes.NewBuffer(jsonData))
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return err
 | 
			
		||||
		return err, nil
 | 
			
		||||
	}
 | 
			
		||||
	if channel.Type == common.ChannelTypeAzure {
 | 
			
		||||
		req.Header.Set("api-key", channel.Key)
 | 
			
		||||
@@ -48,18 +48,18 @@ func testChannel(channel *model.Channel, request ChatRequest) error {
 | 
			
		||||
	client := &http.Client{}
 | 
			
		||||
	resp, err := client.Do(req)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return err
 | 
			
		||||
		return err, nil
 | 
			
		||||
	}
 | 
			
		||||
	defer resp.Body.Close()
 | 
			
		||||
	var response TextResponse
 | 
			
		||||
	err = json.NewDecoder(resp.Body).Decode(&response)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return err
 | 
			
		||||
		return err, nil
 | 
			
		||||
	}
 | 
			
		||||
	if response.Usage.CompletionTokens == 0 {
 | 
			
		||||
		return errors.New(fmt.Sprintf("type %s, code %v, message %s", response.Error.Type, response.Error.Code, response.Error.Message))
 | 
			
		||||
		return errors.New(fmt.Sprintf("type %s, code %v, message %s", response.Error.Type, response.Error.Code, response.Error.Message)), &response.Error
 | 
			
		||||
	}
 | 
			
		||||
	return nil
 | 
			
		||||
	return nil, nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func buildTestRequest() *ChatRequest {
 | 
			
		||||
@@ -94,7 +94,7 @@ func TestChannel(c *gin.Context) {
 | 
			
		||||
	}
 | 
			
		||||
	testRequest := buildTestRequest()
 | 
			
		||||
	tik := time.Now()
 | 
			
		||||
	err = testChannel(channel, *testRequest)
 | 
			
		||||
	err, _ = testChannel(channel, *testRequest)
 | 
			
		||||
	tok := time.Now()
 | 
			
		||||
	milliseconds := tok.Sub(tik).Milliseconds()
 | 
			
		||||
	go channel.UpdateResponseTime(milliseconds)
 | 
			
		||||
@@ -158,13 +158,14 @@ func testAllChannels(notify bool) error {
 | 
			
		||||
				continue
 | 
			
		||||
			}
 | 
			
		||||
			tik := time.Now()
 | 
			
		||||
			err := testChannel(channel, *testRequest)
 | 
			
		||||
			err, openaiErr := testChannel(channel, *testRequest)
 | 
			
		||||
			tok := time.Now()
 | 
			
		||||
			milliseconds := tok.Sub(tik).Milliseconds()
 | 
			
		||||
			if err != nil || milliseconds > disableThreshold {
 | 
			
		||||
				if milliseconds > disableThreshold {
 | 
			
		||||
					err = errors.New(fmt.Sprintf("响应时间 %.2fs 超过阈值 %.2fs", float64(milliseconds)/1000.0, float64(disableThreshold)/1000.0))
 | 
			
		||||
				}
 | 
			
		||||
			if milliseconds > disableThreshold {
 | 
			
		||||
				err = errors.New(fmt.Sprintf("响应时间 %.2fs 超过阈值 %.2fs", float64(milliseconds)/1000.0, float64(disableThreshold)/1000.0))
 | 
			
		||||
				disableChannel(channel.Id, channel.Name, err.Error())
 | 
			
		||||
			}
 | 
			
		||||
			if shouldDisableChannel(openaiErr) {
 | 
			
		||||
				disableChannel(channel.Id, channel.Name, err.Error())
 | 
			
		||||
			}
 | 
			
		||||
			channel.UpdateResponseTime(milliseconds)
 | 
			
		||||
 
 | 
			
		||||
@@ -270,6 +270,42 @@ func init() {
 | 
			
		||||
			Root:       "ChatGLM2",
 | 
			
		||||
			Parent:     nil,
 | 
			
		||||
		},
 | 
			
		||||
		{
 | 
			
		||||
			Id:         "claude-instant-1",
 | 
			
		||||
			Object:     "model",
 | 
			
		||||
			Created:    1677649963,
 | 
			
		||||
			OwnedBy:    "anturopic",
 | 
			
		||||
			Permission: permission,
 | 
			
		||||
			Root:       "claude-instant-1",
 | 
			
		||||
			Parent:     nil,
 | 
			
		||||
		},
 | 
			
		||||
		{
 | 
			
		||||
			Id:         "claude-2",
 | 
			
		||||
			Object:     "model",
 | 
			
		||||
			Created:    1677649963,
 | 
			
		||||
			OwnedBy:    "anturopic",
 | 
			
		||||
			Permission: permission,
 | 
			
		||||
			Root:       "claude-2",
 | 
			
		||||
			Parent:     nil,
 | 
			
		||||
		},
 | 
			
		||||
		{
 | 
			
		||||
			Id:         "ERNIE-Bot",
 | 
			
		||||
			Object:     "model",
 | 
			
		||||
			Created:    1677649963,
 | 
			
		||||
			OwnedBy:    "baidu",
 | 
			
		||||
			Permission: permission,
 | 
			
		||||
			Root:       "ERNIE-Bot",
 | 
			
		||||
			Parent:     nil,
 | 
			
		||||
		},
 | 
			
		||||
		{
 | 
			
		||||
			Id:         "ERNIE-Bot-turbo",
 | 
			
		||||
			Object:     "model",
 | 
			
		||||
			Created:    1677649963,
 | 
			
		||||
			OwnedBy:    "baidu",
 | 
			
		||||
			Permission: permission,
 | 
			
		||||
			Root:       "ERNIE-Bot-turbo",
 | 
			
		||||
			Parent:     nil,
 | 
			
		||||
		},
 | 
			
		||||
	}
 | 
			
		||||
	openAIModelsMap = make(map[string]OpenAIModels)
 | 
			
		||||
	for _, model := range openAIModels {
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										203
									
								
								controller/relay-baidu.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										203
									
								
								controller/relay-baidu.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,203 @@
 | 
			
		||||
package controller
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"bufio"
 | 
			
		||||
	"encoding/json"
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"io"
 | 
			
		||||
	"net/http"
 | 
			
		||||
	"one-api/common"
 | 
			
		||||
	"strings"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
// https://cloud.baidu.com/doc/WENXINWORKSHOP/s/flfmc9do2
 | 
			
		||||
 | 
			
		||||
type BaiduTokenResponse struct {
 | 
			
		||||
	RefreshToken  string `json:"refresh_token"`
 | 
			
		||||
	ExpiresIn     int    `json:"expires_in"`
 | 
			
		||||
	SessionKey    string `json:"session_key"`
 | 
			
		||||
	AccessToken   string `json:"access_token"`
 | 
			
		||||
	Scope         string `json:"scope"`
 | 
			
		||||
	SessionSecret string `json:"session_secret"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type BaiduMessage struct {
 | 
			
		||||
	Role    string `json:"role"`
 | 
			
		||||
	Content string `json:"content"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type BaiduChatRequest struct {
 | 
			
		||||
	Messages []BaiduMessage `json:"messages"`
 | 
			
		||||
	Stream   bool           `json:"stream"`
 | 
			
		||||
	UserId   string         `json:"user_id,omitempty"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type BaiduError struct {
 | 
			
		||||
	ErrorCode int    `json:"error_code"`
 | 
			
		||||
	ErrorMsg  string `json:"error_msg"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type BaiduChatResponse struct {
 | 
			
		||||
	Id               string `json:"id"`
 | 
			
		||||
	Object           string `json:"object"`
 | 
			
		||||
	Created          int64  `json:"created"`
 | 
			
		||||
	Result           string `json:"result"`
 | 
			
		||||
	IsTruncated      bool   `json:"is_truncated"`
 | 
			
		||||
	NeedClearHistory bool   `json:"need_clear_history"`
 | 
			
		||||
	Usage            Usage  `json:"usage"`
 | 
			
		||||
	BaiduError
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type BaiduChatStreamResponse struct {
 | 
			
		||||
	BaiduChatResponse
 | 
			
		||||
	SentenceId int  `json:"sentence_id"`
 | 
			
		||||
	IsEnd      bool `json:"is_end"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func requestOpenAI2Baidu(request GeneralOpenAIRequest) *BaiduChatRequest {
 | 
			
		||||
	messages := make([]BaiduMessage, 0, len(request.Messages))
 | 
			
		||||
	for _, message := range request.Messages {
 | 
			
		||||
		messages = append(messages, BaiduMessage{
 | 
			
		||||
			Role:    message.Role,
 | 
			
		||||
			Content: message.Content,
 | 
			
		||||
		})
 | 
			
		||||
	}
 | 
			
		||||
	return &BaiduChatRequest{
 | 
			
		||||
		Messages: messages,
 | 
			
		||||
		Stream:   request.Stream,
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func responseBaidu2OpenAI(response *BaiduChatResponse) *OpenAITextResponse {
 | 
			
		||||
	choice := OpenAITextResponseChoice{
 | 
			
		||||
		Index: 0,
 | 
			
		||||
		Message: Message{
 | 
			
		||||
			Role:    "assistant",
 | 
			
		||||
			Content: response.Result,
 | 
			
		||||
		},
 | 
			
		||||
		FinishReason: "stop",
 | 
			
		||||
	}
 | 
			
		||||
	fullTextResponse := OpenAITextResponse{
 | 
			
		||||
		Id:      response.Id,
 | 
			
		||||
		Object:  "chat.completion",
 | 
			
		||||
		Created: response.Created,
 | 
			
		||||
		Choices: []OpenAITextResponseChoice{choice},
 | 
			
		||||
		Usage:   response.Usage,
 | 
			
		||||
	}
 | 
			
		||||
	return &fullTextResponse
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func streamResponseBaidu2OpenAI(baiduResponse *BaiduChatStreamResponse) *ChatCompletionsStreamResponse {
 | 
			
		||||
	var choice ChatCompletionsStreamResponseChoice
 | 
			
		||||
	choice.Delta.Content = baiduResponse.Result
 | 
			
		||||
	choice.FinishReason = "stop"
 | 
			
		||||
	response := ChatCompletionsStreamResponse{
 | 
			
		||||
		Id:      baiduResponse.Id,
 | 
			
		||||
		Object:  "chat.completion.chunk",
 | 
			
		||||
		Created: baiduResponse.Created,
 | 
			
		||||
		Model:   "ernie-bot",
 | 
			
		||||
		Choices: []ChatCompletionsStreamResponseChoice{choice},
 | 
			
		||||
	}
 | 
			
		||||
	return &response
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func baiduStreamHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) {
 | 
			
		||||
	var usage Usage
 | 
			
		||||
	scanner := bufio.NewScanner(resp.Body)
 | 
			
		||||
	scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
 | 
			
		||||
		if atEOF && len(data) == 0 {
 | 
			
		||||
			return 0, nil, nil
 | 
			
		||||
		}
 | 
			
		||||
		if i := strings.Index(string(data), "\n"); i >= 0 {
 | 
			
		||||
			return i + 1, data[0:i], nil
 | 
			
		||||
		}
 | 
			
		||||
		if atEOF {
 | 
			
		||||
			return len(data), data, nil
 | 
			
		||||
		}
 | 
			
		||||
		return 0, nil, nil
 | 
			
		||||
	})
 | 
			
		||||
	dataChan := make(chan string)
 | 
			
		||||
	stopChan := make(chan bool)
 | 
			
		||||
	go func() {
 | 
			
		||||
		for scanner.Scan() {
 | 
			
		||||
			data := scanner.Text()
 | 
			
		||||
			if len(data) < 6 { // ignore blank line or wrong format
 | 
			
		||||
				continue
 | 
			
		||||
			}
 | 
			
		||||
			data = data[6:]
 | 
			
		||||
			dataChan <- data
 | 
			
		||||
		}
 | 
			
		||||
		stopChan <- true
 | 
			
		||||
	}()
 | 
			
		||||
	c.Writer.Header().Set("Content-Type", "text/event-stream")
 | 
			
		||||
	c.Writer.Header().Set("Cache-Control", "no-cache")
 | 
			
		||||
	c.Writer.Header().Set("Connection", "keep-alive")
 | 
			
		||||
	c.Writer.Header().Set("Transfer-Encoding", "chunked")
 | 
			
		||||
	c.Writer.Header().Set("X-Accel-Buffering", "no")
 | 
			
		||||
	c.Stream(func(w io.Writer) bool {
 | 
			
		||||
		select {
 | 
			
		||||
		case data := <-dataChan:
 | 
			
		||||
			var baiduResponse BaiduChatStreamResponse
 | 
			
		||||
			err := json.Unmarshal([]byte(data), &baiduResponse)
 | 
			
		||||
			if err != nil {
 | 
			
		||||
				common.SysError("error unmarshalling stream response: " + err.Error())
 | 
			
		||||
				return true
 | 
			
		||||
			}
 | 
			
		||||
			usage.PromptTokens += baiduResponse.Usage.PromptTokens
 | 
			
		||||
			usage.CompletionTokens += baiduResponse.Usage.CompletionTokens
 | 
			
		||||
			usage.TotalTokens += baiduResponse.Usage.TotalTokens
 | 
			
		||||
			response := streamResponseBaidu2OpenAI(&baiduResponse)
 | 
			
		||||
			jsonResponse, err := json.Marshal(response)
 | 
			
		||||
			if err != nil {
 | 
			
		||||
				common.SysError("error marshalling stream response: " + err.Error())
 | 
			
		||||
				return true
 | 
			
		||||
			}
 | 
			
		||||
			c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
 | 
			
		||||
			return true
 | 
			
		||||
		case <-stopChan:
 | 
			
		||||
			c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
 | 
			
		||||
			return false
 | 
			
		||||
		}
 | 
			
		||||
	})
 | 
			
		||||
	err := resp.Body.Close()
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	return nil, &usage
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func baiduHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) {
 | 
			
		||||
	var baiduResponse BaiduChatResponse
 | 
			
		||||
	responseBody, err := io.ReadAll(resp.Body)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return errorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	err = resp.Body.Close()
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	err = json.Unmarshal(responseBody, &baiduResponse)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	if baiduResponse.ErrorMsg != "" {
 | 
			
		||||
		return &OpenAIErrorWithStatusCode{
 | 
			
		||||
			OpenAIError: OpenAIError{
 | 
			
		||||
				Message: baiduResponse.ErrorMsg,
 | 
			
		||||
				Type:    "baidu_error",
 | 
			
		||||
				Param:   "",
 | 
			
		||||
				Code:    baiduResponse.ErrorCode,
 | 
			
		||||
			},
 | 
			
		||||
			StatusCode: resp.StatusCode,
 | 
			
		||||
		}, nil
 | 
			
		||||
	}
 | 
			
		||||
	fullTextResponse := responseBaidu2OpenAI(&baiduResponse)
 | 
			
		||||
	jsonResponse, err := json.Marshal(fullTextResponse)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	c.Writer.Header().Set("Content-Type", "application/json")
 | 
			
		||||
	c.Writer.WriteHeader(resp.StatusCode)
 | 
			
		||||
	_, err = c.Writer.Write(jsonResponse)
 | 
			
		||||
	return nil, &fullTextResponse.Usage
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										221
									
								
								controller/relay-claude.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										221
									
								
								controller/relay-claude.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,221 @@
 | 
			
		||||
package controller
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"bufio"
 | 
			
		||||
	"encoding/json"
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"io"
 | 
			
		||||
	"net/http"
 | 
			
		||||
	"one-api/common"
 | 
			
		||||
	"strings"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
type ClaudeMetadata struct {
 | 
			
		||||
	UserId string `json:"user_id"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type ClaudeRequest struct {
 | 
			
		||||
	Model             string   `json:"model"`
 | 
			
		||||
	Prompt            string   `json:"prompt"`
 | 
			
		||||
	MaxTokensToSample int      `json:"max_tokens_to_sample"`
 | 
			
		||||
	StopSequences     []string `json:"stop_sequences,omitempty"`
 | 
			
		||||
	Temperature       float64  `json:"temperature,omitempty"`
 | 
			
		||||
	TopP              float64  `json:"top_p,omitempty"`
 | 
			
		||||
	TopK              int      `json:"top_k,omitempty"`
 | 
			
		||||
	//ClaudeMetadata    `json:"metadata,omitempty"`
 | 
			
		||||
	Stream bool `json:"stream,omitempty"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type ClaudeError struct {
 | 
			
		||||
	Type    string `json:"type"`
 | 
			
		||||
	Message string `json:"message"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type ClaudeResponse struct {
 | 
			
		||||
	Completion string      `json:"completion"`
 | 
			
		||||
	StopReason string      `json:"stop_reason"`
 | 
			
		||||
	Model      string      `json:"model"`
 | 
			
		||||
	Error      ClaudeError `json:"error"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func stopReasonClaude2OpenAI(reason string) string {
 | 
			
		||||
	switch reason {
 | 
			
		||||
	case "stop_sequence":
 | 
			
		||||
		return "stop"
 | 
			
		||||
	case "max_tokens":
 | 
			
		||||
		return "length"
 | 
			
		||||
	default:
 | 
			
		||||
		return reason
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func requestOpenAI2Claude(textRequest GeneralOpenAIRequest) *ClaudeRequest {
 | 
			
		||||
	claudeRequest := ClaudeRequest{
 | 
			
		||||
		Model:             textRequest.Model,
 | 
			
		||||
		Prompt:            "",
 | 
			
		||||
		MaxTokensToSample: textRequest.MaxTokens,
 | 
			
		||||
		StopSequences:     nil,
 | 
			
		||||
		Temperature:       textRequest.Temperature,
 | 
			
		||||
		TopP:              textRequest.TopP,
 | 
			
		||||
		Stream:            textRequest.Stream,
 | 
			
		||||
	}
 | 
			
		||||
	if claudeRequest.MaxTokensToSample == 0 {
 | 
			
		||||
		claudeRequest.MaxTokensToSample = 1000000
 | 
			
		||||
	}
 | 
			
		||||
	prompt := ""
 | 
			
		||||
	for _, message := range textRequest.Messages {
 | 
			
		||||
		if message.Role == "user" {
 | 
			
		||||
			prompt += fmt.Sprintf("\n\nHuman: %s", message.Content)
 | 
			
		||||
		} else if message.Role == "assistant" {
 | 
			
		||||
			prompt += fmt.Sprintf("\n\nAssistant: %s", message.Content)
 | 
			
		||||
		} else {
 | 
			
		||||
			// ignore other roles
 | 
			
		||||
		}
 | 
			
		||||
		prompt += "\n\nAssistant:"
 | 
			
		||||
	}
 | 
			
		||||
	claudeRequest.Prompt = prompt
 | 
			
		||||
	return &claudeRequest
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func streamResponseClaude2OpenAI(claudeResponse *ClaudeResponse) *ChatCompletionsStreamResponse {
 | 
			
		||||
	var choice ChatCompletionsStreamResponseChoice
 | 
			
		||||
	choice.Delta.Content = claudeResponse.Completion
 | 
			
		||||
	choice.FinishReason = stopReasonClaude2OpenAI(claudeResponse.StopReason)
 | 
			
		||||
	var response ChatCompletionsStreamResponse
 | 
			
		||||
	response.Object = "chat.completion.chunk"
 | 
			
		||||
	response.Model = claudeResponse.Model
 | 
			
		||||
	response.Choices = []ChatCompletionsStreamResponseChoice{choice}
 | 
			
		||||
	return &response
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func responseClaude2OpenAI(claudeResponse *ClaudeResponse) *OpenAITextResponse {
 | 
			
		||||
	choice := OpenAITextResponseChoice{
 | 
			
		||||
		Index: 0,
 | 
			
		||||
		Message: Message{
 | 
			
		||||
			Role:    "assistant",
 | 
			
		||||
			Content: strings.TrimPrefix(claudeResponse.Completion, " "),
 | 
			
		||||
			Name:    nil,
 | 
			
		||||
		},
 | 
			
		||||
		FinishReason: stopReasonClaude2OpenAI(claudeResponse.StopReason),
 | 
			
		||||
	}
 | 
			
		||||
	fullTextResponse := OpenAITextResponse{
 | 
			
		||||
		Id:      fmt.Sprintf("chatcmpl-%s", common.GetUUID()),
 | 
			
		||||
		Object:  "chat.completion",
 | 
			
		||||
		Created: common.GetTimestamp(),
 | 
			
		||||
		Choices: []OpenAITextResponseChoice{choice},
 | 
			
		||||
	}
 | 
			
		||||
	return &fullTextResponse
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func claudeStreamHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, string) {
 | 
			
		||||
	responseText := ""
 | 
			
		||||
	responseId := fmt.Sprintf("chatcmpl-%s", common.GetUUID())
 | 
			
		||||
	createdTime := common.GetTimestamp()
 | 
			
		||||
	scanner := bufio.NewScanner(resp.Body)
 | 
			
		||||
	scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
 | 
			
		||||
		if atEOF && len(data) == 0 {
 | 
			
		||||
			return 0, nil, nil
 | 
			
		||||
		}
 | 
			
		||||
		if i := strings.Index(string(data), "\r\n\r\n"); i >= 0 {
 | 
			
		||||
			return i + 4, data[0:i], nil
 | 
			
		||||
		}
 | 
			
		||||
		if atEOF {
 | 
			
		||||
			return len(data), data, nil
 | 
			
		||||
		}
 | 
			
		||||
		return 0, nil, nil
 | 
			
		||||
	})
 | 
			
		||||
	dataChan := make(chan string)
 | 
			
		||||
	stopChan := make(chan bool)
 | 
			
		||||
	go func() {
 | 
			
		||||
		for scanner.Scan() {
 | 
			
		||||
			data := scanner.Text()
 | 
			
		||||
			if !strings.HasPrefix(data, "event: completion") {
 | 
			
		||||
				continue
 | 
			
		||||
			}
 | 
			
		||||
			data = strings.TrimPrefix(data, "event: completion\r\ndata: ")
 | 
			
		||||
			dataChan <- data
 | 
			
		||||
		}
 | 
			
		||||
		stopChan <- true
 | 
			
		||||
	}()
 | 
			
		||||
	c.Writer.Header().Set("Content-Type", "text/event-stream")
 | 
			
		||||
	c.Writer.Header().Set("Cache-Control", "no-cache")
 | 
			
		||||
	c.Writer.Header().Set("Connection", "keep-alive")
 | 
			
		||||
	c.Writer.Header().Set("Transfer-Encoding", "chunked")
 | 
			
		||||
	c.Writer.Header().Set("X-Accel-Buffering", "no")
 | 
			
		||||
	c.Stream(func(w io.Writer) bool {
 | 
			
		||||
		select {
 | 
			
		||||
		case data := <-dataChan:
 | 
			
		||||
			// some implementations may add \r at the end of data
 | 
			
		||||
			data = strings.TrimSuffix(data, "\r")
 | 
			
		||||
			var claudeResponse ClaudeResponse
 | 
			
		||||
			err := json.Unmarshal([]byte(data), &claudeResponse)
 | 
			
		||||
			if err != nil {
 | 
			
		||||
				common.SysError("error unmarshalling stream response: " + err.Error())
 | 
			
		||||
				return true
 | 
			
		||||
			}
 | 
			
		||||
			responseText += claudeResponse.Completion
 | 
			
		||||
			response := streamResponseClaude2OpenAI(&claudeResponse)
 | 
			
		||||
			response.Id = responseId
 | 
			
		||||
			response.Created = createdTime
 | 
			
		||||
			jsonStr, err := json.Marshal(response)
 | 
			
		||||
			if err != nil {
 | 
			
		||||
				common.SysError("error marshalling stream response: " + err.Error())
 | 
			
		||||
				return true
 | 
			
		||||
			}
 | 
			
		||||
			c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonStr)})
 | 
			
		||||
			return true
 | 
			
		||||
		case <-stopChan:
 | 
			
		||||
			c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
 | 
			
		||||
			return false
 | 
			
		||||
		}
 | 
			
		||||
	})
 | 
			
		||||
	err := resp.Body.Close()
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), ""
 | 
			
		||||
	}
 | 
			
		||||
	return nil, responseText
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func claudeHandler(c *gin.Context, resp *http.Response, promptTokens int, model string) (*OpenAIErrorWithStatusCode, *Usage) {
 | 
			
		||||
	responseBody, err := io.ReadAll(resp.Body)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return errorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	err = resp.Body.Close()
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	var claudeResponse ClaudeResponse
 | 
			
		||||
	err = json.Unmarshal(responseBody, &claudeResponse)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	if claudeResponse.Error.Type != "" {
 | 
			
		||||
		return &OpenAIErrorWithStatusCode{
 | 
			
		||||
			OpenAIError: OpenAIError{
 | 
			
		||||
				Message: claudeResponse.Error.Message,
 | 
			
		||||
				Type:    claudeResponse.Error.Type,
 | 
			
		||||
				Param:   "",
 | 
			
		||||
				Code:    claudeResponse.Error.Type,
 | 
			
		||||
			},
 | 
			
		||||
			StatusCode: resp.StatusCode,
 | 
			
		||||
		}, nil
 | 
			
		||||
	}
 | 
			
		||||
	fullTextResponse := responseClaude2OpenAI(&claudeResponse)
 | 
			
		||||
	completionTokens := countTokenText(claudeResponse.Completion, model)
 | 
			
		||||
	usage := Usage{
 | 
			
		||||
		PromptTokens:     promptTokens,
 | 
			
		||||
		CompletionTokens: completionTokens,
 | 
			
		||||
		TotalTokens:      promptTokens + completionTokens,
 | 
			
		||||
	}
 | 
			
		||||
	fullTextResponse.Usage = usage
 | 
			
		||||
	jsonResponse, err := json.Marshal(fullTextResponse)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	c.Writer.Header().Set("Content-Type", "application/json")
 | 
			
		||||
	c.Writer.WriteHeader(resp.StatusCode)
 | 
			
		||||
	_, err = c.Writer.Write(jsonResponse)
 | 
			
		||||
	return nil, &usage
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										133
									
								
								controller/relay-openai.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										133
									
								
								controller/relay-openai.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,133 @@
 | 
			
		||||
package controller
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"bufio"
 | 
			
		||||
	"bytes"
 | 
			
		||||
	"encoding/json"
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"io"
 | 
			
		||||
	"net/http"
 | 
			
		||||
	"one-api/common"
 | 
			
		||||
	"strings"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
func openaiStreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*OpenAIErrorWithStatusCode, string) {
 | 
			
		||||
	responseText := ""
 | 
			
		||||
	scanner := bufio.NewScanner(resp.Body)
 | 
			
		||||
	scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
 | 
			
		||||
		if atEOF && len(data) == 0 {
 | 
			
		||||
			return 0, nil, nil
 | 
			
		||||
		}
 | 
			
		||||
		if i := strings.Index(string(data), "\n"); i >= 0 {
 | 
			
		||||
			return i + 1, data[0:i], nil
 | 
			
		||||
		}
 | 
			
		||||
		if atEOF {
 | 
			
		||||
			return len(data), data, nil
 | 
			
		||||
		}
 | 
			
		||||
		return 0, nil, nil
 | 
			
		||||
	})
 | 
			
		||||
	dataChan := make(chan string)
 | 
			
		||||
	stopChan := make(chan bool)
 | 
			
		||||
	go func() {
 | 
			
		||||
		for scanner.Scan() {
 | 
			
		||||
			data := scanner.Text()
 | 
			
		||||
			if len(data) < 6 { // ignore blank line or wrong format
 | 
			
		||||
				continue
 | 
			
		||||
			}
 | 
			
		||||
			dataChan <- data
 | 
			
		||||
			data = data[6:]
 | 
			
		||||
			if !strings.HasPrefix(data, "[DONE]") {
 | 
			
		||||
				switch relayMode {
 | 
			
		||||
				case RelayModeChatCompletions:
 | 
			
		||||
					var streamResponse ChatCompletionsStreamResponse
 | 
			
		||||
					err := json.Unmarshal([]byte(data), &streamResponse)
 | 
			
		||||
					if err != nil {
 | 
			
		||||
						common.SysError("error unmarshalling stream response: " + err.Error())
 | 
			
		||||
						return
 | 
			
		||||
					}
 | 
			
		||||
					for _, choice := range streamResponse.Choices {
 | 
			
		||||
						responseText += choice.Delta.Content
 | 
			
		||||
					}
 | 
			
		||||
				case RelayModeCompletions:
 | 
			
		||||
					var streamResponse CompletionsStreamResponse
 | 
			
		||||
					err := json.Unmarshal([]byte(data), &streamResponse)
 | 
			
		||||
					if err != nil {
 | 
			
		||||
						common.SysError("error unmarshalling stream response: " + err.Error())
 | 
			
		||||
						return
 | 
			
		||||
					}
 | 
			
		||||
					for _, choice := range streamResponse.Choices {
 | 
			
		||||
						responseText += choice.Text
 | 
			
		||||
					}
 | 
			
		||||
				}
 | 
			
		||||
			}
 | 
			
		||||
		}
 | 
			
		||||
		stopChan <- true
 | 
			
		||||
	}()
 | 
			
		||||
	c.Writer.Header().Set("Content-Type", "text/event-stream")
 | 
			
		||||
	c.Writer.Header().Set("Cache-Control", "no-cache")
 | 
			
		||||
	c.Writer.Header().Set("Connection", "keep-alive")
 | 
			
		||||
	c.Writer.Header().Set("Transfer-Encoding", "chunked")
 | 
			
		||||
	c.Writer.Header().Set("X-Accel-Buffering", "no")
 | 
			
		||||
	c.Stream(func(w io.Writer) bool {
 | 
			
		||||
		select {
 | 
			
		||||
		case data := <-dataChan:
 | 
			
		||||
			if strings.HasPrefix(data, "data: [DONE]") {
 | 
			
		||||
				data = data[:12]
 | 
			
		||||
			}
 | 
			
		||||
			// some implementations may add \r at the end of data
 | 
			
		||||
			data = strings.TrimSuffix(data, "\r")
 | 
			
		||||
			c.Render(-1, common.CustomEvent{Data: data})
 | 
			
		||||
			return true
 | 
			
		||||
		case <-stopChan:
 | 
			
		||||
			return false
 | 
			
		||||
		}
 | 
			
		||||
	})
 | 
			
		||||
	err := resp.Body.Close()
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), ""
 | 
			
		||||
	}
 | 
			
		||||
	return nil, responseText
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func openaiHandler(c *gin.Context, resp *http.Response, consumeQuota bool) (*OpenAIErrorWithStatusCode, *Usage) {
 | 
			
		||||
	var textResponse TextResponse
 | 
			
		||||
	if consumeQuota {
 | 
			
		||||
		responseBody, err := io.ReadAll(resp.Body)
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			return errorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
		}
 | 
			
		||||
		err = resp.Body.Close()
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
		}
 | 
			
		||||
		err = json.Unmarshal(responseBody, &textResponse)
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
		}
 | 
			
		||||
		if textResponse.Error.Type != "" {
 | 
			
		||||
			return &OpenAIErrorWithStatusCode{
 | 
			
		||||
				OpenAIError: textResponse.Error,
 | 
			
		||||
				StatusCode:  resp.StatusCode,
 | 
			
		||||
			}, nil
 | 
			
		||||
		}
 | 
			
		||||
		// Reset response body
 | 
			
		||||
		resp.Body = io.NopCloser(bytes.NewBuffer(responseBody))
 | 
			
		||||
	}
 | 
			
		||||
	// We shouldn't set the header before we parse the response body, because the parse part may fail.
 | 
			
		||||
	// And then we will have to send an error response, but in this case, the header has already been set.
 | 
			
		||||
	// So the client will be confused by the response.
 | 
			
		||||
	// For example, Postman will report error, and we cannot check the response at all.
 | 
			
		||||
	for k, v := range resp.Header {
 | 
			
		||||
		c.Writer.Header().Set(k, v[0])
 | 
			
		||||
	}
 | 
			
		||||
	c.Writer.WriteHeader(resp.StatusCode)
 | 
			
		||||
	_, err := io.Copy(c.Writer, resp.Body)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return errorWrapper(err, "copy_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	err = resp.Body.Close()
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	return nil, &textResponse.Usage
 | 
			
		||||
}
 | 
			
		||||
@@ -1,7 +1,6 @@
 | 
			
		||||
package controller
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"bufio"
 | 
			
		||||
	"bytes"
 | 
			
		||||
	"encoding/json"
 | 
			
		||||
	"errors"
 | 
			
		||||
@@ -15,6 +14,13 @@ import (
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
const (
 | 
			
		||||
	APITypeOpenAI = iota
 | 
			
		||||
	APITypeClaude
 | 
			
		||||
	APITypePaLM
 | 
			
		||||
	APITypeBaidu
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
 | 
			
		||||
	channelType := c.GetInt("channel")
 | 
			
		||||
	tokenId := c.GetInt("token_id")
 | 
			
		||||
@@ -71,33 +77,56 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
 | 
			
		||||
			isModelMapped = true
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
	apiType := APITypeOpenAI
 | 
			
		||||
	if strings.HasPrefix(textRequest.Model, "claude") {
 | 
			
		||||
		apiType = APITypeClaude
 | 
			
		||||
	} else if strings.HasPrefix(textRequest.Model, "ERNIE") {
 | 
			
		||||
		apiType = APITypeBaidu
 | 
			
		||||
	}
 | 
			
		||||
	baseURL := common.ChannelBaseURLs[channelType]
 | 
			
		||||
	requestURL := c.Request.URL.String()
 | 
			
		||||
	if c.GetString("base_url") != "" {
 | 
			
		||||
		baseURL = c.GetString("base_url")
 | 
			
		||||
	}
 | 
			
		||||
	fullRequestURL := fmt.Sprintf("%s%s", baseURL, requestURL)
 | 
			
		||||
	if channelType == common.ChannelTypeAzure {
 | 
			
		||||
		// https://learn.microsoft.com/en-us/azure/cognitive-services/openai/chatgpt-quickstart?pivots=rest-api&tabs=command-line#rest-api
 | 
			
		||||
		query := c.Request.URL.Query()
 | 
			
		||||
		apiVersion := query.Get("api-version")
 | 
			
		||||
		if apiVersion == "" {
 | 
			
		||||
			apiVersion = c.GetString("api_version")
 | 
			
		||||
	switch apiType {
 | 
			
		||||
	case APITypeOpenAI:
 | 
			
		||||
		if channelType == common.ChannelTypeAzure {
 | 
			
		||||
			// https://learn.microsoft.com/en-us/azure/cognitive-services/openai/chatgpt-quickstart?pivots=rest-api&tabs=command-line#rest-api
 | 
			
		||||
			query := c.Request.URL.Query()
 | 
			
		||||
			apiVersion := query.Get("api-version")
 | 
			
		||||
			if apiVersion == "" {
 | 
			
		||||
				apiVersion = c.GetString("api_version")
 | 
			
		||||
			}
 | 
			
		||||
			requestURL := strings.Split(requestURL, "?")[0]
 | 
			
		||||
			requestURL = fmt.Sprintf("%s?api-version=%s", requestURL, apiVersion)
 | 
			
		||||
			baseURL = c.GetString("base_url")
 | 
			
		||||
			task := strings.TrimPrefix(requestURL, "/v1/")
 | 
			
		||||
			model_ := textRequest.Model
 | 
			
		||||
			model_ = strings.Replace(model_, ".", "", -1)
 | 
			
		||||
			// https://github.com/songquanpeng/one-api/issues/67
 | 
			
		||||
			model_ = strings.TrimSuffix(model_, "-0301")
 | 
			
		||||
			model_ = strings.TrimSuffix(model_, "-0314")
 | 
			
		||||
			model_ = strings.TrimSuffix(model_, "-0613")
 | 
			
		||||
			fullRequestURL = fmt.Sprintf("%s/openai/deployments/%s/%s", baseURL, model_, task)
 | 
			
		||||
		}
 | 
			
		||||
		requestURL := strings.Split(requestURL, "?")[0]
 | 
			
		||||
		requestURL = fmt.Sprintf("%s?api-version=%s", requestURL, apiVersion)
 | 
			
		||||
		baseURL = c.GetString("base_url")
 | 
			
		||||
		task := strings.TrimPrefix(requestURL, "/v1/")
 | 
			
		||||
		model_ := textRequest.Model
 | 
			
		||||
		model_ = strings.Replace(model_, ".", "", -1)
 | 
			
		||||
		// https://github.com/songquanpeng/one-api/issues/67
 | 
			
		||||
		model_ = strings.TrimSuffix(model_, "-0301")
 | 
			
		||||
		model_ = strings.TrimSuffix(model_, "-0314")
 | 
			
		||||
		model_ = strings.TrimSuffix(model_, "-0613")
 | 
			
		||||
		fullRequestURL = fmt.Sprintf("%s/openai/deployments/%s/%s", baseURL, model_, task)
 | 
			
		||||
	} else if channelType == common.ChannelTypePaLM {
 | 
			
		||||
		err := relayPaLM(textRequest, c)
 | 
			
		||||
		return err
 | 
			
		||||
	case APITypeClaude:
 | 
			
		||||
		fullRequestURL = "https://api.anthropic.com/v1/complete"
 | 
			
		||||
		if baseURL != "" {
 | 
			
		||||
			fullRequestURL = fmt.Sprintf("%s/v1/complete", baseURL)
 | 
			
		||||
		}
 | 
			
		||||
	case APITypeBaidu:
 | 
			
		||||
		switch textRequest.Model {
 | 
			
		||||
		case "ERNIE-Bot":
 | 
			
		||||
			fullRequestURL = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/completions"
 | 
			
		||||
		case "ERNIE-Bot-turbo":
 | 
			
		||||
			fullRequestURL = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/eb-instant"
 | 
			
		||||
		case "BLOOMZ-7B":
 | 
			
		||||
			fullRequestURL = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/bloomz_7b1"
 | 
			
		||||
		}
 | 
			
		||||
		apiKey := c.Request.Header.Get("Authorization")
 | 
			
		||||
		apiKey = strings.TrimPrefix(apiKey, "Bearer ")
 | 
			
		||||
		fullRequestURL += "?access_token=" + apiKey // TODO: access token expire in 30 days
 | 
			
		||||
	}
 | 
			
		||||
	var promptTokens int
 | 
			
		||||
	var completionTokens int
 | 
			
		||||
@@ -142,16 +171,42 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
 | 
			
		||||
	} else {
 | 
			
		||||
		requestBody = c.Request.Body
 | 
			
		||||
	}
 | 
			
		||||
	switch apiType {
 | 
			
		||||
	case APITypeClaude:
 | 
			
		||||
		claudeRequest := requestOpenAI2Claude(textRequest)
 | 
			
		||||
		jsonStr, err := json.Marshal(claudeRequest)
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			return errorWrapper(err, "marshal_text_request_failed", http.StatusInternalServerError)
 | 
			
		||||
		}
 | 
			
		||||
		requestBody = bytes.NewBuffer(jsonStr)
 | 
			
		||||
	case APITypeBaidu:
 | 
			
		||||
		baiduRequest := requestOpenAI2Baidu(textRequest)
 | 
			
		||||
		jsonStr, err := json.Marshal(baiduRequest)
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			return errorWrapper(err, "marshal_text_request_failed", http.StatusInternalServerError)
 | 
			
		||||
		}
 | 
			
		||||
		requestBody = bytes.NewBuffer(jsonStr)
 | 
			
		||||
	}
 | 
			
		||||
	req, err := http.NewRequest(c.Request.Method, fullRequestURL, requestBody)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return errorWrapper(err, "new_request_failed", http.StatusInternalServerError)
 | 
			
		||||
	}
 | 
			
		||||
	if channelType == common.ChannelTypeAzure {
 | 
			
		||||
		key := c.Request.Header.Get("Authorization")
 | 
			
		||||
		key = strings.TrimPrefix(key, "Bearer ")
 | 
			
		||||
		req.Header.Set("api-key", key)
 | 
			
		||||
	} else {
 | 
			
		||||
		req.Header.Set("Authorization", c.Request.Header.Get("Authorization"))
 | 
			
		||||
	apiKey := c.Request.Header.Get("Authorization")
 | 
			
		||||
	apiKey = strings.TrimPrefix(apiKey, "Bearer ")
 | 
			
		||||
	switch apiType {
 | 
			
		||||
	case APITypeOpenAI:
 | 
			
		||||
		if channelType == common.ChannelTypeAzure {
 | 
			
		||||
			req.Header.Set("api-key", apiKey)
 | 
			
		||||
		} else {
 | 
			
		||||
			req.Header.Set("Authorization", c.Request.Header.Get("Authorization"))
 | 
			
		||||
		}
 | 
			
		||||
	case APITypeClaude:
 | 
			
		||||
		req.Header.Set("x-api-key", apiKey)
 | 
			
		||||
		anthropicVersion := c.Request.Header.Get("anthropic-version")
 | 
			
		||||
		if anthropicVersion == "" {
 | 
			
		||||
			anthropicVersion = "2023-06-01"
 | 
			
		||||
		}
 | 
			
		||||
		req.Header.Set("anthropic-version", anthropicVersion)
 | 
			
		||||
	}
 | 
			
		||||
	req.Header.Set("Content-Type", c.Request.Header.Get("Content-Type"))
 | 
			
		||||
	req.Header.Set("Accept", c.Request.Header.Get("Accept"))
 | 
			
		||||
@@ -183,7 +238,7 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
 | 
			
		||||
			if strings.HasPrefix(textRequest.Model, "gpt-4") {
 | 
			
		||||
				completionRatio = 2
 | 
			
		||||
			}
 | 
			
		||||
			if isStream {
 | 
			
		||||
			if isStream && apiType != APITypeBaidu {
 | 
			
		||||
				completionTokens = countTokenText(streamResponseText, textRequest.Model)
 | 
			
		||||
			} else {
 | 
			
		||||
				promptTokens = textResponse.Usage.PromptTokens
 | 
			
		||||
@@ -219,124 +274,56 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
 | 
			
		||||
			}
 | 
			
		||||
		}
 | 
			
		||||
	}()
 | 
			
		||||
 | 
			
		||||
	if isStream {
 | 
			
		||||
		scanner := bufio.NewScanner(resp.Body)
 | 
			
		||||
		scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
 | 
			
		||||
			if atEOF && len(data) == 0 {
 | 
			
		||||
				return 0, nil, nil
 | 
			
		||||
			}
 | 
			
		||||
 | 
			
		||||
			if i := strings.Index(string(data), "\n"); i >= 0 {
 | 
			
		||||
				return i + 1, data[0:i], nil
 | 
			
		||||
			}
 | 
			
		||||
 | 
			
		||||
			if atEOF {
 | 
			
		||||
				return len(data), data, nil
 | 
			
		||||
			}
 | 
			
		||||
 | 
			
		||||
			return 0, nil, nil
 | 
			
		||||
		})
 | 
			
		||||
		dataChan := make(chan string)
 | 
			
		||||
		stopChan := make(chan bool)
 | 
			
		||||
		go func() {
 | 
			
		||||
			for scanner.Scan() {
 | 
			
		||||
				data := scanner.Text()
 | 
			
		||||
				if len(data) < 6 { // ignore blank line or wrong format
 | 
			
		||||
					continue
 | 
			
		||||
				}
 | 
			
		||||
				dataChan <- data
 | 
			
		||||
				data = data[6:]
 | 
			
		||||
				if !strings.HasPrefix(data, "[DONE]") {
 | 
			
		||||
					switch relayMode {
 | 
			
		||||
					case RelayModeChatCompletions:
 | 
			
		||||
						var streamResponse ChatCompletionsStreamResponse
 | 
			
		||||
						err = json.Unmarshal([]byte(data), &streamResponse)
 | 
			
		||||
						if err != nil {
 | 
			
		||||
							common.SysError("error unmarshalling stream response: " + err.Error())
 | 
			
		||||
							return
 | 
			
		||||
						}
 | 
			
		||||
						for _, choice := range streamResponse.Choices {
 | 
			
		||||
							streamResponseText += choice.Delta.Content
 | 
			
		||||
						}
 | 
			
		||||
					case RelayModeCompletions:
 | 
			
		||||
						var streamResponse CompletionsStreamResponse
 | 
			
		||||
						err = json.Unmarshal([]byte(data), &streamResponse)
 | 
			
		||||
						if err != nil {
 | 
			
		||||
							common.SysError("error unmarshalling stream response: " + err.Error())
 | 
			
		||||
							return
 | 
			
		||||
						}
 | 
			
		||||
						for _, choice := range streamResponse.Choices {
 | 
			
		||||
							streamResponseText += choice.Text
 | 
			
		||||
						}
 | 
			
		||||
					}
 | 
			
		||||
				}
 | 
			
		||||
			}
 | 
			
		||||
			stopChan <- true
 | 
			
		||||
		}()
 | 
			
		||||
		c.Writer.Header().Set("Content-Type", "text/event-stream")
 | 
			
		||||
		c.Writer.Header().Set("Cache-Control", "no-cache")
 | 
			
		||||
		c.Writer.Header().Set("Connection", "keep-alive")
 | 
			
		||||
		c.Writer.Header().Set("Transfer-Encoding", "chunked")
 | 
			
		||||
		c.Writer.Header().Set("X-Accel-Buffering", "no")
 | 
			
		||||
		c.Stream(func(w io.Writer) bool {
 | 
			
		||||
			select {
 | 
			
		||||
			case data := <-dataChan:
 | 
			
		||||
				if strings.HasPrefix(data, "data: [DONE]") {
 | 
			
		||||
					data = data[:12]
 | 
			
		||||
				}
 | 
			
		||||
				// some implementations may add \r at the end of data
 | 
			
		||||
				data = strings.TrimSuffix(data, "\r")
 | 
			
		||||
				c.Render(-1, common.CustomEvent{Data: data})
 | 
			
		||||
				return true
 | 
			
		||||
			case <-stopChan:
 | 
			
		||||
				return false
 | 
			
		||||
			}
 | 
			
		||||
		})
 | 
			
		||||
		err = resp.Body.Close()
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError)
 | 
			
		||||
		}
 | 
			
		||||
		return nil
 | 
			
		||||
	} else {
 | 
			
		||||
		if consumeQuota {
 | 
			
		||||
			responseBody, err := io.ReadAll(resp.Body)
 | 
			
		||||
	switch apiType {
 | 
			
		||||
	case APITypeOpenAI:
 | 
			
		||||
		if isStream {
 | 
			
		||||
			err, responseText := openaiStreamHandler(c, resp, relayMode)
 | 
			
		||||
			if err != nil {
 | 
			
		||||
				return errorWrapper(err, "read_response_body_failed", http.StatusInternalServerError)
 | 
			
		||||
				return err
 | 
			
		||||
			}
 | 
			
		||||
			err = resp.Body.Close()
 | 
			
		||||
			streamResponseText = responseText
 | 
			
		||||
			return nil
 | 
			
		||||
		} else {
 | 
			
		||||
			err, usage := openaiHandler(c, resp, consumeQuota)
 | 
			
		||||
			if err != nil {
 | 
			
		||||
				return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError)
 | 
			
		||||
				return err
 | 
			
		||||
			}
 | 
			
		||||
			err = json.Unmarshal(responseBody, &textResponse)
 | 
			
		||||
			textResponse.Usage = *usage
 | 
			
		||||
			return nil
 | 
			
		||||
		}
 | 
			
		||||
	case APITypeClaude:
 | 
			
		||||
		if isStream {
 | 
			
		||||
			err, responseText := claudeStreamHandler(c, resp)
 | 
			
		||||
			if err != nil {
 | 
			
		||||
				return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
 | 
			
		||||
				return err
 | 
			
		||||
			}
 | 
			
		||||
			if textResponse.Error.Type != "" {
 | 
			
		||||
				return &OpenAIErrorWithStatusCode{
 | 
			
		||||
					OpenAIError: textResponse.Error,
 | 
			
		||||
					StatusCode:  resp.StatusCode,
 | 
			
		||||
				}
 | 
			
		||||
			streamResponseText = responseText
 | 
			
		||||
			return nil
 | 
			
		||||
		} else {
 | 
			
		||||
			err, usage := claudeHandler(c, resp, promptTokens, textRequest.Model)
 | 
			
		||||
			if err != nil {
 | 
			
		||||
				return err
 | 
			
		||||
			}
 | 
			
		||||
			// Reset response body
 | 
			
		||||
			resp.Body = io.NopCloser(bytes.NewBuffer(responseBody))
 | 
			
		||||
			textResponse.Usage = *usage
 | 
			
		||||
			return nil
 | 
			
		||||
		}
 | 
			
		||||
		// We shouldn't set the header before we parse the response body, because the parse part may fail.
 | 
			
		||||
		// And then we will have to send an error response, but in this case, the header has already been set.
 | 
			
		||||
		// So the client will be confused by the response.
 | 
			
		||||
		// For example, Postman will report error, and we cannot check the response at all.
 | 
			
		||||
		for k, v := range resp.Header {
 | 
			
		||||
			c.Writer.Header().Set(k, v[0])
 | 
			
		||||
	case APITypeBaidu:
 | 
			
		||||
		if isStream {
 | 
			
		||||
			err, usage := baiduStreamHandler(c, resp)
 | 
			
		||||
			if err != nil {
 | 
			
		||||
				return err
 | 
			
		||||
			}
 | 
			
		||||
			textResponse.Usage = *usage
 | 
			
		||||
			return nil
 | 
			
		||||
		} else {
 | 
			
		||||
			err, usage := baiduHandler(c, resp)
 | 
			
		||||
			if err != nil {
 | 
			
		||||
				return err
 | 
			
		||||
			}
 | 
			
		||||
			textResponse.Usage = *usage
 | 
			
		||||
			return nil
 | 
			
		||||
		}
 | 
			
		||||
		c.Writer.WriteHeader(resp.StatusCode)
 | 
			
		||||
		_, err = io.Copy(c.Writer, resp.Body)
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			return errorWrapper(err, "copy_response_body_failed", http.StatusInternalServerError)
 | 
			
		||||
		}
 | 
			
		||||
		err = resp.Body.Close()
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError)
 | 
			
		||||
		}
 | 
			
		||||
		return nil
 | 
			
		||||
	default:
 | 
			
		||||
		return errorWrapper(errors.New("unknown api type"), "unknown_api_type", http.StatusInternalServerError)
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -91,3 +91,16 @@ func errorWrapper(err error, code string, statusCode int) *OpenAIErrorWithStatus
 | 
			
		||||
		StatusCode:  statusCode,
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func shouldDisableChannel(err *OpenAIError) bool {
 | 
			
		||||
	if !common.AutomaticDisableChannelEnabled {
 | 
			
		||||
		return false
 | 
			
		||||
	}
 | 
			
		||||
	if err == nil {
 | 
			
		||||
		return false
 | 
			
		||||
	}
 | 
			
		||||
	if err.Type == "insufficient_quota" || err.Code == "invalid_api_key" || err.Code == "account_deactivated" {
 | 
			
		||||
		return true
 | 
			
		||||
	}
 | 
			
		||||
	return false
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -4,6 +4,7 @@ import (
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"net/http"
 | 
			
		||||
	"one-api/common"
 | 
			
		||||
	"strconv"
 | 
			
		||||
	"strings"
 | 
			
		||||
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
@@ -84,6 +85,20 @@ type TextResponse struct {
 | 
			
		||||
	Error OpenAIError `json:"error"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type OpenAITextResponseChoice struct {
 | 
			
		||||
	Index        int `json:"index"`
 | 
			
		||||
	Message      `json:"message"`
 | 
			
		||||
	FinishReason string `json:"finish_reason"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type OpenAITextResponse struct {
 | 
			
		||||
	Id      string                     `json:"id"`
 | 
			
		||||
	Object  string                     `json:"object"`
 | 
			
		||||
	Created int64                      `json:"created"`
 | 
			
		||||
	Choices []OpenAITextResponseChoice `json:"choices"`
 | 
			
		||||
	Usage   `json:"usage"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type ImageResponse struct {
 | 
			
		||||
	Created int `json:"created"`
 | 
			
		||||
	Data    []struct {
 | 
			
		||||
@@ -91,13 +106,19 @@ type ImageResponse struct {
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type ChatCompletionsStreamResponseChoice struct {
 | 
			
		||||
	Delta struct {
 | 
			
		||||
		Content string `json:"content"`
 | 
			
		||||
	} `json:"delta"`
 | 
			
		||||
	FinishReason string `json:"finish_reason,omitempty"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type ChatCompletionsStreamResponse struct {
 | 
			
		||||
	Choices []struct {
 | 
			
		||||
		Delta struct {
 | 
			
		||||
			Content string `json:"content"`
 | 
			
		||||
		} `json:"delta"`
 | 
			
		||||
		FinishReason string `json:"finish_reason"`
 | 
			
		||||
	} `json:"choices"`
 | 
			
		||||
	Id      string                                `json:"id"`
 | 
			
		||||
	Object  string                                `json:"object"`
 | 
			
		||||
	Created int64                                 `json:"created"`
 | 
			
		||||
	Model   string                                `json:"model"`
 | 
			
		||||
	Choices []ChatCompletionsStreamResponseChoice `json:"choices"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type CompletionsStreamResponse struct {
 | 
			
		||||
@@ -132,16 +153,25 @@ func Relay(c *gin.Context) {
 | 
			
		||||
		err = relayTextHelper(c, relayMode)
 | 
			
		||||
	}
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		if err.StatusCode == http.StatusTooManyRequests {
 | 
			
		||||
			err.OpenAIError.Message = "当前分组负载已饱和,请稍后再试,或升级账户以提升服务质量。"
 | 
			
		||||
		retryTimesStr := c.Query("retry")
 | 
			
		||||
		retryTimes, _ := strconv.Atoi(retryTimesStr)
 | 
			
		||||
		if retryTimesStr == "" {
 | 
			
		||||
			retryTimes = common.RetryTimes
 | 
			
		||||
		}
 | 
			
		||||
		if retryTimes > 0 {
 | 
			
		||||
			c.Redirect(http.StatusTemporaryRedirect, fmt.Sprintf("%s?retry=%d", c.Request.URL.Path, retryTimes-1))
 | 
			
		||||
		} else {
 | 
			
		||||
			if err.StatusCode == http.StatusTooManyRequests {
 | 
			
		||||
				err.OpenAIError.Message = "当前分组负载已饱和,请稍后再试,或升级账户以提升服务质量。"
 | 
			
		||||
			}
 | 
			
		||||
			c.JSON(err.StatusCode, gin.H{
 | 
			
		||||
				"error": err.OpenAIError,
 | 
			
		||||
			})
 | 
			
		||||
		}
 | 
			
		||||
		c.JSON(err.StatusCode, gin.H{
 | 
			
		||||
			"error": err.OpenAIError,
 | 
			
		||||
		})
 | 
			
		||||
		channelId := c.GetInt("channel_id")
 | 
			
		||||
		common.SysError(fmt.Sprintf("relay error (channel #%d): %s", channelId, err.Message))
 | 
			
		||||
		// https://platform.openai.com/docs/guides/error-codes/api-errors
 | 
			
		||||
		if common.AutomaticDisableChannelEnabled && (err.Type == "insufficient_quota" || err.Code == "invalid_api_key" || err.Code == "account_deactivated") {
 | 
			
		||||
		if shouldDisableChannel(&err.OpenAIError) {
 | 
			
		||||
			channelId := c.GetInt("channel_id")
 | 
			
		||||
			channelName := c.GetString("channel_name")
 | 
			
		||||
			disableChannel(channelId, channelName, err.Message)
 | 
			
		||||
 
 | 
			
		||||
@@ -68,6 +68,7 @@ func InitOptionMap() {
 | 
			
		||||
	common.OptionMap["TopUpLink"] = common.TopUpLink
 | 
			
		||||
	common.OptionMap["ChatLink"] = common.ChatLink
 | 
			
		||||
	common.OptionMap["QuotaPerUnit"] = strconv.FormatFloat(common.QuotaPerUnit, 'f', -1, 64)
 | 
			
		||||
	common.OptionMap["RetryTimes"] = strconv.Itoa(common.RetryTimes)
 | 
			
		||||
	common.OptionMapRWMutex.Unlock()
 | 
			
		||||
	loadOptionsFromDatabase()
 | 
			
		||||
}
 | 
			
		||||
@@ -196,6 +197,8 @@ func updateOptionMap(key string, value string) (err error) {
 | 
			
		||||
		common.QuotaRemindThreshold, _ = strconv.Atoi(value)
 | 
			
		||||
	case "PreConsumedQuota":
 | 
			
		||||
		common.PreConsumedQuota, _ = strconv.Atoi(value)
 | 
			
		||||
	case "RetryTimes":
 | 
			
		||||
		common.RetryTimes, _ = strconv.Atoi(value)
 | 
			
		||||
	case "ModelRatio":
 | 
			
		||||
		err = common.UpdateModelRatioByJSONString(value)
 | 
			
		||||
	case "GroupRatio":
 | 
			
		||||
 
 | 
			
		||||
@@ -20,6 +20,7 @@ const OperationSetting = () => {
 | 
			
		||||
    DisplayInCurrencyEnabled: '',
 | 
			
		||||
    DisplayTokenStatEnabled: '',
 | 
			
		||||
    ApproximateTokenEnabled: '',
 | 
			
		||||
    RetryTimes: 0,
 | 
			
		||||
  });
 | 
			
		||||
  const [originInputs, setOriginInputs] = useState({});
 | 
			
		||||
  let [loading, setLoading] = useState(false);
 | 
			
		||||
@@ -122,6 +123,9 @@ const OperationSetting = () => {
 | 
			
		||||
        if (originInputs['QuotaPerUnit'] !== inputs.QuotaPerUnit) {
 | 
			
		||||
          await updateOption('QuotaPerUnit', inputs.QuotaPerUnit);
 | 
			
		||||
        }
 | 
			
		||||
        if (originInputs['RetryTimes'] !== inputs.RetryTimes) {
 | 
			
		||||
          await updateOption('RetryTimes', inputs.RetryTimes);
 | 
			
		||||
        }
 | 
			
		||||
        break;
 | 
			
		||||
    }
 | 
			
		||||
  };
 | 
			
		||||
@@ -133,7 +137,7 @@ const OperationSetting = () => {
 | 
			
		||||
          <Header as='h3'>
 | 
			
		||||
            通用设置
 | 
			
		||||
          </Header>
 | 
			
		||||
          <Form.Group widths={3}>
 | 
			
		||||
          <Form.Group widths={4}>
 | 
			
		||||
            <Form.Input
 | 
			
		||||
              label='充值链接'
 | 
			
		||||
              name='TopUpLink'
 | 
			
		||||
@@ -162,6 +166,17 @@ const OperationSetting = () => {
 | 
			
		||||
              step='0.01'
 | 
			
		||||
              placeholder='一单位货币能兑换的额度'
 | 
			
		||||
            />
 | 
			
		||||
            <Form.Input
 | 
			
		||||
              label='失败重试次数'
 | 
			
		||||
              name='RetryTimes'
 | 
			
		||||
              type={'number'}
 | 
			
		||||
              step='1'
 | 
			
		||||
              min='0'
 | 
			
		||||
              onChange={handleInputChange}
 | 
			
		||||
              autoComplete='new-password'
 | 
			
		||||
              value={inputs.RetryTimes}
 | 
			
		||||
              placeholder='失败重试次数'
 | 
			
		||||
            />
 | 
			
		||||
          </Form.Group>
 | 
			
		||||
          <Form.Group inline>
 | 
			
		||||
            <Form.Checkbox
 | 
			
		||||
 
 | 
			
		||||
@@ -1,7 +1,9 @@
 | 
			
		||||
export const CHANNEL_OPTIONS = [
 | 
			
		||||
  { key: 1, text: 'OpenAI', value: 1, color: 'green' },
 | 
			
		||||
  { key: 14, text: 'Anthropic', value: 14, color: 'black' },
 | 
			
		||||
  { key: 8, text: '自定义', value: 8, color: 'pink' },
 | 
			
		||||
  { key: 3, text: 'Azure', value: 3, color: 'olive' },
 | 
			
		||||
  { key: 15, text: 'Baidu', value: 15, color: 'blue' },
 | 
			
		||||
  { key: 2, text: 'API2D', value: 2, color: 'blue' },
 | 
			
		||||
  { key: 4, text: 'CloseAI', value: 4, color: 'teal' },
 | 
			
		||||
  { key: 5, text: 'OpenAI-SB', value: 5, color: 'brown' },
 | 
			
		||||
 
 | 
			
		||||
@@ -27,6 +27,7 @@ const EditChannel = () => {
 | 
			
		||||
  };
 | 
			
		||||
  const [batch, setBatch] = useState(false);
 | 
			
		||||
  const [inputs, setInputs] = useState(originInputs);
 | 
			
		||||
  const [originModelOptions, setOriginModelOptions] = useState([]);
 | 
			
		||||
  const [modelOptions, setModelOptions] = useState([]);
 | 
			
		||||
  const [groupOptions, setGroupOptions] = useState([]);
 | 
			
		||||
  const [basicModels, setBasicModels] = useState([]);
 | 
			
		||||
@@ -44,19 +45,6 @@ const EditChannel = () => {
 | 
			
		||||
        data.models = [];
 | 
			
		||||
      } else {
 | 
			
		||||
        data.models = data.models.split(',');
 | 
			
		||||
        setTimeout(() => {
 | 
			
		||||
          let localModelOptions = [...modelOptions];
 | 
			
		||||
          data.models.forEach((model) => {
 | 
			
		||||
            if (!localModelOptions.find((option) => option.key === model)) {
 | 
			
		||||
              localModelOptions.push({
 | 
			
		||||
                key: model,
 | 
			
		||||
                text: model,
 | 
			
		||||
                value: model
 | 
			
		||||
              });
 | 
			
		||||
            }
 | 
			
		||||
          });
 | 
			
		||||
          setModelOptions(localModelOptions);
 | 
			
		||||
        }, 1000);
 | 
			
		||||
      }
 | 
			
		||||
      if (data.group === '') {
 | 
			
		||||
        data.groups = [];
 | 
			
		||||
@@ -76,13 +64,16 @@ const EditChannel = () => {
 | 
			
		||||
  const fetchModels = async () => {
 | 
			
		||||
    try {
 | 
			
		||||
      let res = await API.get(`/api/channel/models`);
 | 
			
		||||
      setModelOptions(res.data.data.map((model) => ({
 | 
			
		||||
      let localModelOptions = res.data.data.map((model) => ({
 | 
			
		||||
        key: model.id,
 | 
			
		||||
        text: model.id,
 | 
			
		||||
        value: model.id
 | 
			
		||||
      })));
 | 
			
		||||
      }));
 | 
			
		||||
      setOriginModelOptions(localModelOptions);
 | 
			
		||||
      setFullModels(res.data.data.map((model) => model.id));
 | 
			
		||||
      setBasicModels(res.data.data.filter((model) => !model.id.startsWith('gpt-4')).map((model) => model.id));
 | 
			
		||||
      setBasicModels(res.data.data.filter((model) => {
 | 
			
		||||
        return model.id.startsWith('gpt-3') || model.id.startsWith('text-');
 | 
			
		||||
      }).map((model) => model.id));
 | 
			
		||||
    } catch (error) {
 | 
			
		||||
      showError(error.message);
 | 
			
		||||
    }
 | 
			
		||||
@@ -101,6 +92,20 @@ const EditChannel = () => {
 | 
			
		||||
    }
 | 
			
		||||
  };
 | 
			
		||||
 | 
			
		||||
  useEffect(() => {
 | 
			
		||||
    let localModelOptions = [...originModelOptions];
 | 
			
		||||
    inputs.models.forEach((model) => {
 | 
			
		||||
      if (!localModelOptions.find((option) => option.key === model)) {
 | 
			
		||||
        localModelOptions.push({
 | 
			
		||||
          key: model,
 | 
			
		||||
          text: model,
 | 
			
		||||
          value: model
 | 
			
		||||
        });
 | 
			
		||||
      }
 | 
			
		||||
    });
 | 
			
		||||
    setModelOptions(localModelOptions);
 | 
			
		||||
  }, [originModelOptions, inputs.models]);
 | 
			
		||||
 | 
			
		||||
  useEffect(() => {
 | 
			
		||||
    if (isEdit) {
 | 
			
		||||
      loadChannel().then();
 | 
			
		||||
@@ -280,15 +285,20 @@ const EditChannel = () => {
 | 
			
		||||
            <Input
 | 
			
		||||
              action={
 | 
			
		||||
                <Button type={'button'} onClick={()=>{
 | 
			
		||||
                  if (customModel.trim() === "") return;
 | 
			
		||||
                  if (inputs.models.includes(customModel)) return;
 | 
			
		||||
                  let localModels = [...inputs.models];
 | 
			
		||||
                  localModels.push(customModel);
 | 
			
		||||
                  let localModelOptions = [...modelOptions];
 | 
			
		||||
                  let localModelOptions = [];
 | 
			
		||||
                  localModelOptions.push({
 | 
			
		||||
                    key: customModel,
 | 
			
		||||
                    text: customModel,
 | 
			
		||||
                    value: customModel,
 | 
			
		||||
                  });
 | 
			
		||||
                  setModelOptions(localModelOptions);
 | 
			
		||||
                  setModelOptions(modelOptions=>{
 | 
			
		||||
                    return [...modelOptions, ...localModelOptions];
 | 
			
		||||
                  });
 | 
			
		||||
                  setCustomModel('');
 | 
			
		||||
                  handleInputChange(null, { name: 'models', value: localModels });
 | 
			
		||||
                }}>填入</Button>
 | 
			
		||||
              }
 | 
			
		||||
@@ -327,7 +337,7 @@ const EditChannel = () => {
 | 
			
		||||
                label='密钥'
 | 
			
		||||
                name='key'
 | 
			
		||||
                required
 | 
			
		||||
                placeholder={'请输入密钥'}
 | 
			
		||||
                placeholder={inputs.type === 15 ? "请输入 access token,当前版本暂不支持自动刷新,请每 30 天更新一次" : '请输入密钥'}
 | 
			
		||||
                onChange={handleInputChange}
 | 
			
		||||
                value={inputs.key}
 | 
			
		||||
                autoComplete='new-password'
 | 
			
		||||
 
 | 
			
		||||
		Reference in New Issue
	
	Block a user