mirror of
				https://github.com/songquanpeng/one-api.git
				synced 2025-11-04 07:43:41 +08:00 
			
		
		
		
	Merge 7bc40cc93a into 8df4a2670b
				
					
				
			This commit is contained in:
		
							
								
								
									
										15
									
								
								Dockerfile
									
									
									
									
									
								
							
							
						
						
									
										15
									
								
								Dockerfile
									
									
									
									
									
								
							@@ -4,15 +4,14 @@ WORKDIR /web
 | 
			
		||||
COPY ./VERSION .
 | 
			
		||||
COPY ./web .
 | 
			
		||||
 | 
			
		||||
RUN npm install --prefix /web/default & \
 | 
			
		||||
    npm install --prefix /web/berry & \
 | 
			
		||||
    npm install --prefix /web/air & \
 | 
			
		||||
    wait
 | 
			
		||||
RUN npm install --prefix /web/default
 | 
			
		||||
RUN npm install --prefix /web/berry
 | 
			
		||||
RUN npm install --prefix /web/air
 | 
			
		||||
 | 
			
		||||
RUN DISABLE_ESLINT_PLUGIN='true' REACT_APP_VERSION=$(cat ./VERSION) npm run build --prefix /web/default
 | 
			
		||||
RUN DISABLE_ESLINT_PLUGIN='true' REACT_APP_VERSION=$(cat ./VERSION) npm run build --prefix /web/berry
 | 
			
		||||
RUN DISABLE_ESLINT_PLUGIN='true' REACT_APP_VERSION=$(cat ./VERSION) npm run build --prefix /web/air
 | 
			
		||||
 | 
			
		||||
RUN DISABLE_ESLINT_PLUGIN='true' REACT_APP_VERSION=$(cat ./VERSION) npm run build --prefix /web/default & \
 | 
			
		||||
    DISABLE_ESLINT_PLUGIN='true' REACT_APP_VERSION=$(cat ./VERSION) npm run build --prefix /web/berry & \
 | 
			
		||||
    DISABLE_ESLINT_PLUGIN='true' REACT_APP_VERSION=$(cat ./VERSION) npm run build --prefix /web/air & \
 | 
			
		||||
    wait
 | 
			
		||||
 | 
			
		||||
FROM golang:alpine AS builder2
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										23
									
								
								common/file/file.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										23
									
								
								common/file/file.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,23 @@
 | 
			
		||||
package file
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"bytes"
 | 
			
		||||
	"encoding/base64"
 | 
			
		||||
	"net/http"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
func GetFileFromUrl(url string) (mimeType string, data string, err error) {
 | 
			
		||||
	resp, err := http.Get(url)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return
 | 
			
		||||
	}
 | 
			
		||||
	defer resp.Body.Close()
 | 
			
		||||
	buffer := bytes.NewBuffer(nil)
 | 
			
		||||
	_, err = buffer.ReadFrom(resp.Body)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return
 | 
			
		||||
	}
 | 
			
		||||
	mimeType = resp.Header.Get("Content-Type")
 | 
			
		||||
	data = base64.StdEncoding.EncodeToString(buffer.Bytes())
 | 
			
		||||
	return
 | 
			
		||||
}
 | 
			
		||||
@@ -135,30 +135,32 @@ func InitDB() {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func migrateDB() error {
 | 
			
		||||
	var err error
 | 
			
		||||
	if err = DB.AutoMigrate(&Channel{}); err != nil {
 | 
			
		||||
		return err
 | 
			
		||||
	}
 | 
			
		||||
	if err = DB.AutoMigrate(&Token{}); err != nil {
 | 
			
		||||
		return err
 | 
			
		||||
	}
 | 
			
		||||
	if err = DB.AutoMigrate(&User{}); err != nil {
 | 
			
		||||
		return err
 | 
			
		||||
	}
 | 
			
		||||
	if err = DB.AutoMigrate(&Option{}); err != nil {
 | 
			
		||||
		return err
 | 
			
		||||
	}
 | 
			
		||||
	if err = DB.AutoMigrate(&Redemption{}); err != nil {
 | 
			
		||||
		return err
 | 
			
		||||
	}
 | 
			
		||||
	if err = DB.AutoMigrate(&Ability{}); err != nil {
 | 
			
		||||
		return err
 | 
			
		||||
	}
 | 
			
		||||
	if err = DB.AutoMigrate(&Log{}); err != nil {
 | 
			
		||||
		return err
 | 
			
		||||
	}
 | 
			
		||||
	if err = DB.AutoMigrate(&Channel{}); err != nil {
 | 
			
		||||
		return err
 | 
			
		||||
	if env.Bool("StartSqlMigration", false) {
 | 
			
		||||
		var err error
 | 
			
		||||
		if err = DB.AutoMigrate(&Channel{}); err != nil {
 | 
			
		||||
			return err
 | 
			
		||||
		}
 | 
			
		||||
		if err = DB.AutoMigrate(&Token{}); err != nil {
 | 
			
		||||
			return err
 | 
			
		||||
		}
 | 
			
		||||
		if err = DB.AutoMigrate(&User{}); err != nil {
 | 
			
		||||
			return err
 | 
			
		||||
		}
 | 
			
		||||
		if err = DB.AutoMigrate(&Option{}); err != nil {
 | 
			
		||||
			return err
 | 
			
		||||
		}
 | 
			
		||||
		if err = DB.AutoMigrate(&Redemption{}); err != nil {
 | 
			
		||||
			return err
 | 
			
		||||
		}
 | 
			
		||||
		if err = DB.AutoMigrate(&Ability{}); err != nil {
 | 
			
		||||
			return err
 | 
			
		||||
		}
 | 
			
		||||
		if err = DB.AutoMigrate(&Log{}); err != nil {
 | 
			
		||||
			return err
 | 
			
		||||
		}
 | 
			
		||||
		if err = DB.AutoMigrate(&Channel{}); err != nil {
 | 
			
		||||
			return err
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
	return nil
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -64,6 +64,9 @@ func GetAdaptor(apiType int) adaptor.Adaptor {
 | 
			
		||||
		return &proxy.Adaptor{}
 | 
			
		||||
	case apitype.Replicate:
 | 
			
		||||
		return &replicate.Adaptor{}
 | 
			
		||||
	case apitype.CozeV3:
 | 
			
		||||
		return &coze.AdaptorV3{}
 | 
			
		||||
 | 
			
		||||
	}
 | 
			
		||||
	return nil
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -36,6 +36,12 @@ func ConvertRequest(request model.GeneralOpenAIRequest) *ChatRequest {
 | 
			
		||||
		enableSearch = true
 | 
			
		||||
		aliModel = strings.TrimSuffix(aliModel, EnableSearchModelSuffix)
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	enableThinking := false
 | 
			
		||||
	if request.ReasoningEffort != nil {
 | 
			
		||||
		enableThinking = true
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	request.TopP = helper.Float64PtrMax(request.TopP, 0.9999)
 | 
			
		||||
	return &ChatRequest{
 | 
			
		||||
		Model: aliModel,
 | 
			
		||||
@@ -52,6 +58,7 @@ func ConvertRequest(request model.GeneralOpenAIRequest) *ChatRequest {
 | 
			
		||||
			TopK:              request.TopK,
 | 
			
		||||
			ResultFormat:      "message",
 | 
			
		||||
			Tools:             request.Tools,
 | 
			
		||||
			EnableThinking:    enableThinking,
 | 
			
		||||
		},
 | 
			
		||||
	}
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -25,6 +25,7 @@ type Parameters struct {
 | 
			
		||||
	Temperature       *float64     `json:"temperature,omitempty"`
 | 
			
		||||
	ResultFormat      string       `json:"result_format,omitempty"`
 | 
			
		||||
	Tools             []model.Tool `json:"tools,omitempty"`
 | 
			
		||||
	EnableThinking    bool         `json:"enable_thinking,omitempty"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type ChatRequest struct {
 | 
			
		||||
 
 | 
			
		||||
@@ -17,4 +17,5 @@ var ModelList = []string{
 | 
			
		||||
 | 
			
		||||
	"deepseek-r1",
 | 
			
		||||
	"deepseek-v3",
 | 
			
		||||
	"deepseek-v3.1",
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										75
									
								
								relay/adaptor/coze/adaptor_v3.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										75
									
								
								relay/adaptor/coze/adaptor_v3.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,75 @@
 | 
			
		||||
package coze
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"errors"
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/openai"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/meta"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/model"
 | 
			
		||||
	"io"
 | 
			
		||||
	"net/http"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
type AdaptorV3 struct {
 | 
			
		||||
	meta *meta.Meta
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *AdaptorV3) Init(meta *meta.Meta) {
 | 
			
		||||
	a.meta = meta
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *AdaptorV3) GetRequestURL(meta *meta.Meta) (string, error) {
 | 
			
		||||
	return fmt.Sprintf("%s/v3/chat", meta.BaseURL), nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *AdaptorV3) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error {
 | 
			
		||||
	adaptor.SetupCommonRequestHeader(c, req, meta)
 | 
			
		||||
	req.Header.Set("Authorization", "Bearer "+meta.APIKey)
 | 
			
		||||
	return nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *AdaptorV3) ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) {
 | 
			
		||||
	if request == nil {
 | 
			
		||||
		return nil, errors.New("request is nil")
 | 
			
		||||
	}
 | 
			
		||||
	request.User = a.meta.Config.UserID
 | 
			
		||||
	return V3ConvertRequest(*request), nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *AdaptorV3) ConvertImageRequest(request *model.ImageRequest) (any, error) {
 | 
			
		||||
	if request == nil {
 | 
			
		||||
		return nil, errors.New("request is nil")
 | 
			
		||||
	}
 | 
			
		||||
	return request, nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *AdaptorV3) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) {
 | 
			
		||||
	return adaptor.DoRequestHelper(a, c, meta, requestBody)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *AdaptorV3) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
 | 
			
		||||
	var responseText *string
 | 
			
		||||
	if meta.IsStream {
 | 
			
		||||
		err, responseText = V3StreamHandler(c, resp)
 | 
			
		||||
	} else {
 | 
			
		||||
		err, responseText = V3Handler(c, resp, meta.PromptTokens, meta.ActualModelName)
 | 
			
		||||
	}
 | 
			
		||||
	if responseText != nil {
 | 
			
		||||
		usage = openai.ResponseText2Usage(*responseText, meta.ActualModelName, meta.PromptTokens)
 | 
			
		||||
	} else {
 | 
			
		||||
		usage = &model.Usage{}
 | 
			
		||||
	}
 | 
			
		||||
	usage.PromptTokens = meta.PromptTokens
 | 
			
		||||
	usage.TotalTokens = usage.PromptTokens + usage.CompletionTokens
 | 
			
		||||
	return
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *AdaptorV3) GetModelList() []string {
 | 
			
		||||
	return ModelList
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (a *AdaptorV3) GetChannelName() string {
 | 
			
		||||
	return "CozeV3"
 | 
			
		||||
}
 | 
			
		||||
@@ -1,6 +1,9 @@
 | 
			
		||||
package coze
 | 
			
		||||
 | 
			
		||||
import "github.com/songquanpeng/one-api/relay/adaptor/coze/constant/event"
 | 
			
		||||
import (
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/coze/constant/event"
 | 
			
		||||
	"strings"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
func event2StopReason(e *string) string {
 | 
			
		||||
	if e == nil || *e == event.Message {
 | 
			
		||||
@@ -8,3 +11,16 @@ func event2StopReason(e *string) string {
 | 
			
		||||
	}
 | 
			
		||||
	return "stop"
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func splitOnDoubleNewline(data []byte, atEOF bool) (advance int, token []byte, err error) {
 | 
			
		||||
	if atEOF && len(data) == 0 {
 | 
			
		||||
		return 0, nil, nil
 | 
			
		||||
	}
 | 
			
		||||
	if i := strings.Index(string(data), "\n\n"); i >= 0 {
 | 
			
		||||
		return i + 1, data[0:i], nil
 | 
			
		||||
	}
 | 
			
		||||
	if atEOF {
 | 
			
		||||
		return len(data), data, nil
 | 
			
		||||
	}
 | 
			
		||||
	return 0, nil, nil
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -4,19 +4,18 @@ import (
 | 
			
		||||
	"bufio"
 | 
			
		||||
	"encoding/json"
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/render"
 | 
			
		||||
	"io"
 | 
			
		||||
	"net/http"
 | 
			
		||||
	"strings"
 | 
			
		||||
 | 
			
		||||
	"github.com/gin-gonic/gin"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/conv"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/helper"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/logger"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/render"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/coze/constant/messagetype"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/adaptor/openai"
 | 
			
		||||
	"github.com/songquanpeng/one-api/relay/model"
 | 
			
		||||
	"io"
 | 
			
		||||
	"net/http"
 | 
			
		||||
	"strings"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
// https://www.coze.com/open
 | 
			
		||||
@@ -57,6 +56,32 @@ func ConvertRequest(textRequest model.GeneralOpenAIRequest) *Request {
 | 
			
		||||
	return &cozeRequest
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func V3ConvertRequest(textRequest model.GeneralOpenAIRequest) *V3Request {
 | 
			
		||||
	cozeRequest := V3Request{
 | 
			
		||||
		UserId: textRequest.User,
 | 
			
		||||
		Stream: textRequest.Stream,
 | 
			
		||||
		BotId:  strings.TrimPrefix(textRequest.Model, "bot-"),
 | 
			
		||||
	}
 | 
			
		||||
	if cozeRequest.UserId == "" {
 | 
			
		||||
		cozeRequest.UserId = "any"
 | 
			
		||||
	}
 | 
			
		||||
	for i, message := range textRequest.Messages {
 | 
			
		||||
		if i == len(textRequest.Messages)-1 {
 | 
			
		||||
			cozeRequest.AdditionalMessages = append(cozeRequest.AdditionalMessages, Message{
 | 
			
		||||
				Role:    "user",
 | 
			
		||||
				Content: message.CozeV3StringContent(),
 | 
			
		||||
			})
 | 
			
		||||
			continue
 | 
			
		||||
		}
 | 
			
		||||
		cozeMessage := Message{
 | 
			
		||||
			Role:    message.Role,
 | 
			
		||||
			Content: message.CozeV3StringContent(),
 | 
			
		||||
		}
 | 
			
		||||
		cozeRequest.AdditionalMessages = append(cozeRequest.AdditionalMessages, cozeMessage)
 | 
			
		||||
	}
 | 
			
		||||
	return &cozeRequest
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func StreamResponseCoze2OpenAI(cozeResponse *StreamResponse) (*openai.ChatCompletionsStreamResponse, *Response) {
 | 
			
		||||
	var response *Response
 | 
			
		||||
	var stopReason string
 | 
			
		||||
@@ -80,6 +105,28 @@ func StreamResponseCoze2OpenAI(cozeResponse *StreamResponse) (*openai.ChatComple
 | 
			
		||||
	return &openaiResponse, response
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func V3StreamResponseCoze2OpenAI(cozeResponse *V3StreamResponse) (*openai.ChatCompletionsStreamResponse, *Response) {
 | 
			
		||||
	var response *Response
 | 
			
		||||
	var choice openai.ChatCompletionsStreamResponseChoice
 | 
			
		||||
 | 
			
		||||
	choice.Delta.Role = cozeResponse.Role
 | 
			
		||||
	choice.Delta.Content = cozeResponse.Content
 | 
			
		||||
 | 
			
		||||
	var openaiResponse openai.ChatCompletionsStreamResponse
 | 
			
		||||
	openaiResponse.Object = "chat.completion.chunk"
 | 
			
		||||
	openaiResponse.Choices = []openai.ChatCompletionsStreamResponseChoice{choice}
 | 
			
		||||
	openaiResponse.Id = cozeResponse.ConversationId
 | 
			
		||||
 | 
			
		||||
	if cozeResponse.Usage.TokenCount > 0 {
 | 
			
		||||
		openaiResponse.Usage = &model.Usage{
 | 
			
		||||
			PromptTokens:     cozeResponse.Usage.InputCount,
 | 
			
		||||
			CompletionTokens: cozeResponse.Usage.OutputCount,
 | 
			
		||||
			TotalTokens:      cozeResponse.Usage.TokenCount,
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
	return &openaiResponse, response
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func ResponseCoze2OpenAI(cozeResponse *Response) *openai.TextResponse {
 | 
			
		||||
	var responseText string
 | 
			
		||||
	for _, message := range cozeResponse.Messages {
 | 
			
		||||
@@ -107,6 +154,26 @@ func ResponseCoze2OpenAI(cozeResponse *Response) *openai.TextResponse {
 | 
			
		||||
	return &fullTextResponse
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func V3ResponseCoze2OpenAI(cozeResponse *V3Response) *openai.TextResponse {
 | 
			
		||||
	choice := openai.TextResponseChoice{
 | 
			
		||||
		Index: 0,
 | 
			
		||||
		Message: model.Message{
 | 
			
		||||
			Role:    "assistant",
 | 
			
		||||
			Content: cozeResponse.Data.Content,
 | 
			
		||||
			Name:    nil,
 | 
			
		||||
		},
 | 
			
		||||
		FinishReason: "stop",
 | 
			
		||||
	}
 | 
			
		||||
	fullTextResponse := openai.TextResponse{
 | 
			
		||||
		Id:      fmt.Sprintf("chatcmpl-%s", cozeResponse.Data.ConversationId),
 | 
			
		||||
		Model:   "coze-bot",
 | 
			
		||||
		Object:  "chat.completion",
 | 
			
		||||
		Created: helper.GetTimestamp(),
 | 
			
		||||
		Choices: []openai.TextResponseChoice{choice},
 | 
			
		||||
	}
 | 
			
		||||
	return &fullTextResponse
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *string) {
 | 
			
		||||
	var responseText string
 | 
			
		||||
	createdTime := helper.GetTimestamp()
 | 
			
		||||
@@ -162,6 +229,63 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusC
 | 
			
		||||
	return nil, &responseText
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func V3StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *string) {
 | 
			
		||||
	var responseText string
 | 
			
		||||
	createdTime := helper.GetTimestamp()
 | 
			
		||||
	scanner := bufio.NewScanner(resp.Body)
 | 
			
		||||
	scanner.Split(splitOnDoubleNewline)
 | 
			
		||||
	common.SetEventStreamHeaders(c)
 | 
			
		||||
	var modelName string
 | 
			
		||||
	for scanner.Scan() {
 | 
			
		||||
		part := scanner.Text()
 | 
			
		||||
		part = strings.TrimPrefix(part, "\n")
 | 
			
		||||
		parts := strings.Split(part, "\n")
 | 
			
		||||
		if len(parts) != 2 {
 | 
			
		||||
			continue
 | 
			
		||||
		}
 | 
			
		||||
		if !strings.HasPrefix(parts[0], "event:") || !strings.HasPrefix(parts[1], "data:") {
 | 
			
		||||
			continue
 | 
			
		||||
		}
 | 
			
		||||
		event, data := strings.TrimSpace(parts[0][6:]), strings.TrimSpace(parts[1][5:])
 | 
			
		||||
		if event == "conversation.message.delta" || event == "conversation.chat.completed" {
 | 
			
		||||
			data = strings.TrimSuffix(data, "\r")
 | 
			
		||||
			var cozeResponse V3StreamResponse
 | 
			
		||||
			err := json.Unmarshal([]byte(data), &cozeResponse)
 | 
			
		||||
			if err != nil {
 | 
			
		||||
				logger.SysError("error unmarshalling stream response: " + err.Error())
 | 
			
		||||
				continue
 | 
			
		||||
			}
 | 
			
		||||
 | 
			
		||||
			response, _ := V3StreamResponseCoze2OpenAI(&cozeResponse)
 | 
			
		||||
			if response == nil {
 | 
			
		||||
				continue
 | 
			
		||||
			}
 | 
			
		||||
 | 
			
		||||
			for _, choice := range response.Choices {
 | 
			
		||||
				responseText += conv.AsString(choice.Delta.Content)
 | 
			
		||||
			}
 | 
			
		||||
			response.Model = modelName
 | 
			
		||||
			response.Created = createdTime
 | 
			
		||||
 | 
			
		||||
			err = render.ObjectData(c, response)
 | 
			
		||||
			if err != nil {
 | 
			
		||||
				logger.SysError(err.Error())
 | 
			
		||||
			}
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
	if err := scanner.Err(); err != nil {
 | 
			
		||||
		logger.SysError("error reading stream: " + err.Error())
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	render.Done(c)
 | 
			
		||||
	err := resp.Body.Close()
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	return nil, &responseText
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName string) (*model.ErrorWithStatusCode, *string) {
 | 
			
		||||
	responseBody, err := io.ReadAll(resp.Body)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
@@ -200,3 +324,42 @@ func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName st
 | 
			
		||||
	}
 | 
			
		||||
	return nil, &responseText
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func V3Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName string) (*model.ErrorWithStatusCode, *string) {
 | 
			
		||||
	responseBody, err := io.ReadAll(resp.Body)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	err = resp.Body.Close()
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	var cozeResponse V3Response
 | 
			
		||||
	err = json.Unmarshal(responseBody, &cozeResponse)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	if cozeResponse.Code != 0 {
 | 
			
		||||
		return &model.ErrorWithStatusCode{
 | 
			
		||||
			Error: model.Error{
 | 
			
		||||
				Message: cozeResponse.Msg,
 | 
			
		||||
				Code:    cozeResponse.Code,
 | 
			
		||||
			},
 | 
			
		||||
			StatusCode: resp.StatusCode,
 | 
			
		||||
		}, nil
 | 
			
		||||
	}
 | 
			
		||||
	fullTextResponse := V3ResponseCoze2OpenAI(&cozeResponse)
 | 
			
		||||
	fullTextResponse.Model = modelName
 | 
			
		||||
	jsonResponse, err := json.Marshal(fullTextResponse)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
 | 
			
		||||
	}
 | 
			
		||||
	c.Writer.Header().Set("Content-Type", "application/json")
 | 
			
		||||
	c.Writer.WriteHeader(resp.StatusCode)
 | 
			
		||||
	_, err = c.Writer.Write(jsonResponse)
 | 
			
		||||
	var responseText string
 | 
			
		||||
	if len(fullTextResponse.Choices) > 0 {
 | 
			
		||||
		responseText = fullTextResponse.Choices[0].Message.StringContent()
 | 
			
		||||
	}
 | 
			
		||||
	return nil, &responseText
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -2,9 +2,9 @@ package coze
 | 
			
		||||
 | 
			
		||||
type Message struct {
 | 
			
		||||
	Role        string `json:"role"`
 | 
			
		||||
	Type        string `json:"type"`
 | 
			
		||||
	Type        string `json:"type,omitempty"`
 | 
			
		||||
	Content     string `json:"content"`
 | 
			
		||||
	ContentType string `json:"content_type"`
 | 
			
		||||
	ContentType string `json:"content_type,omitempty"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type ErrorInformation struct {
 | 
			
		||||
@@ -36,3 +36,52 @@ type StreamResponse struct {
 | 
			
		||||
	ConversationId   string            `json:"conversation_id,omitempty"`
 | 
			
		||||
	ErrorInformation *ErrorInformation `json:"error_information,omitempty"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type V3StreamResponse struct {
 | 
			
		||||
	Id             string `json:"id"`
 | 
			
		||||
	ConversationId string `json:"conversation_id"`
 | 
			
		||||
	BotId          string `json:"bot_id"`
 | 
			
		||||
	Role           string `json:"role"`
 | 
			
		||||
	Type           string `json:"type"`
 | 
			
		||||
	Content        string `json:"content"`
 | 
			
		||||
	ContentType    string `json:"content_type"`
 | 
			
		||||
	ChatId         string `json:"chat_id"`
 | 
			
		||||
	CreatedAt      int    `json:"created_at"`
 | 
			
		||||
	CompletedAt    int    `json:"completed_at"`
 | 
			
		||||
	LastError      struct {
 | 
			
		||||
		Code int    `json:"code"`
 | 
			
		||||
		Msg  string `json:"msg"`
 | 
			
		||||
	} `json:"last_error"`
 | 
			
		||||
	Status string `json:"status"`
 | 
			
		||||
	Usage  struct {
 | 
			
		||||
		TokenCount  int `json:"token_count"`
 | 
			
		||||
		OutputCount int `json:"output_count"`
 | 
			
		||||
		InputCount  int `json:"input_count"`
 | 
			
		||||
	} `json:"usage"`
 | 
			
		||||
	SectionId string `json:"section_id"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type V3Response struct {
 | 
			
		||||
	Data struct {
 | 
			
		||||
		Id             string `json:"id"`
 | 
			
		||||
		ConversationId string `json:"conversation_id"`
 | 
			
		||||
		BotId          string `json:"bot_id"`
 | 
			
		||||
		Content        string `json:"content"`
 | 
			
		||||
		ContentType    string `json:"content_type"`
 | 
			
		||||
		CreatedAt      int    `json:"created_at"`
 | 
			
		||||
		LastError      struct {
 | 
			
		||||
			Code int    `json:"code"`
 | 
			
		||||
			Msg  string `json:"msg"`
 | 
			
		||||
		} `json:"last_error"`
 | 
			
		||||
		Status string `json:"status"`
 | 
			
		||||
	} `json:"data"`
 | 
			
		||||
	Code int    `json:"code"`
 | 
			
		||||
	Msg  string `json:"msg"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type V3Request struct {
 | 
			
		||||
	BotId              string    `json:"bot_id"`
 | 
			
		||||
	UserId             string    `json:"user_id"`
 | 
			
		||||
	AdditionalMessages []Message `json:"additional_messages"`
 | 
			
		||||
	Stream             bool      `json:"stream"`
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -25,7 +25,7 @@ func (a *Adaptor) Init(meta *meta.Meta) {
 | 
			
		||||
 | 
			
		||||
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
 | 
			
		||||
	defaultVersion := config.GeminiVersion
 | 
			
		||||
	if strings.Contains(meta.ActualModelName, "gemini-2.0") ||
 | 
			
		||||
	if strings.Contains(meta.ActualModelName, "gemini-2") ||
 | 
			
		||||
		strings.Contains(meta.ActualModelName, "gemini-1.5") {
 | 
			
		||||
		defaultVersion = "v1beta"
 | 
			
		||||
	}
 | 
			
		||||
 
 | 
			
		||||
@@ -12,6 +12,7 @@ import (
 | 
			
		||||
 | 
			
		||||
	"github.com/songquanpeng/one-api/common"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/config"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/file"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/helper"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/image"
 | 
			
		||||
	"github.com/songquanpeng/one-api/common/logger"
 | 
			
		||||
@@ -66,6 +67,23 @@ func ConvertRequest(textRequest model.GeneralOpenAIRequest) *ChatRequest {
 | 
			
		||||
			MaxOutputTokens: textRequest.MaxTokens,
 | 
			
		||||
		},
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if textRequest.ReasoningEffort != nil {
 | 
			
		||||
		var thinkBudget int
 | 
			
		||||
		switch *textRequest.ReasoningEffort {
 | 
			
		||||
		case "low":
 | 
			
		||||
			thinkBudget = 1000
 | 
			
		||||
		case "medium":
 | 
			
		||||
			thinkBudget = 8000
 | 
			
		||||
		case "high":
 | 
			
		||||
			thinkBudget = 24000
 | 
			
		||||
		}
 | 
			
		||||
		geminiRequest.GenerationConfig.ThinkingConfig = &ThinkingConfig{
 | 
			
		||||
			ThinkingBudget:  thinkBudget,
 | 
			
		||||
			IncludeThoughts: true,
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if textRequest.ResponseFormat != nil {
 | 
			
		||||
		if mimeType, ok := mimeTypeMap[textRequest.ResponseFormat.Type]; ok {
 | 
			
		||||
			geminiRequest.GenerationConfig.ResponseMimeType = mimeType
 | 
			
		||||
@@ -76,22 +94,13 @@ func ConvertRequest(textRequest model.GeneralOpenAIRequest) *ChatRequest {
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
	if textRequest.Tools != nil {
 | 
			
		||||
		functions := make([]model.Function, 0, len(textRequest.Tools))
 | 
			
		||||
		for _, tool := range textRequest.Tools {
 | 
			
		||||
			functions = append(functions, tool.Function)
 | 
			
		||||
		}
 | 
			
		||||
		geminiRequest.Tools = []ChatTools{
 | 
			
		||||
			{
 | 
			
		||||
				FunctionDeclarations: functions,
 | 
			
		||||
			},
 | 
			
		||||
		}
 | 
			
		||||
	} else if textRequest.Functions != nil {
 | 
			
		||||
		geminiRequest.Tools = []ChatTools{
 | 
			
		||||
			{
 | 
			
		||||
				FunctionDeclarations: textRequest.Functions,
 | 
			
		||||
			},
 | 
			
		||||
		}
 | 
			
		||||
		geminiRequest.Tools = textRequest.Tools
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if textRequest.Functions != nil {
 | 
			
		||||
		geminiRequest.Tools = textRequest.Functions
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	shouldAddDummyModelMessage := false
 | 
			
		||||
	for _, message := range textRequest.Messages {
 | 
			
		||||
		content := ChatContent{
 | 
			
		||||
@@ -110,6 +119,14 @@ func ConvertRequest(textRequest model.GeneralOpenAIRequest) *ChatRequest {
 | 
			
		||||
				parts = append(parts, Part{
 | 
			
		||||
					Text: part.Text,
 | 
			
		||||
				})
 | 
			
		||||
			} else if part.Type == model.ContentTypeInputFile {
 | 
			
		||||
				mimeType, data, _ := file.GetFileFromUrl(part.File.FileData)
 | 
			
		||||
				parts = append(parts, Part{
 | 
			
		||||
					InlineData: &InlineData{
 | 
			
		||||
						MimeType: mimeType,
 | 
			
		||||
						Data:     data,
 | 
			
		||||
					},
 | 
			
		||||
				})
 | 
			
		||||
			} else if part.Type == model.ContentTypeImageURL {
 | 
			
		||||
				imageNum += 1
 | 
			
		||||
				if imageNum > VisionMaxImageNum {
 | 
			
		||||
@@ -199,6 +216,21 @@ func (g *ChatResponse) GetResponseText() string {
 | 
			
		||||
	return ""
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (g *ChatResponse) GetResponseTextAndThought() (content string, thought string) {
 | 
			
		||||
	if g == nil {
 | 
			
		||||
		return
 | 
			
		||||
	}
 | 
			
		||||
	if len(g.Candidates) > 0 && len(g.Candidates[0].Content.Parts) > 0 {
 | 
			
		||||
		contentPart := g.Candidates[0].Content.Parts[0]
 | 
			
		||||
		if contentPart.Thought {
 | 
			
		||||
			thought = contentPart.Text
 | 
			
		||||
			return
 | 
			
		||||
		}
 | 
			
		||||
		content = contentPart.Text
 | 
			
		||||
	}
 | 
			
		||||
	return
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type ChatCandidate struct {
 | 
			
		||||
	Content       ChatContent        `json:"content"`
 | 
			
		||||
	FinishReason  string             `json:"finishReason"`
 | 
			
		||||
@@ -263,7 +295,11 @@ func responseGeminiChat2OpenAI(response *ChatResponse) *openai.TextResponse {
 | 
			
		||||
					if i > 0 {
 | 
			
		||||
						builder.WriteString("\n")
 | 
			
		||||
					}
 | 
			
		||||
					builder.WriteString(part.Text)
 | 
			
		||||
					if part.Thought {
 | 
			
		||||
						builder.WriteString(fmt.Sprintf("<think>%s</think>\n", part.Text))
 | 
			
		||||
					} else {
 | 
			
		||||
						builder.WriteString(part.Text)
 | 
			
		||||
					}
 | 
			
		||||
				}
 | 
			
		||||
				choice.Message.Content = builder.String()
 | 
			
		||||
			}
 | 
			
		||||
@@ -278,7 +314,7 @@ func responseGeminiChat2OpenAI(response *ChatResponse) *openai.TextResponse {
 | 
			
		||||
 | 
			
		||||
func streamResponseGeminiChat2OpenAI(geminiResponse *ChatResponse) *openai.ChatCompletionsStreamResponse {
 | 
			
		||||
	var choice openai.ChatCompletionsStreamResponseChoice
 | 
			
		||||
	choice.Delta.Content = geminiResponse.GetResponseText()
 | 
			
		||||
	choice.Delta.Content, choice.Delta.ReasoningContent = geminiResponse.GetResponseTextAndThought()
 | 
			
		||||
	//choice.FinishReason = &constant.StopFinishReason
 | 
			
		||||
	var response openai.ChatCompletionsStreamResponse
 | 
			
		||||
	response.Id = fmt.Sprintf("chatcmpl-%s", random.GetUUID())
 | 
			
		||||
 
 | 
			
		||||
@@ -4,7 +4,7 @@ type ChatRequest struct {
 | 
			
		||||
	Contents          []ChatContent        `json:"contents"`
 | 
			
		||||
	SafetySettings    []ChatSafetySettings `json:"safety_settings,omitempty"`
 | 
			
		||||
	GenerationConfig  ChatGenerationConfig `json:"generation_config,omitempty"`
 | 
			
		||||
	Tools             []ChatTools          `json:"tools,omitempty"`
 | 
			
		||||
	Tools             interface{}          `json:"tools,omitempty"`
 | 
			
		||||
	SystemInstruction *ChatContent         `json:"system_instruction,omitempty"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@@ -40,6 +40,11 @@ type InlineData struct {
 | 
			
		||||
	Data     string `json:"data"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type FileData struct {
 | 
			
		||||
	MimeType string `json:"mime_type"`
 | 
			
		||||
	FileUri  string `json:"file_uri"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type FunctionCall struct {
 | 
			
		||||
	FunctionName string `json:"name"`
 | 
			
		||||
	Arguments    any    `json:"args"`
 | 
			
		||||
@@ -49,6 +54,8 @@ type Part struct {
 | 
			
		||||
	Text         string        `json:"text,omitempty"`
 | 
			
		||||
	InlineData   *InlineData   `json:"inlineData,omitempty"`
 | 
			
		||||
	FunctionCall *FunctionCall `json:"functionCall,omitempty"`
 | 
			
		||||
	Thought      bool          `json:"thought,omitempty"`
 | 
			
		||||
	FileData     *FileData     `json:"fileData,omitempty"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type ChatContent struct {
 | 
			
		||||
@@ -66,12 +73,18 @@ type ChatTools struct {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type ChatGenerationConfig struct {
 | 
			
		||||
	ResponseMimeType string   `json:"responseMimeType,omitempty"`
 | 
			
		||||
	ResponseSchema   any      `json:"responseSchema,omitempty"`
 | 
			
		||||
	Temperature      *float64 `json:"temperature,omitempty"`
 | 
			
		||||
	TopP             *float64 `json:"topP,omitempty"`
 | 
			
		||||
	TopK             float64  `json:"topK,omitempty"`
 | 
			
		||||
	MaxOutputTokens  int      `json:"maxOutputTokens,omitempty"`
 | 
			
		||||
	CandidateCount   int      `json:"candidateCount,omitempty"`
 | 
			
		||||
	StopSequences    []string `json:"stopSequences,omitempty"`
 | 
			
		||||
	ResponseMimeType string          `json:"responseMimeType,omitempty"`
 | 
			
		||||
	ResponseSchema   any             `json:"responseSchema,omitempty"`
 | 
			
		||||
	Temperature      *float64        `json:"temperature,omitempty"`
 | 
			
		||||
	TopP             *float64        `json:"topP,omitempty"`
 | 
			
		||||
	TopK             float64         `json:"topK,omitempty"`
 | 
			
		||||
	MaxOutputTokens  int             `json:"maxOutputTokens,omitempty"`
 | 
			
		||||
	CandidateCount   int             `json:"candidateCount,omitempty"`
 | 
			
		||||
	StopSequences    []string        `json:"stopSequences,omitempty"`
 | 
			
		||||
	ThinkingConfig   *ThinkingConfig `json:"thinkingConfig,omitempty"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type ThinkingConfig struct {
 | 
			
		||||
	ThinkingBudget  int  `json:"thinkingBudget"`
 | 
			
		||||
	IncludeThoughts bool `json:"includeThoughts"`
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -45,7 +45,6 @@ func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
 | 
			
		||||
		requestURL = fmt.Sprintf("%s?api-version=%s", requestURL, meta.Config.APIVersion)
 | 
			
		||||
		task := strings.TrimPrefix(requestURL, "/v1/")
 | 
			
		||||
		model_ := meta.ActualModelName
 | 
			
		||||
		model_ = strings.Replace(model_, ".", "", -1)
 | 
			
		||||
		//https://github.com/songquanpeng/one-api/issues/1191
 | 
			
		||||
		// {your endpoint}/openai/deployments/{your azure_model}/chat/completions?api-version={api_version}
 | 
			
		||||
		requestURL = fmt.Sprintf("/openai/deployments/%s/%s", model_, task)
 | 
			
		||||
 
 | 
			
		||||
@@ -4,7 +4,7 @@ var ModelList = []string{
 | 
			
		||||
	"gpt-3.5-turbo", "gpt-3.5-turbo-0301", "gpt-3.5-turbo-0613", "gpt-3.5-turbo-1106", "gpt-3.5-turbo-0125",
 | 
			
		||||
	"gpt-3.5-turbo-16k", "gpt-3.5-turbo-16k-0613",
 | 
			
		||||
	"gpt-3.5-turbo-instruct",
 | 
			
		||||
	"gpt-4", "gpt-4-0314", "gpt-4-0613", "gpt-4-1106-preview", "gpt-4-0125-preview",
 | 
			
		||||
	"gpt-4", "gpt-4.1", "gpt-4-0314", "gpt-4-0613", "gpt-4-1106-preview", "gpt-4-0125-preview",
 | 
			
		||||
	"gpt-4-32k", "gpt-4-32k-0314", "gpt-4-32k-0613",
 | 
			
		||||
	"gpt-4-turbo-preview", "gpt-4-turbo", "gpt-4-turbo-2024-04-09",
 | 
			
		||||
	"gpt-4o", "gpt-4o-2024-05-13",
 | 
			
		||||
 
 | 
			
		||||
@@ -20,6 +20,6 @@ const (
 | 
			
		||||
	VertexAI
 | 
			
		||||
	Proxy
 | 
			
		||||
	Replicate
 | 
			
		||||
 | 
			
		||||
	CozeV3
 | 
			
		||||
	Dummy // this one is only for count, do not add any channel after this
 | 
			
		||||
)
 | 
			
		||||
 
 | 
			
		||||
@@ -27,6 +27,7 @@ var modelRatioLock sync.RWMutex
 | 
			
		||||
var ModelRatio = map[string]float64{
 | 
			
		||||
	// https://openai.com/pricing
 | 
			
		||||
	"gpt-4":                   15,
 | 
			
		||||
	"gpt-4.1":                 15,
 | 
			
		||||
	"gpt-4-0314":              15,
 | 
			
		||||
	"gpt-4-0613":              15,
 | 
			
		||||
	"gpt-4-32k":               30,
 | 
			
		||||
@@ -71,6 +72,8 @@ var ModelRatio = map[string]float64{
 | 
			
		||||
	"text-davinci-edit-001":   10,
 | 
			
		||||
	"code-davinci-edit-001":   10,
 | 
			
		||||
	"whisper-1":               15,  // $0.006 / minute -> $0.006 / 150 words -> $0.006 / 200 tokens -> $0.03 / 1k tokens
 | 
			
		||||
	"gpt-4o-mini-transcribe":  15,  // $0.006 / minute -> $0.006 / 150 words -> $0.006 / 200 tokens -> $0.03 / 1k tokens
 | 
			
		||||
	"gpt-4o-transcribe":       15,  // $0.006 / minute -> $0.006 / 150 words -> $0.006 / 200 tokens -> $0.03 / 1k tokens
 | 
			
		||||
	"tts-1":                   7.5, // $0.015 / 1K characters
 | 
			
		||||
	"tts-1-1106":              7.5,
 | 
			
		||||
	"tts-1-hd":                15, // $0.030 / 1K characters
 | 
			
		||||
@@ -626,7 +629,9 @@ var CompletionRatio = map[string]float64{
 | 
			
		||||
	"llama3-8b-8192(33)":  0.0006 / 0.0003,
 | 
			
		||||
	"llama3-70b-8192(33)": 0.0035 / 0.00265,
 | 
			
		||||
	// whisper
 | 
			
		||||
	"whisper-1": 0, // only count input tokens
 | 
			
		||||
	"whisper-1":              0, // only count input tokens
 | 
			
		||||
	"gpt-4o-mini-transcribe": 0,
 | 
			
		||||
	"gpt-4o-transcribe":      0,
 | 
			
		||||
	// deepseek
 | 
			
		||||
	"deepseek-chat":     0.28 / 0.14,
 | 
			
		||||
	"deepseek-reasoner": 2.19 / 0.55,
 | 
			
		||||
 
 | 
			
		||||
@@ -53,5 +53,6 @@ const (
 | 
			
		||||
	AliBailian
 | 
			
		||||
	OpenAICompatible
 | 
			
		||||
	GeminiOpenAICompatible
 | 
			
		||||
	CozeV3
 | 
			
		||||
	Dummy
 | 
			
		||||
)
 | 
			
		||||
 
 | 
			
		||||
@@ -23,12 +23,16 @@ func ToAPIType(channelType int) int {
 | 
			
		||||
		apiType = apitype.Tencent
 | 
			
		||||
	case Gemini:
 | 
			
		||||
		apiType = apitype.Gemini
 | 
			
		||||
	case GeminiOpenAICompatible:
 | 
			
		||||
		apiType = apitype.Gemini
 | 
			
		||||
	case Ollama:
 | 
			
		||||
		apiType = apitype.Ollama
 | 
			
		||||
	case AwsClaude:
 | 
			
		||||
		apiType = apitype.AwsClaude
 | 
			
		||||
	case Coze:
 | 
			
		||||
		apiType = apitype.Coze
 | 
			
		||||
	case CozeV3:
 | 
			
		||||
		apiType = apitype.CozeV3
 | 
			
		||||
	case Cohere:
 | 
			
		||||
		apiType = apitype.Cohere
 | 
			
		||||
	case Cloudflare:
 | 
			
		||||
 
 | 
			
		||||
@@ -54,6 +54,7 @@ var ChannelBaseURLs = []string{
 | 
			
		||||
	"",                                          // 50
 | 
			
		||||
 | 
			
		||||
	"https://generativelanguage.googleapis.com/v1beta/openai/", // 51
 | 
			
		||||
	"https://api.coze.cn", // 52
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func init() {
 | 
			
		||||
 
 | 
			
		||||
@@ -8,6 +8,7 @@ import (
 | 
			
		||||
	"errors"
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"io"
 | 
			
		||||
	"mime/multipart"
 | 
			
		||||
	"net/http"
 | 
			
		||||
	"strings"
 | 
			
		||||
 | 
			
		||||
@@ -30,8 +31,7 @@ import (
 | 
			
		||||
func RelayAudioHelper(c *gin.Context, relayMode int) *relaymodel.ErrorWithStatusCode {
 | 
			
		||||
	ctx := c.Request.Context()
 | 
			
		||||
	meta := meta.GetByContext(c)
 | 
			
		||||
	audioModel := "whisper-1"
 | 
			
		||||
 | 
			
		||||
	audioModel := "gpt-4o-transcribe"
 | 
			
		||||
	tokenId := c.GetInt(ctxkey.TokenId)
 | 
			
		||||
	channelType := c.GetInt(ctxkey.Channel)
 | 
			
		||||
	channelId := c.GetInt(ctxkey.ChannelId)
 | 
			
		||||
@@ -124,12 +124,13 @@ func RelayAudioHelper(c *gin.Context, relayMode int) *relaymodel.ErrorWithStatus
 | 
			
		||||
	fullRequestURL := openai.GetFullRequestURL(baseURL, requestURL, channelType)
 | 
			
		||||
	if channelType == channeltype.Azure {
 | 
			
		||||
		apiVersion := meta.Config.APIVersion
 | 
			
		||||
		deploymentName := c.GetString(ctxkey.ChannelName)
 | 
			
		||||
		if relayMode == relaymode.AudioTranscription {
 | 
			
		||||
			// https://learn.microsoft.com/en-us/azure/ai-services/openai/whisper-quickstart?tabs=command-line#rest-api
 | 
			
		||||
			fullRequestURL = fmt.Sprintf("%s/openai/deployments/%s/audio/transcriptions?api-version=%s", baseURL, audioModel, apiVersion)
 | 
			
		||||
			fullRequestURL = fmt.Sprintf("%s/openai/deployments/%s/audio/transcriptions?api-version=%s", baseURL, deploymentName, apiVersion)
 | 
			
		||||
		} else if relayMode == relaymode.AudioSpeech {
 | 
			
		||||
			// https://learn.microsoft.com/en-us/azure/ai-services/openai/text-to-speech-quickstart?tabs=command-line#rest-api
 | 
			
		||||
			fullRequestURL = fmt.Sprintf("%s/openai/deployments/%s/audio/speech?api-version=%s", baseURL, audioModel, apiVersion)
 | 
			
		||||
			fullRequestURL = fmt.Sprintf("%s/openai/deployments/%s/audio/speech?api-version=%s", baseURL, deploymentName, apiVersion)
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
@@ -138,8 +139,73 @@ func RelayAudioHelper(c *gin.Context, relayMode int) *relaymodel.ErrorWithStatus
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return openai.ErrorWrapper(err, "new_request_body_failed", http.StatusInternalServerError)
 | 
			
		||||
	}
 | 
			
		||||
	c.Request.Body = io.NopCloser(bytes.NewBuffer(requestBody.Bytes()))
 | 
			
		||||
	responseFormat := c.DefaultPostForm("response_format", "json")
 | 
			
		||||
 | 
			
		||||
	// 处理表单数据
 | 
			
		||||
	contentType := c.Request.Header.Get("Content-Type")
 | 
			
		||||
	responseFormat := "json"
 | 
			
		||||
	var contentTypeWithBoundary string
 | 
			
		||||
 | 
			
		||||
	if strings.Contains(contentType, "multipart/form-data") {
 | 
			
		||||
		originalBody := requestBody.Bytes()
 | 
			
		||||
		c.Request.Body = io.NopCloser(bytes.NewBuffer(originalBody))
 | 
			
		||||
		err = c.Request.ParseMultipartForm(32 << 20) // 32MB 最大内存
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			return openai.ErrorWrapper(err, "parse_multipart_form_failed", http.StatusInternalServerError)
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		// 获取响应格式
 | 
			
		||||
		if format := c.Request.FormValue("response_format"); format != "" {
 | 
			
		||||
			responseFormat = format
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		requestBody = &bytes.Buffer{}
 | 
			
		||||
		writer := multipart.NewWriter(requestBody)
 | 
			
		||||
 | 
			
		||||
		// 复制表单字段
 | 
			
		||||
		for key, values := range c.Request.MultipartForm.Value {
 | 
			
		||||
			for _, value := range values {
 | 
			
		||||
				err = writer.WriteField(key, value)
 | 
			
		||||
				if err != nil {
 | 
			
		||||
					return openai.ErrorWrapper(err, "write_field_failed", http.StatusInternalServerError)
 | 
			
		||||
				}
 | 
			
		||||
			}
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		// 复制文件
 | 
			
		||||
		for key, fileHeaders := range c.Request.MultipartForm.File {
 | 
			
		||||
			for _, fileHeader := range fileHeaders {
 | 
			
		||||
				file, err := fileHeader.Open()
 | 
			
		||||
				if err != nil {
 | 
			
		||||
					return openai.ErrorWrapper(err, "open_file_failed", http.StatusInternalServerError)
 | 
			
		||||
				}
 | 
			
		||||
 | 
			
		||||
				part, err := writer.CreateFormFile(key, fileHeader.Filename)
 | 
			
		||||
				if err != nil {
 | 
			
		||||
					file.Close()
 | 
			
		||||
					return openai.ErrorWrapper(err, "create_form_file_failed", http.StatusInternalServerError)
 | 
			
		||||
				}
 | 
			
		||||
 | 
			
		||||
				_, err = io.Copy(part, file)
 | 
			
		||||
				file.Close()
 | 
			
		||||
				if err != nil {
 | 
			
		||||
					return openai.ErrorWrapper(err, "copy_file_failed", http.StatusInternalServerError)
 | 
			
		||||
				}
 | 
			
		||||
			}
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		// 完成multipart写入
 | 
			
		||||
		err = writer.Close()
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			return openai.ErrorWrapper(err, "close_writer_failed", http.StatusInternalServerError)
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		// 更新Content-Type
 | 
			
		||||
		contentTypeWithBoundary = writer.FormDataContentType()
 | 
			
		||||
		c.Request.Header.Set("Content-Type", contentTypeWithBoundary)
 | 
			
		||||
	} else {
 | 
			
		||||
		// 对于非表单请求,直接重置请求体
 | 
			
		||||
		c.Request.Body = io.NopCloser(bytes.NewBuffer(requestBody.Bytes()))
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	req, err := http.NewRequest(c.Request.Method, fullRequestURL, requestBody)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
@@ -151,11 +217,26 @@ func RelayAudioHelper(c *gin.Context, relayMode int) *relaymodel.ErrorWithStatus
 | 
			
		||||
		apiKey := c.Request.Header.Get("Authorization")
 | 
			
		||||
		apiKey = strings.TrimPrefix(apiKey, "Bearer ")
 | 
			
		||||
		req.Header.Set("api-key", apiKey)
 | 
			
		||||
		req.ContentLength = c.Request.ContentLength
 | 
			
		||||
		// 确保请求体大小与Content-Length一致
 | 
			
		||||
		req.ContentLength = int64(requestBody.Len())
 | 
			
		||||
	} else {
 | 
			
		||||
		req.Header.Set("Authorization", c.Request.Header.Get("Authorization"))
 | 
			
		||||
		// 确保请求体大小与Content-Length一致
 | 
			
		||||
		req.ContentLength = int64(requestBody.Len())
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	// 确保Content-Type正确传递
 | 
			
		||||
	if strings.Contains(contentType, "multipart/form-data") && c.Request.MultipartForm != nil {
 | 
			
		||||
		// 对于multipart请求,使用我们重建时生成的Content-Type
 | 
			
		||||
		// 注意:此处必须使用writer生成的boundary
 | 
			
		||||
		if contentTypeWithBoundary != "" {
 | 
			
		||||
			req.Header.Set("Content-Type", contentTypeWithBoundary)
 | 
			
		||||
		} else {
 | 
			
		||||
			req.Header.Set("Content-Type", c.Request.Header.Get("Content-Type"))
 | 
			
		||||
		}
 | 
			
		||||
	} else {
 | 
			
		||||
		req.Header.Set("Content-Type", c.Request.Header.Get("Content-Type"))
 | 
			
		||||
	}
 | 
			
		||||
	req.Header.Set("Content-Type", c.Request.Header.Get("Content-Type"))
 | 
			
		||||
	req.Header.Set("Accept", c.Request.Header.Get("Accept"))
 | 
			
		||||
 | 
			
		||||
	resp, err := client.HTTPClient.Do(req)
 | 
			
		||||
 
 | 
			
		||||
@@ -4,4 +4,5 @@ const (
 | 
			
		||||
	ContentTypeText       = "text"
 | 
			
		||||
	ContentTypeImageURL   = "image_url"
 | 
			
		||||
	ContentTypeInputAudio = "input_audio"
 | 
			
		||||
	ContentTypeInputFile  = "file"
 | 
			
		||||
)
 | 
			
		||||
 
 | 
			
		||||
@@ -1,5 +1,7 @@
 | 
			
		||||
package model
 | 
			
		||||
 | 
			
		||||
import "encoding/json"
 | 
			
		||||
 | 
			
		||||
type Message struct {
 | 
			
		||||
	Role             string  `json:"role,omitempty"`
 | 
			
		||||
	Content          any     `json:"content,omitempty"`
 | 
			
		||||
@@ -38,6 +40,53 @@ func (m Message) StringContent() string {
 | 
			
		||||
	return ""
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (m Message) CozeV3StringContent() string {
 | 
			
		||||
	content, ok := m.Content.(string)
 | 
			
		||||
	if ok {
 | 
			
		||||
		return content
 | 
			
		||||
	}
 | 
			
		||||
	contentList, ok := m.Content.([]any)
 | 
			
		||||
	if ok {
 | 
			
		||||
		contents := make([]map[string]any, 0)
 | 
			
		||||
		var contentStr string
 | 
			
		||||
		for _, contentItem := range contentList {
 | 
			
		||||
			contentMap, ok := contentItem.(map[string]any)
 | 
			
		||||
			if !ok {
 | 
			
		||||
				continue
 | 
			
		||||
			}
 | 
			
		||||
			switch contentMap["type"] {
 | 
			
		||||
			case "text":
 | 
			
		||||
				if subStr, ok := contentMap["text"].(string); ok {
 | 
			
		||||
					contents = append(contents, map[string]any{
 | 
			
		||||
						"type": "text",
 | 
			
		||||
						"text": subStr,
 | 
			
		||||
					})
 | 
			
		||||
				}
 | 
			
		||||
			case "image_url":
 | 
			
		||||
				if subStr, ok := contentMap["image_url"].(string); ok {
 | 
			
		||||
					contents = append(contents, map[string]any{
 | 
			
		||||
						"type":     "image",
 | 
			
		||||
						"file_url": subStr,
 | 
			
		||||
					})
 | 
			
		||||
				}
 | 
			
		||||
			case "file":
 | 
			
		||||
				if subStr, ok := contentMap["image_url"].(string); ok {
 | 
			
		||||
					contents = append(contents, map[string]any{
 | 
			
		||||
						"type":     "file",
 | 
			
		||||
						"file_url": subStr,
 | 
			
		||||
					})
 | 
			
		||||
				}
 | 
			
		||||
			}
 | 
			
		||||
		}
 | 
			
		||||
		if len(contents) > 0 {
 | 
			
		||||
			b, _ := json.Marshal(contents)
 | 
			
		||||
			return string(b)
 | 
			
		||||
		}
 | 
			
		||||
		return contentStr
 | 
			
		||||
	}
 | 
			
		||||
	return ""
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (m Message) ParseContent() []MessageContent {
 | 
			
		||||
	var contentList []MessageContent
 | 
			
		||||
	content, ok := m.Content.(string)
 | 
			
		||||
@@ -72,6 +121,15 @@ func (m Message) ParseContent() []MessageContent {
 | 
			
		||||
						},
 | 
			
		||||
					})
 | 
			
		||||
				}
 | 
			
		||||
			case ContentTypeInputFile:
 | 
			
		||||
				if subObj, ok := contentMap["file"].(map[string]any); ok {
 | 
			
		||||
					contentList = append(contentList, MessageContent{
 | 
			
		||||
						Type: ContentTypeInputFile,
 | 
			
		||||
						File: &File{
 | 
			
		||||
							FileData: subObj["file_data"].(string),
 | 
			
		||||
						},
 | 
			
		||||
					})
 | 
			
		||||
				}
 | 
			
		||||
			}
 | 
			
		||||
		}
 | 
			
		||||
		return contentList
 | 
			
		||||
@@ -88,4 +146,10 @@ type MessageContent struct {
 | 
			
		||||
	Type     string    `json:"type,omitempty"`
 | 
			
		||||
	Text     string    `json:"text"`
 | 
			
		||||
	ImageURL *ImageURL `json:"image_url,omitempty"`
 | 
			
		||||
	File     *File     `json:"file,omitempty"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type File struct {
 | 
			
		||||
	FileData string `json:"file_data,omitempty"`
 | 
			
		||||
	FileName string `json:"filename,omitempty"`
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -22,6 +22,7 @@ export const CHANNEL_OPTIONS = [
 | 
			
		||||
  { key: 31, text: '零一万物', value: 31, color: 'green' },
 | 
			
		||||
  { key: 32, text: '阶跃星辰', value: 32, color: 'blue' },
 | 
			
		||||
  { key: 34, text: 'Coze', value: 34, color: 'blue' },
 | 
			
		||||
  { key: 52, text: 'CozeV3', value: 52, color: 'blue' },
 | 
			
		||||
  { key: 35, text: 'Cohere', value: 35, color: 'blue' },
 | 
			
		||||
  { key: 36, text: 'DeepSeek', value: 36, color: 'black' },
 | 
			
		||||
  { key: 37, text: 'Cloudflare', value: 37, color: 'orange' },
 | 
			
		||||
 
 | 
			
		||||
@@ -137,6 +137,12 @@ export const CHANNEL_OPTIONS = {
 | 
			
		||||
    value: 34,
 | 
			
		||||
    color: 'primary'
 | 
			
		||||
  },
 | 
			
		||||
  52: {
 | 
			
		||||
    key: 52,
 | 
			
		||||
    text: 'CozeV3',
 | 
			
		||||
    value: 52,
 | 
			
		||||
    color: 'primary'
 | 
			
		||||
  },
 | 
			
		||||
  35: {
 | 
			
		||||
    key: 35,
 | 
			
		||||
    text: 'Cohere',
 | 
			
		||||
@@ -185,7 +191,7 @@ export const CHANNEL_OPTIONS = {
 | 
			
		||||
    value: 45,
 | 
			
		||||
    color: 'primary'
 | 
			
		||||
  },
 | 
			
		||||
  45: {
 | 
			
		||||
  46: {
 | 
			
		||||
    key: 46,
 | 
			
		||||
    text: 'Replicate',
 | 
			
		||||
    value: 46,
 | 
			
		||||
 
 | 
			
		||||
@@ -206,6 +206,20 @@ const typeConfig = {
 | 
			
		||||
    },
 | 
			
		||||
    modelGroup: 'Coze'
 | 
			
		||||
  },
 | 
			
		||||
  52: {
 | 
			
		||||
    inputLabel: {
 | 
			
		||||
      config: {
 | 
			
		||||
        user_id: 'User ID'
 | 
			
		||||
      }
 | 
			
		||||
    },
 | 
			
		||||
    prompt: {
 | 
			
		||||
      models: '对于 CozeV3 而言,模型名称即 Bot ID,你可以添加一个前缀 `bot-`,例如:`bot-123456`',
 | 
			
		||||
      config: {
 | 
			
		||||
        user_id: '生成该密钥的用户 ID'
 | 
			
		||||
      }
 | 
			
		||||
    },
 | 
			
		||||
    modelGroup: 'CozeV3'
 | 
			
		||||
  },
 | 
			
		||||
  42: {
 | 
			
		||||
    inputLabel: {
 | 
			
		||||
      key: '',
 | 
			
		||||
 
 | 
			
		||||
@@ -1,108 +1,109 @@
 | 
			
		||||
export const CHANNEL_OPTIONS = [
 | 
			
		||||
  { key: 1, text: 'OpenAI', value: 1, color: 'green' },
 | 
			
		||||
  {
 | 
			
		||||
    key: 50,
 | 
			
		||||
    text: 'OpenAI 兼容',
 | 
			
		||||
    value: 50,
 | 
			
		||||
    color: 'olive',
 | 
			
		||||
    description: 'OpenAI 兼容渠道,支持设置 Base URL',
 | 
			
		||||
  },
 | 
			
		||||
  {key: 14, text: 'Anthropic', value: 14, color: 'black'},
 | 
			
		||||
  { key: 33, text: 'AWS', value: 33, color: 'black' },
 | 
			
		||||
  {key: 3, text: 'Azure', value: 3, color: 'olive'},
 | 
			
		||||
  {key: 11, text: 'PaLM2', value: 11, color: 'orange'},
 | 
			
		||||
  {key: 24, text: 'Gemini', value: 24, color: 'orange'},
 | 
			
		||||
  {
 | 
			
		||||
    key: 51,
 | 
			
		||||
    text: 'Gemini (OpenAI)',
 | 
			
		||||
    value: 51,
 | 
			
		||||
    color: 'orange',
 | 
			
		||||
    description: 'Gemini OpenAI 兼容格式',
 | 
			
		||||
  },
 | 
			
		||||
  { key: 28, text: 'Mistral AI', value: 28, color: 'orange' },
 | 
			
		||||
  { key: 41, text: 'Novita', value: 41, color: 'purple' },
 | 
			
		||||
  {
 | 
			
		||||
    key: 40,
 | 
			
		||||
    text: '字节火山引擎',
 | 
			
		||||
    value: 40,
 | 
			
		||||
    color: 'blue',
 | 
			
		||||
    description: '原字节跳动豆包',
 | 
			
		||||
  },
 | 
			
		||||
  {
 | 
			
		||||
    key: 15,
 | 
			
		||||
    text: '百度文心千帆',
 | 
			
		||||
    value: 15,
 | 
			
		||||
    color: 'blue',
 | 
			
		||||
    tip: '请前往<a href="https://console.bce.baidu.com/qianfan/ais/console/applicationConsole/application/v1" target="_blank">此处</a>获取 AK(API Key)以及 SK(Secret Key),注意,V2 版本接口请使用 <strong>百度文心千帆 V2 </strong>渠道类型',
 | 
			
		||||
  },
 | 
			
		||||
  {
 | 
			
		||||
    key: 47,
 | 
			
		||||
    text: '百度文心千帆 V2',
 | 
			
		||||
    value: 47,
 | 
			
		||||
    color: 'blue',
 | 
			
		||||
    tip: '请前往<a href="https://console.bce.baidu.com/iam/#/iam/apikey/list" target="_blank">此处</a>获取 API Key,注意本渠道仅支持<a target="_blank" href="https://cloud.baidu.com/doc/WENXINWORKSHOP/s/em4tsqo3v">推理服务 V2</a>相关模型',
 | 
			
		||||
  },
 | 
			
		||||
  {
 | 
			
		||||
    key: 17,
 | 
			
		||||
    text: '阿里通义千问',
 | 
			
		||||
    value: 17,
 | 
			
		||||
    color: 'orange',
 | 
			
		||||
    tip: '如需使用阿里云百炼,请使用<strong>阿里云百炼</strong>渠道',
 | 
			
		||||
  },
 | 
			
		||||
  { key: 49, text: '阿里云百炼', value: 49, color: 'orange' },
 | 
			
		||||
  {
 | 
			
		||||
    key: 18,
 | 
			
		||||
    text: '讯飞星火认知',
 | 
			
		||||
    value: 18,
 | 
			
		||||
    color: 'blue',
 | 
			
		||||
    tip: '本渠道基于讯飞 WebSocket 版本 API,如需 HTTP 版本,请使用<strong>讯飞星火认知 V2</strong>渠道',
 | 
			
		||||
  },
 | 
			
		||||
  {
 | 
			
		||||
    key: 48,
 | 
			
		||||
    text: '讯飞星火认知 V2',
 | 
			
		||||
    value: 48,
 | 
			
		||||
    color: 'blue',
 | 
			
		||||
    tip: 'HTTP 版本的讯飞接口,前往<a href="https://console.xfyun.cn/services/cbm" target="_blank">此处</a>获取 HTTP 服务接口认证密钥',
 | 
			
		||||
  },
 | 
			
		||||
  { key: 16, text: '智谱 ChatGLM', value: 16, color: 'violet' },
 | 
			
		||||
  { key: 19, text: '360 智脑', value: 19, color: 'blue' },
 | 
			
		||||
  { key: 25, text: 'Moonshot AI', value: 25, color: 'black' },
 | 
			
		||||
  { key: 23, text: '腾讯混元', value: 23, color: 'teal' },
 | 
			
		||||
  { key: 26, text: '百川大模型', value: 26, color: 'orange' },
 | 
			
		||||
  { key: 27, text: 'MiniMax', value: 27, color: 'red' },
 | 
			
		||||
  { key: 29, text: 'Groq', value: 29, color: 'orange' },
 | 
			
		||||
  { key: 30, text: 'Ollama', value: 30, color: 'black' },
 | 
			
		||||
  { key: 31, text: '零一万物', value: 31, color: 'green' },
 | 
			
		||||
  { key: 32, text: '阶跃星辰', value: 32, color: 'blue' },
 | 
			
		||||
  { key: 34, text: 'Coze', value: 34, color: 'blue' },
 | 
			
		||||
  { key: 35, text: 'Cohere', value: 35, color: 'blue' },
 | 
			
		||||
  { key: 36, text: 'DeepSeek', value: 36, color: 'black' },
 | 
			
		||||
  { key: 37, text: 'Cloudflare', value: 37, color: 'orange' },
 | 
			
		||||
  { key: 38, text: 'DeepL', value: 38, color: 'black' },
 | 
			
		||||
  { key: 39, text: 'together.ai', value: 39, color: 'blue' },
 | 
			
		||||
  { key: 42, text: 'VertexAI', value: 42, color: 'blue' },
 | 
			
		||||
  { key: 43, text: 'Proxy', value: 43, color: 'blue' },
 | 
			
		||||
  { key: 44, text: 'SiliconFlow', value: 44, color: 'blue' },
 | 
			
		||||
  { key: 45, text: 'xAI', value: 45, color: 'blue' },
 | 
			
		||||
  { key: 46, text: 'Replicate', value: 46, color: 'blue' },
 | 
			
		||||
  {
 | 
			
		||||
    key: 8,
 | 
			
		||||
    text: '自定义渠道',
 | 
			
		||||
    value: 8,
 | 
			
		||||
    color: 'pink',
 | 
			
		||||
    tip: '不推荐使用,请使用 <strong>OpenAI 兼容</strong>渠道类型。注意,这里所需要填入的代理地址仅会在实际请求时替换域名部分,如果你想填入 OpenAI SDK 中所要求的 Base URL,请使用 OpenAI 兼容渠道类型',
 | 
			
		||||
    description: '不推荐使用,请使用 OpenAI 兼容渠道类型',
 | 
			
		||||
  },
 | 
			
		||||
  { key: 22, text: '知识库:FastGPT', value: 22, color: 'blue' },
 | 
			
		||||
  { key: 21, text: '知识库:AI Proxy', value: 21, color: 'purple' },
 | 
			
		||||
  { key: 20, text: 'OpenRouter', value: 20, color: 'black' },
 | 
			
		||||
  { key: 2, text: '代理:API2D', value: 2, color: 'blue' },
 | 
			
		||||
  { key: 5, text: '代理:OpenAI-SB', value: 5, color: 'brown' },
 | 
			
		||||
  { key: 7, text: '代理:OhMyGPT', value: 7, color: 'purple' },
 | 
			
		||||
  { key: 10, text: '代理:AI Proxy', value: 10, color: 'purple' },
 | 
			
		||||
  { key: 4, text: '代理:CloseAI', value: 4, color: 'teal' },
 | 
			
		||||
  { key: 6, text: '代理:OpenAI Max', value: 6, color: 'violet' },
 | 
			
		||||
  { key: 9, text: '代理:AI.LS', value: 9, color: 'yellow' },
 | 
			
		||||
  { key: 12, text: '代理:API2GPT', value: 12, color: 'blue' },
 | 
			
		||||
  { key: 13, text: '代理:AIGC2D', value: 13, color: 'purple' },
 | 
			
		||||
    {key: 1, text: 'OpenAI', value: 1, color: 'green'},
 | 
			
		||||
    {
 | 
			
		||||
        key: 50,
 | 
			
		||||
        text: 'OpenAI 兼容',
 | 
			
		||||
        value: 50,
 | 
			
		||||
        color: 'olive',
 | 
			
		||||
        description: 'OpenAI 兼容渠道,支持设置 Base URL',
 | 
			
		||||
    },
 | 
			
		||||
    {key: 14, text: 'Anthropic', value: 14, color: 'black'},
 | 
			
		||||
    {key: 33, text: 'AWS', value: 33, color: 'black'},
 | 
			
		||||
    {key: 3, text: 'Azure', value: 3, color: 'olive'},
 | 
			
		||||
    {key: 11, text: 'PaLM2', value: 11, color: 'orange'},
 | 
			
		||||
    {key: 24, text: 'Gemini', value: 24, color: 'orange'},
 | 
			
		||||
    {
 | 
			
		||||
        key: 51,
 | 
			
		||||
        text: 'Gemini (OpenAI)',
 | 
			
		||||
        value: 51,
 | 
			
		||||
        color: 'orange',
 | 
			
		||||
        description: 'Gemini OpenAI 兼容格式',
 | 
			
		||||
    },
 | 
			
		||||
    {key: 28, text: 'Mistral AI', value: 28, color: 'orange'},
 | 
			
		||||
    {key: 41, text: 'Novita', value: 41, color: 'purple'},
 | 
			
		||||
    {
 | 
			
		||||
        key: 40,
 | 
			
		||||
        text: '字节火山引擎',
 | 
			
		||||
        value: 40,
 | 
			
		||||
        color: 'blue',
 | 
			
		||||
        description: '原字节跳动豆包',
 | 
			
		||||
    },
 | 
			
		||||
    {
 | 
			
		||||
        key: 15,
 | 
			
		||||
        text: '百度文心千帆',
 | 
			
		||||
        value: 15,
 | 
			
		||||
        color: 'blue',
 | 
			
		||||
        tip: '请前往<a href="https://console.bce.baidu.com/qianfan/ais/console/applicationConsole/application/v1" target="_blank">此处</a>获取 AK(API Key)以及 SK(Secret Key),注意,V2 版本接口请使用 <strong>百度文心千帆 V2 </strong>渠道类型',
 | 
			
		||||
    },
 | 
			
		||||
    {
 | 
			
		||||
        key: 47,
 | 
			
		||||
        text: '百度文心千帆 V2',
 | 
			
		||||
        value: 47,
 | 
			
		||||
        color: 'blue',
 | 
			
		||||
        tip: '请前往<a href="https://console.bce.baidu.com/iam/#/iam/apikey/list" target="_blank">此处</a>获取 API Key,注意本渠道仅支持<a target="_blank" href="https://cloud.baidu.com/doc/WENXINWORKSHOP/s/em4tsqo3v">推理服务 V2</a>相关模型',
 | 
			
		||||
    },
 | 
			
		||||
    {
 | 
			
		||||
        key: 17,
 | 
			
		||||
        text: '阿里通义千问',
 | 
			
		||||
        value: 17,
 | 
			
		||||
        color: 'orange',
 | 
			
		||||
        tip: '如需使用阿里云百炼,请使用<strong>阿里云百炼</strong>渠道',
 | 
			
		||||
    },
 | 
			
		||||
    {key: 49, text: '阿里云百炼', value: 49, color: 'orange'},
 | 
			
		||||
    {
 | 
			
		||||
        key: 18,
 | 
			
		||||
        text: '讯飞星火认知',
 | 
			
		||||
        value: 18,
 | 
			
		||||
        color: 'blue',
 | 
			
		||||
        tip: '本渠道基于讯飞 WebSocket 版本 API,如需 HTTP 版本,请使用<strong>讯飞星火认知 V2</strong>渠道',
 | 
			
		||||
    },
 | 
			
		||||
    {
 | 
			
		||||
        key: 48,
 | 
			
		||||
        text: '讯飞星火认知 V2',
 | 
			
		||||
        value: 48,
 | 
			
		||||
        color: 'blue',
 | 
			
		||||
        tip: 'HTTP 版本的讯飞接口,前往<a href="https://console.xfyun.cn/services/cbm" target="_blank">此处</a>获取 HTTP 服务接口认证密钥',
 | 
			
		||||
    },
 | 
			
		||||
    {key: 16, text: '智谱 ChatGLM', value: 16, color: 'violet'},
 | 
			
		||||
    {key: 19, text: '360 智脑', value: 19, color: 'blue'},
 | 
			
		||||
    {key: 25, text: 'Moonshot AI', value: 25, color: 'black'},
 | 
			
		||||
    {key: 23, text: '腾讯混元', value: 23, color: 'teal'},
 | 
			
		||||
    {key: 26, text: '百川大模型', value: 26, color: 'orange'},
 | 
			
		||||
    {key: 27, text: 'MiniMax', value: 27, color: 'red'},
 | 
			
		||||
    {key: 29, text: 'Groq', value: 29, color: 'orange'},
 | 
			
		||||
    {key: 30, text: 'Ollama', value: 30, color: 'black'},
 | 
			
		||||
    {key: 31, text: '零一万物', value: 31, color: 'green'},
 | 
			
		||||
    {key: 32, text: '阶跃星辰', value: 32, color: 'blue'},
 | 
			
		||||
    {key: 34, text: 'Coze', value: 34, color: 'blue'},
 | 
			
		||||
    {key: 52, text: 'CozeV3', value: 52, color: 'blue'},
 | 
			
		||||
    {key: 35, text: 'Cohere', value: 35, color: 'blue'},
 | 
			
		||||
    {key: 36, text: 'DeepSeek', value: 36, color: 'black'},
 | 
			
		||||
    {key: 37, text: 'Cloudflare', value: 37, color: 'orange'},
 | 
			
		||||
    {key: 38, text: 'DeepL', value: 38, color: 'black'},
 | 
			
		||||
    {key: 39, text: 'together.ai', value: 39, color: 'blue'},
 | 
			
		||||
    {key: 42, text: 'VertexAI', value: 42, color: 'blue'},
 | 
			
		||||
    {key: 43, text: 'Proxy', value: 43, color: 'blue'},
 | 
			
		||||
    {key: 44, text: 'SiliconFlow', value: 44, color: 'blue'},
 | 
			
		||||
    {key: 45, text: 'xAI', value: 45, color: 'blue'},
 | 
			
		||||
    {key: 46, text: 'Replicate', value: 46, color: 'blue'},
 | 
			
		||||
    {
 | 
			
		||||
        key: 8,
 | 
			
		||||
        text: '自定义渠道',
 | 
			
		||||
        value: 8,
 | 
			
		||||
        color: 'pink',
 | 
			
		||||
        tip: '不推荐使用,请使用 <strong>OpenAI 兼容</strong>渠道类型。注意,这里所需要填入的代理地址仅会在实际请求时替换域名部分,如果你想填入 OpenAI SDK 中所要求的 Base URL,请使用 OpenAI 兼容渠道类型',
 | 
			
		||||
        description: '不推荐使用,请使用 OpenAI 兼容渠道类型',
 | 
			
		||||
    },
 | 
			
		||||
    {key: 22, text: '知识库:FastGPT', value: 22, color: 'blue'},
 | 
			
		||||
    {key: 21, text: '知识库:AI Proxy', value: 21, color: 'purple'},
 | 
			
		||||
    {key: 20, text: 'OpenRouter', value: 20, color: 'black'},
 | 
			
		||||
    {key: 2, text: '代理:API2D', value: 2, color: 'blue'},
 | 
			
		||||
    {key: 5, text: '代理:OpenAI-SB', value: 5, color: 'brown'},
 | 
			
		||||
    {key: 7, text: '代理:OhMyGPT', value: 7, color: 'purple'},
 | 
			
		||||
    {key: 10, text: '代理:AI Proxy', value: 10, color: 'purple'},
 | 
			
		||||
    {key: 4, text: '代理:CloseAI', value: 4, color: 'teal'},
 | 
			
		||||
    {key: 6, text: '代理:OpenAI Max', value: 6, color: 'violet'},
 | 
			
		||||
    {key: 9, text: '代理:AI.LS', value: 9, color: 'yellow'},
 | 
			
		||||
    {key: 12, text: '代理:API2GPT', value: 12, color: 'blue'},
 | 
			
		||||
    {key: 13, text: '代理:AIGC2D', value: 13, color: 'purple'},
 | 
			
		||||
];
 | 
			
		||||
 
 | 
			
		||||
		Reference in New Issue
	
	Block a user