mirror of
https://github.com/songquanpeng/one-api.git
synced 2025-10-23 18:03:41 +08:00
Compare commits
12 Commits
v0.6.6-alp
...
v0.6.6-alp
Author | SHA1 | Date | |
---|---|---|---|
|
e5b3e37c46 | ||
|
8de489cf06 | ||
|
d14e4aa01b | ||
|
541182102e | ||
|
b2679cca65 | ||
|
8572fac7a2 | ||
|
a2a00dfbc3 | ||
|
129282f4a9 | ||
|
a873cbd392 | ||
|
35ba1da984 | ||
|
2369025842 | ||
|
f452bd481e |
2
.github/workflows/docker-image-amd64-en.yml
vendored
2
.github/workflows/docker-image-amd64-en.yml
vendored
@@ -3,7 +3,7 @@ name: Publish Docker image (amd64, English)
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
- 'v*.*.*'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
name:
|
||||
|
2
.github/workflows/docker-image-amd64.yml
vendored
2
.github/workflows/docker-image-amd64.yml
vendored
@@ -3,7 +3,7 @@ name: Publish Docker image (amd64)
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
- 'v*.*.*'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
name:
|
||||
|
2
.github/workflows/docker-image-arm64.yml
vendored
2
.github/workflows/docker-image-arm64.yml
vendored
@@ -3,7 +3,7 @@ name: Publish Docker image (arm64)
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
- 'v*.*.*'
|
||||
- '!*-alpha*'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
|
2
.github/workflows/linux-release.yml
vendored
2
.github/workflows/linux-release.yml
vendored
@@ -5,7 +5,7 @@ permissions:
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
- 'v*.*.*'
|
||||
- '!*-alpha*'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
|
2
.github/workflows/macos-release.yml
vendored
2
.github/workflows/macos-release.yml
vendored
@@ -5,7 +5,7 @@ permissions:
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
- 'v*.*.*'
|
||||
- '!*-alpha*'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
|
2
.github/workflows/windows-release.yml
vendored
2
.github/workflows/windows-release.yml
vendored
@@ -5,7 +5,7 @@ permissions:
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
- 'v*.*.*'
|
||||
- '!*-alpha*'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
|
@@ -82,6 +82,7 @@ _✨ 通过标准的 OpenAI API 格式访问所有的大模型,开箱即用
|
||||
+ [x] [Ollama](https://github.com/ollama/ollama)
|
||||
+ [x] [零一万物](https://platform.lingyiwanwu.com/)
|
||||
+ [x] [阶跃星辰](https://platform.stepfun.com/)
|
||||
+ [x] [Coze](https://www.coze.com/)
|
||||
2. 支持配置镜像以及众多[第三方代理服务](https://iamazing.cn/page/openai-api-third-party-services)。
|
||||
3. 支持通过**负载均衡**的方式访问多个渠道。
|
||||
4. 支持 **stream 模式**,可以通过流式传输实现打字机效果。
|
||||
|
@@ -9,4 +9,5 @@ const (
|
||||
KeySK = KeyPrefix + "sk"
|
||||
KeyAK = KeyPrefix + "ak"
|
||||
KeyRegion = KeyPrefix + "region"
|
||||
KeyUserID = KeyPrefix + "user_id"
|
||||
)
|
||||
|
@@ -16,7 +16,7 @@ import (
|
||||
)
|
||||
|
||||
// Regex to match data URL pattern
|
||||
var dataURLPattern = regexp.MustCompile(`data:image/([^;]+);base64,(.*)`)
|
||||
var dataURLPattern = regexp.MustCompile(`data:image/([^;]+);base64,(.*)`)
|
||||
|
||||
func IsImageUrl(url string) (bool, error) {
|
||||
resp, err := http.Head(url)
|
||||
|
@@ -3,15 +3,16 @@ package logger
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/songquanpeng/one-api/common/config"
|
||||
"github.com/songquanpeng/one-api/common/helper"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/songquanpeng/one-api/common/config"
|
||||
"github.com/songquanpeng/one-api/common/helper"
|
||||
)
|
||||
|
||||
const (
|
||||
@@ -21,28 +22,20 @@ const (
|
||||
loggerError = "ERR"
|
||||
)
|
||||
|
||||
var setupLogLock sync.Mutex
|
||||
var setupLogWorking bool
|
||||
var setupLogOnce sync.Once
|
||||
|
||||
func SetupLogger() {
|
||||
if LogDir != "" {
|
||||
ok := setupLogLock.TryLock()
|
||||
if !ok {
|
||||
log.Println("setup log is already working")
|
||||
return
|
||||
setupLogOnce.Do(func() {
|
||||
if LogDir != "" {
|
||||
logPath := filepath.Join(LogDir, fmt.Sprintf("oneapi-%s.log", time.Now().Format("20060102")))
|
||||
fd, err := os.OpenFile(logPath, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
|
||||
if err != nil {
|
||||
log.Fatal("failed to open log file")
|
||||
}
|
||||
gin.DefaultWriter = io.MultiWriter(os.Stdout, fd)
|
||||
gin.DefaultErrorWriter = io.MultiWriter(os.Stderr, fd)
|
||||
}
|
||||
defer func() {
|
||||
setupLogLock.Unlock()
|
||||
setupLogWorking = false
|
||||
}()
|
||||
logPath := filepath.Join(LogDir, fmt.Sprintf("oneapi-%s.log", time.Now().Format("20060102")))
|
||||
fd, err := os.OpenFile(logPath, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
|
||||
if err != nil {
|
||||
log.Fatal("failed to open log file")
|
||||
}
|
||||
gin.DefaultWriter = io.MultiWriter(os.Stdout, fd)
|
||||
gin.DefaultErrorWriter = io.MultiWriter(os.Stderr, fd)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func SysLog(s string) {
|
||||
@@ -100,12 +93,7 @@ func logHelper(ctx context.Context, level string, msg string) {
|
||||
}
|
||||
now := time.Now()
|
||||
_, _ = fmt.Fprintf(writer, "[%s] %v | %s | %s \n", level, now.Format("2006/01/02 - 15:04:05"), id, msg)
|
||||
if !setupLogWorking {
|
||||
setupLogWorking = true
|
||||
go func() {
|
||||
SetupLogger()
|
||||
}()
|
||||
}
|
||||
SetupLogger()
|
||||
}
|
||||
|
||||
func FatalLog(v ...any) {
|
||||
|
@@ -64,8 +64,12 @@ func testChannel(channel *model.Channel) (err error, openaiErr *relaymodel.Error
|
||||
return fmt.Errorf("invalid api type: %d, adaptor is nil", apiType), nil
|
||||
}
|
||||
adaptor.Init(meta)
|
||||
modelName := adaptor.GetModelList()[0]
|
||||
if !strings.Contains(channel.Models, modelName) {
|
||||
var modelName string
|
||||
modelList := adaptor.GetModelList()
|
||||
if len(modelList) != 0 {
|
||||
modelName = modelList[0]
|
||||
}
|
||||
if modelName == "" || !strings.Contains(channel.Models, modelName) {
|
||||
modelNames := strings.Split(channel.Models, ",")
|
||||
if len(modelNames) > 0 {
|
||||
modelName = modelNames[0]
|
||||
@@ -82,6 +86,7 @@ func testChannel(channel *model.Channel) (err error, openaiErr *relaymodel.Error
|
||||
if err != nil {
|
||||
return err, nil
|
||||
}
|
||||
logger.SysLog(string(jsonData))
|
||||
requestBody := bytes.NewBuffer(jsonData)
|
||||
c.Request.Body = io.NopCloser(requestBody)
|
||||
resp, err := adaptor.DoRequest(c, meta, requestBody)
|
||||
|
2
main.go
2
main.go
@@ -71,7 +71,7 @@ func main() {
|
||||
}
|
||||
if config.MemoryCacheEnabled {
|
||||
logger.SysLog("memory cache enabled")
|
||||
logger.SysError(fmt.Sprintf("sync frequency: %d seconds", config.SyncFrequency))
|
||||
logger.SysLog(fmt.Sprintf("sync frequency: %d seconds", config.SyncFrequency))
|
||||
model.InitChannelCache()
|
||||
}
|
||||
if config.MemoryCacheEnabled {
|
||||
|
@@ -7,6 +7,7 @@ import (
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/anthropic"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/aws"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/baidu"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/coze"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/gemini"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/ollama"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
||||
@@ -43,6 +44,8 @@ func GetAdaptor(apiType int) adaptor.Adaptor {
|
||||
return &zhipu.Adaptor{}
|
||||
case apitype.Ollama:
|
||||
return &ollama.Adaptor{}
|
||||
case apitype.Coze:
|
||||
return &coze.Adaptor{}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
75
relay/adaptor/coze/adaptor.go
Normal file
75
relay/adaptor/coze/adaptor.go
Normal file
@@ -0,0 +1,75 @@
|
||||
package coze
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/songquanpeng/one-api/common/config"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
||||
"github.com/songquanpeng/one-api/relay/meta"
|
||||
"github.com/songquanpeng/one-api/relay/model"
|
||||
"io"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
type Adaptor struct {
|
||||
}
|
||||
|
||||
func (a *Adaptor) Init(meta *meta.Meta) {
|
||||
|
||||
}
|
||||
|
||||
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
|
||||
return fmt.Sprintf("%s/open_api/v2/chat", meta.BaseURL), nil
|
||||
}
|
||||
|
||||
func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error {
|
||||
adaptor.SetupCommonRequestHeader(c, req, meta)
|
||||
req.Header.Set("Authorization", "Bearer "+meta.APIKey)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) {
|
||||
if request == nil {
|
||||
return nil, errors.New("request is nil")
|
||||
}
|
||||
request.User = c.GetString(config.KeyUserID)
|
||||
return ConvertRequest(*request), nil
|
||||
}
|
||||
|
||||
func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
|
||||
if request == nil {
|
||||
return nil, errors.New("request is nil")
|
||||
}
|
||||
return request, nil
|
||||
}
|
||||
|
||||
func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) {
|
||||
return adaptor.DoRequestHelper(a, c, meta, requestBody)
|
||||
}
|
||||
|
||||
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
|
||||
var responseText *string
|
||||
if meta.IsStream {
|
||||
err, responseText = StreamHandler(c, resp)
|
||||
} else {
|
||||
err, responseText = Handler(c, resp, meta.PromptTokens, meta.ActualModelName)
|
||||
}
|
||||
if responseText != nil {
|
||||
usage = openai.ResponseText2Usage(*responseText, meta.ActualModelName, meta.PromptTokens)
|
||||
} else {
|
||||
usage = &model.Usage{}
|
||||
}
|
||||
usage.PromptTokens = meta.PromptTokens
|
||||
usage.TotalTokens = usage.PromptTokens + usage.CompletionTokens
|
||||
return
|
||||
}
|
||||
|
||||
func (a *Adaptor) GetModelList() []string {
|
||||
return ModelList
|
||||
}
|
||||
|
||||
func (a *Adaptor) GetChannelName() string {
|
||||
return "coze"
|
||||
}
|
5
relay/adaptor/coze/constant/contenttype/define.go
Normal file
5
relay/adaptor/coze/constant/contenttype/define.go
Normal file
@@ -0,0 +1,5 @@
|
||||
package contenttype
|
||||
|
||||
const (
|
||||
Text = "text"
|
||||
)
|
7
relay/adaptor/coze/constant/event/define.go
Normal file
7
relay/adaptor/coze/constant/event/define.go
Normal file
@@ -0,0 +1,7 @@
|
||||
package event
|
||||
|
||||
const (
|
||||
Message = "message"
|
||||
Done = "done"
|
||||
Error = "error"
|
||||
)
|
6
relay/adaptor/coze/constant/messagetype/define.go
Normal file
6
relay/adaptor/coze/constant/messagetype/define.go
Normal file
@@ -0,0 +1,6 @@
|
||||
package messagetype
|
||||
|
||||
const (
|
||||
Answer = "answer"
|
||||
FollowUp = "follow_up"
|
||||
)
|
3
relay/adaptor/coze/constants.go
Normal file
3
relay/adaptor/coze/constants.go
Normal file
@@ -0,0 +1,3 @@
|
||||
package coze
|
||||
|
||||
var ModelList = []string{}
|
10
relay/adaptor/coze/helper.go
Normal file
10
relay/adaptor/coze/helper.go
Normal file
@@ -0,0 +1,10 @@
|
||||
package coze
|
||||
|
||||
import "github.com/songquanpeng/one-api/relay/adaptor/coze/constant/event"
|
||||
|
||||
func event2StopReason(e *string) string {
|
||||
if e == nil || *e == event.Message {
|
||||
return ""
|
||||
}
|
||||
return "stop"
|
||||
}
|
215
relay/adaptor/coze/main.go
Normal file
215
relay/adaptor/coze/main.go
Normal file
@@ -0,0 +1,215 @@
|
||||
package coze
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/songquanpeng/one-api/common"
|
||||
"github.com/songquanpeng/one-api/common/conv"
|
||||
"github.com/songquanpeng/one-api/common/helper"
|
||||
"github.com/songquanpeng/one-api/common/logger"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/coze/constant/messagetype"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
||||
"github.com/songquanpeng/one-api/relay/model"
|
||||
"io"
|
||||
"net/http"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// https://www.coze.com/open
|
||||
|
||||
func stopReasonCoze2OpenAI(reason *string) string {
|
||||
if reason == nil {
|
||||
return ""
|
||||
}
|
||||
switch *reason {
|
||||
case "end_turn":
|
||||
return "stop"
|
||||
case "stop_sequence":
|
||||
return "stop"
|
||||
case "max_tokens":
|
||||
return "length"
|
||||
default:
|
||||
return *reason
|
||||
}
|
||||
}
|
||||
|
||||
func ConvertRequest(textRequest model.GeneralOpenAIRequest) *Request {
|
||||
cozeRequest := Request{
|
||||
Stream: textRequest.Stream,
|
||||
User: textRequest.User,
|
||||
BotId: strings.TrimPrefix(textRequest.Model, "bot-"),
|
||||
}
|
||||
for i, message := range textRequest.Messages {
|
||||
if i == len(textRequest.Messages)-1 {
|
||||
cozeRequest.Query = message.StringContent()
|
||||
continue
|
||||
}
|
||||
cozeMessage := Message{
|
||||
Role: message.Role,
|
||||
Content: message.StringContent(),
|
||||
}
|
||||
cozeRequest.ChatHistory = append(cozeRequest.ChatHistory, cozeMessage)
|
||||
}
|
||||
return &cozeRequest
|
||||
}
|
||||
|
||||
func StreamResponseCoze2OpenAI(cozeResponse *StreamResponse) (*openai.ChatCompletionsStreamResponse, *Response) {
|
||||
var response *Response
|
||||
var stopReason string
|
||||
var choice openai.ChatCompletionsStreamResponseChoice
|
||||
|
||||
if cozeResponse.Message != nil {
|
||||
if cozeResponse.Message.Type != messagetype.Answer {
|
||||
return nil, nil
|
||||
}
|
||||
choice.Delta.Content = cozeResponse.Message.Content
|
||||
}
|
||||
choice.Delta.Role = "assistant"
|
||||
finishReason := stopReasonCoze2OpenAI(&stopReason)
|
||||
if finishReason != "null" {
|
||||
choice.FinishReason = &finishReason
|
||||
}
|
||||
var openaiResponse openai.ChatCompletionsStreamResponse
|
||||
openaiResponse.Object = "chat.completion.chunk"
|
||||
openaiResponse.Choices = []openai.ChatCompletionsStreamResponseChoice{choice}
|
||||
openaiResponse.Id = cozeResponse.ConversationId
|
||||
return &openaiResponse, response
|
||||
}
|
||||
|
||||
func ResponseCoze2OpenAI(cozeResponse *Response) *openai.TextResponse {
|
||||
var responseText string
|
||||
for _, message := range cozeResponse.Messages {
|
||||
if message.Type == messagetype.Answer {
|
||||
responseText = message.Content
|
||||
break
|
||||
}
|
||||
}
|
||||
choice := openai.TextResponseChoice{
|
||||
Index: 0,
|
||||
Message: model.Message{
|
||||
Role: "assistant",
|
||||
Content: responseText,
|
||||
Name: nil,
|
||||
},
|
||||
FinishReason: "stop",
|
||||
}
|
||||
fullTextResponse := openai.TextResponse{
|
||||
Id: fmt.Sprintf("chatcmpl-%s", cozeResponse.ConversationId),
|
||||
Model: "coze-bot",
|
||||
Object: "chat.completion",
|
||||
Created: helper.GetTimestamp(),
|
||||
Choices: []openai.TextResponseChoice{choice},
|
||||
}
|
||||
return &fullTextResponse
|
||||
}
|
||||
|
||||
func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *string) {
|
||||
var responseText string
|
||||
createdTime := helper.GetTimestamp()
|
||||
scanner := bufio.NewScanner(resp.Body)
|
||||
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
|
||||
if atEOF && len(data) == 0 {
|
||||
return 0, nil, nil
|
||||
}
|
||||
if i := strings.Index(string(data), "\n"); i >= 0 {
|
||||
return i + 1, data[0:i], nil
|
||||
}
|
||||
if atEOF {
|
||||
return len(data), data, nil
|
||||
}
|
||||
return 0, nil, nil
|
||||
})
|
||||
dataChan := make(chan string)
|
||||
stopChan := make(chan bool)
|
||||
go func() {
|
||||
for scanner.Scan() {
|
||||
data := scanner.Text()
|
||||
if len(data) < 5 {
|
||||
continue
|
||||
}
|
||||
if !strings.HasPrefix(data, "data:") {
|
||||
continue
|
||||
}
|
||||
data = strings.TrimPrefix(data, "data:")
|
||||
dataChan <- data
|
||||
}
|
||||
stopChan <- true
|
||||
}()
|
||||
common.SetEventStreamHeaders(c)
|
||||
var modelName string
|
||||
c.Stream(func(w io.Writer) bool {
|
||||
select {
|
||||
case data := <-dataChan:
|
||||
// some implementations may add \r at the end of data
|
||||
data = strings.TrimSuffix(data, "\r")
|
||||
var cozeResponse StreamResponse
|
||||
err := json.Unmarshal([]byte(data), &cozeResponse)
|
||||
if err != nil {
|
||||
logger.SysError("error unmarshalling stream response: " + err.Error())
|
||||
return true
|
||||
}
|
||||
response, _ := StreamResponseCoze2OpenAI(&cozeResponse)
|
||||
if response == nil {
|
||||
return true
|
||||
}
|
||||
for _, choice := range response.Choices {
|
||||
responseText += conv.AsString(choice.Delta.Content)
|
||||
}
|
||||
response.Model = modelName
|
||||
response.Created = createdTime
|
||||
jsonStr, err := json.Marshal(response)
|
||||
if err != nil {
|
||||
logger.SysError("error marshalling stream response: " + err.Error())
|
||||
return true
|
||||
}
|
||||
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonStr)})
|
||||
return true
|
||||
case <-stopChan:
|
||||
c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
|
||||
return false
|
||||
}
|
||||
})
|
||||
_ = resp.Body.Close()
|
||||
return nil, &responseText
|
||||
}
|
||||
|
||||
func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName string) (*model.ErrorWithStatusCode, *string) {
|
||||
responseBody, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
|
||||
}
|
||||
err = resp.Body.Close()
|
||||
if err != nil {
|
||||
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
||||
}
|
||||
var cozeResponse Response
|
||||
err = json.Unmarshal(responseBody, &cozeResponse)
|
||||
if err != nil {
|
||||
return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
|
||||
}
|
||||
if cozeResponse.Code != 0 {
|
||||
return &model.ErrorWithStatusCode{
|
||||
Error: model.Error{
|
||||
Message: cozeResponse.Msg,
|
||||
Code: cozeResponse.Code,
|
||||
},
|
||||
StatusCode: resp.StatusCode,
|
||||
}, nil
|
||||
}
|
||||
fullTextResponse := ResponseCoze2OpenAI(&cozeResponse)
|
||||
fullTextResponse.Model = modelName
|
||||
jsonResponse, err := json.Marshal(fullTextResponse)
|
||||
if err != nil {
|
||||
return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
|
||||
}
|
||||
c.Writer.Header().Set("Content-Type", "application/json")
|
||||
c.Writer.WriteHeader(resp.StatusCode)
|
||||
_, err = c.Writer.Write(jsonResponse)
|
||||
var responseText string
|
||||
if len(fullTextResponse.Choices) > 0 {
|
||||
responseText = fullTextResponse.Choices[0].Message.StringContent()
|
||||
}
|
||||
return nil, &responseText
|
||||
}
|
38
relay/adaptor/coze/model.go
Normal file
38
relay/adaptor/coze/model.go
Normal file
@@ -0,0 +1,38 @@
|
||||
package coze
|
||||
|
||||
type Message struct {
|
||||
Role string `json:"role"`
|
||||
Type string `json:"type"`
|
||||
Content string `json:"content"`
|
||||
ContentType string `json:"content_type"`
|
||||
}
|
||||
|
||||
type ErrorInformation struct {
|
||||
Code int `json:"code"`
|
||||
Msg string `json:"msg"`
|
||||
}
|
||||
|
||||
type Request struct {
|
||||
ConversationId string `json:"conversation_id,omitempty"`
|
||||
BotId string `json:"bot_id"`
|
||||
User string `json:"user"`
|
||||
Query string `json:"query"`
|
||||
ChatHistory []Message `json:"chat_history,omitempty"`
|
||||
Stream bool `json:"stream"`
|
||||
}
|
||||
|
||||
type Response struct {
|
||||
ConversationId string `json:"conversation_id,omitempty"`
|
||||
Messages []Message `json:"messages,omitempty"`
|
||||
Code int `json:"code,omitempty"`
|
||||
Msg string `json:"msg,omitempty"`
|
||||
}
|
||||
|
||||
type StreamResponse struct {
|
||||
Event string `json:"event,omitempty"`
|
||||
Message *Message `json:"message,omitempty"`
|
||||
IsFinish bool `json:"is_finish,omitempty"`
|
||||
Index int `json:"index,omitempty"`
|
||||
ConversationId string `json:"conversation_id,omitempty"`
|
||||
ErrorInformation *ErrorInformation `json:"error_information,omitempty"`
|
||||
}
|
@@ -7,4 +7,6 @@ var ModelList = []string{
|
||||
"llama2-7b-2048",
|
||||
"llama2-70b-4096",
|
||||
"mixtral-8x7b-32768",
|
||||
"llama3-8b-8192",
|
||||
"llama3-70b-8192",
|
||||
}
|
||||
|
@@ -15,6 +15,12 @@ import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
const (
|
||||
dataPrefix = "data: "
|
||||
done = "[DONE]"
|
||||
dataPrefixLength = len(dataPrefix)
|
||||
)
|
||||
|
||||
func StreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*model.ErrorWithStatusCode, string, *model.Usage) {
|
||||
responseText := ""
|
||||
scanner := bufio.NewScanner(resp.Body)
|
||||
@@ -36,39 +42,46 @@ func StreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*model.E
|
||||
go func() {
|
||||
for scanner.Scan() {
|
||||
data := scanner.Text()
|
||||
if len(data) < 6 { // ignore blank line or wrong format
|
||||
if len(data) < dataPrefixLength { // ignore blank line or wrong format
|
||||
continue
|
||||
}
|
||||
if data[:6] != "data: " && data[:6] != "[DONE]" {
|
||||
if data[:dataPrefixLength] != dataPrefix && data[:dataPrefixLength] != done {
|
||||
continue
|
||||
}
|
||||
dataChan <- data
|
||||
data = data[6:]
|
||||
if !strings.HasPrefix(data, "[DONE]") {
|
||||
switch relayMode {
|
||||
case relaymode.ChatCompletions:
|
||||
var streamResponse ChatCompletionsStreamResponse
|
||||
err := json.Unmarshal([]byte(data), &streamResponse)
|
||||
if err != nil {
|
||||
logger.SysError("error unmarshalling stream response: " + err.Error())
|
||||
continue // just ignore the error
|
||||
}
|
||||
for _, choice := range streamResponse.Choices {
|
||||
responseText += conv.AsString(choice.Delta.Content)
|
||||
}
|
||||
if streamResponse.Usage != nil {
|
||||
usage = streamResponse.Usage
|
||||
}
|
||||
case relaymode.Completions:
|
||||
var streamResponse CompletionsStreamResponse
|
||||
err := json.Unmarshal([]byte(data), &streamResponse)
|
||||
if err != nil {
|
||||
logger.SysError("error unmarshalling stream response: " + err.Error())
|
||||
continue
|
||||
}
|
||||
for _, choice := range streamResponse.Choices {
|
||||
responseText += choice.Text
|
||||
}
|
||||
if strings.HasPrefix(data[dataPrefixLength:], done) {
|
||||
dataChan <- data
|
||||
continue
|
||||
}
|
||||
switch relayMode {
|
||||
case relaymode.ChatCompletions:
|
||||
var streamResponse ChatCompletionsStreamResponse
|
||||
err := json.Unmarshal([]byte(data[dataPrefixLength:]), &streamResponse)
|
||||
if err != nil {
|
||||
logger.SysError("error unmarshalling stream response: " + err.Error())
|
||||
dataChan <- data // if error happened, pass the data to client
|
||||
continue // just ignore the error
|
||||
}
|
||||
if len(streamResponse.Choices) == 0 {
|
||||
// but for empty choice, we should not pass it to client, this is for azure
|
||||
continue // just ignore empty choice
|
||||
}
|
||||
dataChan <- data
|
||||
for _, choice := range streamResponse.Choices {
|
||||
responseText += conv.AsString(choice.Delta.Content)
|
||||
}
|
||||
if streamResponse.Usage != nil {
|
||||
usage = streamResponse.Usage
|
||||
}
|
||||
case relaymode.Completions:
|
||||
dataChan <- data
|
||||
var streamResponse CompletionsStreamResponse
|
||||
err := json.Unmarshal([]byte(data[dataPrefixLength:]), &streamResponse)
|
||||
if err != nil {
|
||||
logger.SysError("error unmarshalling stream response: " + err.Error())
|
||||
continue
|
||||
}
|
||||
for _, choice := range streamResponse.Choices {
|
||||
responseText += choice.Text
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -13,6 +13,7 @@ const (
|
||||
Gemini
|
||||
Ollama
|
||||
AwsClaude
|
||||
Coze
|
||||
|
||||
Dummy // this one is only for count, do not add any channel after this
|
||||
)
|
||||
|
@@ -147,11 +147,13 @@ var ModelRatio = map[string]float64{
|
||||
"mistral-medium-latest": 2.7 / 1000 * USD,
|
||||
"mistral-large-latest": 8.0 / 1000 * USD,
|
||||
"mistral-embed": 0.1 / 1000 * USD,
|
||||
// https://wow.groq.com/
|
||||
"llama2-70b-4096": 0.7 / 1000 * USD,
|
||||
"llama2-7b-2048": 0.1 / 1000 * USD,
|
||||
// https://wow.groq.com/#:~:text=inquiries%C2%A0here.-,Model,-Current%20Speed
|
||||
"llama3-70b-8192": 0.59 / 1000 * USD,
|
||||
"mixtral-8x7b-32768": 0.27 / 1000 * USD,
|
||||
"llama3-8b-8192": 0.05 / 1000 * USD,
|
||||
"gemma-7b-it": 0.1 / 1000 * USD,
|
||||
"llama2-70b-4096": 0.64 / 1000 * USD,
|
||||
"llama2-7b-2048": 0.1 / 1000 * USD,
|
||||
// https://platform.lingyiwanwu.com/docs#-计费单元
|
||||
"yi-34b-chat-0205": 2.5 / 1000 * RMB,
|
||||
"yi-34b-chat-200k": 12.0 / 1000 * RMB,
|
||||
@@ -258,7 +260,7 @@ func GetCompletionRatio(name string) float64 {
|
||||
return 4.0 / 3.0
|
||||
}
|
||||
if strings.HasPrefix(name, "gpt-4") {
|
||||
if strings.HasPrefix(name, "gpt-4-turbo") {
|
||||
if strings.HasPrefix(name, "gpt-4-turbo") || strings.HasSuffix(name, "preview") {
|
||||
return 3
|
||||
}
|
||||
return 2
|
||||
@@ -277,7 +279,11 @@ func GetCompletionRatio(name string) float64 {
|
||||
}
|
||||
switch name {
|
||||
case "llama2-70b-4096":
|
||||
return 0.8 / 0.7
|
||||
return 0.8 / 0.64
|
||||
case "llama3-8b-8192":
|
||||
return 2
|
||||
case "llama3-70b-8192":
|
||||
return 0.79 / 0.59
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
@@ -35,6 +35,7 @@ const (
|
||||
LingYiWanWu
|
||||
StepFun
|
||||
AwsClaude
|
||||
Coze
|
||||
|
||||
Dummy
|
||||
)
|
||||
|
@@ -27,6 +27,8 @@ func ToAPIType(channelType int) int {
|
||||
apiType = apitype.Ollama
|
||||
case AwsClaude:
|
||||
apiType = apitype.AwsClaude
|
||||
case Coze:
|
||||
apiType = apitype.Coze
|
||||
}
|
||||
|
||||
return apiType
|
||||
|
@@ -35,6 +35,7 @@ var ChannelBaseURLs = []string{
|
||||
"https://api.lingyiwanwu.com", // 31
|
||||
"https://api.stepfun.com", // 32
|
||||
"", // 33
|
||||
"https://api.coze.com", // 34
|
||||
}
|
||||
|
||||
func init() {
|
||||
|
@@ -6,6 +6,7 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/songquanpeng/one-api/common/logger"
|
||||
@@ -87,7 +88,7 @@ func RelayTextHelper(c *gin.Context) *model.ErrorWithStatusCode {
|
||||
return openai.ErrorWrapper(err, "do_request_failed", http.StatusInternalServerError)
|
||||
}
|
||||
if resp != nil {
|
||||
errorHappened := (resp.StatusCode != http.StatusOK) || (meta.IsStream && resp.Header.Get("Content-Type") == "application/json")
|
||||
errorHappened := (resp.StatusCode != http.StatusOK) || (meta.IsStream && strings.HasPrefix(resp.Header.Get("Content-Type"), "application/json"))
|
||||
if errorHappened {
|
||||
billing.ReturnPreConsumedQuota(ctx, preConsumedQuota, meta.TokenId)
|
||||
return RelayErrorHandler(resp)
|
||||
|
@@ -13,6 +13,7 @@ const COPY_OPTIONS = [
|
||||
];
|
||||
|
||||
const OPEN_LINK_OPTIONS = [
|
||||
{ key: 'next', text: 'ChatGPT Next Web', value: 'next' },
|
||||
{ key: 'ama', text: 'BotGem', value: 'ama' },
|
||||
{ key: 'opencat', text: 'OpenCat', value: 'opencat' },
|
||||
];
|
||||
|
@@ -19,6 +19,7 @@ export const CHANNEL_OPTIONS = [
|
||||
{ key: 30, text: 'Ollama', value: 30, color: 'black' },
|
||||
{ key: 31, text: '零一万物', value: 31, color: 'green' },
|
||||
{ key: 32, text: '阶跃星辰', value: 32, color: 'blue' },
|
||||
{ key: 34, text: 'Coze', value: 34, color: 'blue' },
|
||||
{ key: 8, text: '自定义渠道', value: 8, color: 'pink' },
|
||||
{ key: 22, text: '知识库:FastGPT', value: 22, color: 'blue' },
|
||||
{ key: 21, text: '知识库:AI Proxy', value: 21, color: 'purple' },
|
||||
|
@@ -57,7 +57,8 @@ const EditChannel = () => {
|
||||
const [config, setConfig] = useState({
|
||||
region: '',
|
||||
sk: '',
|
||||
ak: ''
|
||||
ak: '',
|
||||
user_id: ''
|
||||
});
|
||||
const handleInputChange = (e, { name, value }) => {
|
||||
setInputs((inputs) => ({ ...inputs, [name]: value }));
|
||||
@@ -156,13 +157,11 @@ const EditChannel = () => {
|
||||
}, []);
|
||||
|
||||
const submit = async () => {
|
||||
// some provider as AWS need both AK and SK rather than a single key,
|
||||
// so we need to combine them into a single key to achieve the best compatibility.
|
||||
if (inputs.ak && inputs.sk) {
|
||||
console.log(`combine ak ${inputs.ak} and sk ${inputs.sk}`, inputs.ak, inputs.sk);
|
||||
inputs.key = `${inputs.ak}\n${inputs.sk}`;
|
||||
if (inputs.key === '') {
|
||||
if (config.ak !== '' && config.sk !== '' && config.region !== '') {
|
||||
inputs.key = `${config.ak}|${config.sk}|${config.region}`;
|
||||
}
|
||||
}
|
||||
|
||||
if (!isEdit && (inputs.name === '' || inputs.key === '')) {
|
||||
showInfo('请填写渠道名称和渠道密钥!');
|
||||
return;
|
||||
@@ -356,6 +355,13 @@ const EditChannel = () => {
|
||||
</Form.Field>
|
||||
)
|
||||
}
|
||||
{
|
||||
inputs.type === 34 && (
|
||||
<Message>
|
||||
对于 Coze 而言,模型名称即 Bot ID,你可以添加一个前缀 `bot-`,例如:`bot-123456`。
|
||||
</Message>
|
||||
)
|
||||
}
|
||||
<Form.Field>
|
||||
<Form.Dropdown
|
||||
label='模型'
|
||||
@@ -446,6 +452,18 @@ const EditChannel = () => {
|
||||
</Form.Field>
|
||||
)
|
||||
}
|
||||
{
|
||||
inputs.type === 34 && (
|
||||
<Form.Input
|
||||
label='User ID'
|
||||
name='user_id'
|
||||
required
|
||||
placeholder={'生成该密钥的用户 ID'}
|
||||
onChange={handleConfigChange}
|
||||
value={config.user_id}
|
||||
autoComplete=''
|
||||
/>)
|
||||
}
|
||||
{
|
||||
inputs.type !== 33 && (batch ? <Form.Field>
|
||||
<Form.TextArea
|
||||
|
Reference in New Issue
Block a user