mirror of
https://github.com/songquanpeng/one-api.git
synced 2025-09-20 02:26:38 +08:00
feat: support vision
- Added new dependencies: `github.com/fsnotify/fsnotify v1.4.9`, `github.com/go-playground/assert/v2 v2.2.0`, `github.com/nxadm/tail v1.4.8`, `github.com/onsi/ginkgo v1.16.5`, `github.com/onsi/gomega v1.18.1`, `golang.org/x/net v0.10.0`, `gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7` - Updated dependencies: `github.com/gin-gonic/gin` from v1.9.0 to v2.0.0, `golang.org/x/net` from v0.17.0 to v0.10.0 - Removed dependencies: `github.com/golang-jwt/jwt v3.2.2+incompatible`, `github.com/gorilla/websocket v1.5.1` - Updated Go version from `1.18` to `1.21` - Made various modifications and refactoring in the code: - Added new struct `VisionMessage` with fields `Role`, `Content`, and `Name` - Added constants for certain types - Added methods and error handling to handle different message types - Modified existing struct and methods to accommodate changes - Removed unused imports
This commit is contained in:
parent
b2e46a33ac
commit
b58bd7e3ab
@ -4,12 +4,14 @@ import (
|
|||||||
"bufio"
|
"bufio"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/gin-gonic/gin"
|
|
||||||
"io"
|
"io"
|
||||||
|
"log"
|
||||||
"net/http"
|
"net/http"
|
||||||
"one-api/common"
|
"one-api/common"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
)
|
)
|
||||||
|
|
||||||
// https://docs.aiproxy.io/dev/library#使用已经定制好的知识库进行对话问答
|
// https://docs.aiproxy.io/dev/library#使用已经定制好的知识库进行对话问答
|
||||||
@ -47,9 +49,27 @@ type AIProxyLibraryStreamResponse struct {
|
|||||||
|
|
||||||
func requestOpenAI2AIProxyLibrary(request GeneralOpenAIRequest) *AIProxyLibraryRequest {
|
func requestOpenAI2AIProxyLibrary(request GeneralOpenAIRequest) *AIProxyLibraryRequest {
|
||||||
query := ""
|
query := ""
|
||||||
if len(request.Messages) != 0 {
|
if request.MessagesLen() != 0 {
|
||||||
query = request.Messages[len(request.Messages)-1].Content
|
switch msgs := request.Messages.(type) {
|
||||||
|
case []Message:
|
||||||
|
query = msgs[len(msgs)-1].Content
|
||||||
|
case []VisionMessage:
|
||||||
|
query = msgs[len(msgs)-1].Content.Text
|
||||||
|
case []any:
|
||||||
|
msg := msgs[len(msgs)-1]
|
||||||
|
switch msg := msg.(type) {
|
||||||
|
case Message:
|
||||||
|
query = msg.Content
|
||||||
|
case VisionMessage:
|
||||||
|
query = msg.Content.Text
|
||||||
|
default:
|
||||||
|
log.Panicf("unknown message type: %T", msg)
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
log.Panicf("unknown message type: %T", msgs)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return &AIProxyLibraryRequest{
|
return &AIProxyLibraryRequest{
|
||||||
Model: request.Model,
|
Model: request.Model,
|
||||||
Stream: request.Stream,
|
Stream: request.Stream,
|
||||||
|
@ -1,329 +1,329 @@
|
|||||||
package controller
|
package controller
|
||||||
|
|
||||||
import (
|
// import (
|
||||||
"bufio"
|
// "bufio"
|
||||||
"encoding/json"
|
// "encoding/json"
|
||||||
"github.com/gin-gonic/gin"
|
// "github.com/gin-gonic/gin"
|
||||||
"io"
|
// "io"
|
||||||
"net/http"
|
// "net/http"
|
||||||
"one-api/common"
|
// "one-api/common"
|
||||||
"strings"
|
// "strings"
|
||||||
)
|
// )
|
||||||
|
|
||||||
// https://help.aliyun.com/document_detail/613695.html?spm=a2c4g.2399480.0.0.1adb778fAdzP9w#341800c0f8w0r
|
// // https://help.aliyun.com/document_detail/613695.html?spm=a2c4g.2399480.0.0.1adb778fAdzP9w#341800c0f8w0r
|
||||||
|
|
||||||
type AliMessage struct {
|
// type AliMessage struct {
|
||||||
User string `json:"user"`
|
// User string `json:"user"`
|
||||||
Bot string `json:"bot"`
|
// Bot string `json:"bot"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type AliInput struct {
|
// type AliInput struct {
|
||||||
Prompt string `json:"prompt"`
|
// Prompt string `json:"prompt"`
|
||||||
History []AliMessage `json:"history"`
|
// History []AliMessage `json:"history"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type AliParameters struct {
|
// type AliParameters struct {
|
||||||
TopP float64 `json:"top_p,omitempty"`
|
// TopP float64 `json:"top_p,omitempty"`
|
||||||
TopK int `json:"top_k,omitempty"`
|
// TopK int `json:"top_k,omitempty"`
|
||||||
Seed uint64 `json:"seed,omitempty"`
|
// Seed uint64 `json:"seed,omitempty"`
|
||||||
EnableSearch bool `json:"enable_search,omitempty"`
|
// EnableSearch bool `json:"enable_search,omitempty"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type AliChatRequest struct {
|
// type AliChatRequest struct {
|
||||||
Model string `json:"model"`
|
// Model string `json:"model"`
|
||||||
Input AliInput `json:"input"`
|
// Input AliInput `json:"input"`
|
||||||
Parameters AliParameters `json:"parameters,omitempty"`
|
// Parameters AliParameters `json:"parameters,omitempty"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type AliEmbeddingRequest struct {
|
// type AliEmbeddingRequest struct {
|
||||||
Model string `json:"model"`
|
// Model string `json:"model"`
|
||||||
Input struct {
|
// Input struct {
|
||||||
Texts []string `json:"texts"`
|
// Texts []string `json:"texts"`
|
||||||
} `json:"input"`
|
// } `json:"input"`
|
||||||
Parameters *struct {
|
// Parameters *struct {
|
||||||
TextType string `json:"text_type,omitempty"`
|
// TextType string `json:"text_type,omitempty"`
|
||||||
} `json:"parameters,omitempty"`
|
// } `json:"parameters,omitempty"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type AliEmbedding struct {
|
// type AliEmbedding struct {
|
||||||
Embedding []float64 `json:"embedding"`
|
// Embedding []float64 `json:"embedding"`
|
||||||
TextIndex int `json:"text_index"`
|
// TextIndex int `json:"text_index"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type AliEmbeddingResponse struct {
|
// type AliEmbeddingResponse struct {
|
||||||
Output struct {
|
// Output struct {
|
||||||
Embeddings []AliEmbedding `json:"embeddings"`
|
// Embeddings []AliEmbedding `json:"embeddings"`
|
||||||
} `json:"output"`
|
// } `json:"output"`
|
||||||
Usage AliUsage `json:"usage"`
|
// Usage AliUsage `json:"usage"`
|
||||||
AliError
|
// AliError
|
||||||
}
|
// }
|
||||||
|
|
||||||
type AliError struct {
|
// type AliError struct {
|
||||||
Code string `json:"code"`
|
// Code string `json:"code"`
|
||||||
Message string `json:"message"`
|
// Message string `json:"message"`
|
||||||
RequestId string `json:"request_id"`
|
// RequestId string `json:"request_id"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type AliUsage struct {
|
// type AliUsage struct {
|
||||||
InputTokens int `json:"input_tokens"`
|
// InputTokens int `json:"input_tokens"`
|
||||||
OutputTokens int `json:"output_tokens"`
|
// OutputTokens int `json:"output_tokens"`
|
||||||
TotalTokens int `json:"total_tokens"`
|
// TotalTokens int `json:"total_tokens"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type AliOutput struct {
|
// type AliOutput struct {
|
||||||
Text string `json:"text"`
|
// Text string `json:"text"`
|
||||||
FinishReason string `json:"finish_reason"`
|
// FinishReason string `json:"finish_reason"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type AliChatResponse struct {
|
// type AliChatResponse struct {
|
||||||
Output AliOutput `json:"output"`
|
// Output AliOutput `json:"output"`
|
||||||
Usage AliUsage `json:"usage"`
|
// Usage AliUsage `json:"usage"`
|
||||||
AliError
|
// AliError
|
||||||
}
|
// }
|
||||||
|
|
||||||
func requestOpenAI2Ali(request GeneralOpenAIRequest) *AliChatRequest {
|
// func requestOpenAI2Ali(request GeneralOpenAIRequest) *AliChatRequest {
|
||||||
messages := make([]AliMessage, 0, len(request.Messages))
|
// messages := make([]AliMessage, 0, len(request.Messages))
|
||||||
prompt := ""
|
// prompt := ""
|
||||||
for i := 0; i < len(request.Messages); i++ {
|
// for i := 0; i < len(request.Messages); i++ {
|
||||||
message := request.Messages[i]
|
// message := request.Messages[i]
|
||||||
if message.Role == "system" {
|
// if message.Role == "system" {
|
||||||
messages = append(messages, AliMessage{
|
// messages = append(messages, AliMessage{
|
||||||
User: message.Content,
|
// User: message.Content,
|
||||||
Bot: "Okay",
|
// Bot: "Okay",
|
||||||
})
|
// })
|
||||||
continue
|
// continue
|
||||||
} else {
|
// } else {
|
||||||
if i == len(request.Messages)-1 {
|
// if i == len(request.Messages)-1 {
|
||||||
prompt = message.Content
|
// prompt = message.Content
|
||||||
break
|
// break
|
||||||
}
|
// }
|
||||||
messages = append(messages, AliMessage{
|
// messages = append(messages, AliMessage{
|
||||||
User: message.Content,
|
// User: message.Content,
|
||||||
Bot: request.Messages[i+1].Content,
|
// Bot: request.Messages[i+1].Content,
|
||||||
})
|
// })
|
||||||
i++
|
// i++
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
return &AliChatRequest{
|
// return &AliChatRequest{
|
||||||
Model: request.Model,
|
// Model: request.Model,
|
||||||
Input: AliInput{
|
// Input: AliInput{
|
||||||
Prompt: prompt,
|
// Prompt: prompt,
|
||||||
History: messages,
|
// History: messages,
|
||||||
},
|
// },
|
||||||
//Parameters: AliParameters{ // ChatGPT's parameters are not compatible with Ali's
|
// //Parameters: AliParameters{ // ChatGPT's parameters are not compatible with Ali's
|
||||||
// TopP: request.TopP,
|
// // TopP: request.TopP,
|
||||||
// TopK: 50,
|
// // TopK: 50,
|
||||||
// //Seed: 0,
|
// // //Seed: 0,
|
||||||
// //EnableSearch: false,
|
// // //EnableSearch: false,
|
||||||
//},
|
// //},
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
|
|
||||||
func embeddingRequestOpenAI2Ali(request GeneralOpenAIRequest) *AliEmbeddingRequest {
|
// func embeddingRequestOpenAI2Ali(request GeneralOpenAIRequest) *AliEmbeddingRequest {
|
||||||
return &AliEmbeddingRequest{
|
// return &AliEmbeddingRequest{
|
||||||
Model: "text-embedding-v1",
|
// Model: "text-embedding-v1",
|
||||||
Input: struct {
|
// Input: struct {
|
||||||
Texts []string `json:"texts"`
|
// Texts []string `json:"texts"`
|
||||||
}{
|
// }{
|
||||||
Texts: request.ParseInput(),
|
// Texts: request.ParseInput(),
|
||||||
},
|
// },
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
|
|
||||||
func aliEmbeddingHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) {
|
// func aliEmbeddingHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) {
|
||||||
var aliResponse AliEmbeddingResponse
|
// var aliResponse AliEmbeddingResponse
|
||||||
err := json.NewDecoder(resp.Body).Decode(&aliResponse)
|
// err := json.NewDecoder(resp.Body).Decode(&aliResponse)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
|
// return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
// }
|
||||||
|
|
||||||
err = resp.Body.Close()
|
// err = resp.Body.Close()
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
// return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
// }
|
||||||
|
|
||||||
if aliResponse.Code != "" {
|
// if aliResponse.Code != "" {
|
||||||
return &OpenAIErrorWithStatusCode{
|
// return &OpenAIErrorWithStatusCode{
|
||||||
OpenAIError: OpenAIError{
|
// OpenAIError: OpenAIError{
|
||||||
Message: aliResponse.Message,
|
// Message: aliResponse.Message,
|
||||||
Type: aliResponse.Code,
|
// Type: aliResponse.Code,
|
||||||
Param: aliResponse.RequestId,
|
// Param: aliResponse.RequestId,
|
||||||
Code: aliResponse.Code,
|
// Code: aliResponse.Code,
|
||||||
},
|
// },
|
||||||
StatusCode: resp.StatusCode,
|
// StatusCode: resp.StatusCode,
|
||||||
}, nil
|
// }, nil
|
||||||
}
|
// }
|
||||||
|
|
||||||
fullTextResponse := embeddingResponseAli2OpenAI(&aliResponse)
|
// fullTextResponse := embeddingResponseAli2OpenAI(&aliResponse)
|
||||||
jsonResponse, err := json.Marshal(fullTextResponse)
|
// jsonResponse, err := json.Marshal(fullTextResponse)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
|
// return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
// }
|
||||||
c.Writer.Header().Set("Content-Type", "application/json")
|
// c.Writer.Header().Set("Content-Type", "application/json")
|
||||||
c.Writer.WriteHeader(resp.StatusCode)
|
// c.Writer.WriteHeader(resp.StatusCode)
|
||||||
_, err = c.Writer.Write(jsonResponse)
|
// _, err = c.Writer.Write(jsonResponse)
|
||||||
return nil, &fullTextResponse.Usage
|
// return nil, &fullTextResponse.Usage
|
||||||
}
|
// }
|
||||||
|
|
||||||
func embeddingResponseAli2OpenAI(response *AliEmbeddingResponse) *OpenAIEmbeddingResponse {
|
// func embeddingResponseAli2OpenAI(response *AliEmbeddingResponse) *OpenAIEmbeddingResponse {
|
||||||
openAIEmbeddingResponse := OpenAIEmbeddingResponse{
|
// openAIEmbeddingResponse := OpenAIEmbeddingResponse{
|
||||||
Object: "list",
|
// Object: "list",
|
||||||
Data: make([]OpenAIEmbeddingResponseItem, 0, len(response.Output.Embeddings)),
|
// Data: make([]OpenAIEmbeddingResponseItem, 0, len(response.Output.Embeddings)),
|
||||||
Model: "text-embedding-v1",
|
// Model: "text-embedding-v1",
|
||||||
Usage: Usage{TotalTokens: response.Usage.TotalTokens},
|
// Usage: Usage{TotalTokens: response.Usage.TotalTokens},
|
||||||
}
|
// }
|
||||||
|
|
||||||
for _, item := range response.Output.Embeddings {
|
// for _, item := range response.Output.Embeddings {
|
||||||
openAIEmbeddingResponse.Data = append(openAIEmbeddingResponse.Data, OpenAIEmbeddingResponseItem{
|
// openAIEmbeddingResponse.Data = append(openAIEmbeddingResponse.Data, OpenAIEmbeddingResponseItem{
|
||||||
Object: `embedding`,
|
// Object: `embedding`,
|
||||||
Index: item.TextIndex,
|
// Index: item.TextIndex,
|
||||||
Embedding: item.Embedding,
|
// Embedding: item.Embedding,
|
||||||
})
|
// })
|
||||||
}
|
// }
|
||||||
return &openAIEmbeddingResponse
|
// return &openAIEmbeddingResponse
|
||||||
}
|
// }
|
||||||
|
|
||||||
func responseAli2OpenAI(response *AliChatResponse) *OpenAITextResponse {
|
// func responseAli2OpenAI(response *AliChatResponse) *OpenAITextResponse {
|
||||||
choice := OpenAITextResponseChoice{
|
// choice := OpenAITextResponseChoice{
|
||||||
Index: 0,
|
// Index: 0,
|
||||||
Message: Message{
|
// Message: Message{
|
||||||
Role: "assistant",
|
// Role: "assistant",
|
||||||
Content: response.Output.Text,
|
// Content: response.Output.Text,
|
||||||
},
|
// },
|
||||||
FinishReason: response.Output.FinishReason,
|
// FinishReason: response.Output.FinishReason,
|
||||||
}
|
// }
|
||||||
fullTextResponse := OpenAITextResponse{
|
// fullTextResponse := OpenAITextResponse{
|
||||||
Id: response.RequestId,
|
// Id: response.RequestId,
|
||||||
Object: "chat.completion",
|
// Object: "chat.completion",
|
||||||
Created: common.GetTimestamp(),
|
// Created: common.GetTimestamp(),
|
||||||
Choices: []OpenAITextResponseChoice{choice},
|
// Choices: []OpenAITextResponseChoice{choice},
|
||||||
Usage: Usage{
|
// Usage: Usage{
|
||||||
PromptTokens: response.Usage.InputTokens,
|
// PromptTokens: response.Usage.InputTokens,
|
||||||
CompletionTokens: response.Usage.OutputTokens,
|
// CompletionTokens: response.Usage.OutputTokens,
|
||||||
TotalTokens: response.Usage.InputTokens + response.Usage.OutputTokens,
|
// TotalTokens: response.Usage.InputTokens + response.Usage.OutputTokens,
|
||||||
},
|
// },
|
||||||
}
|
// }
|
||||||
return &fullTextResponse
|
// return &fullTextResponse
|
||||||
}
|
// }
|
||||||
|
|
||||||
func streamResponseAli2OpenAI(aliResponse *AliChatResponse) *ChatCompletionsStreamResponse {
|
// func streamResponseAli2OpenAI(aliResponse *AliChatResponse) *ChatCompletionsStreamResponse {
|
||||||
var choice ChatCompletionsStreamResponseChoice
|
// var choice ChatCompletionsStreamResponseChoice
|
||||||
choice.Delta.Content = aliResponse.Output.Text
|
// choice.Delta.Content = aliResponse.Output.Text
|
||||||
if aliResponse.Output.FinishReason != "null" {
|
// if aliResponse.Output.FinishReason != "null" {
|
||||||
finishReason := aliResponse.Output.FinishReason
|
// finishReason := aliResponse.Output.FinishReason
|
||||||
choice.FinishReason = &finishReason
|
// choice.FinishReason = &finishReason
|
||||||
}
|
// }
|
||||||
response := ChatCompletionsStreamResponse{
|
// response := ChatCompletionsStreamResponse{
|
||||||
Id: aliResponse.RequestId,
|
// Id: aliResponse.RequestId,
|
||||||
Object: "chat.completion.chunk",
|
// Object: "chat.completion.chunk",
|
||||||
Created: common.GetTimestamp(),
|
// Created: common.GetTimestamp(),
|
||||||
Model: "ernie-bot",
|
// Model: "ernie-bot",
|
||||||
Choices: []ChatCompletionsStreamResponseChoice{choice},
|
// Choices: []ChatCompletionsStreamResponseChoice{choice},
|
||||||
}
|
// }
|
||||||
return &response
|
// return &response
|
||||||
}
|
// }
|
||||||
|
|
||||||
func aliStreamHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) {
|
// func aliStreamHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) {
|
||||||
var usage Usage
|
// var usage Usage
|
||||||
scanner := bufio.NewScanner(resp.Body)
|
// scanner := bufio.NewScanner(resp.Body)
|
||||||
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
|
// scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
|
||||||
if atEOF && len(data) == 0 {
|
// if atEOF && len(data) == 0 {
|
||||||
return 0, nil, nil
|
// return 0, nil, nil
|
||||||
}
|
// }
|
||||||
if i := strings.Index(string(data), "\n"); i >= 0 {
|
// if i := strings.Index(string(data), "\n"); i >= 0 {
|
||||||
return i + 1, data[0:i], nil
|
// return i + 1, data[0:i], nil
|
||||||
}
|
// }
|
||||||
if atEOF {
|
// if atEOF {
|
||||||
return len(data), data, nil
|
// return len(data), data, nil
|
||||||
}
|
// }
|
||||||
return 0, nil, nil
|
// return 0, nil, nil
|
||||||
})
|
// })
|
||||||
dataChan := make(chan string)
|
// dataChan := make(chan string)
|
||||||
stopChan := make(chan bool)
|
// stopChan := make(chan bool)
|
||||||
go func() {
|
// go func() {
|
||||||
for scanner.Scan() {
|
// for scanner.Scan() {
|
||||||
data := scanner.Text()
|
// data := scanner.Text()
|
||||||
if len(data) < 5 { // ignore blank line or wrong format
|
// if len(data) < 5 { // ignore blank line or wrong format
|
||||||
continue
|
// continue
|
||||||
}
|
// }
|
||||||
if data[:5] != "data:" {
|
// if data[:5] != "data:" {
|
||||||
continue
|
// continue
|
||||||
}
|
// }
|
||||||
data = data[5:]
|
// data = data[5:]
|
||||||
dataChan <- data
|
// dataChan <- data
|
||||||
}
|
// }
|
||||||
stopChan <- true
|
// stopChan <- true
|
||||||
}()
|
// }()
|
||||||
setEventStreamHeaders(c)
|
// setEventStreamHeaders(c)
|
||||||
lastResponseText := ""
|
// lastResponseText := ""
|
||||||
c.Stream(func(w io.Writer) bool {
|
// c.Stream(func(w io.Writer) bool {
|
||||||
select {
|
// select {
|
||||||
case data := <-dataChan:
|
// case data := <-dataChan:
|
||||||
var aliResponse AliChatResponse
|
// var aliResponse AliChatResponse
|
||||||
err := json.Unmarshal([]byte(data), &aliResponse)
|
// err := json.Unmarshal([]byte(data), &aliResponse)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
common.SysError("error unmarshalling stream response: " + err.Error())
|
// common.SysError("error unmarshalling stream response: " + err.Error())
|
||||||
return true
|
// return true
|
||||||
}
|
// }
|
||||||
if aliResponse.Usage.OutputTokens != 0 {
|
// if aliResponse.Usage.OutputTokens != 0 {
|
||||||
usage.PromptTokens = aliResponse.Usage.InputTokens
|
// usage.PromptTokens = aliResponse.Usage.InputTokens
|
||||||
usage.CompletionTokens = aliResponse.Usage.OutputTokens
|
// usage.CompletionTokens = aliResponse.Usage.OutputTokens
|
||||||
usage.TotalTokens = aliResponse.Usage.InputTokens + aliResponse.Usage.OutputTokens
|
// usage.TotalTokens = aliResponse.Usage.InputTokens + aliResponse.Usage.OutputTokens
|
||||||
}
|
// }
|
||||||
response := streamResponseAli2OpenAI(&aliResponse)
|
// response := streamResponseAli2OpenAI(&aliResponse)
|
||||||
response.Choices[0].Delta.Content = strings.TrimPrefix(response.Choices[0].Delta.Content, lastResponseText)
|
// response.Choices[0].Delta.Content = strings.TrimPrefix(response.Choices[0].Delta.Content, lastResponseText)
|
||||||
lastResponseText = aliResponse.Output.Text
|
// lastResponseText = aliResponse.Output.Text
|
||||||
jsonResponse, err := json.Marshal(response)
|
// jsonResponse, err := json.Marshal(response)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
common.SysError("error marshalling stream response: " + err.Error())
|
// common.SysError("error marshalling stream response: " + err.Error())
|
||||||
return true
|
// return true
|
||||||
}
|
// }
|
||||||
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
|
// c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
|
||||||
return true
|
// return true
|
||||||
case <-stopChan:
|
// case <-stopChan:
|
||||||
c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
|
// c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
|
||||||
return false
|
// return false
|
||||||
}
|
// }
|
||||||
})
|
// })
|
||||||
err := resp.Body.Close()
|
// err := resp.Body.Close()
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
// return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
// }
|
||||||
return nil, &usage
|
// return nil, &usage
|
||||||
}
|
// }
|
||||||
|
|
||||||
func aliHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) {
|
// func aliHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) {
|
||||||
var aliResponse AliChatResponse
|
// var aliResponse AliChatResponse
|
||||||
responseBody, err := io.ReadAll(resp.Body)
|
// responseBody, err := io.ReadAll(resp.Body)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
|
// return errorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
// }
|
||||||
err = resp.Body.Close()
|
// err = resp.Body.Close()
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
// return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
// }
|
||||||
err = json.Unmarshal(responseBody, &aliResponse)
|
// err = json.Unmarshal(responseBody, &aliResponse)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
|
// return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
// }
|
||||||
if aliResponse.Code != "" {
|
// if aliResponse.Code != "" {
|
||||||
return &OpenAIErrorWithStatusCode{
|
// return &OpenAIErrorWithStatusCode{
|
||||||
OpenAIError: OpenAIError{
|
// OpenAIError: OpenAIError{
|
||||||
Message: aliResponse.Message,
|
// Message: aliResponse.Message,
|
||||||
Type: aliResponse.Code,
|
// Type: aliResponse.Code,
|
||||||
Param: aliResponse.RequestId,
|
// Param: aliResponse.RequestId,
|
||||||
Code: aliResponse.Code,
|
// Code: aliResponse.Code,
|
||||||
},
|
// },
|
||||||
StatusCode: resp.StatusCode,
|
// StatusCode: resp.StatusCode,
|
||||||
}, nil
|
// }, nil
|
||||||
}
|
// }
|
||||||
fullTextResponse := responseAli2OpenAI(&aliResponse)
|
// fullTextResponse := responseAli2OpenAI(&aliResponse)
|
||||||
jsonResponse, err := json.Marshal(fullTextResponse)
|
// jsonResponse, err := json.Marshal(fullTextResponse)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
|
// return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
// }
|
||||||
c.Writer.Header().Set("Content-Type", "application/json")
|
// c.Writer.Header().Set("Content-Type", "application/json")
|
||||||
c.Writer.WriteHeader(resp.StatusCode)
|
// c.Writer.WriteHeader(resp.StatusCode)
|
||||||
_, err = c.Writer.Write(jsonResponse)
|
// _, err = c.Writer.Write(jsonResponse)
|
||||||
return nil, &fullTextResponse.Usage
|
// return nil, &fullTextResponse.Usage
|
||||||
}
|
// }
|
||||||
|
@ -1,359 +1,359 @@
|
|||||||
package controller
|
package controller
|
||||||
|
|
||||||
import (
|
// import (
|
||||||
"bufio"
|
// "bufio"
|
||||||
"encoding/json"
|
// "encoding/json"
|
||||||
"errors"
|
// "errors"
|
||||||
"fmt"
|
// "fmt"
|
||||||
"github.com/gin-gonic/gin"
|
// "github.com/gin-gonic/gin"
|
||||||
"io"
|
// "io"
|
||||||
"net/http"
|
// "net/http"
|
||||||
"one-api/common"
|
// "one-api/common"
|
||||||
"strings"
|
// "strings"
|
||||||
"sync"
|
// "sync"
|
||||||
"time"
|
// "time"
|
||||||
)
|
// )
|
||||||
|
|
||||||
// https://cloud.baidu.com/doc/WENXINWORKSHOP/s/flfmc9do2
|
// // https://cloud.baidu.com/doc/WENXINWORKSHOP/s/flfmc9do2
|
||||||
|
|
||||||
type BaiduTokenResponse struct {
|
// type BaiduTokenResponse struct {
|
||||||
ExpiresIn int `json:"expires_in"`
|
// ExpiresIn int `json:"expires_in"`
|
||||||
AccessToken string `json:"access_token"`
|
// AccessToken string `json:"access_token"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type BaiduMessage struct {
|
// type BaiduMessage struct {
|
||||||
Role string `json:"role"`
|
// Role string `json:"role"`
|
||||||
Content string `json:"content"`
|
// Content string `json:"content"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type BaiduChatRequest struct {
|
// type BaiduChatRequest struct {
|
||||||
Messages []BaiduMessage `json:"messages"`
|
// Messages []BaiduMessage `json:"messages"`
|
||||||
Stream bool `json:"stream"`
|
// Stream bool `json:"stream"`
|
||||||
UserId string `json:"user_id,omitempty"`
|
// UserId string `json:"user_id,omitempty"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type BaiduError struct {
|
// type BaiduError struct {
|
||||||
ErrorCode int `json:"error_code"`
|
// ErrorCode int `json:"error_code"`
|
||||||
ErrorMsg string `json:"error_msg"`
|
// ErrorMsg string `json:"error_msg"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type BaiduChatResponse struct {
|
// type BaiduChatResponse struct {
|
||||||
Id string `json:"id"`
|
// Id string `json:"id"`
|
||||||
Object string `json:"object"`
|
// Object string `json:"object"`
|
||||||
Created int64 `json:"created"`
|
// Created int64 `json:"created"`
|
||||||
Result string `json:"result"`
|
// Result string `json:"result"`
|
||||||
IsTruncated bool `json:"is_truncated"`
|
// IsTruncated bool `json:"is_truncated"`
|
||||||
NeedClearHistory bool `json:"need_clear_history"`
|
// NeedClearHistory bool `json:"need_clear_history"`
|
||||||
Usage Usage `json:"usage"`
|
// Usage Usage `json:"usage"`
|
||||||
BaiduError
|
// BaiduError
|
||||||
}
|
// }
|
||||||
|
|
||||||
type BaiduChatStreamResponse struct {
|
// type BaiduChatStreamResponse struct {
|
||||||
BaiduChatResponse
|
// BaiduChatResponse
|
||||||
SentenceId int `json:"sentence_id"`
|
// SentenceId int `json:"sentence_id"`
|
||||||
IsEnd bool `json:"is_end"`
|
// IsEnd bool `json:"is_end"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type BaiduEmbeddingRequest struct {
|
// type BaiduEmbeddingRequest struct {
|
||||||
Input []string `json:"input"`
|
// Input []string `json:"input"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type BaiduEmbeddingData struct {
|
// type BaiduEmbeddingData struct {
|
||||||
Object string `json:"object"`
|
// Object string `json:"object"`
|
||||||
Embedding []float64 `json:"embedding"`
|
// Embedding []float64 `json:"embedding"`
|
||||||
Index int `json:"index"`
|
// Index int `json:"index"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type BaiduEmbeddingResponse struct {
|
// type BaiduEmbeddingResponse struct {
|
||||||
Id string `json:"id"`
|
// Id string `json:"id"`
|
||||||
Object string `json:"object"`
|
// Object string `json:"object"`
|
||||||
Created int64 `json:"created"`
|
// Created int64 `json:"created"`
|
||||||
Data []BaiduEmbeddingData `json:"data"`
|
// Data []BaiduEmbeddingData `json:"data"`
|
||||||
Usage Usage `json:"usage"`
|
// Usage Usage `json:"usage"`
|
||||||
BaiduError
|
// BaiduError
|
||||||
}
|
// }
|
||||||
|
|
||||||
type BaiduAccessToken struct {
|
// type BaiduAccessToken struct {
|
||||||
AccessToken string `json:"access_token"`
|
// AccessToken string `json:"access_token"`
|
||||||
Error string `json:"error,omitempty"`
|
// Error string `json:"error,omitempty"`
|
||||||
ErrorDescription string `json:"error_description,omitempty"`
|
// ErrorDescription string `json:"error_description,omitempty"`
|
||||||
ExpiresIn int64 `json:"expires_in,omitempty"`
|
// ExpiresIn int64 `json:"expires_in,omitempty"`
|
||||||
ExpiresAt time.Time `json:"-"`
|
// ExpiresAt time.Time `json:"-"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
var baiduTokenStore sync.Map
|
// var baiduTokenStore sync.Map
|
||||||
|
|
||||||
func requestOpenAI2Baidu(request GeneralOpenAIRequest) *BaiduChatRequest {
|
// func requestOpenAI2Baidu(request GeneralOpenAIRequest) *BaiduChatRequest {
|
||||||
messages := make([]BaiduMessage, 0, len(request.Messages))
|
// messages := make([]BaiduMessage, 0, len(request.Messages))
|
||||||
for _, message := range request.Messages {
|
// for _, message := range request.Messages {
|
||||||
if message.Role == "system" {
|
// if message.Role == "system" {
|
||||||
messages = append(messages, BaiduMessage{
|
// messages = append(messages, BaiduMessage{
|
||||||
Role: "user",
|
// Role: "user",
|
||||||
Content: message.Content,
|
// Content: message.Content,
|
||||||
})
|
// })
|
||||||
messages = append(messages, BaiduMessage{
|
// messages = append(messages, BaiduMessage{
|
||||||
Role: "assistant",
|
// Role: "assistant",
|
||||||
Content: "Okay",
|
// Content: "Okay",
|
||||||
})
|
// })
|
||||||
} else {
|
// } else {
|
||||||
messages = append(messages, BaiduMessage{
|
// messages = append(messages, BaiduMessage{
|
||||||
Role: message.Role,
|
// Role: message.Role,
|
||||||
Content: message.Content,
|
// Content: message.Content,
|
||||||
})
|
// })
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
return &BaiduChatRequest{
|
// return &BaiduChatRequest{
|
||||||
Messages: messages,
|
// Messages: messages,
|
||||||
Stream: request.Stream,
|
// Stream: request.Stream,
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
|
|
||||||
func responseBaidu2OpenAI(response *BaiduChatResponse) *OpenAITextResponse {
|
// func responseBaidu2OpenAI(response *BaiduChatResponse) *OpenAITextResponse {
|
||||||
choice := OpenAITextResponseChoice{
|
// choice := OpenAITextResponseChoice{
|
||||||
Index: 0,
|
// Index: 0,
|
||||||
Message: Message{
|
// Message: Message{
|
||||||
Role: "assistant",
|
// Role: "assistant",
|
||||||
Content: response.Result,
|
// Content: response.Result,
|
||||||
},
|
// },
|
||||||
FinishReason: "stop",
|
// FinishReason: "stop",
|
||||||
}
|
// }
|
||||||
fullTextResponse := OpenAITextResponse{
|
// fullTextResponse := OpenAITextResponse{
|
||||||
Id: response.Id,
|
// Id: response.Id,
|
||||||
Object: "chat.completion",
|
// Object: "chat.completion",
|
||||||
Created: response.Created,
|
// Created: response.Created,
|
||||||
Choices: []OpenAITextResponseChoice{choice},
|
// Choices: []OpenAITextResponseChoice{choice},
|
||||||
Usage: response.Usage,
|
// Usage: response.Usage,
|
||||||
}
|
// }
|
||||||
return &fullTextResponse
|
// return &fullTextResponse
|
||||||
}
|
// }
|
||||||
|
|
||||||
func streamResponseBaidu2OpenAI(baiduResponse *BaiduChatStreamResponse) *ChatCompletionsStreamResponse {
|
// func streamResponseBaidu2OpenAI(baiduResponse *BaiduChatStreamResponse) *ChatCompletionsStreamResponse {
|
||||||
var choice ChatCompletionsStreamResponseChoice
|
// var choice ChatCompletionsStreamResponseChoice
|
||||||
choice.Delta.Content = baiduResponse.Result
|
// choice.Delta.Content = baiduResponse.Result
|
||||||
if baiduResponse.IsEnd {
|
// if baiduResponse.IsEnd {
|
||||||
choice.FinishReason = &stopFinishReason
|
// choice.FinishReason = &stopFinishReason
|
||||||
}
|
// }
|
||||||
response := ChatCompletionsStreamResponse{
|
// response := ChatCompletionsStreamResponse{
|
||||||
Id: baiduResponse.Id,
|
// Id: baiduResponse.Id,
|
||||||
Object: "chat.completion.chunk",
|
// Object: "chat.completion.chunk",
|
||||||
Created: baiduResponse.Created,
|
// Created: baiduResponse.Created,
|
||||||
Model: "ernie-bot",
|
// Model: "ernie-bot",
|
||||||
Choices: []ChatCompletionsStreamResponseChoice{choice},
|
// Choices: []ChatCompletionsStreamResponseChoice{choice},
|
||||||
}
|
// }
|
||||||
return &response
|
// return &response
|
||||||
}
|
// }
|
||||||
|
|
||||||
func embeddingRequestOpenAI2Baidu(request GeneralOpenAIRequest) *BaiduEmbeddingRequest {
|
// func embeddingRequestOpenAI2Baidu(request GeneralOpenAIRequest) *BaiduEmbeddingRequest {
|
||||||
return &BaiduEmbeddingRequest{
|
// return &BaiduEmbeddingRequest{
|
||||||
Input: request.ParseInput(),
|
// Input: request.ParseInput(),
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
|
|
||||||
func embeddingResponseBaidu2OpenAI(response *BaiduEmbeddingResponse) *OpenAIEmbeddingResponse {
|
// func embeddingResponseBaidu2OpenAI(response *BaiduEmbeddingResponse) *OpenAIEmbeddingResponse {
|
||||||
openAIEmbeddingResponse := OpenAIEmbeddingResponse{
|
// openAIEmbeddingResponse := OpenAIEmbeddingResponse{
|
||||||
Object: "list",
|
// Object: "list",
|
||||||
Data: make([]OpenAIEmbeddingResponseItem, 0, len(response.Data)),
|
// Data: make([]OpenAIEmbeddingResponseItem, 0, len(response.Data)),
|
||||||
Model: "baidu-embedding",
|
// Model: "baidu-embedding",
|
||||||
Usage: response.Usage,
|
// Usage: response.Usage,
|
||||||
}
|
// }
|
||||||
for _, item := range response.Data {
|
// for _, item := range response.Data {
|
||||||
openAIEmbeddingResponse.Data = append(openAIEmbeddingResponse.Data, OpenAIEmbeddingResponseItem{
|
// openAIEmbeddingResponse.Data = append(openAIEmbeddingResponse.Data, OpenAIEmbeddingResponseItem{
|
||||||
Object: item.Object,
|
// Object: item.Object,
|
||||||
Index: item.Index,
|
// Index: item.Index,
|
||||||
Embedding: item.Embedding,
|
// Embedding: item.Embedding,
|
||||||
})
|
// })
|
||||||
}
|
// }
|
||||||
return &openAIEmbeddingResponse
|
// return &openAIEmbeddingResponse
|
||||||
}
|
// }
|
||||||
|
|
||||||
func baiduStreamHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) {
|
// func baiduStreamHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) {
|
||||||
var usage Usage
|
// var usage Usage
|
||||||
scanner := bufio.NewScanner(resp.Body)
|
// scanner := bufio.NewScanner(resp.Body)
|
||||||
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
|
// scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
|
||||||
if atEOF && len(data) == 0 {
|
// if atEOF && len(data) == 0 {
|
||||||
return 0, nil, nil
|
// return 0, nil, nil
|
||||||
}
|
// }
|
||||||
if i := strings.Index(string(data), "\n"); i >= 0 {
|
// if i := strings.Index(string(data), "\n"); i >= 0 {
|
||||||
return i + 1, data[0:i], nil
|
// return i + 1, data[0:i], nil
|
||||||
}
|
// }
|
||||||
if atEOF {
|
// if atEOF {
|
||||||
return len(data), data, nil
|
// return len(data), data, nil
|
||||||
}
|
// }
|
||||||
return 0, nil, nil
|
// return 0, nil, nil
|
||||||
})
|
// })
|
||||||
dataChan := make(chan string)
|
// dataChan := make(chan string)
|
||||||
stopChan := make(chan bool)
|
// stopChan := make(chan bool)
|
||||||
go func() {
|
// go func() {
|
||||||
for scanner.Scan() {
|
// for scanner.Scan() {
|
||||||
data := scanner.Text()
|
// data := scanner.Text()
|
||||||
if len(data) < 6 { // ignore blank line or wrong format
|
// if len(data) < 6 { // ignore blank line or wrong format
|
||||||
continue
|
// continue
|
||||||
}
|
// }
|
||||||
data = data[6:]
|
// data = data[6:]
|
||||||
dataChan <- data
|
// dataChan <- data
|
||||||
}
|
// }
|
||||||
stopChan <- true
|
// stopChan <- true
|
||||||
}()
|
// }()
|
||||||
setEventStreamHeaders(c)
|
// setEventStreamHeaders(c)
|
||||||
c.Stream(func(w io.Writer) bool {
|
// c.Stream(func(w io.Writer) bool {
|
||||||
select {
|
// select {
|
||||||
case data := <-dataChan:
|
// case data := <-dataChan:
|
||||||
var baiduResponse BaiduChatStreamResponse
|
// var baiduResponse BaiduChatStreamResponse
|
||||||
err := json.Unmarshal([]byte(data), &baiduResponse)
|
// err := json.Unmarshal([]byte(data), &baiduResponse)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
common.SysError("error unmarshalling stream response: " + err.Error())
|
// common.SysError("error unmarshalling stream response: " + err.Error())
|
||||||
return true
|
// return true
|
||||||
}
|
// }
|
||||||
if baiduResponse.Usage.TotalTokens != 0 {
|
// if baiduResponse.Usage.TotalTokens != 0 {
|
||||||
usage.TotalTokens = baiduResponse.Usage.TotalTokens
|
// usage.TotalTokens = baiduResponse.Usage.TotalTokens
|
||||||
usage.PromptTokens = baiduResponse.Usage.PromptTokens
|
// usage.PromptTokens = baiduResponse.Usage.PromptTokens
|
||||||
usage.CompletionTokens = baiduResponse.Usage.TotalTokens - baiduResponse.Usage.PromptTokens
|
// usage.CompletionTokens = baiduResponse.Usage.TotalTokens - baiduResponse.Usage.PromptTokens
|
||||||
}
|
// }
|
||||||
response := streamResponseBaidu2OpenAI(&baiduResponse)
|
// response := streamResponseBaidu2OpenAI(&baiduResponse)
|
||||||
jsonResponse, err := json.Marshal(response)
|
// jsonResponse, err := json.Marshal(response)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
common.SysError("error marshalling stream response: " + err.Error())
|
// common.SysError("error marshalling stream response: " + err.Error())
|
||||||
return true
|
// return true
|
||||||
}
|
// }
|
||||||
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
|
// c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
|
||||||
return true
|
// return true
|
||||||
case <-stopChan:
|
// case <-stopChan:
|
||||||
c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
|
// c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
|
||||||
return false
|
// return false
|
||||||
}
|
// }
|
||||||
})
|
// })
|
||||||
err := resp.Body.Close()
|
// err := resp.Body.Close()
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
// return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
// }
|
||||||
return nil, &usage
|
// return nil, &usage
|
||||||
}
|
// }
|
||||||
|
|
||||||
func baiduHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) {
|
// func baiduHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) {
|
||||||
var baiduResponse BaiduChatResponse
|
// var baiduResponse BaiduChatResponse
|
||||||
responseBody, err := io.ReadAll(resp.Body)
|
// responseBody, err := io.ReadAll(resp.Body)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
|
// return errorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
// }
|
||||||
err = resp.Body.Close()
|
// err = resp.Body.Close()
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
// return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
// }
|
||||||
err = json.Unmarshal(responseBody, &baiduResponse)
|
// err = json.Unmarshal(responseBody, &baiduResponse)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
|
// return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
// }
|
||||||
if baiduResponse.ErrorMsg != "" {
|
// if baiduResponse.ErrorMsg != "" {
|
||||||
return &OpenAIErrorWithStatusCode{
|
// return &OpenAIErrorWithStatusCode{
|
||||||
OpenAIError: OpenAIError{
|
// OpenAIError: OpenAIError{
|
||||||
Message: baiduResponse.ErrorMsg,
|
// Message: baiduResponse.ErrorMsg,
|
||||||
Type: "baidu_error",
|
// Type: "baidu_error",
|
||||||
Param: "",
|
// Param: "",
|
||||||
Code: baiduResponse.ErrorCode,
|
// Code: baiduResponse.ErrorCode,
|
||||||
},
|
// },
|
||||||
StatusCode: resp.StatusCode,
|
// StatusCode: resp.StatusCode,
|
||||||
}, nil
|
// }, nil
|
||||||
}
|
// }
|
||||||
fullTextResponse := responseBaidu2OpenAI(&baiduResponse)
|
// fullTextResponse := responseBaidu2OpenAI(&baiduResponse)
|
||||||
jsonResponse, err := json.Marshal(fullTextResponse)
|
// jsonResponse, err := json.Marshal(fullTextResponse)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
|
// return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
// }
|
||||||
c.Writer.Header().Set("Content-Type", "application/json")
|
// c.Writer.Header().Set("Content-Type", "application/json")
|
||||||
c.Writer.WriteHeader(resp.StatusCode)
|
// c.Writer.WriteHeader(resp.StatusCode)
|
||||||
_, err = c.Writer.Write(jsonResponse)
|
// _, err = c.Writer.Write(jsonResponse)
|
||||||
return nil, &fullTextResponse.Usage
|
// return nil, &fullTextResponse.Usage
|
||||||
}
|
// }
|
||||||
|
|
||||||
func baiduEmbeddingHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) {
|
// func baiduEmbeddingHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) {
|
||||||
var baiduResponse BaiduEmbeddingResponse
|
// var baiduResponse BaiduEmbeddingResponse
|
||||||
responseBody, err := io.ReadAll(resp.Body)
|
// responseBody, err := io.ReadAll(resp.Body)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
|
// return errorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
// }
|
||||||
err = resp.Body.Close()
|
// err = resp.Body.Close()
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
// return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
// }
|
||||||
err = json.Unmarshal(responseBody, &baiduResponse)
|
// err = json.Unmarshal(responseBody, &baiduResponse)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
|
// return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
// }
|
||||||
if baiduResponse.ErrorMsg != "" {
|
// if baiduResponse.ErrorMsg != "" {
|
||||||
return &OpenAIErrorWithStatusCode{
|
// return &OpenAIErrorWithStatusCode{
|
||||||
OpenAIError: OpenAIError{
|
// OpenAIError: OpenAIError{
|
||||||
Message: baiduResponse.ErrorMsg,
|
// Message: baiduResponse.ErrorMsg,
|
||||||
Type: "baidu_error",
|
// Type: "baidu_error",
|
||||||
Param: "",
|
// Param: "",
|
||||||
Code: baiduResponse.ErrorCode,
|
// Code: baiduResponse.ErrorCode,
|
||||||
},
|
// },
|
||||||
StatusCode: resp.StatusCode,
|
// StatusCode: resp.StatusCode,
|
||||||
}, nil
|
// }, nil
|
||||||
}
|
// }
|
||||||
fullTextResponse := embeddingResponseBaidu2OpenAI(&baiduResponse)
|
// fullTextResponse := embeddingResponseBaidu2OpenAI(&baiduResponse)
|
||||||
jsonResponse, err := json.Marshal(fullTextResponse)
|
// jsonResponse, err := json.Marshal(fullTextResponse)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
|
// return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
// }
|
||||||
c.Writer.Header().Set("Content-Type", "application/json")
|
// c.Writer.Header().Set("Content-Type", "application/json")
|
||||||
c.Writer.WriteHeader(resp.StatusCode)
|
// c.Writer.WriteHeader(resp.StatusCode)
|
||||||
_, err = c.Writer.Write(jsonResponse)
|
// _, err = c.Writer.Write(jsonResponse)
|
||||||
return nil, &fullTextResponse.Usage
|
// return nil, &fullTextResponse.Usage
|
||||||
}
|
// }
|
||||||
|
|
||||||
func getBaiduAccessToken(apiKey string) (string, error) {
|
// func getBaiduAccessToken(apiKey string) (string, error) {
|
||||||
if val, ok := baiduTokenStore.Load(apiKey); ok {
|
// if val, ok := baiduTokenStore.Load(apiKey); ok {
|
||||||
var accessToken BaiduAccessToken
|
// var accessToken BaiduAccessToken
|
||||||
if accessToken, ok = val.(BaiduAccessToken); ok {
|
// if accessToken, ok = val.(BaiduAccessToken); ok {
|
||||||
// soon this will expire
|
// // soon this will expire
|
||||||
if time.Now().Add(time.Hour).After(accessToken.ExpiresAt) {
|
// if time.Now().Add(time.Hour).After(accessToken.ExpiresAt) {
|
||||||
go func() {
|
// go func() {
|
||||||
_, _ = getBaiduAccessTokenHelper(apiKey)
|
// _, _ = getBaiduAccessTokenHelper(apiKey)
|
||||||
}()
|
// }()
|
||||||
}
|
// }
|
||||||
return accessToken.AccessToken, nil
|
// return accessToken.AccessToken, nil
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
accessToken, err := getBaiduAccessTokenHelper(apiKey)
|
// accessToken, err := getBaiduAccessTokenHelper(apiKey)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return "", err
|
// return "", err
|
||||||
}
|
// }
|
||||||
if accessToken == nil {
|
// if accessToken == nil {
|
||||||
return "", errors.New("getBaiduAccessToken return a nil token")
|
// return "", errors.New("getBaiduAccessToken return a nil token")
|
||||||
}
|
// }
|
||||||
return (*accessToken).AccessToken, nil
|
// return (*accessToken).AccessToken, nil
|
||||||
}
|
// }
|
||||||
|
|
||||||
func getBaiduAccessTokenHelper(apiKey string) (*BaiduAccessToken, error) {
|
// func getBaiduAccessTokenHelper(apiKey string) (*BaiduAccessToken, error) {
|
||||||
parts := strings.Split(apiKey, "|")
|
// parts := strings.Split(apiKey, "|")
|
||||||
if len(parts) != 2 {
|
// if len(parts) != 2 {
|
||||||
return nil, errors.New("invalid baidu apikey")
|
// return nil, errors.New("invalid baidu apikey")
|
||||||
}
|
// }
|
||||||
req, err := http.NewRequest("POST", fmt.Sprintf("https://aip.baidubce.com/oauth/2.0/token?grant_type=client_credentials&client_id=%s&client_secret=%s",
|
// req, err := http.NewRequest("POST", fmt.Sprintf("https://aip.baidubce.com/oauth/2.0/token?grant_type=client_credentials&client_id=%s&client_secret=%s",
|
||||||
parts[0], parts[1]), nil)
|
// parts[0], parts[1]), nil)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, err
|
// return nil, err
|
||||||
}
|
// }
|
||||||
req.Header.Add("Content-Type", "application/json")
|
// req.Header.Add("Content-Type", "application/json")
|
||||||
req.Header.Add("Accept", "application/json")
|
// req.Header.Add("Accept", "application/json")
|
||||||
res, err := impatientHTTPClient.Do(req)
|
// res, err := impatientHTTPClient.Do(req)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, err
|
// return nil, err
|
||||||
}
|
// }
|
||||||
defer res.Body.Close()
|
// defer res.Body.Close()
|
||||||
|
|
||||||
var accessToken BaiduAccessToken
|
// var accessToken BaiduAccessToken
|
||||||
err = json.NewDecoder(res.Body).Decode(&accessToken)
|
// err = json.NewDecoder(res.Body).Decode(&accessToken)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, err
|
// return nil, err
|
||||||
}
|
// }
|
||||||
if accessToken.Error != "" {
|
// if accessToken.Error != "" {
|
||||||
return nil, errors.New(accessToken.Error + ": " + accessToken.ErrorDescription)
|
// return nil, errors.New(accessToken.Error + ": " + accessToken.ErrorDescription)
|
||||||
}
|
// }
|
||||||
if accessToken.AccessToken == "" {
|
// if accessToken.AccessToken == "" {
|
||||||
return nil, errors.New("getBaiduAccessTokenHelper get empty access token")
|
// return nil, errors.New("getBaiduAccessTokenHelper get empty access token")
|
||||||
}
|
// }
|
||||||
accessToken.ExpiresAt = time.Now().Add(time.Duration(accessToken.ExpiresIn) * time.Second)
|
// accessToken.ExpiresAt = time.Now().Add(time.Duration(accessToken.ExpiresIn) * time.Second)
|
||||||
baiduTokenStore.Store(apiKey, accessToken)
|
// baiduTokenStore.Store(apiKey, accessToken)
|
||||||
return &accessToken, nil
|
// return &accessToken, nil
|
||||||
}
|
// }
|
||||||
|
@ -4,11 +4,13 @@ import (
|
|||||||
"bufio"
|
"bufio"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/gin-gonic/gin"
|
|
||||||
"io"
|
"io"
|
||||||
|
"log"
|
||||||
"net/http"
|
"net/http"
|
||||||
"one-api/common"
|
"one-api/common"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
)
|
)
|
||||||
|
|
||||||
type ClaudeMetadata struct {
|
type ClaudeMetadata struct {
|
||||||
@ -64,7 +66,13 @@ func requestOpenAI2Claude(textRequest GeneralOpenAIRequest) *ClaudeRequest {
|
|||||||
claudeRequest.MaxTokensToSample = 1000000
|
claudeRequest.MaxTokensToSample = 1000000
|
||||||
}
|
}
|
||||||
prompt := ""
|
prompt := ""
|
||||||
for _, message := range textRequest.Messages {
|
|
||||||
|
messages, err := textRequest.TextMessages()
|
||||||
|
if err != nil {
|
||||||
|
log.Panicf("invalid message type: %T", textRequest.Messages)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, message := range messages {
|
||||||
if message.Role == "user" {
|
if message.Role == "user" {
|
||||||
prompt += fmt.Sprintf("\n\nHuman: %s", message.Content)
|
prompt += fmt.Sprintf("\n\nHuman: %s", message.Content)
|
||||||
} else if message.Role == "assistant" {
|
} else if message.Role == "assistant" {
|
||||||
|
@ -1,205 +1,205 @@
|
|||||||
package controller
|
package controller
|
||||||
|
|
||||||
import (
|
// import (
|
||||||
"encoding/json"
|
// "encoding/json"
|
||||||
"fmt"
|
// "fmt"
|
||||||
"github.com/gin-gonic/gin"
|
// "github.com/gin-gonic/gin"
|
||||||
"io"
|
// "io"
|
||||||
"net/http"
|
// "net/http"
|
||||||
"one-api/common"
|
// "one-api/common"
|
||||||
)
|
// )
|
||||||
|
|
||||||
// https://developers.generativeai.google/api/rest/generativelanguage/models/generateMessage#request-body
|
// // https://developers.generativeai.google/api/rest/generativelanguage/models/generateMessage#request-body
|
||||||
// https://developers.generativeai.google/api/rest/generativelanguage/models/generateMessage#response-body
|
// // https://developers.generativeai.google/api/rest/generativelanguage/models/generateMessage#response-body
|
||||||
|
|
||||||
type PaLMChatMessage struct {
|
// type PaLMChatMessage struct {
|
||||||
Author string `json:"author"`
|
// Author string `json:"author"`
|
||||||
Content string `json:"content"`
|
// Content string `json:"content"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type PaLMFilter struct {
|
// type PaLMFilter struct {
|
||||||
Reason string `json:"reason"`
|
// Reason string `json:"reason"`
|
||||||
Message string `json:"message"`
|
// Message string `json:"message"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type PaLMPrompt struct {
|
// type PaLMPrompt struct {
|
||||||
Messages []PaLMChatMessage `json:"messages"`
|
// Messages []PaLMChatMessage `json:"messages"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type PaLMChatRequest struct {
|
// type PaLMChatRequest struct {
|
||||||
Prompt PaLMPrompt `json:"prompt"`
|
// Prompt PaLMPrompt `json:"prompt"`
|
||||||
Temperature float64 `json:"temperature,omitempty"`
|
// Temperature float64 `json:"temperature,omitempty"`
|
||||||
CandidateCount int `json:"candidateCount,omitempty"`
|
// CandidateCount int `json:"candidateCount,omitempty"`
|
||||||
TopP float64 `json:"topP,omitempty"`
|
// TopP float64 `json:"topP,omitempty"`
|
||||||
TopK int `json:"topK,omitempty"`
|
// TopK int `json:"topK,omitempty"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type PaLMError struct {
|
// type PaLMError struct {
|
||||||
Code int `json:"code"`
|
// Code int `json:"code"`
|
||||||
Message string `json:"message"`
|
// Message string `json:"message"`
|
||||||
Status string `json:"status"`
|
// Status string `json:"status"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type PaLMChatResponse struct {
|
// type PaLMChatResponse struct {
|
||||||
Candidates []PaLMChatMessage `json:"candidates"`
|
// Candidates []PaLMChatMessage `json:"candidates"`
|
||||||
Messages []Message `json:"messages"`
|
// Messages []Message `json:"messages"`
|
||||||
Filters []PaLMFilter `json:"filters"`
|
// Filters []PaLMFilter `json:"filters"`
|
||||||
Error PaLMError `json:"error"`
|
// Error PaLMError `json:"error"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
func requestOpenAI2PaLM(textRequest GeneralOpenAIRequest) *PaLMChatRequest {
|
// func requestOpenAI2PaLM(textRequest GeneralOpenAIRequest) *PaLMChatRequest {
|
||||||
palmRequest := PaLMChatRequest{
|
// palmRequest := PaLMChatRequest{
|
||||||
Prompt: PaLMPrompt{
|
// Prompt: PaLMPrompt{
|
||||||
Messages: make([]PaLMChatMessage, 0, len(textRequest.Messages)),
|
// Messages: make([]PaLMChatMessage, 0, len(textRequest.Messages)),
|
||||||
},
|
// },
|
||||||
Temperature: textRequest.Temperature,
|
// Temperature: textRequest.Temperature,
|
||||||
CandidateCount: textRequest.N,
|
// CandidateCount: textRequest.N,
|
||||||
TopP: textRequest.TopP,
|
// TopP: textRequest.TopP,
|
||||||
TopK: textRequest.MaxTokens,
|
// TopK: textRequest.MaxTokens,
|
||||||
}
|
// }
|
||||||
for _, message := range textRequest.Messages {
|
// for _, message := range textRequest.Messages {
|
||||||
palmMessage := PaLMChatMessage{
|
// palmMessage := PaLMChatMessage{
|
||||||
Content: message.Content,
|
// Content: message.Content,
|
||||||
}
|
// }
|
||||||
if message.Role == "user" {
|
// if message.Role == "user" {
|
||||||
palmMessage.Author = "0"
|
// palmMessage.Author = "0"
|
||||||
} else {
|
// } else {
|
||||||
palmMessage.Author = "1"
|
// palmMessage.Author = "1"
|
||||||
}
|
// }
|
||||||
palmRequest.Prompt.Messages = append(palmRequest.Prompt.Messages, palmMessage)
|
// palmRequest.Prompt.Messages = append(palmRequest.Prompt.Messages, palmMessage)
|
||||||
}
|
// }
|
||||||
return &palmRequest
|
// return &palmRequest
|
||||||
}
|
// }
|
||||||
|
|
||||||
func responsePaLM2OpenAI(response *PaLMChatResponse) *OpenAITextResponse {
|
// func responsePaLM2OpenAI(response *PaLMChatResponse) *OpenAITextResponse {
|
||||||
fullTextResponse := OpenAITextResponse{
|
// fullTextResponse := OpenAITextResponse{
|
||||||
Choices: make([]OpenAITextResponseChoice, 0, len(response.Candidates)),
|
// Choices: make([]OpenAITextResponseChoice, 0, len(response.Candidates)),
|
||||||
}
|
// }
|
||||||
for i, candidate := range response.Candidates {
|
// for i, candidate := range response.Candidates {
|
||||||
choice := OpenAITextResponseChoice{
|
// choice := OpenAITextResponseChoice{
|
||||||
Index: i,
|
// Index: i,
|
||||||
Message: Message{
|
// Message: Message{
|
||||||
Role: "assistant",
|
// Role: "assistant",
|
||||||
Content: candidate.Content,
|
// Content: candidate.Content,
|
||||||
},
|
// },
|
||||||
FinishReason: "stop",
|
// FinishReason: "stop",
|
||||||
}
|
// }
|
||||||
fullTextResponse.Choices = append(fullTextResponse.Choices, choice)
|
// fullTextResponse.Choices = append(fullTextResponse.Choices, choice)
|
||||||
}
|
// }
|
||||||
return &fullTextResponse
|
// return &fullTextResponse
|
||||||
}
|
// }
|
||||||
|
|
||||||
func streamResponsePaLM2OpenAI(palmResponse *PaLMChatResponse) *ChatCompletionsStreamResponse {
|
// func streamResponsePaLM2OpenAI(palmResponse *PaLMChatResponse) *ChatCompletionsStreamResponse {
|
||||||
var choice ChatCompletionsStreamResponseChoice
|
// var choice ChatCompletionsStreamResponseChoice
|
||||||
if len(palmResponse.Candidates) > 0 {
|
// if len(palmResponse.Candidates) > 0 {
|
||||||
choice.Delta.Content = palmResponse.Candidates[0].Content
|
// choice.Delta.Content = palmResponse.Candidates[0].Content
|
||||||
}
|
// }
|
||||||
choice.FinishReason = &stopFinishReason
|
// choice.FinishReason = &stopFinishReason
|
||||||
var response ChatCompletionsStreamResponse
|
// var response ChatCompletionsStreamResponse
|
||||||
response.Object = "chat.completion.chunk"
|
// response.Object = "chat.completion.chunk"
|
||||||
response.Model = "palm2"
|
// response.Model = "palm2"
|
||||||
response.Choices = []ChatCompletionsStreamResponseChoice{choice}
|
// response.Choices = []ChatCompletionsStreamResponseChoice{choice}
|
||||||
return &response
|
// return &response
|
||||||
}
|
// }
|
||||||
|
|
||||||
func palmStreamHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, string) {
|
// func palmStreamHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, string) {
|
||||||
responseText := ""
|
// responseText := ""
|
||||||
responseId := fmt.Sprintf("chatcmpl-%s", common.GetUUID())
|
// responseId := fmt.Sprintf("chatcmpl-%s", common.GetUUID())
|
||||||
createdTime := common.GetTimestamp()
|
// createdTime := common.GetTimestamp()
|
||||||
dataChan := make(chan string)
|
// dataChan := make(chan string)
|
||||||
stopChan := make(chan bool)
|
// stopChan := make(chan bool)
|
||||||
go func() {
|
// go func() {
|
||||||
responseBody, err := io.ReadAll(resp.Body)
|
// responseBody, err := io.ReadAll(resp.Body)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
common.SysError("error reading stream response: " + err.Error())
|
// common.SysError("error reading stream response: " + err.Error())
|
||||||
stopChan <- true
|
// stopChan <- true
|
||||||
return
|
// return
|
||||||
}
|
// }
|
||||||
err = resp.Body.Close()
|
// err = resp.Body.Close()
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
common.SysError("error closing stream response: " + err.Error())
|
// common.SysError("error closing stream response: " + err.Error())
|
||||||
stopChan <- true
|
// stopChan <- true
|
||||||
return
|
// return
|
||||||
}
|
// }
|
||||||
var palmResponse PaLMChatResponse
|
// var palmResponse PaLMChatResponse
|
||||||
err = json.Unmarshal(responseBody, &palmResponse)
|
// err = json.Unmarshal(responseBody, &palmResponse)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
common.SysError("error unmarshalling stream response: " + err.Error())
|
// common.SysError("error unmarshalling stream response: " + err.Error())
|
||||||
stopChan <- true
|
// stopChan <- true
|
||||||
return
|
// return
|
||||||
}
|
// }
|
||||||
fullTextResponse := streamResponsePaLM2OpenAI(&palmResponse)
|
// fullTextResponse := streamResponsePaLM2OpenAI(&palmResponse)
|
||||||
fullTextResponse.Id = responseId
|
// fullTextResponse.Id = responseId
|
||||||
fullTextResponse.Created = createdTime
|
// fullTextResponse.Created = createdTime
|
||||||
if len(palmResponse.Candidates) > 0 {
|
// if len(palmResponse.Candidates) > 0 {
|
||||||
responseText = palmResponse.Candidates[0].Content
|
// responseText = palmResponse.Candidates[0].Content
|
||||||
}
|
// }
|
||||||
jsonResponse, err := json.Marshal(fullTextResponse)
|
// jsonResponse, err := json.Marshal(fullTextResponse)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
common.SysError("error marshalling stream response: " + err.Error())
|
// common.SysError("error marshalling stream response: " + err.Error())
|
||||||
stopChan <- true
|
// stopChan <- true
|
||||||
return
|
// return
|
||||||
}
|
// }
|
||||||
dataChan <- string(jsonResponse)
|
// dataChan <- string(jsonResponse)
|
||||||
stopChan <- true
|
// stopChan <- true
|
||||||
}()
|
// }()
|
||||||
setEventStreamHeaders(c)
|
// setEventStreamHeaders(c)
|
||||||
c.Stream(func(w io.Writer) bool {
|
// c.Stream(func(w io.Writer) bool {
|
||||||
select {
|
// select {
|
||||||
case data := <-dataChan:
|
// case data := <-dataChan:
|
||||||
c.Render(-1, common.CustomEvent{Data: "data: " + data})
|
// c.Render(-1, common.CustomEvent{Data: "data: " + data})
|
||||||
return true
|
// return true
|
||||||
case <-stopChan:
|
// case <-stopChan:
|
||||||
c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
|
// c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
|
||||||
return false
|
// return false
|
||||||
}
|
// }
|
||||||
})
|
// })
|
||||||
err := resp.Body.Close()
|
// err := resp.Body.Close()
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), ""
|
// return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), ""
|
||||||
}
|
// }
|
||||||
return nil, responseText
|
// return nil, responseText
|
||||||
}
|
// }
|
||||||
|
|
||||||
func palmHandler(c *gin.Context, resp *http.Response, promptTokens int, model string) (*OpenAIErrorWithStatusCode, *Usage) {
|
// func palmHandler(c *gin.Context, resp *http.Response, promptTokens int, model string) (*OpenAIErrorWithStatusCode, *Usage) {
|
||||||
responseBody, err := io.ReadAll(resp.Body)
|
// responseBody, err := io.ReadAll(resp.Body)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
|
// return errorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
// }
|
||||||
err = resp.Body.Close()
|
// err = resp.Body.Close()
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
// return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
// }
|
||||||
var palmResponse PaLMChatResponse
|
// var palmResponse PaLMChatResponse
|
||||||
err = json.Unmarshal(responseBody, &palmResponse)
|
// err = json.Unmarshal(responseBody, &palmResponse)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
|
// return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
// }
|
||||||
if palmResponse.Error.Code != 0 || len(palmResponse.Candidates) == 0 {
|
// if palmResponse.Error.Code != 0 || len(palmResponse.Candidates) == 0 {
|
||||||
return &OpenAIErrorWithStatusCode{
|
// return &OpenAIErrorWithStatusCode{
|
||||||
OpenAIError: OpenAIError{
|
// OpenAIError: OpenAIError{
|
||||||
Message: palmResponse.Error.Message,
|
// Message: palmResponse.Error.Message,
|
||||||
Type: palmResponse.Error.Status,
|
// Type: palmResponse.Error.Status,
|
||||||
Param: "",
|
// Param: "",
|
||||||
Code: palmResponse.Error.Code,
|
// Code: palmResponse.Error.Code,
|
||||||
},
|
// },
|
||||||
StatusCode: resp.StatusCode,
|
// StatusCode: resp.StatusCode,
|
||||||
}, nil
|
// }, nil
|
||||||
}
|
// }
|
||||||
fullTextResponse := responsePaLM2OpenAI(&palmResponse)
|
// fullTextResponse := responsePaLM2OpenAI(&palmResponse)
|
||||||
completionTokens := countTokenText(palmResponse.Candidates[0].Content, model)
|
// completionTokens := countTokenText(palmResponse.Candidates[0].Content, model)
|
||||||
usage := Usage{
|
// usage := Usage{
|
||||||
PromptTokens: promptTokens,
|
// PromptTokens: promptTokens,
|
||||||
CompletionTokens: completionTokens,
|
// CompletionTokens: completionTokens,
|
||||||
TotalTokens: promptTokens + completionTokens,
|
// TotalTokens: promptTokens + completionTokens,
|
||||||
}
|
// }
|
||||||
fullTextResponse.Usage = usage
|
// fullTextResponse.Usage = usage
|
||||||
jsonResponse, err := json.Marshal(fullTextResponse)
|
// jsonResponse, err := json.Marshal(fullTextResponse)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
|
// return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
// }
|
||||||
c.Writer.Header().Set("Content-Type", "application/json")
|
// c.Writer.Header().Set("Content-Type", "application/json")
|
||||||
c.Writer.WriteHeader(resp.StatusCode)
|
// c.Writer.WriteHeader(resp.StatusCode)
|
||||||
_, err = c.Writer.Write(jsonResponse)
|
// _, err = c.Writer.Write(jsonResponse)
|
||||||
return nil, &usage
|
// return nil, &usage
|
||||||
}
|
// }
|
||||||
|
@ -1,287 +1,287 @@
|
|||||||
package controller
|
package controller
|
||||||
|
|
||||||
import (
|
// import (
|
||||||
"bufio"
|
// "bufio"
|
||||||
"crypto/hmac"
|
// "crypto/hmac"
|
||||||
"crypto/sha1"
|
// "crypto/sha1"
|
||||||
"encoding/base64"
|
// "encoding/base64"
|
||||||
"encoding/json"
|
// "encoding/json"
|
||||||
"errors"
|
// "errors"
|
||||||
"fmt"
|
// "fmt"
|
||||||
"github.com/gin-gonic/gin"
|
// "github.com/gin-gonic/gin"
|
||||||
"io"
|
// "io"
|
||||||
"net/http"
|
// "net/http"
|
||||||
"one-api/common"
|
// "one-api/common"
|
||||||
"sort"
|
// "sort"
|
||||||
"strconv"
|
// "strconv"
|
||||||
"strings"
|
// "strings"
|
||||||
)
|
// )
|
||||||
|
|
||||||
// https://cloud.tencent.com/document/product/1729/97732
|
// // https://cloud.tencent.com/document/product/1729/97732
|
||||||
|
|
||||||
type TencentMessage struct {
|
// type TencentMessage struct {
|
||||||
Role string `json:"role"`
|
// Role string `json:"role"`
|
||||||
Content string `json:"content"`
|
// Content string `json:"content"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type TencentChatRequest struct {
|
// type TencentChatRequest struct {
|
||||||
AppId int64 `json:"app_id"` // 腾讯云账号的 APPID
|
// AppId int64 `json:"app_id"` // 腾讯云账号的 APPID
|
||||||
SecretId string `json:"secret_id"` // 官网 SecretId
|
// SecretId string `json:"secret_id"` // 官网 SecretId
|
||||||
// Timestamp当前 UNIX 时间戳,单位为秒,可记录发起 API 请求的时间。
|
// // Timestamp当前 UNIX 时间戳,单位为秒,可记录发起 API 请求的时间。
|
||||||
// 例如1529223702,如果与当前时间相差过大,会引起签名过期错误
|
// // 例如1529223702,如果与当前时间相差过大,会引起签名过期错误
|
||||||
Timestamp int64 `json:"timestamp"`
|
// Timestamp int64 `json:"timestamp"`
|
||||||
// Expired 签名的有效期,是一个符合 UNIX Epoch 时间戳规范的数值,
|
// // Expired 签名的有效期,是一个符合 UNIX Epoch 时间戳规范的数值,
|
||||||
// 单位为秒;Expired 必须大于 Timestamp 且 Expired-Timestamp 小于90天
|
// // 单位为秒;Expired 必须大于 Timestamp 且 Expired-Timestamp 小于90天
|
||||||
Expired int64 `json:"expired"`
|
// Expired int64 `json:"expired"`
|
||||||
QueryID string `json:"query_id"` //请求 Id,用于问题排查
|
// QueryID string `json:"query_id"` //请求 Id,用于问题排查
|
||||||
// Temperature 较高的数值会使输出更加随机,而较低的数值会使其更加集中和确定
|
// // Temperature 较高的数值会使输出更加随机,而较低的数值会使其更加集中和确定
|
||||||
// 默认 1.0,取值区间为[0.0,2.0],非必要不建议使用,不合理的取值会影响效果
|
// // 默认 1.0,取值区间为[0.0,2.0],非必要不建议使用,不合理的取值会影响效果
|
||||||
// 建议该参数和 top_p 只设置1个,不要同时更改 top_p
|
// // 建议该参数和 top_p 只设置1个,不要同时更改 top_p
|
||||||
Temperature float64 `json:"temperature"`
|
// Temperature float64 `json:"temperature"`
|
||||||
// TopP 影响输出文本的多样性,取值越大,生成文本的多样性越强
|
// // TopP 影响输出文本的多样性,取值越大,生成文本的多样性越强
|
||||||
// 默认1.0,取值区间为[0.0, 1.0],非必要不建议使用, 不合理的取值会影响效果
|
// // 默认1.0,取值区间为[0.0, 1.0],非必要不建议使用, 不合理的取值会影响效果
|
||||||
// 建议该参数和 temperature 只设置1个,不要同时更改
|
// // 建议该参数和 temperature 只设置1个,不要同时更改
|
||||||
TopP float64 `json:"top_p"`
|
// TopP float64 `json:"top_p"`
|
||||||
// Stream 0:同步,1:流式 (默认,协议:SSE)
|
// // Stream 0:同步,1:流式 (默认,协议:SSE)
|
||||||
// 同步请求超时:60s,如果内容较长建议使用流式
|
// // 同步请求超时:60s,如果内容较长建议使用流式
|
||||||
Stream int `json:"stream"`
|
// Stream int `json:"stream"`
|
||||||
// Messages 会话内容, 长度最多为40, 按对话时间从旧到新在数组中排列
|
// // Messages 会话内容, 长度最多为40, 按对话时间从旧到新在数组中排列
|
||||||
// 输入 content 总数最大支持 3000 token。
|
// // 输入 content 总数最大支持 3000 token。
|
||||||
Messages []TencentMessage `json:"messages"`
|
// Messages []TencentMessage `json:"messages"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type TencentError struct {
|
// type TencentError struct {
|
||||||
Code int `json:"code"`
|
// Code int `json:"code"`
|
||||||
Message string `json:"message"`
|
// Message string `json:"message"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type TencentUsage struct {
|
// type TencentUsage struct {
|
||||||
InputTokens int `json:"input_tokens"`
|
// InputTokens int `json:"input_tokens"`
|
||||||
OutputTokens int `json:"output_tokens"`
|
// OutputTokens int `json:"output_tokens"`
|
||||||
TotalTokens int `json:"total_tokens"`
|
// TotalTokens int `json:"total_tokens"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type TencentResponseChoices struct {
|
// type TencentResponseChoices struct {
|
||||||
FinishReason string `json:"finish_reason,omitempty"` // 流式结束标志位,为 stop 则表示尾包
|
// FinishReason string `json:"finish_reason,omitempty"` // 流式结束标志位,为 stop 则表示尾包
|
||||||
Messages TencentMessage `json:"messages,omitempty"` // 内容,同步模式返回内容,流模式为 null 输出 content 内容总数最多支持 1024token。
|
// Messages TencentMessage `json:"messages,omitempty"` // 内容,同步模式返回内容,流模式为 null 输出 content 内容总数最多支持 1024token。
|
||||||
Delta TencentMessage `json:"delta,omitempty"` // 内容,流模式返回内容,同步模式为 null 输出 content 内容总数最多支持 1024token。
|
// Delta TencentMessage `json:"delta,omitempty"` // 内容,流模式返回内容,同步模式为 null 输出 content 内容总数最多支持 1024token。
|
||||||
}
|
// }
|
||||||
|
|
||||||
type TencentChatResponse struct {
|
// type TencentChatResponse struct {
|
||||||
Choices []TencentResponseChoices `json:"choices,omitempty"` // 结果
|
// Choices []TencentResponseChoices `json:"choices,omitempty"` // 结果
|
||||||
Created string `json:"created,omitempty"` // unix 时间戳的字符串
|
// Created string `json:"created,omitempty"` // unix 时间戳的字符串
|
||||||
Id string `json:"id,omitempty"` // 会话 id
|
// Id string `json:"id,omitempty"` // 会话 id
|
||||||
Usage Usage `json:"usage,omitempty"` // token 数量
|
// Usage Usage `json:"usage,omitempty"` // token 数量
|
||||||
Error TencentError `json:"error,omitempty"` // 错误信息 注意:此字段可能返回 null,表示取不到有效值
|
// Error TencentError `json:"error,omitempty"` // 错误信息 注意:此字段可能返回 null,表示取不到有效值
|
||||||
Note string `json:"note,omitempty"` // 注释
|
// Note string `json:"note,omitempty"` // 注释
|
||||||
ReqID string `json:"req_id,omitempty"` // 唯一请求 Id,每次请求都会返回。用于反馈接口入参
|
// ReqID string `json:"req_id,omitempty"` // 唯一请求 Id,每次请求都会返回。用于反馈接口入参
|
||||||
}
|
// }
|
||||||
|
|
||||||
func requestOpenAI2Tencent(request GeneralOpenAIRequest) *TencentChatRequest {
|
// func requestOpenAI2Tencent(request GeneralOpenAIRequest) *TencentChatRequest {
|
||||||
messages := make([]TencentMessage, 0, len(request.Messages))
|
// messages := make([]TencentMessage, 0, len(request.Messages))
|
||||||
for i := 0; i < len(request.Messages); i++ {
|
// for i := 0; i < len(request.Messages); i++ {
|
||||||
message := request.Messages[i]
|
// message := request.Messages[i]
|
||||||
if message.Role == "system" {
|
// if message.Role == "system" {
|
||||||
messages = append(messages, TencentMessage{
|
// messages = append(messages, TencentMessage{
|
||||||
Role: "user",
|
// Role: "user",
|
||||||
Content: message.Content,
|
// Content: message.Content,
|
||||||
})
|
// })
|
||||||
messages = append(messages, TencentMessage{
|
// messages = append(messages, TencentMessage{
|
||||||
Role: "assistant",
|
// Role: "assistant",
|
||||||
Content: "Okay",
|
// Content: "Okay",
|
||||||
})
|
// })
|
||||||
continue
|
// continue
|
||||||
}
|
// }
|
||||||
messages = append(messages, TencentMessage{
|
// messages = append(messages, TencentMessage{
|
||||||
Content: message.Content,
|
// Content: message.Content,
|
||||||
Role: message.Role,
|
// Role: message.Role,
|
||||||
})
|
// })
|
||||||
}
|
// }
|
||||||
stream := 0
|
// stream := 0
|
||||||
if request.Stream {
|
// if request.Stream {
|
||||||
stream = 1
|
// stream = 1
|
||||||
}
|
// }
|
||||||
return &TencentChatRequest{
|
// return &TencentChatRequest{
|
||||||
Timestamp: common.GetTimestamp(),
|
// Timestamp: common.GetTimestamp(),
|
||||||
Expired: common.GetTimestamp() + 24*60*60,
|
// Expired: common.GetTimestamp() + 24*60*60,
|
||||||
QueryID: common.GetUUID(),
|
// QueryID: common.GetUUID(),
|
||||||
Temperature: request.Temperature,
|
// Temperature: request.Temperature,
|
||||||
TopP: request.TopP,
|
// TopP: request.TopP,
|
||||||
Stream: stream,
|
// Stream: stream,
|
||||||
Messages: messages,
|
// Messages: messages,
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
|
|
||||||
func responseTencent2OpenAI(response *TencentChatResponse) *OpenAITextResponse {
|
// func responseTencent2OpenAI(response *TencentChatResponse) *OpenAITextResponse {
|
||||||
fullTextResponse := OpenAITextResponse{
|
// fullTextResponse := OpenAITextResponse{
|
||||||
Object: "chat.completion",
|
// Object: "chat.completion",
|
||||||
Created: common.GetTimestamp(),
|
// Created: common.GetTimestamp(),
|
||||||
Usage: response.Usage,
|
// Usage: response.Usage,
|
||||||
}
|
// }
|
||||||
if len(response.Choices) > 0 {
|
// if len(response.Choices) > 0 {
|
||||||
choice := OpenAITextResponseChoice{
|
// choice := OpenAITextResponseChoice{
|
||||||
Index: 0,
|
// Index: 0,
|
||||||
Message: Message{
|
// Message: Message{
|
||||||
Role: "assistant",
|
// Role: "assistant",
|
||||||
Content: response.Choices[0].Messages.Content,
|
// Content: response.Choices[0].Messages.Content,
|
||||||
},
|
// },
|
||||||
FinishReason: response.Choices[0].FinishReason,
|
// FinishReason: response.Choices[0].FinishReason,
|
||||||
}
|
// }
|
||||||
fullTextResponse.Choices = append(fullTextResponse.Choices, choice)
|
// fullTextResponse.Choices = append(fullTextResponse.Choices, choice)
|
||||||
}
|
// }
|
||||||
return &fullTextResponse
|
// return &fullTextResponse
|
||||||
}
|
// }
|
||||||
|
|
||||||
func streamResponseTencent2OpenAI(TencentResponse *TencentChatResponse) *ChatCompletionsStreamResponse {
|
// func streamResponseTencent2OpenAI(TencentResponse *TencentChatResponse) *ChatCompletionsStreamResponse {
|
||||||
response := ChatCompletionsStreamResponse{
|
// response := ChatCompletionsStreamResponse{
|
||||||
Object: "chat.completion.chunk",
|
// Object: "chat.completion.chunk",
|
||||||
Created: common.GetTimestamp(),
|
// Created: common.GetTimestamp(),
|
||||||
Model: "tencent-hunyuan",
|
// Model: "tencent-hunyuan",
|
||||||
}
|
// }
|
||||||
if len(TencentResponse.Choices) > 0 {
|
// if len(TencentResponse.Choices) > 0 {
|
||||||
var choice ChatCompletionsStreamResponseChoice
|
// var choice ChatCompletionsStreamResponseChoice
|
||||||
choice.Delta.Content = TencentResponse.Choices[0].Delta.Content
|
// choice.Delta.Content = TencentResponse.Choices[0].Delta.Content
|
||||||
if TencentResponse.Choices[0].FinishReason == "stop" {
|
// if TencentResponse.Choices[0].FinishReason == "stop" {
|
||||||
choice.FinishReason = &stopFinishReason
|
// choice.FinishReason = &stopFinishReason
|
||||||
}
|
// }
|
||||||
response.Choices = append(response.Choices, choice)
|
// response.Choices = append(response.Choices, choice)
|
||||||
}
|
// }
|
||||||
return &response
|
// return &response
|
||||||
}
|
// }
|
||||||
|
|
||||||
func tencentStreamHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, string) {
|
// func tencentStreamHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, string) {
|
||||||
var responseText string
|
// var responseText string
|
||||||
scanner := bufio.NewScanner(resp.Body)
|
// scanner := bufio.NewScanner(resp.Body)
|
||||||
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
|
// scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
|
||||||
if atEOF && len(data) == 0 {
|
// if atEOF && len(data) == 0 {
|
||||||
return 0, nil, nil
|
// return 0, nil, nil
|
||||||
}
|
// }
|
||||||
if i := strings.Index(string(data), "\n"); i >= 0 {
|
// if i := strings.Index(string(data), "\n"); i >= 0 {
|
||||||
return i + 1, data[0:i], nil
|
// return i + 1, data[0:i], nil
|
||||||
}
|
// }
|
||||||
if atEOF {
|
// if atEOF {
|
||||||
return len(data), data, nil
|
// return len(data), data, nil
|
||||||
}
|
// }
|
||||||
return 0, nil, nil
|
// return 0, nil, nil
|
||||||
})
|
// })
|
||||||
dataChan := make(chan string)
|
// dataChan := make(chan string)
|
||||||
stopChan := make(chan bool)
|
// stopChan := make(chan bool)
|
||||||
go func() {
|
// go func() {
|
||||||
for scanner.Scan() {
|
// for scanner.Scan() {
|
||||||
data := scanner.Text()
|
// data := scanner.Text()
|
||||||
if len(data) < 5 { // ignore blank line or wrong format
|
// if len(data) < 5 { // ignore blank line or wrong format
|
||||||
continue
|
// continue
|
||||||
}
|
// }
|
||||||
if data[:5] != "data:" {
|
// if data[:5] != "data:" {
|
||||||
continue
|
// continue
|
||||||
}
|
// }
|
||||||
data = data[5:]
|
// data = data[5:]
|
||||||
dataChan <- data
|
// dataChan <- data
|
||||||
}
|
// }
|
||||||
stopChan <- true
|
// stopChan <- true
|
||||||
}()
|
// }()
|
||||||
setEventStreamHeaders(c)
|
// setEventStreamHeaders(c)
|
||||||
c.Stream(func(w io.Writer) bool {
|
// c.Stream(func(w io.Writer) bool {
|
||||||
select {
|
// select {
|
||||||
case data := <-dataChan:
|
// case data := <-dataChan:
|
||||||
var TencentResponse TencentChatResponse
|
// var TencentResponse TencentChatResponse
|
||||||
err := json.Unmarshal([]byte(data), &TencentResponse)
|
// err := json.Unmarshal([]byte(data), &TencentResponse)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
common.SysError("error unmarshalling stream response: " + err.Error())
|
// common.SysError("error unmarshalling stream response: " + err.Error())
|
||||||
return true
|
// return true
|
||||||
}
|
// }
|
||||||
response := streamResponseTencent2OpenAI(&TencentResponse)
|
// response := streamResponseTencent2OpenAI(&TencentResponse)
|
||||||
if len(response.Choices) != 0 {
|
// if len(response.Choices) != 0 {
|
||||||
responseText += response.Choices[0].Delta.Content
|
// responseText += response.Choices[0].Delta.Content
|
||||||
}
|
// }
|
||||||
jsonResponse, err := json.Marshal(response)
|
// jsonResponse, err := json.Marshal(response)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
common.SysError("error marshalling stream response: " + err.Error())
|
// common.SysError("error marshalling stream response: " + err.Error())
|
||||||
return true
|
// return true
|
||||||
}
|
// }
|
||||||
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
|
// c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
|
||||||
return true
|
// return true
|
||||||
case <-stopChan:
|
// case <-stopChan:
|
||||||
c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
|
// c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
|
||||||
return false
|
// return false
|
||||||
}
|
// }
|
||||||
})
|
// })
|
||||||
err := resp.Body.Close()
|
// err := resp.Body.Close()
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), ""
|
// return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), ""
|
||||||
}
|
// }
|
||||||
return nil, responseText
|
// return nil, responseText
|
||||||
}
|
// }
|
||||||
|
|
||||||
func tencentHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) {
|
// func tencentHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) {
|
||||||
var TencentResponse TencentChatResponse
|
// var TencentResponse TencentChatResponse
|
||||||
responseBody, err := io.ReadAll(resp.Body)
|
// responseBody, err := io.ReadAll(resp.Body)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
|
// return errorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
// }
|
||||||
err = resp.Body.Close()
|
// err = resp.Body.Close()
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
// return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
// }
|
||||||
err = json.Unmarshal(responseBody, &TencentResponse)
|
// err = json.Unmarshal(responseBody, &TencentResponse)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
|
// return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
// }
|
||||||
if TencentResponse.Error.Code != 0 {
|
// if TencentResponse.Error.Code != 0 {
|
||||||
return &OpenAIErrorWithStatusCode{
|
// return &OpenAIErrorWithStatusCode{
|
||||||
OpenAIError: OpenAIError{
|
// OpenAIError: OpenAIError{
|
||||||
Message: TencentResponse.Error.Message,
|
// Message: TencentResponse.Error.Message,
|
||||||
Code: TencentResponse.Error.Code,
|
// Code: TencentResponse.Error.Code,
|
||||||
},
|
// },
|
||||||
StatusCode: resp.StatusCode,
|
// StatusCode: resp.StatusCode,
|
||||||
}, nil
|
// }, nil
|
||||||
}
|
// }
|
||||||
fullTextResponse := responseTencent2OpenAI(&TencentResponse)
|
// fullTextResponse := responseTencent2OpenAI(&TencentResponse)
|
||||||
jsonResponse, err := json.Marshal(fullTextResponse)
|
// jsonResponse, err := json.Marshal(fullTextResponse)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
|
// return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
// }
|
||||||
c.Writer.Header().Set("Content-Type", "application/json")
|
// c.Writer.Header().Set("Content-Type", "application/json")
|
||||||
c.Writer.WriteHeader(resp.StatusCode)
|
// c.Writer.WriteHeader(resp.StatusCode)
|
||||||
_, err = c.Writer.Write(jsonResponse)
|
// _, err = c.Writer.Write(jsonResponse)
|
||||||
return nil, &fullTextResponse.Usage
|
// return nil, &fullTextResponse.Usage
|
||||||
}
|
// }
|
||||||
|
|
||||||
func parseTencentConfig(config string) (appId int64, secretId string, secretKey string, err error) {
|
// func parseTencentConfig(config string) (appId int64, secretId string, secretKey string, err error) {
|
||||||
parts := strings.Split(config, "|")
|
// parts := strings.Split(config, "|")
|
||||||
if len(parts) != 3 {
|
// if len(parts) != 3 {
|
||||||
err = errors.New("invalid tencent config")
|
// err = errors.New("invalid tencent config")
|
||||||
return
|
// return
|
||||||
}
|
// }
|
||||||
appId, err = strconv.ParseInt(parts[0], 10, 64)
|
// appId, err = strconv.ParseInt(parts[0], 10, 64)
|
||||||
secretId = parts[1]
|
// secretId = parts[1]
|
||||||
secretKey = parts[2]
|
// secretKey = parts[2]
|
||||||
return
|
// return
|
||||||
}
|
// }
|
||||||
|
|
||||||
func getTencentSign(req TencentChatRequest, secretKey string) string {
|
// func getTencentSign(req TencentChatRequest, secretKey string) string {
|
||||||
params := make([]string, 0)
|
// params := make([]string, 0)
|
||||||
params = append(params, "app_id="+strconv.FormatInt(req.AppId, 10))
|
// params = append(params, "app_id="+strconv.FormatInt(req.AppId, 10))
|
||||||
params = append(params, "secret_id="+req.SecretId)
|
// params = append(params, "secret_id="+req.SecretId)
|
||||||
params = append(params, "timestamp="+strconv.FormatInt(req.Timestamp, 10))
|
// params = append(params, "timestamp="+strconv.FormatInt(req.Timestamp, 10))
|
||||||
params = append(params, "query_id="+req.QueryID)
|
// params = append(params, "query_id="+req.QueryID)
|
||||||
params = append(params, "temperature="+strconv.FormatFloat(req.Temperature, 'f', -1, 64))
|
// params = append(params, "temperature="+strconv.FormatFloat(req.Temperature, 'f', -1, 64))
|
||||||
params = append(params, "top_p="+strconv.FormatFloat(req.TopP, 'f', -1, 64))
|
// params = append(params, "top_p="+strconv.FormatFloat(req.TopP, 'f', -1, 64))
|
||||||
params = append(params, "stream="+strconv.Itoa(req.Stream))
|
// params = append(params, "stream="+strconv.Itoa(req.Stream))
|
||||||
params = append(params, "expired="+strconv.FormatInt(req.Expired, 10))
|
// params = append(params, "expired="+strconv.FormatInt(req.Expired, 10))
|
||||||
|
|
||||||
var messageStr string
|
// var messageStr string
|
||||||
for _, msg := range req.Messages {
|
// for _, msg := range req.Messages {
|
||||||
messageStr += fmt.Sprintf(`{"role":"%s","content":"%s"},`, msg.Role, msg.Content)
|
// messageStr += fmt.Sprintf(`{"role":"%s","content":"%s"},`, msg.Role, msg.Content)
|
||||||
}
|
// }
|
||||||
messageStr = strings.TrimSuffix(messageStr, ",")
|
// messageStr = strings.TrimSuffix(messageStr, ",")
|
||||||
params = append(params, "messages=["+messageStr+"]")
|
// params = append(params, "messages=["+messageStr+"]")
|
||||||
|
|
||||||
sort.Sort(sort.StringSlice(params))
|
// sort.Sort(sort.StringSlice(params))
|
||||||
url := "hunyuan.cloud.tencent.com/hyllm/v1/chat/completions?" + strings.Join(params, "&")
|
// url := "hunyuan.cloud.tencent.com/hyllm/v1/chat/completions?" + strings.Join(params, "&")
|
||||||
mac := hmac.New(sha1.New, []byte(secretKey))
|
// mac := hmac.New(sha1.New, []byte(secretKey))
|
||||||
signURL := url
|
// signURL := url
|
||||||
mac.Write([]byte(signURL))
|
// mac.Write([]byte(signURL))
|
||||||
sign := mac.Sum([]byte(nil))
|
// sign := mac.Sum([]byte(nil))
|
||||||
return base64.StdEncoding.EncodeToString(sign)
|
// return base64.StdEncoding.EncodeToString(sign)
|
||||||
}
|
// }
|
||||||
|
@ -76,7 +76,7 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
|
|||||||
return errorWrapper(errors.New("field prompt is required"), "required_field_missing", http.StatusBadRequest)
|
return errorWrapper(errors.New("field prompt is required"), "required_field_missing", http.StatusBadRequest)
|
||||||
}
|
}
|
||||||
case RelayModeChatCompletions:
|
case RelayModeChatCompletions:
|
||||||
if textRequest.Messages == nil || len(textRequest.Messages) == 0 {
|
if textRequest.Messages == nil || textRequest.MessagesLen() == 0 {
|
||||||
return errorWrapper(errors.New("field messages is required"), "required_field_missing", http.StatusBadRequest)
|
return errorWrapper(errors.New("field messages is required"), "required_field_missing", http.StatusBadRequest)
|
||||||
}
|
}
|
||||||
case RelayModeEmbeddings:
|
case RelayModeEmbeddings:
|
||||||
@ -154,26 +154,26 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
|
|||||||
if baseURL != "" {
|
if baseURL != "" {
|
||||||
fullRequestURL = fmt.Sprintf("%s/v1/complete", baseURL)
|
fullRequestURL = fmt.Sprintf("%s/v1/complete", baseURL)
|
||||||
}
|
}
|
||||||
case APITypeBaidu:
|
// case APITypeBaidu:
|
||||||
switch textRequest.Model {
|
// switch textRequest.Model {
|
||||||
case "ERNIE-Bot":
|
// case "ERNIE-Bot":
|
||||||
fullRequestURL = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/completions"
|
// fullRequestURL = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/completions"
|
||||||
case "ERNIE-Bot-turbo":
|
// case "ERNIE-Bot-turbo":
|
||||||
fullRequestURL = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/eb-instant"
|
// fullRequestURL = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/eb-instant"
|
||||||
case "ERNIE-Bot-4":
|
// case "ERNIE-Bot-4":
|
||||||
fullRequestURL = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/completions_pro"
|
// fullRequestURL = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/completions_pro"
|
||||||
case "BLOOMZ-7B":
|
// case "BLOOMZ-7B":
|
||||||
fullRequestURL = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/bloomz_7b1"
|
// fullRequestURL = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/bloomz_7b1"
|
||||||
case "Embedding-V1":
|
// case "Embedding-V1":
|
||||||
fullRequestURL = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/embeddings/embedding-v1"
|
// fullRequestURL = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/embeddings/embedding-v1"
|
||||||
}
|
// }
|
||||||
apiKey := c.Request.Header.Get("Authorization")
|
// apiKey := c.Request.Header.Get("Authorization")
|
||||||
apiKey = strings.TrimPrefix(apiKey, "Bearer ")
|
// apiKey = strings.TrimPrefix(apiKey, "Bearer ")
|
||||||
var err error
|
// var err error
|
||||||
if apiKey, err = getBaiduAccessToken(apiKey); err != nil {
|
// if apiKey, err = getBaiduAccessToken(apiKey); err != nil {
|
||||||
return errorWrapper(err, "invalid_baidu_config", http.StatusInternalServerError)
|
// return errorWrapper(err, "invalid_baidu_config", http.StatusInternalServerError)
|
||||||
}
|
// }
|
||||||
fullRequestURL += "?access_token=" + apiKey
|
// fullRequestURL += "?access_token=" + apiKey
|
||||||
case APITypePaLM:
|
case APITypePaLM:
|
||||||
fullRequestURL = "https://generativelanguage.googleapis.com/v1beta2/models/chat-bison-001:generateMessage"
|
fullRequestURL = "https://generativelanguage.googleapis.com/v1beta2/models/chat-bison-001:generateMessage"
|
||||||
if baseURL != "" {
|
if baseURL != "" {
|
||||||
@ -202,7 +202,12 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
|
|||||||
var completionTokens int
|
var completionTokens int
|
||||||
switch relayMode {
|
switch relayMode {
|
||||||
case RelayModeChatCompletions:
|
case RelayModeChatCompletions:
|
||||||
promptTokens = countTokenMessages(textRequest.Messages, textRequest.Model)
|
messages, err := textRequest.TextMessages()
|
||||||
|
if err != nil {
|
||||||
|
return errorWrapper(err, "parse_text_messages_failed", http.StatusBadRequest)
|
||||||
|
}
|
||||||
|
|
||||||
|
promptTokens = countTokenMessages(messages, textRequest.Model)
|
||||||
case RelayModeCompletions:
|
case RelayModeCompletions:
|
||||||
promptTokens = countTokenInput(textRequest.Prompt, textRequest.Model)
|
promptTokens = countTokenInput(textRequest.Prompt, textRequest.Model)
|
||||||
case RelayModeModerations:
|
case RelayModeModerations:
|
||||||
@ -257,67 +262,67 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
|
|||||||
return errorWrapper(err, "marshal_text_request_failed", http.StatusInternalServerError)
|
return errorWrapper(err, "marshal_text_request_failed", http.StatusInternalServerError)
|
||||||
}
|
}
|
||||||
requestBody = bytes.NewBuffer(jsonStr)
|
requestBody = bytes.NewBuffer(jsonStr)
|
||||||
case APITypeBaidu:
|
// case APITypeBaidu:
|
||||||
var jsonData []byte
|
// var jsonData []byte
|
||||||
var err error
|
// var err error
|
||||||
switch relayMode {
|
// switch relayMode {
|
||||||
case RelayModeEmbeddings:
|
// case RelayModeEmbeddings:
|
||||||
baiduEmbeddingRequest := embeddingRequestOpenAI2Baidu(textRequest)
|
// baiduEmbeddingRequest := embeddingRequestOpenAI2Baidu(textRequest)
|
||||||
jsonData, err = json.Marshal(baiduEmbeddingRequest)
|
// jsonData, err = json.Marshal(baiduEmbeddingRequest)
|
||||||
default:
|
// default:
|
||||||
baiduRequest := requestOpenAI2Baidu(textRequest)
|
// baiduRequest := requestOpenAI2Baidu(textRequest)
|
||||||
jsonData, err = json.Marshal(baiduRequest)
|
// jsonData, err = json.Marshal(baiduRequest)
|
||||||
}
|
// }
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "marshal_text_request_failed", http.StatusInternalServerError)
|
// return errorWrapper(err, "marshal_text_request_failed", http.StatusInternalServerError)
|
||||||
}
|
// }
|
||||||
requestBody = bytes.NewBuffer(jsonData)
|
// requestBody = bytes.NewBuffer(jsonData)
|
||||||
case APITypePaLM:
|
// case APITypePaLM:
|
||||||
palmRequest := requestOpenAI2PaLM(textRequest)
|
// palmRequest := requestOpenAI2PaLM(textRequest)
|
||||||
jsonStr, err := json.Marshal(palmRequest)
|
// jsonStr, err := json.Marshal(palmRequest)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "marshal_text_request_failed", http.StatusInternalServerError)
|
// return errorWrapper(err, "marshal_text_request_failed", http.StatusInternalServerError)
|
||||||
}
|
// }
|
||||||
requestBody = bytes.NewBuffer(jsonStr)
|
// requestBody = bytes.NewBuffer(jsonStr)
|
||||||
case APITypeZhipu:
|
// case APITypeZhipu:
|
||||||
zhipuRequest := requestOpenAI2Zhipu(textRequest)
|
// zhipuRequest := requestOpenAI2Zhipu(textRequest)
|
||||||
jsonStr, err := json.Marshal(zhipuRequest)
|
// jsonStr, err := json.Marshal(zhipuRequest)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "marshal_text_request_failed", http.StatusInternalServerError)
|
// return errorWrapper(err, "marshal_text_request_failed", http.StatusInternalServerError)
|
||||||
}
|
// }
|
||||||
requestBody = bytes.NewBuffer(jsonStr)
|
// requestBody = bytes.NewBuffer(jsonStr)
|
||||||
case APITypeAli:
|
// case APITypeAli:
|
||||||
var jsonStr []byte
|
// var jsonStr []byte
|
||||||
var err error
|
// var err error
|
||||||
switch relayMode {
|
// switch relayMode {
|
||||||
case RelayModeEmbeddings:
|
// case RelayModeEmbeddings:
|
||||||
aliEmbeddingRequest := embeddingRequestOpenAI2Ali(textRequest)
|
// aliEmbeddingRequest := embeddingRequestOpenAI2Ali(textRequest)
|
||||||
jsonStr, err = json.Marshal(aliEmbeddingRequest)
|
// jsonStr, err = json.Marshal(aliEmbeddingRequest)
|
||||||
default:
|
// default:
|
||||||
aliRequest := requestOpenAI2Ali(textRequest)
|
// aliRequest := requestOpenAI2Ali(textRequest)
|
||||||
jsonStr, err = json.Marshal(aliRequest)
|
// jsonStr, err = json.Marshal(aliRequest)
|
||||||
}
|
// }
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "marshal_text_request_failed", http.StatusInternalServerError)
|
// return errorWrapper(err, "marshal_text_request_failed", http.StatusInternalServerError)
|
||||||
}
|
// }
|
||||||
requestBody = bytes.NewBuffer(jsonStr)
|
// requestBody = bytes.NewBuffer(jsonStr)
|
||||||
case APITypeTencent:
|
// case APITypeTencent:
|
||||||
apiKey := c.Request.Header.Get("Authorization")
|
// apiKey := c.Request.Header.Get("Authorization")
|
||||||
apiKey = strings.TrimPrefix(apiKey, "Bearer ")
|
// apiKey = strings.TrimPrefix(apiKey, "Bearer ")
|
||||||
appId, secretId, secretKey, err := parseTencentConfig(apiKey)
|
// appId, secretId, secretKey, err := parseTencentConfig(apiKey)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "invalid_tencent_config", http.StatusInternalServerError)
|
// return errorWrapper(err, "invalid_tencent_config", http.StatusInternalServerError)
|
||||||
}
|
// }
|
||||||
tencentRequest := requestOpenAI2Tencent(textRequest)
|
// tencentRequest := requestOpenAI2Tencent(textRequest)
|
||||||
tencentRequest.AppId = appId
|
// tencentRequest.AppId = appId
|
||||||
tencentRequest.SecretId = secretId
|
// tencentRequest.SecretId = secretId
|
||||||
jsonStr, err := json.Marshal(tencentRequest)
|
// jsonStr, err := json.Marshal(tencentRequest)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "marshal_text_request_failed", http.StatusInternalServerError)
|
// return errorWrapper(err, "marshal_text_request_failed", http.StatusInternalServerError)
|
||||||
}
|
// }
|
||||||
sign := getTencentSign(*tencentRequest, secretKey)
|
// sign := getTencentSign(*tencentRequest, secretKey)
|
||||||
c.Request.Header.Set("Authorization", sign)
|
// c.Request.Header.Set("Authorization", sign)
|
||||||
requestBody = bytes.NewBuffer(jsonStr)
|
// requestBody = bytes.NewBuffer(jsonStr)
|
||||||
case APITypeAIProxyLibrary:
|
case APITypeAIProxyLibrary:
|
||||||
aiProxyLibraryRequest := requestOpenAI2AIProxyLibrary(textRequest)
|
aiProxyLibraryRequest := requestOpenAI2AIProxyLibrary(textRequest)
|
||||||
aiProxyLibraryRequest.LibraryId = c.GetString("library_id")
|
aiProxyLibraryRequest.LibraryId = c.GetString("library_id")
|
||||||
@ -357,16 +362,16 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
|
|||||||
anthropicVersion = "2023-06-01"
|
anthropicVersion = "2023-06-01"
|
||||||
}
|
}
|
||||||
req.Header.Set("anthropic-version", anthropicVersion)
|
req.Header.Set("anthropic-version", anthropicVersion)
|
||||||
case APITypeZhipu:
|
// case APITypeZhipu:
|
||||||
token := getZhipuToken(apiKey)
|
// token := getZhipuToken(apiKey)
|
||||||
req.Header.Set("Authorization", token)
|
// req.Header.Set("Authorization", token)
|
||||||
case APITypeAli:
|
// case APITypeAli:
|
||||||
req.Header.Set("Authorization", "Bearer "+apiKey)
|
// req.Header.Set("Authorization", "Bearer "+apiKey)
|
||||||
if textRequest.Stream {
|
// if textRequest.Stream {
|
||||||
req.Header.Set("X-DashScope-SSE", "enable")
|
// req.Header.Set("X-DashScope-SSE", "enable")
|
||||||
}
|
// }
|
||||||
case APITypeTencent:
|
// case APITypeTencent:
|
||||||
req.Header.Set("Authorization", apiKey)
|
// req.Header.Set("Authorization", apiKey)
|
||||||
default:
|
default:
|
||||||
req.Header.Set("Authorization", "Bearer "+apiKey)
|
req.Header.Set("Authorization", "Bearer "+apiKey)
|
||||||
}
|
}
|
||||||
@ -482,124 +487,124 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
|
|||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
case APITypeBaidu:
|
// case APITypeBaidu:
|
||||||
if isStream {
|
// if isStream {
|
||||||
err, usage := baiduStreamHandler(c, resp)
|
// err, usage := baiduStreamHandler(c, resp)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return err
|
// return err
|
||||||
}
|
// }
|
||||||
if usage != nil {
|
// if usage != nil {
|
||||||
textResponse.Usage = *usage
|
// textResponse.Usage = *usage
|
||||||
}
|
// }
|
||||||
return nil
|
// return nil
|
||||||
} else {
|
// } else {
|
||||||
var err *OpenAIErrorWithStatusCode
|
// var err *OpenAIErrorWithStatusCode
|
||||||
var usage *Usage
|
// var usage *Usage
|
||||||
switch relayMode {
|
// switch relayMode {
|
||||||
case RelayModeEmbeddings:
|
// case RelayModeEmbeddings:
|
||||||
err, usage = baiduEmbeddingHandler(c, resp)
|
// err, usage = baiduEmbeddingHandler(c, resp)
|
||||||
default:
|
// default:
|
||||||
err, usage = baiduHandler(c, resp)
|
// err, usage = baiduHandler(c, resp)
|
||||||
}
|
// }
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return err
|
// return err
|
||||||
}
|
// }
|
||||||
if usage != nil {
|
// if usage != nil {
|
||||||
textResponse.Usage = *usage
|
// textResponse.Usage = *usage
|
||||||
}
|
// }
|
||||||
return nil
|
// return nil
|
||||||
}
|
// }
|
||||||
case APITypePaLM:
|
// case APITypePaLM:
|
||||||
if textRequest.Stream { // PaLM2 API does not support stream
|
// if textRequest.Stream { // PaLM2 API does not support stream
|
||||||
err, responseText := palmStreamHandler(c, resp)
|
// err, responseText := palmStreamHandler(c, resp)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return err
|
// return err
|
||||||
}
|
// }
|
||||||
textResponse.Usage.PromptTokens = promptTokens
|
// textResponse.Usage.PromptTokens = promptTokens
|
||||||
textResponse.Usage.CompletionTokens = countTokenText(responseText, textRequest.Model)
|
// textResponse.Usage.CompletionTokens = countTokenText(responseText, textRequest.Model)
|
||||||
return nil
|
// return nil
|
||||||
} else {
|
// } else {
|
||||||
err, usage := palmHandler(c, resp, promptTokens, textRequest.Model)
|
// err, usage := palmHandler(c, resp, promptTokens, textRequest.Model)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return err
|
// return err
|
||||||
}
|
// }
|
||||||
if usage != nil {
|
// if usage != nil {
|
||||||
textResponse.Usage = *usage
|
// textResponse.Usage = *usage
|
||||||
}
|
// }
|
||||||
return nil
|
// return nil
|
||||||
}
|
// }
|
||||||
case APITypeZhipu:
|
// case APITypeZhipu:
|
||||||
if isStream {
|
// if isStream {
|
||||||
err, usage := zhipuStreamHandler(c, resp)
|
// err, usage := zhipuStreamHandler(c, resp)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return err
|
// return err
|
||||||
}
|
// }
|
||||||
if usage != nil {
|
// if usage != nil {
|
||||||
textResponse.Usage = *usage
|
// textResponse.Usage = *usage
|
||||||
}
|
// }
|
||||||
// zhipu's API does not return prompt tokens & completion tokens
|
// // zhipu's API does not return prompt tokens & completion tokens
|
||||||
textResponse.Usage.PromptTokens = textResponse.Usage.TotalTokens
|
// textResponse.Usage.PromptTokens = textResponse.Usage.TotalTokens
|
||||||
return nil
|
// return nil
|
||||||
} else {
|
// } else {
|
||||||
err, usage := zhipuHandler(c, resp)
|
// err, usage := zhipuHandler(c, resp)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return err
|
// return err
|
||||||
}
|
// }
|
||||||
if usage != nil {
|
// if usage != nil {
|
||||||
textResponse.Usage = *usage
|
// textResponse.Usage = *usage
|
||||||
}
|
// }
|
||||||
// zhipu's API does not return prompt tokens & completion tokens
|
// // zhipu's API does not return prompt tokens & completion tokens
|
||||||
textResponse.Usage.PromptTokens = textResponse.Usage.TotalTokens
|
// textResponse.Usage.PromptTokens = textResponse.Usage.TotalTokens
|
||||||
return nil
|
// return nil
|
||||||
}
|
// }
|
||||||
case APITypeAli:
|
// case APITypeAli:
|
||||||
if isStream {
|
// if isStream {
|
||||||
err, usage := aliStreamHandler(c, resp)
|
// err, usage := aliStreamHandler(c, resp)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return err
|
// return err
|
||||||
}
|
// }
|
||||||
if usage != nil {
|
// if usage != nil {
|
||||||
textResponse.Usage = *usage
|
// textResponse.Usage = *usage
|
||||||
}
|
// }
|
||||||
return nil
|
// return nil
|
||||||
} else {
|
// } else {
|
||||||
var err *OpenAIErrorWithStatusCode
|
// var err *OpenAIErrorWithStatusCode
|
||||||
var usage *Usage
|
// var usage *Usage
|
||||||
switch relayMode {
|
// switch relayMode {
|
||||||
case RelayModeEmbeddings:
|
// case RelayModeEmbeddings:
|
||||||
err, usage = aliEmbeddingHandler(c, resp)
|
// err, usage = aliEmbeddingHandler(c, resp)
|
||||||
default:
|
// default:
|
||||||
err, usage = aliHandler(c, resp)
|
// err, usage = aliHandler(c, resp)
|
||||||
}
|
// }
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return err
|
// return err
|
||||||
}
|
// }
|
||||||
if usage != nil {
|
// if usage != nil {
|
||||||
textResponse.Usage = *usage
|
// textResponse.Usage = *usage
|
||||||
}
|
// }
|
||||||
return nil
|
// return nil
|
||||||
}
|
// }
|
||||||
case APITypeXunfei:
|
// case APITypeXunfei:
|
||||||
auth := c.Request.Header.Get("Authorization")
|
// auth := c.Request.Header.Get("Authorization")
|
||||||
auth = strings.TrimPrefix(auth, "Bearer ")
|
// auth = strings.TrimPrefix(auth, "Bearer ")
|
||||||
splits := strings.Split(auth, "|")
|
// splits := strings.Split(auth, "|")
|
||||||
if len(splits) != 3 {
|
// if len(splits) != 3 {
|
||||||
return errorWrapper(errors.New("invalid auth"), "invalid_auth", http.StatusBadRequest)
|
// return errorWrapper(errors.New("invalid auth"), "invalid_auth", http.StatusBadRequest)
|
||||||
}
|
// }
|
||||||
var err *OpenAIErrorWithStatusCode
|
// var err *OpenAIErrorWithStatusCode
|
||||||
var usage *Usage
|
// var usage *Usage
|
||||||
if isStream {
|
// if isStream {
|
||||||
err, usage = xunfeiStreamHandler(c, textRequest, splits[0], splits[1], splits[2])
|
// err, usage = xunfeiStreamHandler(c, textRequest, splits[0], splits[1], splits[2])
|
||||||
} else {
|
// } else {
|
||||||
err, usage = xunfeiHandler(c, textRequest, splits[0], splits[1], splits[2])
|
// err, usage = xunfeiHandler(c, textRequest, splits[0], splits[1], splits[2])
|
||||||
}
|
// }
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return err
|
// return err
|
||||||
}
|
// }
|
||||||
if usage != nil {
|
// if usage != nil {
|
||||||
textResponse.Usage = *usage
|
// textResponse.Usage = *usage
|
||||||
}
|
// }
|
||||||
return nil
|
// return nil
|
||||||
case APITypeAIProxyLibrary:
|
case APITypeAIProxyLibrary:
|
||||||
if isStream {
|
if isStream {
|
||||||
err, usage := aiProxyLibraryStreamHandler(c, resp)
|
err, usage := aiProxyLibraryStreamHandler(c, resp)
|
||||||
@ -620,25 +625,25 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
|
|||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
case APITypeTencent:
|
// case APITypeTencent:
|
||||||
if isStream {
|
// if isStream {
|
||||||
err, responseText := tencentStreamHandler(c, resp)
|
// err, responseText := tencentStreamHandler(c, resp)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return err
|
// return err
|
||||||
}
|
// }
|
||||||
textResponse.Usage.PromptTokens = promptTokens
|
// textResponse.Usage.PromptTokens = promptTokens
|
||||||
textResponse.Usage.CompletionTokens = countTokenText(responseText, textRequest.Model)
|
// textResponse.Usage.CompletionTokens = countTokenText(responseText, textRequest.Model)
|
||||||
return nil
|
// return nil
|
||||||
} else {
|
// } else {
|
||||||
err, usage := tencentHandler(c, resp)
|
// err, usage := tencentHandler(c, resp)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return err
|
// return err
|
||||||
}
|
// }
|
||||||
if usage != nil {
|
// if usage != nil {
|
||||||
textResponse.Usage = *usage
|
// textResponse.Usage = *usage
|
||||||
}
|
// }
|
||||||
return nil
|
// return nil
|
||||||
}
|
// }
|
||||||
default:
|
default:
|
||||||
return errorWrapper(errors.New("unknown api type"), "unknown_api_type", http.StatusInternalServerError)
|
return errorWrapper(errors.New("unknown api type"), "unknown_api_type", http.StatusInternalServerError)
|
||||||
}
|
}
|
||||||
|
@ -1,306 +1,306 @@
|
|||||||
package controller
|
package controller
|
||||||
|
|
||||||
import (
|
// import (
|
||||||
"crypto/hmac"
|
// "crypto/hmac"
|
||||||
"crypto/sha256"
|
// "crypto/sha256"
|
||||||
"encoding/base64"
|
// "encoding/base64"
|
||||||
"encoding/json"
|
// "encoding/json"
|
||||||
"fmt"
|
// "fmt"
|
||||||
"github.com/gin-gonic/gin"
|
// "github.com/gin-gonic/gin"
|
||||||
"github.com/gorilla/websocket"
|
// "github.com/gorilla/websocket"
|
||||||
"io"
|
// "io"
|
||||||
"net/http"
|
// "net/http"
|
||||||
"net/url"
|
// "net/url"
|
||||||
"one-api/common"
|
// "one-api/common"
|
||||||
"strings"
|
// "strings"
|
||||||
"time"
|
// "time"
|
||||||
)
|
// )
|
||||||
|
|
||||||
// https://console.xfyun.cn/services/cbm
|
// // https://console.xfyun.cn/services/cbm
|
||||||
// https://www.xfyun.cn/doc/spark/Web.html
|
// // https://www.xfyun.cn/doc/spark/Web.html
|
||||||
|
|
||||||
type XunfeiMessage struct {
|
// type XunfeiMessage struct {
|
||||||
Role string `json:"role"`
|
// Role string `json:"role"`
|
||||||
Content string `json:"content"`
|
// Content string `json:"content"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type XunfeiChatRequest struct {
|
// type XunfeiChatRequest struct {
|
||||||
Header struct {
|
// Header struct {
|
||||||
AppId string `json:"app_id"`
|
// AppId string `json:"app_id"`
|
||||||
} `json:"header"`
|
// } `json:"header"`
|
||||||
Parameter struct {
|
// Parameter struct {
|
||||||
Chat struct {
|
// Chat struct {
|
||||||
Domain string `json:"domain,omitempty"`
|
// Domain string `json:"domain,omitempty"`
|
||||||
Temperature float64 `json:"temperature,omitempty"`
|
// Temperature float64 `json:"temperature,omitempty"`
|
||||||
TopK int `json:"top_k,omitempty"`
|
// TopK int `json:"top_k,omitempty"`
|
||||||
MaxTokens int `json:"max_tokens,omitempty"`
|
// MaxTokens int `json:"max_tokens,omitempty"`
|
||||||
Auditing bool `json:"auditing,omitempty"`
|
// Auditing bool `json:"auditing,omitempty"`
|
||||||
} `json:"chat"`
|
// } `json:"chat"`
|
||||||
} `json:"parameter"`
|
// } `json:"parameter"`
|
||||||
Payload struct {
|
// Payload struct {
|
||||||
Message struct {
|
// Message struct {
|
||||||
Text []XunfeiMessage `json:"text"`
|
// Text []XunfeiMessage `json:"text"`
|
||||||
} `json:"message"`
|
// } `json:"message"`
|
||||||
} `json:"payload"`
|
// } `json:"payload"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type XunfeiChatResponseTextItem struct {
|
// type XunfeiChatResponseTextItem struct {
|
||||||
Content string `json:"content"`
|
// Content string `json:"content"`
|
||||||
Role string `json:"role"`
|
// Role string `json:"role"`
|
||||||
Index int `json:"index"`
|
// Index int `json:"index"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type XunfeiChatResponse struct {
|
// type XunfeiChatResponse struct {
|
||||||
Header struct {
|
// Header struct {
|
||||||
Code int `json:"code"`
|
// Code int `json:"code"`
|
||||||
Message string `json:"message"`
|
// Message string `json:"message"`
|
||||||
Sid string `json:"sid"`
|
// Sid string `json:"sid"`
|
||||||
Status int `json:"status"`
|
// Status int `json:"status"`
|
||||||
} `json:"header"`
|
// } `json:"header"`
|
||||||
Payload struct {
|
// Payload struct {
|
||||||
Choices struct {
|
// Choices struct {
|
||||||
Status int `json:"status"`
|
// Status int `json:"status"`
|
||||||
Seq int `json:"seq"`
|
// Seq int `json:"seq"`
|
||||||
Text []XunfeiChatResponseTextItem `json:"text"`
|
// Text []XunfeiChatResponseTextItem `json:"text"`
|
||||||
} `json:"choices"`
|
// } `json:"choices"`
|
||||||
Usage struct {
|
// Usage struct {
|
||||||
//Text struct {
|
// //Text struct {
|
||||||
// QuestionTokens string `json:"question_tokens"`
|
// // QuestionTokens string `json:"question_tokens"`
|
||||||
// PromptTokens string `json:"prompt_tokens"`
|
// // PromptTokens string `json:"prompt_tokens"`
|
||||||
// CompletionTokens string `json:"completion_tokens"`
|
// // CompletionTokens string `json:"completion_tokens"`
|
||||||
// TotalTokens string `json:"total_tokens"`
|
// // TotalTokens string `json:"total_tokens"`
|
||||||
//} `json:"text"`
|
// //} `json:"text"`
|
||||||
Text Usage `json:"text"`
|
// Text Usage `json:"text"`
|
||||||
} `json:"usage"`
|
// } `json:"usage"`
|
||||||
} `json:"payload"`
|
// } `json:"payload"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
func requestOpenAI2Xunfei(request GeneralOpenAIRequest, xunfeiAppId string, domain string) *XunfeiChatRequest {
|
// func requestOpenAI2Xunfei(request GeneralOpenAIRequest, xunfeiAppId string, domain string) *XunfeiChatRequest {
|
||||||
messages := make([]XunfeiMessage, 0, len(request.Messages))
|
// messages := make([]XunfeiMessage, 0, len(request.Messages))
|
||||||
for _, message := range request.Messages {
|
// for _, message := range request.Messages {
|
||||||
if message.Role == "system" {
|
// if message.Role == "system" {
|
||||||
messages = append(messages, XunfeiMessage{
|
// messages = append(messages, XunfeiMessage{
|
||||||
Role: "user",
|
// Role: "user",
|
||||||
Content: message.Content,
|
// Content: message.Content,
|
||||||
})
|
// })
|
||||||
messages = append(messages, XunfeiMessage{
|
// messages = append(messages, XunfeiMessage{
|
||||||
Role: "assistant",
|
// Role: "assistant",
|
||||||
Content: "Okay",
|
// Content: "Okay",
|
||||||
})
|
// })
|
||||||
} else {
|
// } else {
|
||||||
messages = append(messages, XunfeiMessage{
|
// messages = append(messages, XunfeiMessage{
|
||||||
Role: message.Role,
|
// Role: message.Role,
|
||||||
Content: message.Content,
|
// Content: message.Content,
|
||||||
})
|
// })
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
xunfeiRequest := XunfeiChatRequest{}
|
// xunfeiRequest := XunfeiChatRequest{}
|
||||||
xunfeiRequest.Header.AppId = xunfeiAppId
|
// xunfeiRequest.Header.AppId = xunfeiAppId
|
||||||
xunfeiRequest.Parameter.Chat.Domain = domain
|
// xunfeiRequest.Parameter.Chat.Domain = domain
|
||||||
xunfeiRequest.Parameter.Chat.Temperature = request.Temperature
|
// xunfeiRequest.Parameter.Chat.Temperature = request.Temperature
|
||||||
xunfeiRequest.Parameter.Chat.TopK = request.N
|
// xunfeiRequest.Parameter.Chat.TopK = request.N
|
||||||
xunfeiRequest.Parameter.Chat.MaxTokens = request.MaxTokens
|
// xunfeiRequest.Parameter.Chat.MaxTokens = request.MaxTokens
|
||||||
xunfeiRequest.Payload.Message.Text = messages
|
// xunfeiRequest.Payload.Message.Text = messages
|
||||||
return &xunfeiRequest
|
// return &xunfeiRequest
|
||||||
}
|
// }
|
||||||
|
|
||||||
func responseXunfei2OpenAI(response *XunfeiChatResponse) *OpenAITextResponse {
|
// func responseXunfei2OpenAI(response *XunfeiChatResponse) *OpenAITextResponse {
|
||||||
if len(response.Payload.Choices.Text) == 0 {
|
// if len(response.Payload.Choices.Text) == 0 {
|
||||||
response.Payload.Choices.Text = []XunfeiChatResponseTextItem{
|
// response.Payload.Choices.Text = []XunfeiChatResponseTextItem{
|
||||||
{
|
// {
|
||||||
Content: "",
|
// Content: "",
|
||||||
},
|
// },
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
choice := OpenAITextResponseChoice{
|
// choice := OpenAITextResponseChoice{
|
||||||
Index: 0,
|
// Index: 0,
|
||||||
Message: Message{
|
// Message: Message{
|
||||||
Role: "assistant",
|
// Role: "assistant",
|
||||||
Content: response.Payload.Choices.Text[0].Content,
|
// Content: response.Payload.Choices.Text[0].Content,
|
||||||
},
|
// },
|
||||||
FinishReason: stopFinishReason,
|
// FinishReason: stopFinishReason,
|
||||||
}
|
// }
|
||||||
fullTextResponse := OpenAITextResponse{
|
// fullTextResponse := OpenAITextResponse{
|
||||||
Object: "chat.completion",
|
// Object: "chat.completion",
|
||||||
Created: common.GetTimestamp(),
|
// Created: common.GetTimestamp(),
|
||||||
Choices: []OpenAITextResponseChoice{choice},
|
// Choices: []OpenAITextResponseChoice{choice},
|
||||||
Usage: response.Payload.Usage.Text,
|
// Usage: response.Payload.Usage.Text,
|
||||||
}
|
// }
|
||||||
return &fullTextResponse
|
// return &fullTextResponse
|
||||||
}
|
// }
|
||||||
|
|
||||||
func streamResponseXunfei2OpenAI(xunfeiResponse *XunfeiChatResponse) *ChatCompletionsStreamResponse {
|
// func streamResponseXunfei2OpenAI(xunfeiResponse *XunfeiChatResponse) *ChatCompletionsStreamResponse {
|
||||||
if len(xunfeiResponse.Payload.Choices.Text) == 0 {
|
// if len(xunfeiResponse.Payload.Choices.Text) == 0 {
|
||||||
xunfeiResponse.Payload.Choices.Text = []XunfeiChatResponseTextItem{
|
// xunfeiResponse.Payload.Choices.Text = []XunfeiChatResponseTextItem{
|
||||||
{
|
// {
|
||||||
Content: "",
|
// Content: "",
|
||||||
},
|
// },
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
var choice ChatCompletionsStreamResponseChoice
|
// var choice ChatCompletionsStreamResponseChoice
|
||||||
choice.Delta.Content = xunfeiResponse.Payload.Choices.Text[0].Content
|
// choice.Delta.Content = xunfeiResponse.Payload.Choices.Text[0].Content
|
||||||
if xunfeiResponse.Payload.Choices.Status == 2 {
|
// if xunfeiResponse.Payload.Choices.Status == 2 {
|
||||||
choice.FinishReason = &stopFinishReason
|
// choice.FinishReason = &stopFinishReason
|
||||||
}
|
// }
|
||||||
response := ChatCompletionsStreamResponse{
|
// response := ChatCompletionsStreamResponse{
|
||||||
Object: "chat.completion.chunk",
|
// Object: "chat.completion.chunk",
|
||||||
Created: common.GetTimestamp(),
|
// Created: common.GetTimestamp(),
|
||||||
Model: "SparkDesk",
|
// Model: "SparkDesk",
|
||||||
Choices: []ChatCompletionsStreamResponseChoice{choice},
|
// Choices: []ChatCompletionsStreamResponseChoice{choice},
|
||||||
}
|
// }
|
||||||
return &response
|
// return &response
|
||||||
}
|
// }
|
||||||
|
|
||||||
func buildXunfeiAuthUrl(hostUrl string, apiKey, apiSecret string) string {
|
// func buildXunfeiAuthUrl(hostUrl string, apiKey, apiSecret string) string {
|
||||||
HmacWithShaToBase64 := func(algorithm, data, key string) string {
|
// HmacWithShaToBase64 := func(algorithm, data, key string) string {
|
||||||
mac := hmac.New(sha256.New, []byte(key))
|
// mac := hmac.New(sha256.New, []byte(key))
|
||||||
mac.Write([]byte(data))
|
// mac.Write([]byte(data))
|
||||||
encodeData := mac.Sum(nil)
|
// encodeData := mac.Sum(nil)
|
||||||
return base64.StdEncoding.EncodeToString(encodeData)
|
// return base64.StdEncoding.EncodeToString(encodeData)
|
||||||
}
|
// }
|
||||||
ul, err := url.Parse(hostUrl)
|
// ul, err := url.Parse(hostUrl)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
fmt.Println(err)
|
// fmt.Println(err)
|
||||||
}
|
// }
|
||||||
date := time.Now().UTC().Format(time.RFC1123)
|
// date := time.Now().UTC().Format(time.RFC1123)
|
||||||
signString := []string{"host: " + ul.Host, "date: " + date, "GET " + ul.Path + " HTTP/1.1"}
|
// signString := []string{"host: " + ul.Host, "date: " + date, "GET " + ul.Path + " HTTP/1.1"}
|
||||||
sign := strings.Join(signString, "\n")
|
// sign := strings.Join(signString, "\n")
|
||||||
sha := HmacWithShaToBase64("hmac-sha256", sign, apiSecret)
|
// sha := HmacWithShaToBase64("hmac-sha256", sign, apiSecret)
|
||||||
authUrl := fmt.Sprintf("hmac username=\"%s\", algorithm=\"%s\", headers=\"%s\", signature=\"%s\"", apiKey,
|
// authUrl := fmt.Sprintf("hmac username=\"%s\", algorithm=\"%s\", headers=\"%s\", signature=\"%s\"", apiKey,
|
||||||
"hmac-sha256", "host date request-line", sha)
|
// "hmac-sha256", "host date request-line", sha)
|
||||||
authorization := base64.StdEncoding.EncodeToString([]byte(authUrl))
|
// authorization := base64.StdEncoding.EncodeToString([]byte(authUrl))
|
||||||
v := url.Values{}
|
// v := url.Values{}
|
||||||
v.Add("host", ul.Host)
|
// v.Add("host", ul.Host)
|
||||||
v.Add("date", date)
|
// v.Add("date", date)
|
||||||
v.Add("authorization", authorization)
|
// v.Add("authorization", authorization)
|
||||||
callUrl := hostUrl + "?" + v.Encode()
|
// callUrl := hostUrl + "?" + v.Encode()
|
||||||
return callUrl
|
// return callUrl
|
||||||
}
|
// }
|
||||||
|
|
||||||
func xunfeiStreamHandler(c *gin.Context, textRequest GeneralOpenAIRequest, appId string, apiSecret string, apiKey string) (*OpenAIErrorWithStatusCode, *Usage) {
|
// func xunfeiStreamHandler(c *gin.Context, textRequest GeneralOpenAIRequest, appId string, apiSecret string, apiKey string) (*OpenAIErrorWithStatusCode, *Usage) {
|
||||||
domain, authUrl := getXunfeiAuthUrl(c, apiKey, apiSecret)
|
// domain, authUrl := getXunfeiAuthUrl(c, apiKey, apiSecret)
|
||||||
dataChan, stopChan, err := xunfeiMakeRequest(textRequest, domain, authUrl, appId)
|
// dataChan, stopChan, err := xunfeiMakeRequest(textRequest, domain, authUrl, appId)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "make xunfei request err", http.StatusInternalServerError), nil
|
// return errorWrapper(err, "make xunfei request err", http.StatusInternalServerError), nil
|
||||||
}
|
// }
|
||||||
setEventStreamHeaders(c)
|
// setEventStreamHeaders(c)
|
||||||
var usage Usage
|
// var usage Usage
|
||||||
c.Stream(func(w io.Writer) bool {
|
// c.Stream(func(w io.Writer) bool {
|
||||||
select {
|
// select {
|
||||||
case xunfeiResponse := <-dataChan:
|
// case xunfeiResponse := <-dataChan:
|
||||||
usage.PromptTokens += xunfeiResponse.Payload.Usage.Text.PromptTokens
|
// usage.PromptTokens += xunfeiResponse.Payload.Usage.Text.PromptTokens
|
||||||
usage.CompletionTokens += xunfeiResponse.Payload.Usage.Text.CompletionTokens
|
// usage.CompletionTokens += xunfeiResponse.Payload.Usage.Text.CompletionTokens
|
||||||
usage.TotalTokens += xunfeiResponse.Payload.Usage.Text.TotalTokens
|
// usage.TotalTokens += xunfeiResponse.Payload.Usage.Text.TotalTokens
|
||||||
response := streamResponseXunfei2OpenAI(&xunfeiResponse)
|
// response := streamResponseXunfei2OpenAI(&xunfeiResponse)
|
||||||
jsonResponse, err := json.Marshal(response)
|
// jsonResponse, err := json.Marshal(response)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
common.SysError("error marshalling stream response: " + err.Error())
|
// common.SysError("error marshalling stream response: " + err.Error())
|
||||||
return true
|
// return true
|
||||||
}
|
// }
|
||||||
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
|
// c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
|
||||||
return true
|
// return true
|
||||||
case <-stopChan:
|
// case <-stopChan:
|
||||||
c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
|
// c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
|
||||||
return false
|
// return false
|
||||||
}
|
// }
|
||||||
})
|
// })
|
||||||
return nil, &usage
|
// return nil, &usage
|
||||||
}
|
// }
|
||||||
|
|
||||||
func xunfeiHandler(c *gin.Context, textRequest GeneralOpenAIRequest, appId string, apiSecret string, apiKey string) (*OpenAIErrorWithStatusCode, *Usage) {
|
// func xunfeiHandler(c *gin.Context, textRequest GeneralOpenAIRequest, appId string, apiSecret string, apiKey string) (*OpenAIErrorWithStatusCode, *Usage) {
|
||||||
domain, authUrl := getXunfeiAuthUrl(c, apiKey, apiSecret)
|
// domain, authUrl := getXunfeiAuthUrl(c, apiKey, apiSecret)
|
||||||
dataChan, stopChan, err := xunfeiMakeRequest(textRequest, domain, authUrl, appId)
|
// dataChan, stopChan, err := xunfeiMakeRequest(textRequest, domain, authUrl, appId)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "make xunfei request err", http.StatusInternalServerError), nil
|
// return errorWrapper(err, "make xunfei request err", http.StatusInternalServerError), nil
|
||||||
}
|
// }
|
||||||
var usage Usage
|
// var usage Usage
|
||||||
var content string
|
// var content string
|
||||||
var xunfeiResponse XunfeiChatResponse
|
// var xunfeiResponse XunfeiChatResponse
|
||||||
stop := false
|
// stop := false
|
||||||
for !stop {
|
// for !stop {
|
||||||
select {
|
// select {
|
||||||
case xunfeiResponse = <-dataChan:
|
// case xunfeiResponse = <-dataChan:
|
||||||
if len(xunfeiResponse.Payload.Choices.Text) == 0 {
|
// if len(xunfeiResponse.Payload.Choices.Text) == 0 {
|
||||||
continue
|
// continue
|
||||||
}
|
// }
|
||||||
content += xunfeiResponse.Payload.Choices.Text[0].Content
|
// content += xunfeiResponse.Payload.Choices.Text[0].Content
|
||||||
usage.PromptTokens += xunfeiResponse.Payload.Usage.Text.PromptTokens
|
// usage.PromptTokens += xunfeiResponse.Payload.Usage.Text.PromptTokens
|
||||||
usage.CompletionTokens += xunfeiResponse.Payload.Usage.Text.CompletionTokens
|
// usage.CompletionTokens += xunfeiResponse.Payload.Usage.Text.CompletionTokens
|
||||||
usage.TotalTokens += xunfeiResponse.Payload.Usage.Text.TotalTokens
|
// usage.TotalTokens += xunfeiResponse.Payload.Usage.Text.TotalTokens
|
||||||
case stop = <-stopChan:
|
// case stop = <-stopChan:
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
|
|
||||||
xunfeiResponse.Payload.Choices.Text[0].Content = content
|
// xunfeiResponse.Payload.Choices.Text[0].Content = content
|
||||||
|
|
||||||
response := responseXunfei2OpenAI(&xunfeiResponse)
|
// response := responseXunfei2OpenAI(&xunfeiResponse)
|
||||||
jsonResponse, err := json.Marshal(response)
|
// jsonResponse, err := json.Marshal(response)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
|
// return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
// }
|
||||||
c.Writer.Header().Set("Content-Type", "application/json")
|
// c.Writer.Header().Set("Content-Type", "application/json")
|
||||||
_, _ = c.Writer.Write(jsonResponse)
|
// _, _ = c.Writer.Write(jsonResponse)
|
||||||
return nil, &usage
|
// return nil, &usage
|
||||||
}
|
// }
|
||||||
|
|
||||||
func xunfeiMakeRequest(textRequest GeneralOpenAIRequest, domain, authUrl, appId string) (chan XunfeiChatResponse, chan bool, error) {
|
// func xunfeiMakeRequest(textRequest GeneralOpenAIRequest, domain, authUrl, appId string) (chan XunfeiChatResponse, chan bool, error) {
|
||||||
d := websocket.Dialer{
|
// d := websocket.Dialer{
|
||||||
HandshakeTimeout: 5 * time.Second,
|
// HandshakeTimeout: 5 * time.Second,
|
||||||
}
|
// }
|
||||||
conn, resp, err := d.Dial(authUrl, nil)
|
// conn, resp, err := d.Dial(authUrl, nil)
|
||||||
if err != nil || resp.StatusCode != 101 {
|
// if err != nil || resp.StatusCode != 101 {
|
||||||
return nil, nil, err
|
// return nil, nil, err
|
||||||
}
|
// }
|
||||||
data := requestOpenAI2Xunfei(textRequest, appId, domain)
|
// data := requestOpenAI2Xunfei(textRequest, appId, domain)
|
||||||
err = conn.WriteJSON(data)
|
// err = conn.WriteJSON(data)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, nil, err
|
// return nil, nil, err
|
||||||
}
|
// }
|
||||||
|
|
||||||
dataChan := make(chan XunfeiChatResponse)
|
// dataChan := make(chan XunfeiChatResponse)
|
||||||
stopChan := make(chan bool)
|
// stopChan := make(chan bool)
|
||||||
go func() {
|
// go func() {
|
||||||
for {
|
// for {
|
||||||
_, msg, err := conn.ReadMessage()
|
// _, msg, err := conn.ReadMessage()
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
common.SysError("error reading stream response: " + err.Error())
|
// common.SysError("error reading stream response: " + err.Error())
|
||||||
break
|
// break
|
||||||
}
|
// }
|
||||||
var response XunfeiChatResponse
|
// var response XunfeiChatResponse
|
||||||
err = json.Unmarshal(msg, &response)
|
// err = json.Unmarshal(msg, &response)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
common.SysError("error unmarshalling stream response: " + err.Error())
|
// common.SysError("error unmarshalling stream response: " + err.Error())
|
||||||
break
|
// break
|
||||||
}
|
// }
|
||||||
dataChan <- response
|
// dataChan <- response
|
||||||
if response.Payload.Choices.Status == 2 {
|
// if response.Payload.Choices.Status == 2 {
|
||||||
err := conn.Close()
|
// err := conn.Close()
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
common.SysError("error closing websocket connection: " + err.Error())
|
// common.SysError("error closing websocket connection: " + err.Error())
|
||||||
}
|
// }
|
||||||
break
|
// break
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
stopChan <- true
|
// stopChan <- true
|
||||||
}()
|
// }()
|
||||||
|
|
||||||
return dataChan, stopChan, nil
|
// return dataChan, stopChan, nil
|
||||||
}
|
// }
|
||||||
|
|
||||||
func getXunfeiAuthUrl(c *gin.Context, apiKey string, apiSecret string) (string, string) {
|
// func getXunfeiAuthUrl(c *gin.Context, apiKey string, apiSecret string) (string, string) {
|
||||||
query := c.Request.URL.Query()
|
// query := c.Request.URL.Query()
|
||||||
apiVersion := query.Get("api-version")
|
// apiVersion := query.Get("api-version")
|
||||||
if apiVersion == "" {
|
// if apiVersion == "" {
|
||||||
apiVersion = c.GetString("api_version")
|
// apiVersion = c.GetString("api_version")
|
||||||
}
|
// }
|
||||||
if apiVersion == "" {
|
// if apiVersion == "" {
|
||||||
apiVersion = "v1.1"
|
// apiVersion = "v1.1"
|
||||||
common.SysLog("api_version not found, use default: " + apiVersion)
|
// common.SysLog("api_version not found, use default: " + apiVersion)
|
||||||
}
|
// }
|
||||||
domain := "general"
|
// domain := "general"
|
||||||
if apiVersion != "v1.1" {
|
// if apiVersion != "v1.1" {
|
||||||
domain += strings.Split(apiVersion, ".")[0]
|
// domain += strings.Split(apiVersion, ".")[0]
|
||||||
}
|
// }
|
||||||
authUrl := buildXunfeiAuthUrl(fmt.Sprintf("wss://spark-api.xf-yun.com/%s/chat", apiVersion), apiKey, apiSecret)
|
// authUrl := buildXunfeiAuthUrl(fmt.Sprintf("wss://spark-api.xf-yun.com/%s/chat", apiVersion), apiKey, apiSecret)
|
||||||
return domain, authUrl
|
// return domain, authUrl
|
||||||
}
|
// }
|
||||||
|
@ -1,301 +1,301 @@
|
|||||||
package controller
|
package controller
|
||||||
|
|
||||||
import (
|
// import (
|
||||||
"bufio"
|
// "bufio"
|
||||||
"encoding/json"
|
// "encoding/json"
|
||||||
"github.com/gin-gonic/gin"
|
// "github.com/gin-gonic/gin"
|
||||||
"github.com/golang-jwt/jwt"
|
// "github.com/golang-jwt/jwt"
|
||||||
"io"
|
// "io"
|
||||||
"net/http"
|
// "net/http"
|
||||||
"one-api/common"
|
// "one-api/common"
|
||||||
"strings"
|
// "strings"
|
||||||
"sync"
|
// "sync"
|
||||||
"time"
|
// "time"
|
||||||
)
|
// )
|
||||||
|
|
||||||
// https://open.bigmodel.cn/doc/api#chatglm_std
|
// // https://open.bigmodel.cn/doc/api#chatglm_std
|
||||||
// chatglm_std, chatglm_lite
|
// // chatglm_std, chatglm_lite
|
||||||
// https://open.bigmodel.cn/api/paas/v3/model-api/chatglm_std/invoke
|
// // https://open.bigmodel.cn/api/paas/v3/model-api/chatglm_std/invoke
|
||||||
// https://open.bigmodel.cn/api/paas/v3/model-api/chatglm_std/sse-invoke
|
// // https://open.bigmodel.cn/api/paas/v3/model-api/chatglm_std/sse-invoke
|
||||||
|
|
||||||
type ZhipuMessage struct {
|
// type ZhipuMessage struct {
|
||||||
Role string `json:"role"`
|
// Role string `json:"role"`
|
||||||
Content string `json:"content"`
|
// Content string `json:"content"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type ZhipuRequest struct {
|
// type ZhipuRequest struct {
|
||||||
Prompt []ZhipuMessage `json:"prompt"`
|
// Prompt []ZhipuMessage `json:"prompt"`
|
||||||
Temperature float64 `json:"temperature,omitempty"`
|
// Temperature float64 `json:"temperature,omitempty"`
|
||||||
TopP float64 `json:"top_p,omitempty"`
|
// TopP float64 `json:"top_p,omitempty"`
|
||||||
RequestId string `json:"request_id,omitempty"`
|
// RequestId string `json:"request_id,omitempty"`
|
||||||
Incremental bool `json:"incremental,omitempty"`
|
// Incremental bool `json:"incremental,omitempty"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type ZhipuResponseData struct {
|
// type ZhipuResponseData struct {
|
||||||
TaskId string `json:"task_id"`
|
// TaskId string `json:"task_id"`
|
||||||
RequestId string `json:"request_id"`
|
// RequestId string `json:"request_id"`
|
||||||
TaskStatus string `json:"task_status"`
|
// TaskStatus string `json:"task_status"`
|
||||||
Choices []ZhipuMessage `json:"choices"`
|
// Choices []ZhipuMessage `json:"choices"`
|
||||||
Usage `json:"usage"`
|
// Usage `json:"usage"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type ZhipuResponse struct {
|
// type ZhipuResponse struct {
|
||||||
Code int `json:"code"`
|
// Code int `json:"code"`
|
||||||
Msg string `json:"msg"`
|
// Msg string `json:"msg"`
|
||||||
Success bool `json:"success"`
|
// Success bool `json:"success"`
|
||||||
Data ZhipuResponseData `json:"data"`
|
// Data ZhipuResponseData `json:"data"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type ZhipuStreamMetaResponse struct {
|
// type ZhipuStreamMetaResponse struct {
|
||||||
RequestId string `json:"request_id"`
|
// RequestId string `json:"request_id"`
|
||||||
TaskId string `json:"task_id"`
|
// TaskId string `json:"task_id"`
|
||||||
TaskStatus string `json:"task_status"`
|
// TaskStatus string `json:"task_status"`
|
||||||
Usage `json:"usage"`
|
// Usage `json:"usage"`
|
||||||
}
|
// }
|
||||||
|
|
||||||
type zhipuTokenData struct {
|
// type zhipuTokenData struct {
|
||||||
Token string
|
// Token string
|
||||||
ExpiryTime time.Time
|
// ExpiryTime time.Time
|
||||||
}
|
// }
|
||||||
|
|
||||||
var zhipuTokens sync.Map
|
// var zhipuTokens sync.Map
|
||||||
var expSeconds int64 = 24 * 3600
|
// var expSeconds int64 = 24 * 3600
|
||||||
|
|
||||||
func getZhipuToken(apikey string) string {
|
// func getZhipuToken(apikey string) string {
|
||||||
data, ok := zhipuTokens.Load(apikey)
|
// data, ok := zhipuTokens.Load(apikey)
|
||||||
if ok {
|
// if ok {
|
||||||
tokenData := data.(zhipuTokenData)
|
// tokenData := data.(zhipuTokenData)
|
||||||
if time.Now().Before(tokenData.ExpiryTime) {
|
// if time.Now().Before(tokenData.ExpiryTime) {
|
||||||
return tokenData.Token
|
// return tokenData.Token
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
|
|
||||||
split := strings.Split(apikey, ".")
|
// split := strings.Split(apikey, ".")
|
||||||
if len(split) != 2 {
|
// if len(split) != 2 {
|
||||||
common.SysError("invalid zhipu key: " + apikey)
|
// common.SysError("invalid zhipu key: " + apikey)
|
||||||
return ""
|
// return ""
|
||||||
}
|
// }
|
||||||
|
|
||||||
id := split[0]
|
// id := split[0]
|
||||||
secret := split[1]
|
// secret := split[1]
|
||||||
|
|
||||||
expMillis := time.Now().Add(time.Duration(expSeconds)*time.Second).UnixNano() / 1e6
|
// expMillis := time.Now().Add(time.Duration(expSeconds)*time.Second).UnixNano() / 1e6
|
||||||
expiryTime := time.Now().Add(time.Duration(expSeconds) * time.Second)
|
// expiryTime := time.Now().Add(time.Duration(expSeconds) * time.Second)
|
||||||
|
|
||||||
timestamp := time.Now().UnixNano() / 1e6
|
// timestamp := time.Now().UnixNano() / 1e6
|
||||||
|
|
||||||
payload := jwt.MapClaims{
|
// payload := jwt.MapClaims{
|
||||||
"api_key": id,
|
// "api_key": id,
|
||||||
"exp": expMillis,
|
// "exp": expMillis,
|
||||||
"timestamp": timestamp,
|
// "timestamp": timestamp,
|
||||||
}
|
// }
|
||||||
|
|
||||||
token := jwt.NewWithClaims(jwt.SigningMethodHS256, payload)
|
// token := jwt.NewWithClaims(jwt.SigningMethodHS256, payload)
|
||||||
|
|
||||||
token.Header["alg"] = "HS256"
|
// token.Header["alg"] = "HS256"
|
||||||
token.Header["sign_type"] = "SIGN"
|
// token.Header["sign_type"] = "SIGN"
|
||||||
|
|
||||||
tokenString, err := token.SignedString([]byte(secret))
|
// tokenString, err := token.SignedString([]byte(secret))
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return ""
|
// return ""
|
||||||
}
|
// }
|
||||||
|
|
||||||
zhipuTokens.Store(apikey, zhipuTokenData{
|
// zhipuTokens.Store(apikey, zhipuTokenData{
|
||||||
Token: tokenString,
|
// Token: tokenString,
|
||||||
ExpiryTime: expiryTime,
|
// ExpiryTime: expiryTime,
|
||||||
})
|
// })
|
||||||
|
|
||||||
return tokenString
|
// return tokenString
|
||||||
}
|
// }
|
||||||
|
|
||||||
func requestOpenAI2Zhipu(request GeneralOpenAIRequest) *ZhipuRequest {
|
// func requestOpenAI2Zhipu(request GeneralOpenAIRequest) *ZhipuRequest {
|
||||||
messages := make([]ZhipuMessage, 0, len(request.Messages))
|
// messages := make([]ZhipuMessage, 0, len(request.Messages))
|
||||||
for _, message := range request.Messages {
|
// for _, message := range request.Messages {
|
||||||
if message.Role == "system" {
|
// if message.Role == "system" {
|
||||||
messages = append(messages, ZhipuMessage{
|
// messages = append(messages, ZhipuMessage{
|
||||||
Role: "system",
|
// Role: "system",
|
||||||
Content: message.Content,
|
// Content: message.Content,
|
||||||
})
|
// })
|
||||||
messages = append(messages, ZhipuMessage{
|
// messages = append(messages, ZhipuMessage{
|
||||||
Role: "user",
|
// Role: "user",
|
||||||
Content: "Okay",
|
// Content: "Okay",
|
||||||
})
|
// })
|
||||||
} else {
|
// } else {
|
||||||
messages = append(messages, ZhipuMessage{
|
// messages = append(messages, ZhipuMessage{
|
||||||
Role: message.Role,
|
// Role: message.Role,
|
||||||
Content: message.Content,
|
// Content: message.Content,
|
||||||
})
|
// })
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
return &ZhipuRequest{
|
// return &ZhipuRequest{
|
||||||
Prompt: messages,
|
// Prompt: messages,
|
||||||
Temperature: request.Temperature,
|
// Temperature: request.Temperature,
|
||||||
TopP: request.TopP,
|
// TopP: request.TopP,
|
||||||
Incremental: false,
|
// Incremental: false,
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
|
|
||||||
func responseZhipu2OpenAI(response *ZhipuResponse) *OpenAITextResponse {
|
// func responseZhipu2OpenAI(response *ZhipuResponse) *OpenAITextResponse {
|
||||||
fullTextResponse := OpenAITextResponse{
|
// fullTextResponse := OpenAITextResponse{
|
||||||
Id: response.Data.TaskId,
|
// Id: response.Data.TaskId,
|
||||||
Object: "chat.completion",
|
// Object: "chat.completion",
|
||||||
Created: common.GetTimestamp(),
|
// Created: common.GetTimestamp(),
|
||||||
Choices: make([]OpenAITextResponseChoice, 0, len(response.Data.Choices)),
|
// Choices: make([]OpenAITextResponseChoice, 0, len(response.Data.Choices)),
|
||||||
Usage: response.Data.Usage,
|
// Usage: response.Data.Usage,
|
||||||
}
|
// }
|
||||||
for i, choice := range response.Data.Choices {
|
// for i, choice := range response.Data.Choices {
|
||||||
openaiChoice := OpenAITextResponseChoice{
|
// openaiChoice := OpenAITextResponseChoice{
|
||||||
Index: i,
|
// Index: i,
|
||||||
Message: Message{
|
// Message: Message{
|
||||||
Role: choice.Role,
|
// Role: choice.Role,
|
||||||
Content: strings.Trim(choice.Content, "\""),
|
// Content: strings.Trim(choice.Content, "\""),
|
||||||
},
|
// },
|
||||||
FinishReason: "",
|
// FinishReason: "",
|
||||||
}
|
// }
|
||||||
if i == len(response.Data.Choices)-1 {
|
// if i == len(response.Data.Choices)-1 {
|
||||||
openaiChoice.FinishReason = "stop"
|
// openaiChoice.FinishReason = "stop"
|
||||||
}
|
// }
|
||||||
fullTextResponse.Choices = append(fullTextResponse.Choices, openaiChoice)
|
// fullTextResponse.Choices = append(fullTextResponse.Choices, openaiChoice)
|
||||||
}
|
// }
|
||||||
return &fullTextResponse
|
// return &fullTextResponse
|
||||||
}
|
// }
|
||||||
|
|
||||||
func streamResponseZhipu2OpenAI(zhipuResponse string) *ChatCompletionsStreamResponse {
|
// func streamResponseZhipu2OpenAI(zhipuResponse string) *ChatCompletionsStreamResponse {
|
||||||
var choice ChatCompletionsStreamResponseChoice
|
// var choice ChatCompletionsStreamResponseChoice
|
||||||
choice.Delta.Content = zhipuResponse
|
// choice.Delta.Content = zhipuResponse
|
||||||
response := ChatCompletionsStreamResponse{
|
// response := ChatCompletionsStreamResponse{
|
||||||
Object: "chat.completion.chunk",
|
// Object: "chat.completion.chunk",
|
||||||
Created: common.GetTimestamp(),
|
// Created: common.GetTimestamp(),
|
||||||
Model: "chatglm",
|
// Model: "chatglm",
|
||||||
Choices: []ChatCompletionsStreamResponseChoice{choice},
|
// Choices: []ChatCompletionsStreamResponseChoice{choice},
|
||||||
}
|
// }
|
||||||
return &response
|
// return &response
|
||||||
}
|
// }
|
||||||
|
|
||||||
func streamMetaResponseZhipu2OpenAI(zhipuResponse *ZhipuStreamMetaResponse) (*ChatCompletionsStreamResponse, *Usage) {
|
// func streamMetaResponseZhipu2OpenAI(zhipuResponse *ZhipuStreamMetaResponse) (*ChatCompletionsStreamResponse, *Usage) {
|
||||||
var choice ChatCompletionsStreamResponseChoice
|
// var choice ChatCompletionsStreamResponseChoice
|
||||||
choice.Delta.Content = ""
|
// choice.Delta.Content = ""
|
||||||
choice.FinishReason = &stopFinishReason
|
// choice.FinishReason = &stopFinishReason
|
||||||
response := ChatCompletionsStreamResponse{
|
// response := ChatCompletionsStreamResponse{
|
||||||
Id: zhipuResponse.RequestId,
|
// Id: zhipuResponse.RequestId,
|
||||||
Object: "chat.completion.chunk",
|
// Object: "chat.completion.chunk",
|
||||||
Created: common.GetTimestamp(),
|
// Created: common.GetTimestamp(),
|
||||||
Model: "chatglm",
|
// Model: "chatglm",
|
||||||
Choices: []ChatCompletionsStreamResponseChoice{choice},
|
// Choices: []ChatCompletionsStreamResponseChoice{choice},
|
||||||
}
|
// }
|
||||||
return &response, &zhipuResponse.Usage
|
// return &response, &zhipuResponse.Usage
|
||||||
}
|
// }
|
||||||
|
|
||||||
func zhipuStreamHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) {
|
// func zhipuStreamHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) {
|
||||||
var usage *Usage
|
// var usage *Usage
|
||||||
scanner := bufio.NewScanner(resp.Body)
|
// scanner := bufio.NewScanner(resp.Body)
|
||||||
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
|
// scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
|
||||||
if atEOF && len(data) == 0 {
|
// if atEOF && len(data) == 0 {
|
||||||
return 0, nil, nil
|
// return 0, nil, nil
|
||||||
}
|
// }
|
||||||
if i := strings.Index(string(data), "\n\n"); i >= 0 && strings.Index(string(data), ":") >= 0 {
|
// if i := strings.Index(string(data), "\n\n"); i >= 0 && strings.Index(string(data), ":") >= 0 {
|
||||||
return i + 2, data[0:i], nil
|
// return i + 2, data[0:i], nil
|
||||||
}
|
// }
|
||||||
if atEOF {
|
// if atEOF {
|
||||||
return len(data), data, nil
|
// return len(data), data, nil
|
||||||
}
|
// }
|
||||||
return 0, nil, nil
|
// return 0, nil, nil
|
||||||
})
|
// })
|
||||||
dataChan := make(chan string)
|
// dataChan := make(chan string)
|
||||||
metaChan := make(chan string)
|
// metaChan := make(chan string)
|
||||||
stopChan := make(chan bool)
|
// stopChan := make(chan bool)
|
||||||
go func() {
|
// go func() {
|
||||||
for scanner.Scan() {
|
// for scanner.Scan() {
|
||||||
data := scanner.Text()
|
// data := scanner.Text()
|
||||||
lines := strings.Split(data, "\n")
|
// lines := strings.Split(data, "\n")
|
||||||
for i, line := range lines {
|
// for i, line := range lines {
|
||||||
if len(line) < 5 {
|
// if len(line) < 5 {
|
||||||
continue
|
// continue
|
||||||
}
|
// }
|
||||||
if line[:5] == "data:" {
|
// if line[:5] == "data:" {
|
||||||
dataChan <- line[5:]
|
// dataChan <- line[5:]
|
||||||
if i != len(lines)-1 {
|
// if i != len(lines)-1 {
|
||||||
dataChan <- "\n"
|
// dataChan <- "\n"
|
||||||
}
|
// }
|
||||||
} else if line[:5] == "meta:" {
|
// } else if line[:5] == "meta:" {
|
||||||
metaChan <- line[5:]
|
// metaChan <- line[5:]
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
stopChan <- true
|
// stopChan <- true
|
||||||
}()
|
// }()
|
||||||
setEventStreamHeaders(c)
|
// setEventStreamHeaders(c)
|
||||||
c.Stream(func(w io.Writer) bool {
|
// c.Stream(func(w io.Writer) bool {
|
||||||
select {
|
// select {
|
||||||
case data := <-dataChan:
|
// case data := <-dataChan:
|
||||||
response := streamResponseZhipu2OpenAI(data)
|
// response := streamResponseZhipu2OpenAI(data)
|
||||||
jsonResponse, err := json.Marshal(response)
|
// jsonResponse, err := json.Marshal(response)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
common.SysError("error marshalling stream response: " + err.Error())
|
// common.SysError("error marshalling stream response: " + err.Error())
|
||||||
return true
|
// return true
|
||||||
}
|
// }
|
||||||
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
|
// c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
|
||||||
return true
|
// return true
|
||||||
case data := <-metaChan:
|
// case data := <-metaChan:
|
||||||
var zhipuResponse ZhipuStreamMetaResponse
|
// var zhipuResponse ZhipuStreamMetaResponse
|
||||||
err := json.Unmarshal([]byte(data), &zhipuResponse)
|
// err := json.Unmarshal([]byte(data), &zhipuResponse)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
common.SysError("error unmarshalling stream response: " + err.Error())
|
// common.SysError("error unmarshalling stream response: " + err.Error())
|
||||||
return true
|
// return true
|
||||||
}
|
// }
|
||||||
response, zhipuUsage := streamMetaResponseZhipu2OpenAI(&zhipuResponse)
|
// response, zhipuUsage := streamMetaResponseZhipu2OpenAI(&zhipuResponse)
|
||||||
jsonResponse, err := json.Marshal(response)
|
// jsonResponse, err := json.Marshal(response)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
common.SysError("error marshalling stream response: " + err.Error())
|
// common.SysError("error marshalling stream response: " + err.Error())
|
||||||
return true
|
// return true
|
||||||
}
|
// }
|
||||||
usage = zhipuUsage
|
// usage = zhipuUsage
|
||||||
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
|
// c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
|
||||||
return true
|
// return true
|
||||||
case <-stopChan:
|
// case <-stopChan:
|
||||||
c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
|
// c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
|
||||||
return false
|
// return false
|
||||||
}
|
// }
|
||||||
})
|
// })
|
||||||
err := resp.Body.Close()
|
// err := resp.Body.Close()
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
// return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
// }
|
||||||
return nil, usage
|
// return nil, usage
|
||||||
}
|
// }
|
||||||
|
|
||||||
func zhipuHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) {
|
// func zhipuHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) {
|
||||||
var zhipuResponse ZhipuResponse
|
// var zhipuResponse ZhipuResponse
|
||||||
responseBody, err := io.ReadAll(resp.Body)
|
// responseBody, err := io.ReadAll(resp.Body)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
|
// return errorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
// }
|
||||||
err = resp.Body.Close()
|
// err = resp.Body.Close()
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
// return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
// }
|
||||||
err = json.Unmarshal(responseBody, &zhipuResponse)
|
// err = json.Unmarshal(responseBody, &zhipuResponse)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
|
// return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
// }
|
||||||
if !zhipuResponse.Success {
|
// if !zhipuResponse.Success {
|
||||||
return &OpenAIErrorWithStatusCode{
|
// return &OpenAIErrorWithStatusCode{
|
||||||
OpenAIError: OpenAIError{
|
// OpenAIError: OpenAIError{
|
||||||
Message: zhipuResponse.Msg,
|
// Message: zhipuResponse.Msg,
|
||||||
Type: "zhipu_error",
|
// Type: "zhipu_error",
|
||||||
Param: "",
|
// Param: "",
|
||||||
Code: zhipuResponse.Code,
|
// Code: zhipuResponse.Code,
|
||||||
},
|
// },
|
||||||
StatusCode: resp.StatusCode,
|
// StatusCode: resp.StatusCode,
|
||||||
}, nil
|
// }, nil
|
||||||
}
|
// }
|
||||||
fullTextResponse := responseZhipu2OpenAI(&zhipuResponse)
|
// fullTextResponse := responseZhipu2OpenAI(&zhipuResponse)
|
||||||
jsonResponse, err := json.Marshal(fullTextResponse)
|
// jsonResponse, err := json.Marshal(fullTextResponse)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
|
// return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
// }
|
||||||
c.Writer.Header().Set("Content-Type", "application/json")
|
// c.Writer.Header().Set("Content-Type", "application/json")
|
||||||
c.Writer.WriteHeader(resp.StatusCode)
|
// c.Writer.WriteHeader(resp.StatusCode)
|
||||||
_, err = c.Writer.Write(jsonResponse)
|
// _, err = c.Writer.Write(jsonResponse)
|
||||||
return nil, &fullTextResponse.Usage
|
// return nil, &fullTextResponse.Usage
|
||||||
}
|
// }
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
package controller
|
package controller
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
"one-api/common"
|
"one-api/common"
|
||||||
@ -16,6 +17,44 @@ type Message struct {
|
|||||||
Name *string `json:"name,omitempty"`
|
Name *string `json:"name,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type VisionMessage struct {
|
||||||
|
Role string `json:"role"`
|
||||||
|
Content OpenaiVisionMessageContent `json:"content"`
|
||||||
|
Name *string `json:"name,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// OpenaiVisionMessageContentType vision message content type
|
||||||
|
type OpenaiVisionMessageContentType string
|
||||||
|
|
||||||
|
const (
|
||||||
|
// OpenaiVisionMessageContentTypeText text
|
||||||
|
OpenaiVisionMessageContentTypeText OpenaiVisionMessageContentType = "text"
|
||||||
|
// OpenaiVisionMessageContentTypeImageUrl image url
|
||||||
|
OpenaiVisionMessageContentTypeImageUrl OpenaiVisionMessageContentType = "image_url"
|
||||||
|
)
|
||||||
|
|
||||||
|
// OpenaiVisionMessageContent vision message content
|
||||||
|
type OpenaiVisionMessageContent struct {
|
||||||
|
Type OpenaiVisionMessageContentType `json:"type"`
|
||||||
|
Text string `json:"text,omitempty"`
|
||||||
|
ImageUrl OpenaiVisionMessageContentImageUrl `json:"image_url,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// VisionImageResolution image resolution
|
||||||
|
type VisionImageResolution string
|
||||||
|
|
||||||
|
const (
|
||||||
|
// VisionImageResolutionLow low resolution
|
||||||
|
VisionImageResolutionLow VisionImageResolution = "low"
|
||||||
|
// VisionImageResolutionHigh high resolution
|
||||||
|
VisionImageResolutionHigh VisionImageResolution = "high"
|
||||||
|
)
|
||||||
|
|
||||||
|
type OpenaiVisionMessageContentImageUrl struct {
|
||||||
|
URL string `json:"url"`
|
||||||
|
Detail VisionImageResolution `json:"detail,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
const (
|
const (
|
||||||
RelayModeUnknown = iota
|
RelayModeUnknown = iota
|
||||||
RelayModeChatCompletions
|
RelayModeChatCompletions
|
||||||
@ -30,18 +69,76 @@ const (
|
|||||||
// https://platform.openai.com/docs/api-reference/chat
|
// https://platform.openai.com/docs/api-reference/chat
|
||||||
|
|
||||||
type GeneralOpenAIRequest struct {
|
type GeneralOpenAIRequest struct {
|
||||||
Model string `json:"model,omitempty"`
|
Model string `json:"model,omitempty"`
|
||||||
Messages []Message `json:"messages,omitempty"`
|
// Messages maybe []Message or []VisionMessage
|
||||||
Prompt any `json:"prompt,omitempty"`
|
Messages any `json:"messages,omitempty"`
|
||||||
Stream bool `json:"stream,omitempty"`
|
Prompt any `json:"prompt,omitempty"`
|
||||||
MaxTokens int `json:"max_tokens,omitempty"`
|
Stream bool `json:"stream,omitempty"`
|
||||||
Temperature float64 `json:"temperature,omitempty"`
|
MaxTokens int `json:"max_tokens,omitempty"`
|
||||||
TopP float64 `json:"top_p,omitempty"`
|
Temperature float64 `json:"temperature,omitempty"`
|
||||||
N int `json:"n,omitempty"`
|
TopP float64 `json:"top_p,omitempty"`
|
||||||
Input any `json:"input,omitempty"`
|
N int `json:"n,omitempty"`
|
||||||
Instruction string `json:"instruction,omitempty"`
|
Input any `json:"input,omitempty"`
|
||||||
Size string `json:"size,omitempty"`
|
Instruction string `json:"instruction,omitempty"`
|
||||||
Functions any `json:"functions,omitempty"`
|
Size string `json:"size,omitempty"`
|
||||||
|
Functions any `json:"functions,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *GeneralOpenAIRequest) MessagesLen() int {
|
||||||
|
switch msgs := r.Messages.(type) {
|
||||||
|
case []any:
|
||||||
|
return len(msgs)
|
||||||
|
case []Message:
|
||||||
|
return len(msgs)
|
||||||
|
case []VisionMessage:
|
||||||
|
return len(msgs)
|
||||||
|
default:
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TextMessages returns messages as []Message
|
||||||
|
func (r *GeneralOpenAIRequest) TextMessages() (messages []Message, err error) {
|
||||||
|
switch msgs := r.Messages.(type) {
|
||||||
|
case []any:
|
||||||
|
messages = make([]Message, 0, len(msgs))
|
||||||
|
for _, msg := range msgs {
|
||||||
|
if m, ok := msg.(Message); ok {
|
||||||
|
messages = append(messages, m)
|
||||||
|
} else {
|
||||||
|
err = fmt.Errorf("invalid message type")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case []Message:
|
||||||
|
messages = msgs
|
||||||
|
default:
|
||||||
|
return nil, errors.New("invalid message type")
|
||||||
|
}
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// VisionMessages returns messages as []VisionMessage
|
||||||
|
func (r *GeneralOpenAIRequest) VisionMessages() (messages []VisionMessage, err error) {
|
||||||
|
switch msgs := r.Messages.(type) {
|
||||||
|
case []any:
|
||||||
|
messages = make([]VisionMessage, 0, len(msgs))
|
||||||
|
for _, msg := range msgs {
|
||||||
|
if m, ok := msg.(VisionMessage); ok {
|
||||||
|
messages = append(messages, m)
|
||||||
|
} else {
|
||||||
|
err = fmt.Errorf("invalid message type")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case []VisionMessage:
|
||||||
|
messages = msgs
|
||||||
|
default:
|
||||||
|
return nil, errors.New("invalid message type")
|
||||||
|
}
|
||||||
|
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r GeneralOpenAIRequest) ParseInput() []string {
|
func (r GeneralOpenAIRequest) ParseInput() []string {
|
||||||
|
7
go.mod
7
go.mod
@ -1,7 +1,6 @@
|
|||||||
module one-api
|
module one-api
|
||||||
|
|
||||||
// +heroku goVersion go1.18
|
go 1.21
|
||||||
go 1.18
|
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/gin-contrib/cors v1.4.0
|
github.com/gin-contrib/cors v1.4.0
|
||||||
@ -11,9 +10,7 @@ require (
|
|||||||
github.com/gin-gonic/gin v1.9.1
|
github.com/gin-gonic/gin v1.9.1
|
||||||
github.com/go-playground/validator/v10 v10.16.0
|
github.com/go-playground/validator/v10 v10.16.0
|
||||||
github.com/go-redis/redis/v8 v8.11.5
|
github.com/go-redis/redis/v8 v8.11.5
|
||||||
github.com/golang-jwt/jwt v3.2.2+incompatible
|
|
||||||
github.com/google/uuid v1.4.0
|
github.com/google/uuid v1.4.0
|
||||||
github.com/gorilla/websocket v1.5.1
|
|
||||||
github.com/pkoukk/tiktoken-go v0.1.6
|
github.com/pkoukk/tiktoken-go v0.1.6
|
||||||
golang.org/x/crypto v0.15.0
|
golang.org/x/crypto v0.15.0
|
||||||
gorm.io/driver/mysql v1.5.2
|
gorm.io/driver/mysql v1.5.2
|
||||||
@ -53,7 +50,7 @@ require (
|
|||||||
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
|
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
|
||||||
github.com/ugorji/go/codec v1.2.11 // indirect
|
github.com/ugorji/go/codec v1.2.11 // indirect
|
||||||
golang.org/x/arch v0.3.0 // indirect
|
golang.org/x/arch v0.3.0 // indirect
|
||||||
golang.org/x/net v0.17.0 // indirect
|
golang.org/x/net v0.10.0 // indirect
|
||||||
golang.org/x/sys v0.14.0 // indirect
|
golang.org/x/sys v0.14.0 // indirect
|
||||||
golang.org/x/text v0.14.0 // indirect
|
golang.org/x/text v0.14.0 // indirect
|
||||||
google.golang.org/protobuf v1.30.0 // indirect
|
google.golang.org/protobuf v1.30.0 // indirect
|
||||||
|
14
go.sum
14
go.sum
@ -15,6 +15,7 @@ github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cu
|
|||||||
github.com/dlclark/regexp2 v1.10.0 h1:+/GIL799phkJqYW+3YbOd8LCcbHzT0Pbo8zl70MHsq0=
|
github.com/dlclark/regexp2 v1.10.0 h1:+/GIL799phkJqYW+3YbOd8LCcbHzT0Pbo8zl70MHsq0=
|
||||||
github.com/dlclark/regexp2 v1.10.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
|
github.com/dlclark/regexp2 v1.10.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
|
||||||
github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4=
|
github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4=
|
||||||
|
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
|
||||||
github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU=
|
github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU=
|
||||||
github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA=
|
github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA=
|
||||||
github.com/gin-contrib/cors v1.4.0 h1:oJ6gwtUl3lqV0WEIwM/LxPF1QZ5qe2lGWdY2+bz7y0g=
|
github.com/gin-contrib/cors v1.4.0 h1:oJ6gwtUl3lqV0WEIwM/LxPF1QZ5qe2lGWdY2+bz7y0g=
|
||||||
@ -33,6 +34,7 @@ github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg=
|
|||||||
github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU=
|
github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU=
|
||||||
github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
|
github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
|
||||||
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
|
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
|
||||||
|
github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
|
||||||
github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8=
|
github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8=
|
||||||
github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs=
|
github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs=
|
||||||
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
|
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
|
||||||
@ -52,8 +54,6 @@ github.com/go-sql-driver/mysql v1.7.0/go.mod h1:OXbVy3sEdcQ2Doequ6Z5BW6fXNQTmx+9
|
|||||||
github.com/goccy/go-json v0.9.7/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
|
github.com/goccy/go-json v0.9.7/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
|
||||||
github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
|
github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
|
||||||
github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
|
github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
|
||||||
github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY=
|
|
||||||
github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I=
|
|
||||||
github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
|
github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
|
||||||
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
|
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
|
||||||
github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU=
|
github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU=
|
||||||
@ -67,8 +67,6 @@ github.com/gorilla/securecookie v1.1.1 h1:miw7JPhV+b/lAHSXz4qd/nN9jRiAFV5FwjeKyC
|
|||||||
github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4=
|
github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4=
|
||||||
github.com/gorilla/sessions v1.2.1 h1:DHd3rPN5lE3Ts3D8rKkQ8x/0kqfeNmBAaiSi+o7FsgI=
|
github.com/gorilla/sessions v1.2.1 h1:DHd3rPN5lE3Ts3D8rKkQ8x/0kqfeNmBAaiSi+o7FsgI=
|
||||||
github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM=
|
github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM=
|
||||||
github.com/gorilla/websocket v1.5.1 h1:gmztn0JnHVt9JZquRuzLw3g4wouNVzKL15iLr/zn/QY=
|
|
||||||
github.com/gorilla/websocket v1.5.1/go.mod h1:x3kM2JMyaluk02fnUJpQuwD2dCS5NDG2ZHL0uE0tcaY=
|
|
||||||
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
|
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
|
||||||
github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
|
github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
|
||||||
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a h1:bbPeKD0xmW/Y25WS6cokEszi5g+S0QxI/d45PkRi7Nk=
|
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a h1:bbPeKD0xmW/Y25WS6cokEszi5g+S0QxI/d45PkRi7Nk=
|
||||||
@ -110,8 +108,11 @@ github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lN
|
|||||||
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
|
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
|
||||||
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
||||||
github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE=
|
github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE=
|
||||||
|
github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU=
|
||||||
github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE=
|
github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE=
|
||||||
|
github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU=
|
||||||
github.com/onsi/gomega v1.18.1 h1:M1GfJqGRrBrrGGsbxzV5dqM2U2ApXefZCQpkukxYRLE=
|
github.com/onsi/gomega v1.18.1 h1:M1GfJqGRrBrrGGsbxzV5dqM2U2ApXefZCQpkukxYRLE=
|
||||||
|
github.com/onsi/gomega v1.18.1/go.mod h1:0q+aL8jAiMXy9hbwj2mr5GziHiwhAIQpFmmtT5hitRs=
|
||||||
github.com/pelletier/go-toml/v2 v2.0.1/go.mod h1:r9LEWfGN8R5k0VXJ+0BkIe7MYkRdwZOjgMj2KwnJFUo=
|
github.com/pelletier/go-toml/v2 v2.0.1/go.mod h1:r9LEWfGN8R5k0VXJ+0BkIe7MYkRdwZOjgMj2KwnJFUo=
|
||||||
github.com/pelletier/go-toml/v2 v2.0.8 h1:0ctb6s9mE31h0/lhu+J6OPmVeDxJn+kYnJc2jZR9tGQ=
|
github.com/pelletier/go-toml/v2 v2.0.8 h1:0ctb6s9mE31h0/lhu+J6OPmVeDxJn+kYnJc2jZR9tGQ=
|
||||||
github.com/pelletier/go-toml/v2 v2.0.8/go.mod h1:vuYfssBdrU2XDZ9bYydBu6t+6a6PYNcZljzZR9VXg+4=
|
github.com/pelletier/go-toml/v2 v2.0.8/go.mod h1:vuYfssBdrU2XDZ9bYydBu6t+6a6PYNcZljzZR9VXg+4=
|
||||||
@ -151,8 +152,8 @@ golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5y
|
|||||||
golang.org/x/crypto v0.15.0 h1:frVn1TEaCEaZcn3Tmd7Y2b5KKPaZ+I32Q2OA3kYp5TA=
|
golang.org/x/crypto v0.15.0 h1:frVn1TEaCEaZcn3Tmd7Y2b5KKPaZ+I32Q2OA3kYp5TA=
|
||||||
golang.org/x/crypto v0.15.0/go.mod h1:4ChreQoLWfG3xLDer1WdlH5NdlQ3+mwnQq1YTKY+72g=
|
golang.org/x/crypto v0.15.0/go.mod h1:4ChreQoLWfG3xLDer1WdlH5NdlQ3+mwnQq1YTKY+72g=
|
||||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||||
golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM=
|
golang.org/x/net v0.10.0 h1:X2//UzNDwYmtCLn7To6G58Wr6f5ahEAQgKNzv9Y951M=
|
||||||
golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
|
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
||||||
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
@ -181,6 +182,7 @@ gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntN
|
|||||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
||||||
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
|
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
|
||||||
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
|
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
|
||||||
|
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
|
||||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
|
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
|
||||||
|
6
package-lock.json
generated
Normal file
6
package-lock.json
generated
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"name": "one-api",
|
||||||
|
"lockfileVersion": 3,
|
||||||
|
"requires": true,
|
||||||
|
"packages": {}
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user