Merge commit '2369025842b828ac38f4427fd1ebab8d03b1fe7f'

This commit is contained in:
Laisky.Cai
2024-04-20 01:07:29 +00:00
139 changed files with 2642 additions and 2625 deletions

View File

@@ -1,145 +1,146 @@
package zhipu
// import (
// "github.com/Laisky/errors/v2"
// "fmt"
// "github.com/gin-gonic/gin"
// "github.com/songquanpeng/one-api/relay/adaptor"
// "github.com/songquanpeng/one-api/relay/adaptor/openai"
// "github.com/songquanpeng/one-api/relay/meta"
// "github.com/songquanpeng/one-api/relay/model"
// "github.com/songquanpeng/one-api/relay/relaymode"
// "io"
// "math"
// "net/http"
// "strings"
// )
import (
"errors"
"fmt"
"io"
"math"
"net/http"
"strings"
// type Adaptor struct {
// APIVersion string
// }
"github.com/Laisky/one-api/relay/adaptor"
"github.com/Laisky/one-api/relay/adaptor/openai"
"github.com/Laisky/one-api/relay/meta"
"github.com/Laisky/one-api/relay/model"
"github.com/Laisky/one-api/relay/relaymode"
"github.com/gin-gonic/gin"
)
// func (a *Adaptor) Init(meta *meta.Meta) {
type Adaptor struct {
APIVersion string
}
// }
func (a *Adaptor) Init(meta *meta.Meta) {
// func (a *Adaptor) SetVersionByModeName(modelName string) {
// if strings.HasPrefix(modelName, "glm-") {
// a.APIVersion = "v4"
// } else {
// a.APIVersion = "v3"
// }
// }
}
// func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
// switch meta.Mode {
// case relaymode.ImagesGenerations:
// return fmt.Sprintf("%s/api/paas/v4/images/generations", meta.BaseURL), nil
// case relaymode.Embeddings:
// return fmt.Sprintf("%s/api/paas/v4/embeddings", meta.BaseURL), nil
// }
// a.SetVersionByModeName(meta.ActualModelName)
// if a.APIVersion == "v4" {
// return fmt.Sprintf("%s/api/paas/v4/chat/completions", meta.BaseURL), nil
// }
// method := "invoke"
// if meta.IsStream {
// method = "sse-invoke"
// }
// return fmt.Sprintf("%s/api/paas/v3/model-api/%s/%s", meta.BaseURL, meta.ActualModelName, method), nil
// }
func (a *Adaptor) SetVersionByModeName(modelName string) {
if strings.HasPrefix(modelName, "glm-") {
a.APIVersion = "v4"
} else {
a.APIVersion = "v3"
}
}
// func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error {
// adaptor.SetupCommonRequestHeader(c, req, meta)
// token := GetToken(meta.APIKey)
// req.Header.Set("Authorization", token)
// return nil
// }
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
switch meta.Mode {
case relaymode.ImagesGenerations:
return fmt.Sprintf("%s/api/paas/v4/images/generations", meta.BaseURL), nil
case relaymode.Embeddings:
return fmt.Sprintf("%s/api/paas/v4/embeddings", meta.BaseURL), nil
}
a.SetVersionByModeName(meta.ActualModelName)
if a.APIVersion == "v4" {
return fmt.Sprintf("%s/api/paas/v4/chat/completions", meta.BaseURL), nil
}
method := "invoke"
if meta.IsStream {
method = "sse-invoke"
}
return fmt.Sprintf("%s/api/paas/v3/model-api/%s/%s", meta.BaseURL, meta.ActualModelName, method), nil
}
// func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) {
// if request == nil {
// return nil, errors.New("request is nil")
// }
// switch relayMode {
// case relaymode.Embeddings:
// baiduEmbeddingRequest := ConvertEmbeddingRequest(*request)
// return baiduEmbeddingRequest, nil
// default:
// // TopP (0.0, 1.0)
// request.TopP = math.Min(0.99, request.TopP)
// request.TopP = math.Max(0.01, request.TopP)
func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error {
adaptor.SetupCommonRequestHeader(c, req, meta)
token := GetToken(meta.APIKey)
req.Header.Set("Authorization", token)
return nil
}
// // Temperature (0.0, 1.0)
// request.Temperature = math.Min(0.99, request.Temperature)
// request.Temperature = math.Max(0.01, request.Temperature)
// a.SetVersionByModeName(request.Model)
// if a.APIVersion == "v4" {
// return request, nil
// }
// return ConvertRequest(*request), nil
// }
// }
func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) {
if request == nil {
return nil, errors.New("request is nil")
}
switch relayMode {
case relaymode.Embeddings:
baiduEmbeddingRequest := ConvertEmbeddingRequest(*request)
return baiduEmbeddingRequest, nil
default:
// TopP (0.0, 1.0)
request.TopP = math.Min(0.99, request.TopP)
request.TopP = math.Max(0.01, request.TopP)
// func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
// if request == nil {
// return nil, errors.New("request is nil")
// }
// newRequest := ImageRequest{
// Model: request.Model,
// Prompt: request.Prompt,
// UserId: request.User,
// }
// return newRequest, nil
// }
// Temperature (0.0, 1.0)
request.Temperature = math.Min(0.99, request.Temperature)
request.Temperature = math.Max(0.01, request.Temperature)
a.SetVersionByModeName(request.Model)
if a.APIVersion == "v4" {
return request, nil
}
return ConvertRequest(*request), nil
}
}
// func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) {
// return adaptor.DoRequestHelper(a, c, meta, requestBody)
// }
func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
if request == nil {
return nil, errors.New("request is nil")
}
newRequest := ImageRequest{
Model: request.Model,
Prompt: request.Prompt,
UserId: request.User,
}
return newRequest, nil
}
// func (a *Adaptor) DoResponseV4(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
// if meta.IsStream {
// err, _, usage = openai.StreamHandler(c, resp, meta.Mode)
// } else {
// err, usage = openai.Handler(c, resp, meta.PromptTokens, meta.ActualModelName)
// }
// return
// }
func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) {
return adaptor.DoRequestHelper(a, c, meta, requestBody)
}
// func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
// switch meta.Mode {
// case relaymode.Embeddings:
// err, usage = EmbeddingsHandler(c, resp)
// return
// case relaymode.ImagesGenerations:
// err, usage = openai.ImageHandler(c, resp)
// return
// }
// if a.APIVersion == "v4" {
// return a.DoResponseV4(c, resp, meta)
// }
// if meta.IsStream {
// err, usage = StreamHandler(c, resp)
// } else {
// if meta.Mode == relaymode.Embeddings {
// err, usage = EmbeddingsHandler(c, resp)
// } else {
// err, usage = Handler(c, resp)
// }
// }
// return
// }
func (a *Adaptor) DoResponseV4(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
if meta.IsStream {
err, _, usage = openai.StreamHandler(c, resp, meta.Mode)
} else {
err, usage = openai.Handler(c, resp, meta.PromptTokens, meta.ActualModelName)
}
return
}
// func ConvertEmbeddingRequest(request model.GeneralOpenAIRequest) *EmbeddingRequest {
// return &EmbeddingRequest{
// Model: "embedding-2",
// Input: request.Input.(string),
// }
// }
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
switch meta.Mode {
case relaymode.Embeddings:
err, usage = EmbeddingsHandler(c, resp)
return
case relaymode.ImagesGenerations:
err, usage = openai.ImageHandler(c, resp)
return
}
if a.APIVersion == "v4" {
return a.DoResponseV4(c, resp, meta)
}
if meta.IsStream {
err, usage = StreamHandler(c, resp)
} else {
if meta.Mode == relaymode.Embeddings {
err, usage = EmbeddingsHandler(c, resp)
} else {
err, usage = Handler(c, resp)
}
}
return
}
// func (a *Adaptor) GetModelList() []string {
// return ModelList
// }
func ConvertEmbeddingRequest(request model.GeneralOpenAIRequest) *EmbeddingRequest {
return &EmbeddingRequest{
Model: "embedding-2",
Input: request.Input.(string),
}
}
// func (a *Adaptor) GetChannelName() string {
// return "zhipu"
// }
func (a *Adaptor) GetModelList() []string {
return ModelList
}
func (a *Adaptor) GetChannelName() string {
return "zhipu"
}

View File

@@ -1,5 +1,7 @@
package zhipu
// var ModelList = []string{
// "chatglm_turbo", "chatglm_pro", "chatglm_std", "chatglm_lite",
// }
var ModelList = []string{
"chatglm_turbo", "chatglm_pro", "chatglm_std", "chatglm_lite",
"glm-4", "glm-4v", "glm-3-turbo", "embedding-2",
"cogview-3",
}

View File

@@ -1,301 +1,304 @@
package zhipu
// import (
// "bufio"
// "encoding/json"
// "github.com/gin-gonic/gin"
// "github.com/golang-jwt/jwt"
// "io"
// "net/http"
// "one-api/common"
// "strings"
// "sync"
// "time"
// )
import (
"bufio"
"encoding/json"
"io"
"net/http"
"strings"
"sync"
"time"
// // https://open.bigmodel.cn/doc/api#chatglm_std
// // chatglm_std, chatglm_lite
// // https://open.bigmodel.cn/api/paas/v3/model-api/chatglm_std/invoke
// // https://open.bigmodel.cn/api/paas/v3/model-api/chatglm_std/sse-invoke
"github.com/Laisky/one-api/common"
"github.com/Laisky/one-api/common/helper"
"github.com/Laisky/one-api/common/logger"
"github.com/Laisky/one-api/relay/adaptor/openai"
"github.com/Laisky/one-api/relay/constant"
"github.com/Laisky/one-api/relay/model"
"github.com/gin-gonic/gin"
"github.com/golang-jwt/jwt"
)
// type ZhipuMessage struct {
// Role string `json:"role"`
// Content string `json:"content"`
// }
// https://open.bigmodel.cn/doc/api#chatglm_std
// chatglm_std, chatglm_lite
// https://open.bigmodel.cn/api/paas/v3/model-api/chatglm_std/invoke
// https://open.bigmodel.cn/api/paas/v3/model-api/chatglm_std/sse-invoke
// type ZhipuRequest struct {
// Prompt []ZhipuMessage `json:"prompt"`
// Temperature float64 `json:"temperature,omitempty"`
// TopP float64 `json:"top_p,omitempty"`
// RequestId string `json:"request_id,omitempty"`
// Incremental bool `json:"incremental,omitempty"`
// }
var zhipuTokens sync.Map
var expSeconds int64 = 24 * 3600
// type ZhipuResponseData struct {
// TaskId string `json:"task_id"`
// RequestId string `json:"request_id"`
// TaskStatus string `json:"task_status"`
// Choices []ZhipuMessage `json:"choices"`
// Usage `json:"usage"`
// }
func GetToken(apikey string) string {
data, ok := zhipuTokens.Load(apikey)
if ok {
tokenData := data.(tokenData)
if time.Now().Before(tokenData.ExpiryTime) {
return tokenData.Token
}
}
// type ZhipuResponse struct {
// Code int `json:"code"`
// Msg string `json:"msg"`
// Success bool `json:"success"`
// Data ZhipuResponseData `json:"data"`
// }
split := strings.Split(apikey, ".")
if len(split) != 2 {
logger.SysError("invalid zhipu key: " + apikey)
return ""
}
// type ZhipuStreamMetaResponse struct {
// RequestId string `json:"request_id"`
// TaskId string `json:"task_id"`
// TaskStatus string `json:"task_status"`
// Usage `json:"usage"`
// }
id := split[0]
secret := split[1]
// type zhipuTokenData struct {
// Token string
// ExpiryTime time.Time
// }
expMillis := time.Now().Add(time.Duration(expSeconds)*time.Second).UnixNano() / 1e6
expiryTime := time.Now().Add(time.Duration(expSeconds) * time.Second)
// var zhipuTokens sync.Map
// var expSeconds int64 = 24 * 3600
timestamp := time.Now().UnixNano() / 1e6
// func getZhipuToken(apikey string) string {
// data, ok := zhipuTokens.Load(apikey)
// if ok {
// tokenData := data.(zhipuTokenData)
// if time.Now().Before(tokenData.ExpiryTime) {
// return tokenData.Token
// }
// }
payload := jwt.MapClaims{
"api_key": id,
"exp": expMillis,
"timestamp": timestamp,
}
// split := strings.Split(apikey, ".")
// if len(split) != 2 {
// common.SysError("invalid zhipu key: " + apikey)
// return ""
// }
token := jwt.NewWithClaims(jwt.SigningMethodHS256, payload)
// id := split[0]
// secret := split[1]
token.Header["alg"] = "HS256"
token.Header["sign_type"] = "SIGN"
// expMillis := time.Now().Add(time.Duration(expSeconds)*time.Second).UnixNano() / 1e6
// expiryTime := time.Now().Add(time.Duration(expSeconds) * time.Second)
tokenString, err := token.SignedString([]byte(secret))
if err != nil {
return ""
}
// timestamp := time.Now().UnixNano() / 1e6
zhipuTokens.Store(apikey, tokenData{
Token: tokenString,
ExpiryTime: expiryTime,
})
// payload := jwt.MapClaims{
// "api_key": id,
// "exp": expMillis,
// "timestamp": timestamp,
// }
return tokenString
}
// token := jwt.NewWithClaims(jwt.SigningMethodHS256, payload)
func ConvertRequest(request model.GeneralOpenAIRequest) *Request {
messages := make([]Message, 0, len(request.Messages))
for _, message := range request.Messages {
messages = append(messages, Message{
Role: message.Role,
Content: message.StringContent(),
})
}
return &Request{
Prompt: messages,
Temperature: request.Temperature,
TopP: request.TopP,
Incremental: false,
}
}
// token.Header["alg"] = "HS256"
// token.Header["sign_type"] = "SIGN"
func responseZhipu2OpenAI(response *Response) *openai.TextResponse {
fullTextResponse := openai.TextResponse{
Id: response.Data.TaskId,
Object: "chat.completion",
Created: helper.GetTimestamp(),
Choices: make([]openai.TextResponseChoice, 0, len(response.Data.Choices)),
Usage: response.Data.Usage,
}
for i, choice := range response.Data.Choices {
openaiChoice := openai.TextResponseChoice{
Index: i,
Message: model.Message{
Role: choice.Role,
Content: strings.Trim(choice.Content, "\""),
},
FinishReason: "",
}
if i == len(response.Data.Choices)-1 {
openaiChoice.FinishReason = "stop"
}
fullTextResponse.Choices = append(fullTextResponse.Choices, openaiChoice)
}
return &fullTextResponse
}
// tokenString, err := token.SignedString([]byte(secret))
// if err != nil {
// return ""
// }
func streamResponseZhipu2OpenAI(zhipuResponse string) *openai.ChatCompletionsStreamResponse {
var choice openai.ChatCompletionsStreamResponseChoice
choice.Delta.Content = zhipuResponse
response := openai.ChatCompletionsStreamResponse{
Object: "chat.completion.chunk",
Created: helper.GetTimestamp(),
Model: "chatglm",
Choices: []openai.ChatCompletionsStreamResponseChoice{choice},
}
return &response
}
// zhipuTokens.Store(apikey, zhipuTokenData{
// Token: tokenString,
// ExpiryTime: expiryTime,
// })
func streamMetaResponseZhipu2OpenAI(zhipuResponse *StreamMetaResponse) (*openai.ChatCompletionsStreamResponse, *model.Usage) {
var choice openai.ChatCompletionsStreamResponseChoice
choice.Delta.Content = ""
choice.FinishReason = &constant.StopFinishReason
response := openai.ChatCompletionsStreamResponse{
Id: zhipuResponse.RequestId,
Object: "chat.completion.chunk",
Created: helper.GetTimestamp(),
Model: "chatglm",
Choices: []openai.ChatCompletionsStreamResponseChoice{choice},
}
return &response, &zhipuResponse.Usage
}
// return tokenString
// }
func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) {
var usage *model.Usage
scanner := bufio.NewScanner(resp.Body)
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
if atEOF && len(data) == 0 {
return 0, nil, nil
}
if i := strings.Index(string(data), "\n\n"); i >= 0 && strings.Index(string(data), ":") >= 0 {
return i + 2, data[0:i], nil
}
if atEOF {
return len(data), data, nil
}
return 0, nil, nil
})
dataChan := make(chan string)
metaChan := make(chan string)
stopChan := make(chan bool)
go func() {
for scanner.Scan() {
data := scanner.Text()
lines := strings.Split(data, "\n")
for i, line := range lines {
if len(line) < 5 {
continue
}
if line[:5] == "data:" {
dataChan <- line[5:]
if i != len(lines)-1 {
dataChan <- "\n"
}
} else if line[:5] == "meta:" {
metaChan <- line[5:]
}
}
}
stopChan <- true
}()
common.SetEventStreamHeaders(c)
c.Stream(func(w io.Writer) bool {
select {
case data := <-dataChan:
response := streamResponseZhipu2OpenAI(data)
jsonResponse, err := json.Marshal(response)
if err != nil {
logger.SysError("error marshalling stream response: " + err.Error())
return true
}
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
return true
case data := <-metaChan:
var zhipuResponse StreamMetaResponse
err := json.Unmarshal([]byte(data), &zhipuResponse)
if err != nil {
logger.SysError("error unmarshalling stream response: " + err.Error())
return true
}
response, zhipuUsage := streamMetaResponseZhipu2OpenAI(&zhipuResponse)
jsonResponse, err := json.Marshal(response)
if err != nil {
logger.SysError("error marshalling stream response: " + err.Error())
return true
}
usage = zhipuUsage
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
return true
case <-stopChan:
c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
return false
}
})
err := resp.Body.Close()
if err != nil {
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
}
return nil, usage
}
// func requestOpenAI2Zhipu(request GeneralOpenAIRequest) *ZhipuRequest {
// messages := make([]ZhipuMessage, 0, len(request.Messages))
// for _, message := range request.Messages {
// if message.Role == "system" {
// messages = append(messages, ZhipuMessage{
// Role: "system",
// Content: message.Content,
// })
// messages = append(messages, ZhipuMessage{
// Role: "user",
// Content: "Okay",
// })
// } else {
// messages = append(messages, ZhipuMessage{
// Role: message.Role,
// Content: message.Content,
// })
// }
// }
// return &ZhipuRequest{
// Prompt: messages,
// Temperature: request.Temperature,
// TopP: request.TopP,
// Incremental: false,
// }
// }
func Handler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) {
var zhipuResponse Response
responseBody, err := io.ReadAll(resp.Body)
if err != nil {
return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
}
err = resp.Body.Close()
if err != nil {
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
}
err = json.Unmarshal(responseBody, &zhipuResponse)
if err != nil {
return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
}
if !zhipuResponse.Success {
return &model.ErrorWithStatusCode{
Error: model.Error{
Message: zhipuResponse.Msg,
Type: "zhipu_error",
Param: "",
Code: zhipuResponse.Code,
},
StatusCode: resp.StatusCode,
}, nil
}
fullTextResponse := responseZhipu2OpenAI(&zhipuResponse)
fullTextResponse.Model = "chatglm"
jsonResponse, err := json.Marshal(fullTextResponse)
if err != nil {
return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
}
c.Writer.Header().Set("Content-Type", "application/json")
c.Writer.WriteHeader(resp.StatusCode)
_, err = c.Writer.Write(jsonResponse)
return nil, &fullTextResponse.Usage
}
// func responseZhipu2OpenAI(response *ZhipuResponse) *OpenAITextResponse {
// fullTextResponse := OpenAITextResponse{
// Id: response.Data.TaskId,
// Object: "chat.completion",
// Created: common.GetTimestamp(),
// Choices: make([]OpenAITextResponseChoice, 0, len(response.Data.Choices)),
// Usage: response.Data.Usage,
// }
// for i, choice := range response.Data.Choices {
// openaiChoice := OpenAITextResponseChoice{
// Index: i,
// Message: Message{
// Role: choice.Role,
// Content: strings.Trim(choice.Content, "\""),
// },
// FinishReason: "",
// }
// if i == len(response.Data.Choices)-1 {
// openaiChoice.FinishReason = "stop"
// }
// fullTextResponse.Choices = append(fullTextResponse.Choices, openaiChoice)
// }
// return &fullTextResponse
// }
func EmbeddingsHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) {
var zhipuResponse EmbeddingResponse
responseBody, err := io.ReadAll(resp.Body)
if err != nil {
return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
}
err = resp.Body.Close()
if err != nil {
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
}
err = json.Unmarshal(responseBody, &zhipuResponse)
if err != nil {
return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
}
fullTextResponse := embeddingResponseZhipu2OpenAI(&zhipuResponse)
jsonResponse, err := json.Marshal(fullTextResponse)
if err != nil {
return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
}
c.Writer.Header().Set("Content-Type", "application/json")
c.Writer.WriteHeader(resp.StatusCode)
_, err = c.Writer.Write(jsonResponse)
return nil, &fullTextResponse.Usage
}
// func streamResponseZhipu2OpenAI(zhipuResponse string) *ChatCompletionsStreamResponse {
// var choice ChatCompletionsStreamResponseChoice
// choice.Delta.Content = zhipuResponse
// response := ChatCompletionsStreamResponse{
// Object: "chat.completion.chunk",
// Created: common.GetTimestamp(),
// Model: "chatglm",
// Choices: []ChatCompletionsStreamResponseChoice{choice},
// }
// return &response
// }
func embeddingResponseZhipu2OpenAI(response *EmbeddingResponse) *openai.EmbeddingResponse {
openAIEmbeddingResponse := openai.EmbeddingResponse{
Object: "list",
Data: make([]openai.EmbeddingResponseItem, 0, len(response.Embeddings)),
Model: response.Model,
Usage: model.Usage{
PromptTokens: response.PromptTokens,
CompletionTokens: response.CompletionTokens,
TotalTokens: response.Usage.TotalTokens,
},
}
// func streamMetaResponseZhipu2OpenAI(zhipuResponse *ZhipuStreamMetaResponse) (*ChatCompletionsStreamResponse, *Usage) {
// var choice ChatCompletionsStreamResponseChoice
// choice.Delta.Content = ""
// choice.FinishReason = &stopFinishReason
// response := ChatCompletionsStreamResponse{
// Id: zhipuResponse.RequestId,
// Object: "chat.completion.chunk",
// Created: common.GetTimestamp(),
// Model: "chatglm",
// Choices: []ChatCompletionsStreamResponseChoice{choice},
// }
// return &response, &zhipuResponse.Usage
// }
// func zhipuStreamHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) {
// var usage *Usage
// scanner := bufio.NewScanner(resp.Body)
// scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
// if atEOF && len(data) == 0 {
// return 0, nil, nil
// }
// if i := strings.Index(string(data), "\n\n"); i >= 0 && strings.Index(string(data), ":") >= 0 {
// return i + 2, data[0:i], nil
// }
// if atEOF {
// return len(data), data, nil
// }
// return 0, nil, nil
// })
// dataChan := make(chan string)
// metaChan := make(chan string)
// stopChan := make(chan bool)
// go func() {
// for scanner.Scan() {
// data := scanner.Text()
// lines := strings.Split(data, "\n")
// for i, line := range lines {
// if len(line) < 5 {
// continue
// }
// if line[:5] == "data:" {
// dataChan <- line[5:]
// if i != len(lines)-1 {
// dataChan <- "\n"
// }
// } else if line[:5] == "meta:" {
// metaChan <- line[5:]
// }
// }
// }
// stopChan <- true
// }()
// setEventStreamHeaders(c)
// c.Stream(func(w io.Writer) bool {
// select {
// case data := <-dataChan:
// response := streamResponseZhipu2OpenAI(data)
// jsonResponse, err := json.Marshal(response)
// if err != nil {
// common.SysError("error marshalling stream response: " + err.Error())
// return true
// }
// c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
// return true
// case data := <-metaChan:
// var zhipuResponse ZhipuStreamMetaResponse
// err := json.Unmarshal([]byte(data), &zhipuResponse)
// if err != nil {
// common.SysError("error unmarshalling stream response: " + err.Error())
// return true
// }
// response, zhipuUsage := streamMetaResponseZhipu2OpenAI(&zhipuResponse)
// jsonResponse, err := json.Marshal(response)
// if err != nil {
// common.SysError("error marshalling stream response: " + err.Error())
// return true
// }
// usage = zhipuUsage
// c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
// return true
// case <-stopChan:
// c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
// return false
// }
// })
// err := resp.Body.Close()
// if err != nil {
// return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
// }
// return nil, usage
// }
// func zhipuHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) {
// var zhipuResponse ZhipuResponse
// responseBody, err := io.ReadAll(resp.Body)
// if err != nil {
// return errorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
// }
// err = resp.Body.Close()
// if err != nil {
// return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
// }
// err = json.Unmarshal(responseBody, &zhipuResponse)
// if err != nil {
// return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
// }
// if !zhipuResponse.Success {
// return &OpenAIErrorWithStatusCode{
// OpenAIError: OpenAIError{
// Message: zhipuResponse.Msg,
// Type: "zhipu_error",
// Param: "",
// Code: zhipuResponse.Code,
// },
// StatusCode: resp.StatusCode,
// }, nil
// }
// fullTextResponse := responseZhipu2OpenAI(&zhipuResponse)
// jsonResponse, err := json.Marshal(fullTextResponse)
// if err != nil {
// return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
// }
// c.Writer.Header().Set("Content-Type", "application/json")
// c.Writer.WriteHeader(resp.StatusCode)
// _, err = c.Writer.Write(jsonResponse)
// return nil, &fullTextResponse.Usage
// }
for _, item := range response.Embeddings {
openAIEmbeddingResponse.Data = append(openAIEmbeddingResponse.Data, openai.EmbeddingResponseItem{
Object: `embedding`,
Index: item.Index,
Embedding: item.Embedding,
})
}
return &openAIEmbeddingResponse
}

View File

@@ -1,46 +1,71 @@
package zhipu
// import (
// "github.com/songquanpeng/one-api/relay/model"
// "time"
// )
import (
"time"
// type Message struct {
// Role string `json:"role"`
// Content string `json:"content"`
// }
"github.com/Laisky/one-api/relay/model"
)
// type Request struct {
// Prompt []Message `json:"prompt"`
// Temperature float64 `json:"temperature,omitempty"`
// TopP float64 `json:"top_p,omitempty"`
// RequestId string `json:"request_id,omitempty"`
// Incremental bool `json:"incremental,omitempty"`
// }
type Message struct {
Role string `json:"role"`
Content string `json:"content"`
}
// type ResponseData struct {
// TaskId string `json:"task_id"`
// RequestId string `json:"request_id"`
// TaskStatus string `json:"task_status"`
// Choices []Message `json:"choices"`
// model.Usage `json:"usage"`
// }
type Request struct {
Prompt []Message `json:"prompt"`
Temperature float64 `json:"temperature,omitempty"`
TopP float64 `json:"top_p,omitempty"`
RequestId string `json:"request_id,omitempty"`
Incremental bool `json:"incremental,omitempty"`
}
// type Response struct {
// Code int `json:"code"`
// Msg string `json:"msg"`
// Success bool `json:"success"`
// Data ResponseData `json:"data"`
// }
type ResponseData struct {
TaskId string `json:"task_id"`
RequestId string `json:"request_id"`
TaskStatus string `json:"task_status"`
Choices []Message `json:"choices"`
model.Usage `json:"usage"`
}
// type StreamMetaResponse struct {
// RequestId string `json:"request_id"`
// TaskId string `json:"task_id"`
// TaskStatus string `json:"task_status"`
// model.Usage `json:"usage"`
// }
type Response struct {
Code int `json:"code"`
Msg string `json:"msg"`
Success bool `json:"success"`
Data ResponseData `json:"data"`
}
// type tokenData struct {
// Token string
// ExpiryTime time.Time
// }
type StreamMetaResponse struct {
RequestId string `json:"request_id"`
TaskId string `json:"task_id"`
TaskStatus string `json:"task_status"`
model.Usage `json:"usage"`
}
type tokenData struct {
Token string
ExpiryTime time.Time
}
type EmbeddingRequest struct {
Model string `json:"model"`
Input string `json:"input"`
}
type EmbeddingResponse struct {
Model string `json:"model"`
Object string `json:"object"`
Embeddings []EmbeddingData `json:"data"`
model.Usage `json:"usage"`
}
type EmbeddingData struct {
Index int `json:"index"`
Object string `json:"object"`
Embedding []float64 `json:"embedding"`
}
type ImageRequest struct {
Model string `json:"model"`
Prompt string `json:"prompt"`
UserId string `json:"user_id,omitempty"`
}