Refactor codebase, introduce relaymode package, update constants and improve consistency

- Refactor constant definitions and organization
- Clean up package level variables and functions
- Introduce new `relaymode` and `apitype` packages for constant definitions
- Refactor and simplify code in several packages including `openai`, `relay/channel/baidu`, `relay/util`, `relay/controller`, `relay/channeltype`
- Add helper functions in `relay/channeltype` package to convert channel type constants to corresponding API type constants
- Remove deprecated functions such as `ResponseText2Usage` from `relay/channel/openai/helper.go`
- Modify code in `relay/util/validation.go` and related files to use new `validator.ValidateTextRequest` function
- Rename `util` package to `relaymode` and update related imports in several packages
This commit is contained in:
Laisky.Cai
2024-04-06 05:18:04 +00:00
parent 7c59a97a6b
commit 50bab08496
157 changed files with 3217 additions and 19160 deletions

View File

@@ -0,0 +1,105 @@
package ali
// import (
// "github.com/Laisky/errors/v2"
// "fmt"
// "github.com/gin-gonic/gin"
// "github.com/songquanpeng/one-api/common/config"
// "github.com/songquanpeng/one-api/relay/adaptor"
// "github.com/songquanpeng/one-api/relay/meta"
// "github.com/songquanpeng/one-api/relay/model"
// "github.com/songquanpeng/one-api/relay/relaymode"
// "io"
// "net/http"
// )
// // https://help.aliyun.com/zh/dashscope/developer-reference/api-details
// type Adaptor struct {
// }
// func (a *Adaptor) Init(meta *meta.Meta) {
// }
// func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
// fullRequestURL := ""
// switch meta.Mode {
// case relaymode.Embeddings:
// fullRequestURL = fmt.Sprintf("%s/api/v1/services/embeddings/text-embedding/text-embedding", meta.BaseURL)
// case relaymode.ImagesGenerations:
// fullRequestURL = fmt.Sprintf("%s/api/v1/services/aigc/text2image/image-synthesis", meta.BaseURL)
// default:
// fullRequestURL = fmt.Sprintf("%s/api/v1/services/aigc/text-generation/generation", meta.BaseURL)
// }
// return fullRequestURL, nil
// }
// func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error {
// adaptor.SetupCommonRequestHeader(c, req, meta)
// if meta.IsStream {
// req.Header.Set("Accept", "text/event-stream")
// req.Header.Set("X-DashScope-SSE", "enable")
// }
// req.Header.Set("Authorization", "Bearer "+meta.APIKey)
// if meta.Mode == relaymode.ImagesGenerations {
// req.Header.Set("X-DashScope-Async", "enable")
// }
// if c.GetString(config.KeyPlugin) != "" {
// req.Header.Set("X-DashScope-Plugin", c.GetString(config.KeyPlugin))
// }
// return nil
// }
// func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) {
// if request == nil {
// return nil, errors.New("request is nil")
// }
// switch relayMode {
// case relaymode.Embeddings:
// aliEmbeddingRequest := ConvertEmbeddingRequest(*request)
// return aliEmbeddingRequest, nil
// default:
// aliRequest := ConvertRequest(*request)
// return aliRequest, nil
// }
// }
// func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
// if request == nil {
// return nil, errors.New("request is nil")
// }
// aliRequest := ConvertImageRequest(*request)
// return aliRequest, nil
// }
// func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) {
// return adaptor.DoRequestHelper(a, c, meta, requestBody)
// }
// func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
// if meta.IsStream {
// err, usage = StreamHandler(c, resp)
// } else {
// switch meta.Mode {
// case relaymode.Embeddings:
// err, usage = EmbeddingHandler(c, resp)
// case relaymode.ImagesGenerations:
// err, usage = ImageHandler(c, resp)
// default:
// err, usage = Handler(c, resp)
// }
// }
// return
// }
// func (a *Adaptor) GetModelList() []string {
// return ModelList
// }
// func (a *Adaptor) GetChannelName() string {
// return "ali"
// }

View File

@@ -0,0 +1,7 @@
package ali
var ModelList = []string{
"qwen-turbo", "qwen-plus", "qwen-max", "qwen-max-longcontext",
"text-embedding-v1",
"ali-stable-diffusion-xl", "ali-stable-diffusion-v1.5", "wanx-v1",
}

192
relay/adaptor/ali/image.go Normal file
View File

@@ -0,0 +1,192 @@
package ali
import (
"encoding/base64"
"encoding/json"
"fmt"
"github.com/Laisky/errors/v2"
"github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common/helper"
"github.com/songquanpeng/one-api/common/logger"
"github.com/songquanpeng/one-api/relay/adaptor/openai"
"github.com/songquanpeng/one-api/relay/model"
"io"
"net/http"
"strings"
"time"
)
func ImageHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) {
apiKey := c.Request.Header.Get("Authorization")
apiKey = strings.TrimPrefix(apiKey, "Bearer ")
responseFormat := c.GetString("response_format")
var aliTaskResponse TaskResponse
responseBody, err := io.ReadAll(resp.Body)
if err != nil {
return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
}
err = resp.Body.Close()
if err != nil {
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
}
err = json.Unmarshal(responseBody, &aliTaskResponse)
if err != nil {
return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
}
if aliTaskResponse.Message != "" {
logger.SysError("aliAsyncTask err: " + string(responseBody))
return openai.ErrorWrapper(errors.New(aliTaskResponse.Message), "ali_async_task_failed", http.StatusInternalServerError), nil
}
aliResponse, _, err := asyncTaskWait(aliTaskResponse.Output.TaskId, apiKey)
if err != nil {
return openai.ErrorWrapper(err, "ali_async_task_wait_failed", http.StatusInternalServerError), nil
}
if aliResponse.Output.TaskStatus != "SUCCEEDED" {
return &model.ErrorWithStatusCode{
Error: model.Error{
Message: aliResponse.Output.Message,
Type: "ali_error",
Param: "",
Code: aliResponse.Output.Code,
},
StatusCode: resp.StatusCode,
}, nil
}
fullTextResponse := responseAli2OpenAIImage(aliResponse, responseFormat)
jsonResponse, err := json.Marshal(fullTextResponse)
if err != nil {
return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
}
c.Writer.Header().Set("Content-Type", "application/json")
c.Writer.WriteHeader(resp.StatusCode)
_, err = c.Writer.Write(jsonResponse)
return nil, nil
}
func asyncTask(taskID string, key string) (*TaskResponse, error, []byte) {
url := fmt.Sprintf("https://dashscope.aliyuncs.com/api/v1/tasks/%s", taskID)
var aliResponse TaskResponse
req, err := http.NewRequest("GET", url, nil)
if err != nil {
return &aliResponse, err, nil
}
req.Header.Set("Authorization", "Bearer "+key)
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
logger.SysError("aliAsyncTask client.Do err: " + err.Error())
return &aliResponse, err, nil
}
defer resp.Body.Close()
responseBody, err := io.ReadAll(resp.Body)
var response TaskResponse
err = json.Unmarshal(responseBody, &response)
if err != nil {
logger.SysError("aliAsyncTask NewDecoder err: " + err.Error())
return &aliResponse, err, nil
}
return &response, nil, responseBody
}
func asyncTaskWait(taskID string, key string) (*TaskResponse, []byte, error) {
waitSeconds := 2
step := 0
maxStep := 20
var taskResponse TaskResponse
var responseBody []byte
for {
step++
rsp, err, body := asyncTask(taskID, key)
responseBody = body
if err != nil {
return &taskResponse, responseBody, err
}
if rsp.Output.TaskStatus == "" {
return &taskResponse, responseBody, nil
}
switch rsp.Output.TaskStatus {
case "FAILED":
fallthrough
case "CANCELED":
fallthrough
case "SUCCEEDED":
fallthrough
case "UNKNOWN":
return rsp, responseBody, nil
}
if step >= maxStep {
break
}
time.Sleep(time.Duration(waitSeconds) * time.Second)
}
return nil, nil, fmt.Errorf("aliAsyncTaskWait timeout")
}
func responseAli2OpenAIImage(response *TaskResponse, responseFormat string) *openai.ImageResponse {
imageResponse := openai.ImageResponse{
Created: helper.GetTimestamp(),
}
for _, data := range response.Output.Results {
var b64Json string
if responseFormat == "b64_json" {
// 读取 data.Url 的图片数据并转存到 b64Json
imageData, err := getImageData(data.Url)
if err != nil {
// 处理获取图片数据失败的情况
logger.SysError("getImageData Error getting image data: " + err.Error())
continue
}
// 将图片数据转为 Base64 编码的字符串
b64Json = Base64Encode(imageData)
} else {
// 如果 responseFormat 不是 "b64_json",则直接使用 data.B64Image
b64Json = data.B64Image
}
imageResponse.Data = append(imageResponse.Data, openai.ImageData{
Url: data.Url,
B64Json: b64Json,
RevisedPrompt: "",
})
}
return &imageResponse
}
func getImageData(url string) ([]byte, error) {
response, err := http.Get(url)
if err != nil {
return nil, err
}
defer response.Body.Close()
imageData, err := io.ReadAll(response.Body)
if err != nil {
return nil, err
}
return imageData, nil
}
func Base64Encode(data []byte) string {
b64Json := base64.StdEncoding.EncodeToString(data)
return b64Json
}

323
relay/adaptor/ali/main.go Normal file
View File

@@ -0,0 +1,323 @@
package ali
// import (
// "github.com/songquanpeng/one-api/common"
// )
// // https://help.aliyun.com/document_detail/613695.html?spm=a2c4g.2399480.0.0.1adb778fAdzP9w#341800c0f8w0r
// type AliMessage struct {
// Content string `json:"content"`
// Role string `json:"role"`
// }
// type AliInput struct {
// //Prompt string `json:"prompt"`
// Messages []AliMessage `json:"messages"`
// }
// type AliParameters struct {
// TopP float64 `json:"top_p,omitempty"`
// TopK int `json:"top_k,omitempty"`
// Seed uint64 `json:"seed,omitempty"`
// EnableSearch bool `json:"enable_search,omitempty"`
// }
// type AliChatRequest struct {
// Model string `json:"model"`
// Input AliInput `json:"input"`
// Parameters AliParameters `json:"parameters,omitempty"`
// }
// type AliEmbeddingRequest struct {
// Model string `json:"model"`
// Input struct {
// Texts []string `json:"texts"`
// } `json:"input"`
// Parameters *struct {
// TextType string `json:"text_type,omitempty"`
// } `json:"parameters,omitempty"`
// }
// type AliEmbedding struct {
// Embedding []float64 `json:"embedding"`
// TextIndex int `json:"text_index"`
// }
// type AliEmbeddingResponse struct {
// Output struct {
// Embeddings []AliEmbedding `json:"embeddings"`
// } `json:"output"`
// Usage AliUsage `json:"usage"`
// AliError
// }
// type AliError struct {
// Code string `json:"code"`
// Message string `json:"message"`
// RequestId string `json:"request_id"`
// }
// type AliUsage struct {
// InputTokens int `json:"input_tokens"`
// OutputTokens int `json:"output_tokens"`
// TotalTokens int `json:"total_tokens"`
// }
// type AliOutput struct {
// Text string `json:"text"`
// FinishReason string `json:"finish_reason"`
// }
// type AliChatResponse struct {
// Output AliOutput `json:"output"`
// Usage AliUsage `json:"usage"`
// AliError
// }
// func requestOpenAI2Ali(request GeneralOpenAIRequest) *AliChatRequest {
// messages := make([]AliMessage, 0, len(request.Messages))
// prompt := ""
// for i := 0; i < len(request.Messages); i++ {
// message := request.Messages[i]
// if message.Role == "system" {
// messages = append(messages, AliMessage{
// User: message.Content,
// Bot: "Okay",
// })
// continue
// } else {
// if i == len(request.Messages)-1 {
// prompt = message.Content
// break
// }
// messages = append(messages, AliMessage{
// User: message.Content,
// Bot: request.Messages[i+1].Content,
// })
// i++
// }
// }
// return &AliChatRequest{
// Model: request.Model,
// Input: AliInput{
// Prompt: prompt,
// History: messages,
// },
// //Parameters: AliParameters{ // ChatGPT's parameters are not compatible with Ali's
// // TopP: request.TopP,
// // TopK: 50,
// // //Seed: 0,
// // //EnableSearch: false,
// //},
// }
// }
// func embeddingRequestOpenAI2Ali(request GeneralOpenAIRequest) *AliEmbeddingRequest {
// return &AliEmbeddingRequest{
// Model: "text-embedding-v1",
// Input: struct {
// Texts []string `json:"texts"`
// }{
// Texts: request.ParseInput(),
// },
// }
// }
// func aliEmbeddingHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) {
// var aliResponse AliEmbeddingResponse
// err := json.NewDecoder(resp.Body).Decode(&aliResponse)
// if err != nil {
// return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
// }
// err = resp.Body.Close()
// if err != nil {
// return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
// }
// if aliResponse.Code != "" {
// return &OpenAIErrorWithStatusCode{
// OpenAIError: OpenAIError{
// Message: aliResponse.Message,
// Type: aliResponse.Code,
// Param: aliResponse.RequestId,
// Code: aliResponse.Code,
// },
// StatusCode: resp.StatusCode,
// }, nil
// }
// fullTextResponse := embeddingResponseAli2OpenAI(&aliResponse)
// jsonResponse, err := json.Marshal(fullTextResponse)
// if err != nil {
// return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
// }
// c.Writer.Header().Set("Content-Type", "application/json")
// c.Writer.WriteHeader(resp.StatusCode)
// _, err = c.Writer.Write(jsonResponse)
// return nil, &fullTextResponse.Usage
// }
// func embeddingResponseAli2OpenAI(response *AliEmbeddingResponse) *OpenAIEmbeddingResponse {
// openAIEmbeddingResponse := OpenAIEmbeddingResponse{
// Object: "list",
// Data: make([]OpenAIEmbeddingResponseItem, 0, len(response.Output.Embeddings)),
// Model: "text-embedding-v1",
// Usage: Usage{TotalTokens: response.Usage.TotalTokens},
// }
// for _, item := range response.Output.Embeddings {
// openAIEmbeddingResponse.Data = append(openAIEmbeddingResponse.Data, OpenAIEmbeddingResponseItem{
// Object: `embedding`,
// Index: item.TextIndex,
// Embedding: item.Embedding,
// })
// }
// return &openAIEmbeddingResponse
// }
// func responseAli2OpenAI(response *AliChatResponse) *OpenAITextResponse {
// choice := OpenAITextResponseChoice{
// Index: 0,
// Message: Message{
// Role: "assistant",
// Content: response.Output.Text,
// },
// FinishReason: response.Output.FinishReason,
// }
// fullTextResponse := OpenAITextResponse{
// Id: response.RequestId,
// Object: "chat.completion",
// Created: common.GetTimestamp(),
// Choices: []OpenAITextResponseChoice{choice},
// Usage: Usage{
// PromptTokens: response.Usage.InputTokens,
// CompletionTokens: response.Usage.OutputTokens,
// TotalTokens: response.Usage.InputTokens + response.Usage.OutputTokens,
// },
// }
// return &fullTextResponse
// }
// func streamResponseAli2OpenAI(aliResponse *AliChatResponse) *ChatCompletionsStreamResponse {
// var choice ChatCompletionsStreamResponseChoice
// choice.Delta.Content = aliResponse.Output.Text
// if aliResponse.Output.FinishReason != "null" {
// finishReason := aliResponse.Output.FinishReason
// choice.FinishReason = &finishReason
// }
// response := ChatCompletionsStreamResponse{
// Id: aliResponse.RequestId,
// Object: "chat.completion.chunk",
// Created: common.GetTimestamp(),
// Model: "ernie-bot",
// Choices: []ChatCompletionsStreamResponseChoice{choice},
// }
// return &response
// }
// func aliStreamHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) {
// var usage Usage
// scanner := bufio.NewScanner(resp.Body)
// scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
// if atEOF && len(data) == 0 {
// return 0, nil, nil
// }
// if i := strings.Index(string(data), "\n"); i >= 0 {
// return i + 1, data[0:i], nil
// }
// if atEOF {
// return len(data), data, nil
// }
// return 0, nil, nil
// })
// dataChan := make(chan string)
// stopChan := make(chan bool)
// go func() {
// for scanner.Scan() {
// data := scanner.Text()
// if len(data) < 5 { // ignore blank line or wrong format
// continue
// }
// if data[:5] != "data:" {
// continue
// }
// data = data[5:]
// dataChan <- data
// }
// stopChan <- true
// }()
// setEventStreamHeaders(c)
// lastResponseText := ""
// c.Stream(func(w io.Writer) bool {
// select {
// case data := <-dataChan:
// var aliResponse AliChatResponse
// err := json.Unmarshal([]byte(data), &aliResponse)
// if err != nil {
// common.SysError("error unmarshalling stream response: " + err.Error())
// return true
// }
// if aliResponse.Usage.OutputTokens != 0 {
// usage.PromptTokens = aliResponse.Usage.InputTokens
// usage.CompletionTokens = aliResponse.Usage.OutputTokens
// usage.TotalTokens = aliResponse.Usage.InputTokens + aliResponse.Usage.OutputTokens
// }
// response := streamResponseAli2OpenAI(&aliResponse)
// response.Choices[0].Delta.Content = strings.TrimPrefix(response.Choices[0].Delta.Content, lastResponseText)
// lastResponseText = aliResponse.Output.Text
// jsonResponse, err := json.Marshal(response)
// if err != nil {
// common.SysError("error marshalling stream response: " + err.Error())
// return true
// }
// c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
// return true
// case <-stopChan:
// c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
// return false
// }
// })
// err := resp.Body.Close()
// if err != nil {
// return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
// }
// return nil, &usage
// }
// func aliHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) {
// var aliResponse AliChatResponse
// responseBody, err := io.ReadAll(resp.Body)
// if err != nil {
// return errorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
// }
// err = resp.Body.Close()
// if err != nil {
// return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
// }
// err = json.Unmarshal(responseBody, &aliResponse)
// if err != nil {
// return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
// }
// if aliResponse.Code != "" {
// return &OpenAIErrorWithStatusCode{
// OpenAIError: OpenAIError{
// Message: aliResponse.Message,
// Type: aliResponse.Code,
// Param: aliResponse.RequestId,
// Code: aliResponse.Code,
// },
// StatusCode: resp.StatusCode,
// }, nil
// }
// fullTextResponse := responseAli2OpenAI(&aliResponse)
// jsonResponse, err := json.Marshal(fullTextResponse)
// if err != nil {
// return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
// }
// c.Writer.Header().Set("Content-Type", "application/json")
// c.Writer.WriteHeader(resp.StatusCode)
// _, err = c.Writer.Write(jsonResponse)
// return nil, &fullTextResponse.Usage
// }

154
relay/adaptor/ali/model.go Normal file
View File

@@ -0,0 +1,154 @@
package ali
import (
"github.com/songquanpeng/one-api/relay/adaptor/openai"
"github.com/songquanpeng/one-api/relay/model"
)
type Message struct {
Content string `json:"content"`
Role string `json:"role"`
}
type Input struct {
//Prompt string `json:"prompt"`
Messages []Message `json:"messages"`
}
type Parameters struct {
TopP float64 `json:"top_p,omitempty"`
TopK int `json:"top_k,omitempty"`
Seed uint64 `json:"seed,omitempty"`
EnableSearch bool `json:"enable_search,omitempty"`
IncrementalOutput bool `json:"incremental_output,omitempty"`
MaxTokens int `json:"max_tokens,omitempty"`
Temperature float64 `json:"temperature,omitempty"`
ResultFormat string `json:"result_format,omitempty"`
Tools []model.Tool `json:"tools,omitempty"`
}
type ChatRequest struct {
Model string `json:"model"`
Input Input `json:"input"`
Parameters Parameters `json:"parameters,omitempty"`
}
type ImageRequest struct {
Model string `json:"model"`
Input struct {
Prompt string `json:"prompt"`
NegativePrompt string `json:"negative_prompt,omitempty"`
} `json:"input"`
Parameters struct {
Size string `json:"size,omitempty"`
N int `json:"n,omitempty"`
Steps string `json:"steps,omitempty"`
Scale string `json:"scale,omitempty"`
} `json:"parameters,omitempty"`
ResponseFormat string `json:"response_format,omitempty"`
}
type TaskResponse struct {
StatusCode int `json:"status_code,omitempty"`
RequestId string `json:"request_id,omitempty"`
Code string `json:"code,omitempty"`
Message string `json:"message,omitempty"`
Output struct {
TaskId string `json:"task_id,omitempty"`
TaskStatus string `json:"task_status,omitempty"`
Code string `json:"code,omitempty"`
Message string `json:"message,omitempty"`
Results []struct {
B64Image string `json:"b64_image,omitempty"`
Url string `json:"url,omitempty"`
Code string `json:"code,omitempty"`
Message string `json:"message,omitempty"`
} `json:"results,omitempty"`
TaskMetrics struct {
Total int `json:"TOTAL,omitempty"`
Succeeded int `json:"SUCCEEDED,omitempty"`
Failed int `json:"FAILED,omitempty"`
} `json:"task_metrics,omitempty"`
} `json:"output,omitempty"`
Usage Usage `json:"usage"`
}
type Header struct {
Action string `json:"action,omitempty"`
Streaming string `json:"streaming,omitempty"`
TaskID string `json:"task_id,omitempty"`
Event string `json:"event,omitempty"`
ErrorCode string `json:"error_code,omitempty"`
ErrorMessage string `json:"error_message,omitempty"`
Attributes any `json:"attributes,omitempty"`
}
type Payload struct {
Model string `json:"model,omitempty"`
Task string `json:"task,omitempty"`
TaskGroup string `json:"task_group,omitempty"`
Function string `json:"function,omitempty"`
Parameters struct {
SampleRate int `json:"sample_rate,omitempty"`
Rate float64 `json:"rate,omitempty"`
Format string `json:"format,omitempty"`
} `json:"parameters,omitempty"`
Input struct {
Text string `json:"text,omitempty"`
} `json:"input,omitempty"`
Usage struct {
Characters int `json:"characters,omitempty"`
} `json:"usage,omitempty"`
}
type WSSMessage struct {
Header Header `json:"header,omitempty"`
Payload Payload `json:"payload,omitempty"`
}
type EmbeddingRequest struct {
Model string `json:"model"`
Input struct {
Texts []string `json:"texts"`
} `json:"input"`
Parameters *struct {
TextType string `json:"text_type,omitempty"`
} `json:"parameters,omitempty"`
}
type Embedding struct {
Embedding []float64 `json:"embedding"`
TextIndex int `json:"text_index"`
}
type EmbeddingResponse struct {
Output struct {
Embeddings []Embedding `json:"embeddings"`
} `json:"output"`
Usage Usage `json:"usage"`
Error
}
type Error struct {
Code string `json:"code"`
Message string `json:"message"`
RequestId string `json:"request_id"`
}
type Usage struct {
InputTokens int `json:"input_tokens"`
OutputTokens int `json:"output_tokens"`
TotalTokens int `json:"total_tokens"`
}
type Output struct {
//Text string `json:"text"`
//FinishReason string `json:"finish_reason"`
Choices []openai.TextResponseChoice `json:"choices"`
}
type ChatResponse struct {
Output Output `json:"output"`
Usage Usage `json:"usage"`
Error
}