fix: 请求阿里通义千问异常 (close #108)

This commit is contained in:
CaIon 2024-03-11 14:35:46 +08:00
parent bf241b218f
commit 97dd80541b
3 changed files with 21 additions and 19 deletions

View File

@ -10,19 +10,20 @@ import (
) )
type Token struct { type Token struct {
Id int `json:"id"` Id int `json:"id"`
UserId int `json:"user_id"` UserId int `json:"user_id"`
Key string `json:"key" gorm:"type:char(48);uniqueIndex"` Key string `json:"key" gorm:"type:char(48);uniqueIndex"`
Status int `json:"status" gorm:"default:1"` Status int `json:"status" gorm:"default:1"`
Name string `json:"name" gorm:"index" ` Name string `json:"name" gorm:"index" `
CreatedTime int64 `json:"created_time" gorm:"bigint"` CreatedTime int64 `json:"created_time" gorm:"bigint"`
AccessedTime int64 `json:"accessed_time" gorm:"bigint"` AccessedTime int64 `json:"accessed_time" gorm:"bigint"`
ExpiredTime int64 `json:"expired_time" gorm:"bigint;default:-1"` // -1 means never expired ExpiredTime int64 `json:"expired_time" gorm:"bigint;default:-1"` // -1 means never expired
RemainQuota int `json:"remain_quota" gorm:"default:0"` RemainQuota int `json:"remain_quota" gorm:"default:0"`
UnlimitedQuota bool `json:"unlimited_quota" gorm:"default:false"` UnlimitedQuota bool `json:"unlimited_quota" gorm:"default:false"`
ModelLimitsEnabled bool `json:"model_limits_enabled" gorm:"default:false"` ModelLimitsEnabled bool `json:"model_limits_enabled" gorm:"default:false"`
ModelLimits string `json:"model_limits" gorm:"type:varchar(1024);default:''"` ModelLimits string `json:"model_limits" gorm:"type:varchar(1024);default:''"`
UsedQuota int `json:"used_quota" gorm:"default:0"` // used quota UsedQuota int `json:"used_quota" gorm:"default:0"` // used quota
DeletedAt gorm.DeletedAt `gorm:"index"`
} }
func GetAllUserTokens(userId int, startIdx int, num int) ([]*Token, error) { func GetAllUserTokens(userId int, startIdx int, num int) ([]*Token, error) {

View File

@ -6,8 +6,9 @@ type AliMessage struct {
} }
type AliInput struct { type AliInput struct {
Prompt string `json:"prompt"` Prompt string `json:"prompt,omitempty"`
History []AliMessage `json:"history"` //History []AliMessage `json:"history,omitempty"`
Messages []AliMessage `json:"messages"`
} }
type AliParameters struct { type AliParameters struct {
@ -20,7 +21,7 @@ type AliParameters struct {
type AliChatRequest struct { type AliChatRequest struct {
Model string `json:"model"` Model string `json:"model"`
Input AliInput `json:"input"` Input AliInput `json:"input,omitempty"`
Parameters AliParameters `json:"parameters,omitempty"` Parameters AliParameters `json:"parameters,omitempty"`
} }

View File

@ -18,7 +18,7 @@ const EnableSearchModelSuffix = "-internet"
func requestOpenAI2Ali(request dto.GeneralOpenAIRequest) *AliChatRequest { func requestOpenAI2Ali(request dto.GeneralOpenAIRequest) *AliChatRequest {
messages := make([]AliMessage, 0, len(request.Messages)) messages := make([]AliMessage, 0, len(request.Messages))
prompt := "" //prompt := ""
for i := 0; i < len(request.Messages); i++ { for i := 0; i < len(request.Messages); i++ {
message := request.Messages[i] message := request.Messages[i]
messages = append(messages, AliMessage{ messages = append(messages, AliMessage{
@ -35,8 +35,8 @@ func requestOpenAI2Ali(request dto.GeneralOpenAIRequest) *AliChatRequest {
return &AliChatRequest{ return &AliChatRequest{
Model: request.Model, Model: request.Model,
Input: AliInput{ Input: AliInput{
Prompt: prompt, //Prompt: prompt,
History: messages, Messages: messages,
}, },
Parameters: AliParameters{ Parameters: AliParameters{
IncrementalOutput: request.Stream, IncrementalOutput: request.Stream,