mirror of
https://github.com/songquanpeng/one-api.git
synced 2025-09-17 17:16:38 +08:00
fix: set optional fields to pointer type
This commit is contained in:
parent
c57ddbf95f
commit
a34773b1ee
@ -137,3 +137,23 @@ func String2Int(str string) int {
|
|||||||
}
|
}
|
||||||
return num
|
return num
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func Float64PtrMax(p *float64, maxValue float64) *float64 {
|
||||||
|
if p == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if *p > maxValue {
|
||||||
|
return &maxValue
|
||||||
|
}
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
|
||||||
|
func Float64PtrMin(p *float64, minValue float64) *float64 {
|
||||||
|
if p == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if *p < minValue {
|
||||||
|
return &minValue
|
||||||
|
}
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
@ -36,9 +36,7 @@ func ConvertRequest(request model.GeneralOpenAIRequest) *ChatRequest {
|
|||||||
enableSearch = true
|
enableSearch = true
|
||||||
aliModel = strings.TrimSuffix(aliModel, EnableSearchModelSuffix)
|
aliModel = strings.TrimSuffix(aliModel, EnableSearchModelSuffix)
|
||||||
}
|
}
|
||||||
if request.TopP >= 1 {
|
request.TopP = helper.Float64PtrMax(request.TopP, 0.9999)
|
||||||
request.TopP = 0.9999
|
|
||||||
}
|
|
||||||
return &ChatRequest{
|
return &ChatRequest{
|
||||||
Model: aliModel,
|
Model: aliModel,
|
||||||
Input: Input{
|
Input: Input{
|
||||||
|
@ -16,13 +16,13 @@ type Input struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type Parameters struct {
|
type Parameters struct {
|
||||||
TopP float64 `json:"top_p,omitempty"`
|
TopP *float64 `json:"top_p,omitempty"`
|
||||||
TopK int `json:"top_k,omitempty"`
|
TopK int `json:"top_k,omitempty"`
|
||||||
Seed uint64 `json:"seed,omitempty"`
|
Seed uint64 `json:"seed,omitempty"`
|
||||||
EnableSearch bool `json:"enable_search,omitempty"`
|
EnableSearch bool `json:"enable_search,omitempty"`
|
||||||
IncrementalOutput bool `json:"incremental_output,omitempty"`
|
IncrementalOutput bool `json:"incremental_output,omitempty"`
|
||||||
MaxTokens int `json:"max_tokens,omitempty"`
|
MaxTokens int `json:"max_tokens,omitempty"`
|
||||||
Temperature float64 `json:"temperature,omitempty"`
|
Temperature *float64 `json:"temperature,omitempty"`
|
||||||
ResultFormat string `json:"result_format,omitempty"`
|
ResultFormat string `json:"result_format,omitempty"`
|
||||||
Tools []model.Tool `json:"tools,omitempty"`
|
Tools []model.Tool `json:"tools,omitempty"`
|
||||||
}
|
}
|
||||||
|
@ -48,8 +48,8 @@ type Request struct {
|
|||||||
MaxTokens int `json:"max_tokens,omitempty"`
|
MaxTokens int `json:"max_tokens,omitempty"`
|
||||||
StopSequences []string `json:"stop_sequences,omitempty"`
|
StopSequences []string `json:"stop_sequences,omitempty"`
|
||||||
Stream bool `json:"stream,omitempty"`
|
Stream bool `json:"stream,omitempty"`
|
||||||
Temperature float64 `json:"temperature,omitempty"`
|
Temperature *float64 `json:"temperature,omitempty"`
|
||||||
TopP float64 `json:"top_p,omitempty"`
|
TopP *float64 `json:"top_p,omitempty"`
|
||||||
TopK int `json:"top_k,omitempty"`
|
TopK int `json:"top_k,omitempty"`
|
||||||
Tools []Tool `json:"tools,omitempty"`
|
Tools []Tool `json:"tools,omitempty"`
|
||||||
ToolChoice any `json:"tool_choice,omitempty"`
|
ToolChoice any `json:"tool_choice,omitempty"`
|
||||||
|
@ -11,8 +11,8 @@ type Request struct {
|
|||||||
Messages []anthropic.Message `json:"messages"`
|
Messages []anthropic.Message `json:"messages"`
|
||||||
System string `json:"system,omitempty"`
|
System string `json:"system,omitempty"`
|
||||||
MaxTokens int `json:"max_tokens,omitempty"`
|
MaxTokens int `json:"max_tokens,omitempty"`
|
||||||
Temperature float64 `json:"temperature,omitempty"`
|
Temperature *float64 `json:"temperature,omitempty"`
|
||||||
TopP float64 `json:"top_p,omitempty"`
|
TopP *float64 `json:"top_p,omitempty"`
|
||||||
TopK int `json:"top_k,omitempty"`
|
TopK int `json:"top_k,omitempty"`
|
||||||
StopSequences []string `json:"stop_sequences,omitempty"`
|
StopSequences []string `json:"stop_sequences,omitempty"`
|
||||||
Tools []anthropic.Tool `json:"tools,omitempty"`
|
Tools []anthropic.Tool `json:"tools,omitempty"`
|
||||||
|
@ -4,10 +4,10 @@ package aws
|
|||||||
//
|
//
|
||||||
// https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-meta.html
|
// https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-meta.html
|
||||||
type Request struct {
|
type Request struct {
|
||||||
Prompt string `json:"prompt"`
|
Prompt string `json:"prompt"`
|
||||||
MaxGenLen int `json:"max_gen_len,omitempty"`
|
MaxGenLen int `json:"max_gen_len,omitempty"`
|
||||||
Temperature float64 `json:"temperature,omitempty"`
|
Temperature *float64 `json:"temperature,omitempty"`
|
||||||
TopP float64 `json:"top_p,omitempty"`
|
TopP *float64 `json:"top_p,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// Response is the response from AWS Llama3
|
// Response is the response from AWS Llama3
|
||||||
|
@ -35,9 +35,9 @@ type Message struct {
|
|||||||
|
|
||||||
type ChatRequest struct {
|
type ChatRequest struct {
|
||||||
Messages []Message `json:"messages"`
|
Messages []Message `json:"messages"`
|
||||||
Temperature float64 `json:"temperature,omitempty"`
|
Temperature *float64 `json:"temperature,omitempty"`
|
||||||
TopP float64 `json:"top_p,omitempty"`
|
TopP *float64 `json:"top_p,omitempty"`
|
||||||
PenaltyScore float64 `json:"penalty_score,omitempty"`
|
PenaltyScore *float64 `json:"penalty_score,omitempty"`
|
||||||
Stream bool `json:"stream,omitempty"`
|
Stream bool `json:"stream,omitempty"`
|
||||||
System string `json:"system,omitempty"`
|
System string `json:"system,omitempty"`
|
||||||
DisableSearch bool `json:"disable_search,omitempty"`
|
DisableSearch bool `json:"disable_search,omitempty"`
|
||||||
|
@ -9,5 +9,5 @@ type Request struct {
|
|||||||
Prompt string `json:"prompt,omitempty"`
|
Prompt string `json:"prompt,omitempty"`
|
||||||
Raw bool `json:"raw,omitempty"`
|
Raw bool `json:"raw,omitempty"`
|
||||||
Stream bool `json:"stream,omitempty"`
|
Stream bool `json:"stream,omitempty"`
|
||||||
Temperature float64 `json:"temperature,omitempty"`
|
Temperature *float64 `json:"temperature,omitempty"`
|
||||||
}
|
}
|
||||||
|
@ -43,7 +43,7 @@ func ConvertRequest(textRequest model.GeneralOpenAIRequest) *Request {
|
|||||||
K: textRequest.TopK,
|
K: textRequest.TopK,
|
||||||
Stream: textRequest.Stream,
|
Stream: textRequest.Stream,
|
||||||
FrequencyPenalty: textRequest.FrequencyPenalty,
|
FrequencyPenalty: textRequest.FrequencyPenalty,
|
||||||
PresencePenalty: textRequest.FrequencyPenalty,
|
PresencePenalty: textRequest.PresencePenalty,
|
||||||
Seed: int(textRequest.Seed),
|
Seed: int(textRequest.Seed),
|
||||||
}
|
}
|
||||||
if cohereRequest.Model == "" {
|
if cohereRequest.Model == "" {
|
||||||
|
@ -10,15 +10,15 @@ type Request struct {
|
|||||||
PromptTruncation string `json:"prompt_truncation,omitempty"` // 默认值为"AUTO"
|
PromptTruncation string `json:"prompt_truncation,omitempty"` // 默认值为"AUTO"
|
||||||
Connectors []Connector `json:"connectors,omitempty"`
|
Connectors []Connector `json:"connectors,omitempty"`
|
||||||
Documents []Document `json:"documents,omitempty"`
|
Documents []Document `json:"documents,omitempty"`
|
||||||
Temperature float64 `json:"temperature,omitempty"` // 默认值为0.3
|
Temperature *float64 `json:"temperature,omitempty"` // 默认值为0.3
|
||||||
MaxTokens int `json:"max_tokens,omitempty"`
|
MaxTokens int `json:"max_tokens,omitempty"`
|
||||||
MaxInputTokens int `json:"max_input_tokens,omitempty"`
|
MaxInputTokens int `json:"max_input_tokens,omitempty"`
|
||||||
K int `json:"k,omitempty"` // 默认值为0
|
K int `json:"k,omitempty"` // 默认值为0
|
||||||
P float64 `json:"p,omitempty"` // 默认值为0.75
|
P *float64 `json:"p,omitempty"` // 默认值为0.75
|
||||||
Seed int `json:"seed,omitempty"`
|
Seed int `json:"seed,omitempty"`
|
||||||
StopSequences []string `json:"stop_sequences,omitempty"`
|
StopSequences []string `json:"stop_sequences,omitempty"`
|
||||||
FrequencyPenalty float64 `json:"frequency_penalty,omitempty"` // 默认值为0.0
|
FrequencyPenalty *float64 `json:"frequency_penalty,omitempty"` // 默认值为0.0
|
||||||
PresencePenalty float64 `json:"presence_penalty,omitempty"` // 默认值为0.0
|
PresencePenalty *float64 `json:"presence_penalty,omitempty"` // 默认值为0.0
|
||||||
Tools []Tool `json:"tools,omitempty"`
|
Tools []Tool `json:"tools,omitempty"`
|
||||||
ToolResults []ToolResult `json:"tool_results,omitempty"`
|
ToolResults []ToolResult `json:"tool_results,omitempty"`
|
||||||
}
|
}
|
||||||
|
@ -67,8 +67,8 @@ type ChatTools struct {
|
|||||||
type ChatGenerationConfig struct {
|
type ChatGenerationConfig struct {
|
||||||
ResponseMimeType string `json:"responseMimeType,omitempty"`
|
ResponseMimeType string `json:"responseMimeType,omitempty"`
|
||||||
ResponseSchema any `json:"responseSchema,omitempty"`
|
ResponseSchema any `json:"responseSchema,omitempty"`
|
||||||
Temperature float64 `json:"temperature,omitempty"`
|
Temperature *float64 `json:"temperature,omitempty"`
|
||||||
TopP float64 `json:"topP,omitempty"`
|
TopP *float64 `json:"topP,omitempty"`
|
||||||
TopK float64 `json:"topK,omitempty"`
|
TopK float64 `json:"topK,omitempty"`
|
||||||
MaxOutputTokens int `json:"maxOutputTokens,omitempty"`
|
MaxOutputTokens int `json:"maxOutputTokens,omitempty"`
|
||||||
CandidateCount int `json:"candidateCount,omitempty"`
|
CandidateCount int `json:"candidateCount,omitempty"`
|
||||||
|
@ -1,14 +1,14 @@
|
|||||||
package ollama
|
package ollama
|
||||||
|
|
||||||
type Options struct {
|
type Options struct {
|
||||||
Seed int `json:"seed,omitempty"`
|
Seed int `json:"seed,omitempty"`
|
||||||
Temperature float64 `json:"temperature,omitempty"`
|
Temperature *float64 `json:"temperature,omitempty"`
|
||||||
TopK int `json:"top_k,omitempty"`
|
TopK int `json:"top_k,omitempty"`
|
||||||
TopP float64 `json:"top_p,omitempty"`
|
TopP *float64 `json:"top_p,omitempty"`
|
||||||
FrequencyPenalty float64 `json:"frequency_penalty,omitempty"`
|
FrequencyPenalty *float64 `json:"frequency_penalty,omitempty"`
|
||||||
PresencePenalty float64 `json:"presence_penalty,omitempty"`
|
PresencePenalty *float64 `json:"presence_penalty,omitempty"`
|
||||||
NumPredict int `json:"num_predict,omitempty"`
|
NumPredict int `json:"num_predict,omitempty"`
|
||||||
NumCtx int `json:"num_ctx,omitempty"`
|
NumCtx int `json:"num_ctx,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type Message struct {
|
type Message struct {
|
||||||
|
@ -19,11 +19,11 @@ type Prompt struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type ChatRequest struct {
|
type ChatRequest struct {
|
||||||
Prompt Prompt `json:"prompt"`
|
Prompt Prompt `json:"prompt"`
|
||||||
Temperature float64 `json:"temperature,omitempty"`
|
Temperature *float64 `json:"temperature,omitempty"`
|
||||||
CandidateCount int `json:"candidateCount,omitempty"`
|
CandidateCount int `json:"candidateCount,omitempty"`
|
||||||
TopP float64 `json:"topP,omitempty"`
|
TopP *float64 `json:"topP,omitempty"`
|
||||||
TopK int `json:"topK,omitempty"`
|
TopK int `json:"topK,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type Error struct {
|
type Error struct {
|
||||||
|
@ -39,8 +39,8 @@ func ConvertRequest(request model.GeneralOpenAIRequest) *ChatRequest {
|
|||||||
Model: &request.Model,
|
Model: &request.Model,
|
||||||
Stream: &request.Stream,
|
Stream: &request.Stream,
|
||||||
Messages: messages,
|
Messages: messages,
|
||||||
TopP: &request.TopP,
|
TopP: request.TopP,
|
||||||
Temperature: &request.Temperature,
|
Temperature: request.Temperature,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -11,8 +11,8 @@ type Request struct {
|
|||||||
MaxTokens int `json:"max_tokens,omitempty"`
|
MaxTokens int `json:"max_tokens,omitempty"`
|
||||||
StopSequences []string `json:"stop_sequences,omitempty"`
|
StopSequences []string `json:"stop_sequences,omitempty"`
|
||||||
Stream bool `json:"stream,omitempty"`
|
Stream bool `json:"stream,omitempty"`
|
||||||
Temperature float64 `json:"temperature,omitempty"`
|
Temperature *float64 `json:"temperature,omitempty"`
|
||||||
TopP float64 `json:"top_p,omitempty"`
|
TopP *float64 `json:"top_p,omitempty"`
|
||||||
TopK int `json:"top_k,omitempty"`
|
TopK int `json:"top_k,omitempty"`
|
||||||
Tools []anthropic.Tool `json:"tools,omitempty"`
|
Tools []anthropic.Tool `json:"tools,omitempty"`
|
||||||
ToolChoice any `json:"tool_choice,omitempty"`
|
ToolChoice any `json:"tool_choice,omitempty"`
|
||||||
|
@ -19,11 +19,11 @@ type ChatRequest struct {
|
|||||||
} `json:"header"`
|
} `json:"header"`
|
||||||
Parameter struct {
|
Parameter struct {
|
||||||
Chat struct {
|
Chat struct {
|
||||||
Domain string `json:"domain,omitempty"`
|
Domain string `json:"domain,omitempty"`
|
||||||
Temperature float64 `json:"temperature,omitempty"`
|
Temperature *float64 `json:"temperature,omitempty"`
|
||||||
TopK int `json:"top_k,omitempty"`
|
TopK int `json:"top_k,omitempty"`
|
||||||
MaxTokens int `json:"max_tokens,omitempty"`
|
MaxTokens int `json:"max_tokens,omitempty"`
|
||||||
Auditing bool `json:"auditing,omitempty"`
|
Auditing bool `json:"auditing,omitempty"`
|
||||||
} `json:"chat"`
|
} `json:"chat"`
|
||||||
} `json:"parameter"`
|
} `json:"parameter"`
|
||||||
Payload struct {
|
Payload struct {
|
||||||
|
@ -4,13 +4,13 @@ import (
|
|||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/songquanpeng/one-api/common/helper"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor"
|
"github.com/songquanpeng/one-api/relay/adaptor"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
||||||
"github.com/songquanpeng/one-api/relay/meta"
|
"github.com/songquanpeng/one-api/relay/meta"
|
||||||
"github.com/songquanpeng/one-api/relay/model"
|
"github.com/songquanpeng/one-api/relay/model"
|
||||||
"github.com/songquanpeng/one-api/relay/relaymode"
|
"github.com/songquanpeng/one-api/relay/relaymode"
|
||||||
"io"
|
"io"
|
||||||
"math"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
@ -65,13 +65,13 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
|
|||||||
baiduEmbeddingRequest, err := ConvertEmbeddingRequest(*request)
|
baiduEmbeddingRequest, err := ConvertEmbeddingRequest(*request)
|
||||||
return baiduEmbeddingRequest, err
|
return baiduEmbeddingRequest, err
|
||||||
default:
|
default:
|
||||||
// TopP (0.0, 1.0)
|
// TopP [0.0, 1.0]
|
||||||
request.TopP = math.Min(0.99, request.TopP)
|
request.TopP = helper.Float64PtrMax(request.TopP, 1)
|
||||||
request.TopP = math.Max(0.01, request.TopP)
|
request.TopP = helper.Float64PtrMin(request.TopP, 0)
|
||||||
|
|
||||||
// Temperature (0.0, 1.0)
|
// Temperature [0.0, 1.0]
|
||||||
request.Temperature = math.Min(0.99, request.Temperature)
|
request.Temperature = helper.Float64PtrMax(request.Temperature, 1)
|
||||||
request.Temperature = math.Max(0.01, request.Temperature)
|
request.Temperature = helper.Float64PtrMin(request.Temperature, 0)
|
||||||
a.SetVersionByModeName(request.Model)
|
a.SetVersionByModeName(request.Model)
|
||||||
if a.APIVersion == "v4" {
|
if a.APIVersion == "v4" {
|
||||||
return request, nil
|
return request, nil
|
||||||
|
@ -12,8 +12,8 @@ type Message struct {
|
|||||||
|
|
||||||
type Request struct {
|
type Request struct {
|
||||||
Prompt []Message `json:"prompt"`
|
Prompt []Message `json:"prompt"`
|
||||||
Temperature float64 `json:"temperature,omitempty"`
|
Temperature *float64 `json:"temperature,omitempty"`
|
||||||
TopP float64 `json:"top_p,omitempty"`
|
TopP *float64 `json:"top_p,omitempty"`
|
||||||
RequestId string `json:"request_id,omitempty"`
|
RequestId string `json:"request_id,omitempty"`
|
||||||
Incremental bool `json:"incremental,omitempty"`
|
Incremental bool `json:"incremental,omitempty"`
|
||||||
}
|
}
|
||||||
|
@ -26,17 +26,17 @@ type GeneralOpenAIRequest struct {
|
|||||||
Model string `json:"model,omitempty"`
|
Model string `json:"model,omitempty"`
|
||||||
Modalities []string `json:"modalities,omitempty"`
|
Modalities []string `json:"modalities,omitempty"`
|
||||||
Audio *Audio `json:"audio,omitempty"`
|
Audio *Audio `json:"audio,omitempty"`
|
||||||
FrequencyPenalty float64 `json:"frequency_penalty,omitempty"`
|
FrequencyPenalty *float64 `json:"frequency_penalty,omitempty"`
|
||||||
MaxTokens int `json:"max_tokens,omitempty"`
|
MaxTokens int `json:"max_tokens,omitempty"`
|
||||||
N int `json:"n,omitempty"`
|
N int `json:"n,omitempty"`
|
||||||
PresencePenalty float64 `json:"presence_penalty,omitempty"`
|
PresencePenalty *float64 `json:"presence_penalty,omitempty"`
|
||||||
ResponseFormat *ResponseFormat `json:"response_format,omitempty"`
|
ResponseFormat *ResponseFormat `json:"response_format,omitempty"`
|
||||||
Seed float64 `json:"seed,omitempty"`
|
Seed float64 `json:"seed,omitempty"`
|
||||||
Stop any `json:"stop,omitempty"`
|
Stop any `json:"stop,omitempty"`
|
||||||
Stream bool `json:"stream,omitempty"`
|
Stream bool `json:"stream,omitempty"`
|
||||||
StreamOptions *StreamOptions `json:"stream_options,omitempty"`
|
StreamOptions *StreamOptions `json:"stream_options,omitempty"`
|
||||||
Temperature float64 `json:"temperature"`
|
Temperature *float64 `json:"temperature,omitempty"`
|
||||||
TopP float64 `json:"top_p,omitempty"`
|
TopP *float64 `json:"top_p,omitempty"`
|
||||||
TopK int `json:"top_k,omitempty"`
|
TopK int `json:"top_k,omitempty"`
|
||||||
Tools []Tool `json:"tools,omitempty"`
|
Tools []Tool `json:"tools,omitempty"`
|
||||||
ToolChoice any `json:"tool_choice,omitempty"`
|
ToolChoice any `json:"tool_choice,omitempty"`
|
||||||
|
Loading…
Reference in New Issue
Block a user