Merge remote-tracking branch 'origin/upstream/main'

This commit is contained in:
Laisky.Cai
2024-11-20 02:09:28 +00:00
42 changed files with 490 additions and 350 deletions

View File

@@ -36,9 +36,7 @@ func ConvertRequest(request model.GeneralOpenAIRequest) *ChatRequest {
enableSearch = true
aliModel = strings.TrimSuffix(aliModel, EnableSearchModelSuffix)
}
if request.TopP >= 1 {
request.TopP = 0.9999
}
request.TopP = helper.Float64PtrMax(request.TopP, 0.9999)
return &ChatRequest{
Model: aliModel,
Input: Input{

View File

@@ -16,13 +16,13 @@ type Input struct {
}
type Parameters struct {
TopP float64 `json:"top_p,omitempty"`
TopP *float64 `json:"top_p,omitempty"`
TopK int `json:"top_k,omitempty"`
Seed uint64 `json:"seed,omitempty"`
EnableSearch bool `json:"enable_search,omitempty"`
IncrementalOutput bool `json:"incremental_output,omitempty"`
MaxTokens int `json:"max_tokens,omitempty"`
Temperature float64 `json:"temperature,omitempty"`
Temperature *float64 `json:"temperature,omitempty"`
ResultFormat string `json:"result_format,omitempty"`
Tools []model.Tool `json:"tools,omitempty"`
}

View File

@@ -3,6 +3,7 @@ package anthropic
var ModelList = []string{
"claude-instant-1.2", "claude-2.0", "claude-2.1",
"claude-3-haiku-20240307",
"claude-3-5-haiku-20241022",
"claude-3-sonnet-20240229",
"claude-3-opus-20240229",
"claude-3-5-sonnet-20240620",

View File

@@ -48,8 +48,8 @@ type Request struct {
MaxTokens int `json:"max_tokens,omitempty"`
StopSequences []string `json:"stop_sequences,omitempty"`
Stream bool `json:"stream,omitempty"`
Temperature float64 `json:"temperature,omitempty"`
TopP float64 `json:"top_p,omitempty"`
Temperature *float64 `json:"temperature,omitempty"`
TopP *float64 `json:"top_p,omitempty"`
TopK int `json:"top_k,omitempty"`
Tools []Tool `json:"tools,omitempty"`
ToolChoice any `json:"tool_choice,omitempty"`

View File

@@ -11,8 +11,8 @@ type Request struct {
Messages []anthropic.Message `json:"messages"`
System string `json:"system,omitempty"`
MaxTokens int `json:"max_tokens,omitempty"`
Temperature float64 `json:"temperature,omitempty"`
TopP float64 `json:"top_p,omitempty"`
Temperature *float64 `json:"temperature,omitempty"`
TopP *float64 `json:"top_p,omitempty"`
TopK int `json:"top_k,omitempty"`
StopSequences []string `json:"stop_sequences,omitempty"`
Tools []anthropic.Tool `json:"tools,omitempty"`

View File

@@ -4,10 +4,10 @@ package aws
//
// https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-meta.html
type Request struct {
Prompt string `json:"prompt"`
MaxGenLen int `json:"max_gen_len,omitempty"`
Temperature float64 `json:"temperature,omitempty"`
TopP float64 `json:"top_p,omitempty"`
Prompt string `json:"prompt"`
MaxGenLen int `json:"max_gen_len,omitempty"`
Temperature *float64 `json:"temperature,omitempty"`
TopP *float64 `json:"top_p,omitempty"`
}
// Response is the response from AWS Llama3

View File

@@ -35,9 +35,9 @@ type Message struct {
type ChatRequest struct {
Messages []Message `json:"messages"`
Temperature float64 `json:"temperature,omitempty"`
TopP float64 `json:"top_p,omitempty"`
PenaltyScore float64 `json:"penalty_score,omitempty"`
Temperature *float64 `json:"temperature,omitempty"`
TopP *float64 `json:"top_p,omitempty"`
PenaltyScore *float64 `json:"penalty_score,omitempty"`
Stream bool `json:"stream,omitempty"`
System string `json:"system,omitempty"`
DisableSearch bool `json:"disable_search,omitempty"`

View File

@@ -9,5 +9,5 @@ type Request struct {
Prompt string `json:"prompt,omitempty"`
Raw bool `json:"raw,omitempty"`
Stream bool `json:"stream,omitempty"`
Temperature float64 `json:"temperature,omitempty"`
Temperature *float64 `json:"temperature,omitempty"`
}

View File

@@ -43,7 +43,7 @@ func ConvertRequest(textRequest model.GeneralOpenAIRequest) *Request {
K: textRequest.TopK,
Stream: textRequest.Stream,
FrequencyPenalty: textRequest.FrequencyPenalty,
PresencePenalty: textRequest.FrequencyPenalty,
PresencePenalty: textRequest.PresencePenalty,
Seed: int(textRequest.Seed),
}
if cohereRequest.Model == "" {

View File

@@ -10,15 +10,15 @@ type Request struct {
PromptTruncation string `json:"prompt_truncation,omitempty"` // 默认值为"AUTO"
Connectors []Connector `json:"connectors,omitempty"`
Documents []Document `json:"documents,omitempty"`
Temperature float64 `json:"temperature,omitempty"` // 默认值为0.3
Temperature *float64 `json:"temperature,omitempty"` // 默认值为0.3
MaxTokens int `json:"max_tokens,omitempty"`
MaxInputTokens int `json:"max_input_tokens,omitempty"`
K int `json:"k,omitempty"` // 默认值为0
P float64 `json:"p,omitempty"` // 默认值为0.75
P *float64 `json:"p,omitempty"` // 默认值为0.75
Seed int `json:"seed,omitempty"`
StopSequences []string `json:"stop_sequences,omitempty"`
FrequencyPenalty float64 `json:"frequency_penalty,omitempty"` // 默认值为0.0
PresencePenalty float64 `json:"presence_penalty,omitempty"` // 默认值为0.0
FrequencyPenalty *float64 `json:"frequency_penalty,omitempty"` // 默认值为0.0
PresencePenalty *float64 `json:"presence_penalty,omitempty"` // 默认值为0.0
Tools []Tool `json:"tools,omitempty"`
ToolResults []ToolResult `json:"tool_results,omitempty"`
}

View File

@@ -67,8 +67,8 @@ type ChatTools struct {
type ChatGenerationConfig struct {
ResponseMimeType string `json:"responseMimeType,omitempty"`
ResponseSchema any `json:"responseSchema,omitempty"`
Temperature float64 `json:"temperature,omitempty"`
TopP float64 `json:"topP,omitempty"`
Temperature *float64 `json:"temperature,omitempty"`
TopP *float64 `json:"topP,omitempty"`
TopK float64 `json:"topK,omitempty"`
MaxOutputTokens int `json:"maxOutputTokens,omitempty"`
CandidateCount int `json:"candidateCount,omitempty"`

View File

@@ -1,14 +1,14 @@
package ollama
type Options struct {
Seed int `json:"seed,omitempty"`
Temperature float64 `json:"temperature,omitempty"`
TopK int `json:"top_k,omitempty"`
TopP float64 `json:"top_p,omitempty"`
FrequencyPenalty float64 `json:"frequency_penalty,omitempty"`
PresencePenalty float64 `json:"presence_penalty,omitempty"`
NumPredict int `json:"num_predict,omitempty"`
NumCtx int `json:"num_ctx,omitempty"`
Seed int `json:"seed,omitempty"`
Temperature *float64 `json:"temperature,omitempty"`
TopK int `json:"top_k,omitempty"`
TopP *float64 `json:"top_p,omitempty"`
FrequencyPenalty *float64 `json:"frequency_penalty,omitempty"`
PresencePenalty *float64 `json:"presence_penalty,omitempty"`
NumPredict int `json:"num_predict,omitempty"`
NumCtx int `json:"num_ctx,omitempty"`
}
type Message struct {

View File

@@ -11,9 +11,10 @@ import (
"github.com/songquanpeng/one-api/relay/adaptor/mistral"
"github.com/songquanpeng/one-api/relay/adaptor/moonshot"
"github.com/songquanpeng/one-api/relay/adaptor/novita"
"github.com/songquanpeng/one-api/relay/adaptor/siliconflow"
"github.com/songquanpeng/one-api/relay/adaptor/stepfun"
"github.com/songquanpeng/one-api/relay/adaptor/togetherai"
"github.com/songquanpeng/one-api/relay/adaptor/siliconflow"
"github.com/songquanpeng/one-api/relay/adaptor/xai"
"github.com/songquanpeng/one-api/relay/channeltype"
)
@@ -32,6 +33,7 @@ var CompatibleChannels = []int{
channeltype.TogetherAI,
channeltype.Novita,
channeltype.SiliconFlow,
channeltype.XAI,
}
func GetCompatibleChannelMeta(channelType int) (string, []string) {
@@ -64,6 +66,8 @@ func GetCompatibleChannelMeta(channelType int) (string, []string) {
return "novita", novita.ModelList
case channeltype.SiliconFlow:
return "siliconflow", siliconflow.ModelList
case channeltype.XAI:
return "xai", xai.ModelList
default:
return "openai", ModelList
}

View File

@@ -19,11 +19,11 @@ type Prompt struct {
}
type ChatRequest struct {
Prompt Prompt `json:"prompt"`
Temperature float64 `json:"temperature,omitempty"`
CandidateCount int `json:"candidateCount,omitempty"`
TopP float64 `json:"topP,omitempty"`
TopK int `json:"topK,omitempty"`
Prompt Prompt `json:"prompt"`
Temperature *float64 `json:"temperature,omitempty"`
CandidateCount int `json:"candidateCount,omitempty"`
TopP *float64 `json:"topP,omitempty"`
TopK int `json:"topK,omitempty"`
}
type Error struct {

View File

@@ -39,8 +39,8 @@ func ConvertRequest(request model.GeneralOpenAIRequest) *ChatRequest {
Model: &request.Model,
Stream: &request.Stream,
Messages: messages,
TopP: &request.TopP,
Temperature: &request.Temperature,
TopP: request.TopP,
Temperature: request.Temperature,
}
}

View File

@@ -11,8 +11,8 @@ type Request struct {
MaxTokens int `json:"max_tokens,omitempty"`
StopSequences []string `json:"stop_sequences,omitempty"`
Stream bool `json:"stream,omitempty"`
Temperature float64 `json:"temperature,omitempty"`
TopP float64 `json:"top_p,omitempty"`
Temperature *float64 `json:"temperature,omitempty"`
TopP *float64 `json:"top_p,omitempty"`
TopK int `json:"top_k,omitempty"`
Tools []anthropic.Tool `json:"tools,omitempty"`
ToolChoice any `json:"tool_choice,omitempty"`

View File

@@ -0,0 +1,5 @@
package xai
var ModelList = []string{
"grok-beta",
}

View File

@@ -283,7 +283,7 @@ func parseAPIVersionByModelName(modelName string) string {
func apiVersion2domain(apiVersion string) string {
switch apiVersion {
case "v1.1":
return "general"
return "lite"
case "v2.1":
return "generalv2"
case "v3.1":

View File

@@ -19,11 +19,11 @@ type ChatRequest struct {
} `json:"header"`
Parameter struct {
Chat struct {
Domain string `json:"domain,omitempty"`
Temperature float64 `json:"temperature,omitempty"`
TopK int `json:"top_k,omitempty"`
MaxTokens int `json:"max_tokens,omitempty"`
Auditing bool `json:"auditing,omitempty"`
Domain string `json:"domain,omitempty"`
Temperature *float64 `json:"temperature,omitempty"`
TopK int `json:"top_k,omitempty"`
MaxTokens int `json:"max_tokens,omitempty"`
Auditing bool `json:"auditing,omitempty"`
} `json:"chat"`
} `json:"parameter"`
Payload struct {

View File

@@ -4,11 +4,11 @@ import (
"errors"
"fmt"
"io"
"math"
"net/http"
"strings"
"github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common/helper"
"github.com/songquanpeng/one-api/relay/adaptor"
"github.com/songquanpeng/one-api/relay/adaptor/openai"
"github.com/songquanpeng/one-api/relay/meta"
@@ -66,13 +66,13 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
baiduEmbeddingRequest, err := ConvertEmbeddingRequest(*request)
return baiduEmbeddingRequest, err
default:
// TopP (0.0, 1.0)
request.TopP = math.Min(0.99, request.TopP)
request.TopP = math.Max(0.01, request.TopP)
// TopP [0.0, 1.0]
request.TopP = helper.Float64PtrMax(request.TopP, 1)
request.TopP = helper.Float64PtrMin(request.TopP, 0)
// Temperature (0.0, 1.0)
request.Temperature = math.Min(0.99, request.Temperature)
request.Temperature = math.Max(0.01, request.Temperature)
// Temperature [0.0, 1.0]
request.Temperature = helper.Float64PtrMax(request.Temperature, 1)
request.Temperature = helper.Float64PtrMin(request.Temperature, 0)
a.SetVersionByModeName(request.Model)
if a.APIVersion == "v4" {
return request, nil

View File

@@ -13,8 +13,8 @@ type Message struct {
type Request struct {
Prompt []Message `json:"prompt"`
Temperature float64 `json:"temperature,omitempty"`
TopP float64 `json:"top_p,omitempty"`
Temperature *float64 `json:"temperature,omitempty"`
TopP *float64 `json:"top_p,omitempty"`
RequestId string `json:"request_id,omitempty"`
Incremental bool `json:"incremental,omitempty"`
}