mirror of
https://github.com/linux-do/new-api.git
synced 2025-11-09 15:43:41 +08:00
fix: aws claude
This commit is contained in:
@@ -29,6 +29,7 @@ type ClaudeRequest struct {
|
|||||||
System string `json:"system,omitempty"`
|
System string `json:"system,omitempty"`
|
||||||
Messages []ClaudeMessage `json:"messages,omitempty"`
|
Messages []ClaudeMessage `json:"messages,omitempty"`
|
||||||
MaxTokens uint `json:"max_tokens,omitempty"`
|
MaxTokens uint `json:"max_tokens,omitempty"`
|
||||||
|
MaxTokensToSample uint `json:"max_tokens_to_sample,omitempty"`
|
||||||
StopSequences []string `json:"stop_sequences,omitempty"`
|
StopSequences []string `json:"stop_sequences,omitempty"`
|
||||||
Temperature float64 `json:"temperature,omitempty"`
|
Temperature float64 `json:"temperature,omitempty"`
|
||||||
TopP float64 `json:"top_p,omitempty"`
|
TopP float64 `json:"top_p,omitempty"`
|
||||||
|
|||||||
@@ -30,15 +30,14 @@ func RequestOpenAI2ClaudeComplete(textRequest dto.GeneralOpenAIRequest) *ClaudeR
|
|||||||
claudeRequest := ClaudeRequest{
|
claudeRequest := ClaudeRequest{
|
||||||
Model: textRequest.Model,
|
Model: textRequest.Model,
|
||||||
Prompt: "",
|
Prompt: "",
|
||||||
MaxTokens: textRequest.MaxTokens,
|
|
||||||
StopSequences: nil,
|
StopSequences: nil,
|
||||||
Temperature: textRequest.Temperature,
|
Temperature: textRequest.Temperature,
|
||||||
TopP: textRequest.TopP,
|
TopP: textRequest.TopP,
|
||||||
TopK: textRequest.TopK,
|
TopK: textRequest.TopK,
|
||||||
Stream: textRequest.Stream,
|
Stream: textRequest.Stream,
|
||||||
}
|
}
|
||||||
if claudeRequest.MaxTokens == 0 {
|
if claudeRequest.MaxTokensToSample == 0 {
|
||||||
claudeRequest.MaxTokens = 4096
|
claudeRequest.MaxTokensToSample = 4096
|
||||||
}
|
}
|
||||||
prompt := ""
|
prompt := ""
|
||||||
for _, message := range textRequest.Messages {
|
for _, message := range textRequest.Messages {
|
||||||
|
|||||||
Reference in New Issue
Block a user