mirror of
https://github.com/linux-do/new-api.git
synced 2025-09-17 16:06:38 +08:00
fix: aws claude
This commit is contained in:
parent
21f32605c8
commit
5425b5bfc3
@ -24,15 +24,16 @@ type ClaudeMessage struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type ClaudeRequest struct {
|
type ClaudeRequest struct {
|
||||||
Model string `json:"model"`
|
Model string `json:"model"`
|
||||||
Prompt string `json:"prompt,omitempty"`
|
Prompt string `json:"prompt,omitempty"`
|
||||||
System string `json:"system,omitempty"`
|
System string `json:"system,omitempty"`
|
||||||
Messages []ClaudeMessage `json:"messages,omitempty"`
|
Messages []ClaudeMessage `json:"messages,omitempty"`
|
||||||
MaxTokens uint `json:"max_tokens,omitempty"`
|
MaxTokens uint `json:"max_tokens,omitempty"`
|
||||||
StopSequences []string `json:"stop_sequences,omitempty"`
|
MaxTokensToSample uint `json:"max_tokens_to_sample,omitempty"`
|
||||||
Temperature float64 `json:"temperature,omitempty"`
|
StopSequences []string `json:"stop_sequences,omitempty"`
|
||||||
TopP float64 `json:"top_p,omitempty"`
|
Temperature float64 `json:"temperature,omitempty"`
|
||||||
TopK int `json:"top_k,omitempty"`
|
TopP float64 `json:"top_p,omitempty"`
|
||||||
|
TopK int `json:"top_k,omitempty"`
|
||||||
//ClaudeMetadata `json:"metadata,omitempty"`
|
//ClaudeMetadata `json:"metadata,omitempty"`
|
||||||
Stream bool `json:"stream,omitempty"`
|
Stream bool `json:"stream,omitempty"`
|
||||||
}
|
}
|
||||||
|
@ -30,15 +30,14 @@ func RequestOpenAI2ClaudeComplete(textRequest dto.GeneralOpenAIRequest) *ClaudeR
|
|||||||
claudeRequest := ClaudeRequest{
|
claudeRequest := ClaudeRequest{
|
||||||
Model: textRequest.Model,
|
Model: textRequest.Model,
|
||||||
Prompt: "",
|
Prompt: "",
|
||||||
MaxTokens: textRequest.MaxTokens,
|
|
||||||
StopSequences: nil,
|
StopSequences: nil,
|
||||||
Temperature: textRequest.Temperature,
|
Temperature: textRequest.Temperature,
|
||||||
TopP: textRequest.TopP,
|
TopP: textRequest.TopP,
|
||||||
TopK: textRequest.TopK,
|
TopK: textRequest.TopK,
|
||||||
Stream: textRequest.Stream,
|
Stream: textRequest.Stream,
|
||||||
}
|
}
|
||||||
if claudeRequest.MaxTokens == 0 {
|
if claudeRequest.MaxTokensToSample == 0 {
|
||||||
claudeRequest.MaxTokens = 4096
|
claudeRequest.MaxTokensToSample = 4096
|
||||||
}
|
}
|
||||||
prompt := ""
|
prompt := ""
|
||||||
for _, message := range textRequest.Messages {
|
for _, message := range textRequest.Messages {
|
||||||
|
Loading…
Reference in New Issue
Block a user