fix: claude max_tokens

This commit is contained in:
CaIon 2024-04-23 12:19:23 +08:00
parent 9294127686
commit 53e8790024
3 changed files with 20 additions and 21 deletions

View File

@ -24,16 +24,15 @@ type ClaudeMessage struct {
} }
type ClaudeRequest struct { type ClaudeRequest struct {
Model string `json:"model"` Model string `json:"model"`
Prompt string `json:"prompt,omitempty"` Prompt string `json:"prompt,omitempty"`
System string `json:"system,omitempty"` System string `json:"system,omitempty"`
Messages []ClaudeMessage `json:"messages,omitempty"` Messages []ClaudeMessage `json:"messages,omitempty"`
MaxTokensToSample uint `json:"max_tokens_to_sample,omitempty"` MaxTokens uint `json:"max_tokens,omitempty"`
MaxTokens uint `json:"max_tokens,omitempty"` StopSequences []string `json:"stop_sequences,omitempty"`
StopSequences []string `json:"stop_sequences,omitempty"` Temperature float64 `json:"temperature,omitempty"`
Temperature float64 `json:"temperature,omitempty"` TopP float64 `json:"top_p,omitempty"`
TopP float64 `json:"top_p,omitempty"` TopK int `json:"top_k,omitempty"`
TopK int `json:"top_k,omitempty"`
//ClaudeMetadata `json:"metadata,omitempty"` //ClaudeMetadata `json:"metadata,omitempty"`
Stream bool `json:"stream,omitempty"` Stream bool `json:"stream,omitempty"`
} }

View File

@ -28,17 +28,17 @@ func stopReasonClaude2OpenAI(reason string) string {
func RequestOpenAI2ClaudeComplete(textRequest dto.GeneralOpenAIRequest) *ClaudeRequest { func RequestOpenAI2ClaudeComplete(textRequest dto.GeneralOpenAIRequest) *ClaudeRequest {
claudeRequest := ClaudeRequest{ claudeRequest := ClaudeRequest{
Model: textRequest.Model, Model: textRequest.Model,
Prompt: "", Prompt: "",
MaxTokensToSample: textRequest.MaxTokens, MaxTokens: textRequest.MaxTokens,
StopSequences: nil, StopSequences: nil,
Temperature: textRequest.Temperature, Temperature: textRequest.Temperature,
TopP: textRequest.TopP, TopP: textRequest.TopP,
TopK: textRequest.TopK, TopK: textRequest.TopK,
Stream: textRequest.Stream, Stream: textRequest.Stream,
} }
if claudeRequest.MaxTokensToSample == 0 { if claudeRequest.MaxTokens == 0 {
claudeRequest.MaxTokensToSample = 1000000 claudeRequest.MaxTokens = 4096
} }
prompt := "" prompt := ""
for _, message := range textRequest.Messages { for _, message := range textRequest.Messages {

View File

@ -26,7 +26,7 @@ export const CHANNEL_OPTIONS = [
key: 33, key: 33,
text: 'AWS Claude', text: 'AWS Claude',
value: 33, value: 33,
color: 'black', color: 'indigo',
label: 'AWS Claude', label: 'AWS Claude',
}, },
{ {