fix: claude max_tokens

This commit is contained in:
CaIon 2024-04-23 12:19:23 +08:00
parent 9294127686
commit 53e8790024
3 changed files with 20 additions and 21 deletions

View File

@ -28,7 +28,6 @@ type ClaudeRequest struct {
Prompt string `json:"prompt,omitempty"` Prompt string `json:"prompt,omitempty"`
System string `json:"system,omitempty"` System string `json:"system,omitempty"`
Messages []ClaudeMessage `json:"messages,omitempty"` Messages []ClaudeMessage `json:"messages,omitempty"`
MaxTokensToSample uint `json:"max_tokens_to_sample,omitempty"`
MaxTokens uint `json:"max_tokens,omitempty"` MaxTokens uint `json:"max_tokens,omitempty"`
StopSequences []string `json:"stop_sequences,omitempty"` StopSequences []string `json:"stop_sequences,omitempty"`
Temperature float64 `json:"temperature,omitempty"` Temperature float64 `json:"temperature,omitempty"`

View File

@ -30,15 +30,15 @@ func RequestOpenAI2ClaudeComplete(textRequest dto.GeneralOpenAIRequest) *ClaudeR
claudeRequest := ClaudeRequest{ claudeRequest := ClaudeRequest{
Model: textRequest.Model, Model: textRequest.Model,
Prompt: "", Prompt: "",
MaxTokensToSample: textRequest.MaxTokens, MaxTokens: textRequest.MaxTokens,
StopSequences: nil, StopSequences: nil,
Temperature: textRequest.Temperature, Temperature: textRequest.Temperature,
TopP: textRequest.TopP, TopP: textRequest.TopP,
TopK: textRequest.TopK, TopK: textRequest.TopK,
Stream: textRequest.Stream, Stream: textRequest.Stream,
} }
if claudeRequest.MaxTokensToSample == 0 { if claudeRequest.MaxTokens == 0 {
claudeRequest.MaxTokensToSample = 1000000 claudeRequest.MaxTokens = 4096
} }
prompt := "" prompt := ""
for _, message := range textRequest.Messages { for _, message := range textRequest.Messages {

View File

@ -26,7 +26,7 @@ export const CHANNEL_OPTIONS = [
key: 33, key: 33,
text: 'AWS Claude', text: 'AWS Claude',
value: 33, value: 33,
color: 'black', color: 'indigo',
label: 'AWS Claude', label: 'AWS Claude',
}, },
{ {