diff --git a/app/client/platforms/anthropic.ts b/app/client/platforms/anthropic.ts index 6747221a8..2cf1d852b 100644 --- a/app/client/platforms/anthropic.ts +++ b/app/client/platforms/anthropic.ts @@ -40,6 +40,10 @@ export interface AnthropicChatRequest { top_k?: number; // Only sample from the top K options for each subsequent token. metadata?: object; // An object describing metadata about the request. stream?: boolean; // Whether to incrementally stream the response using server-sent events. + thinking?: { + type: "enabled" | "disabled"; + budget_tokens: number; + }; } export interface ChatRequest { @@ -189,6 +193,10 @@ export class ClaudeApi implements LLMApi { top_p: modelConfig.top_p, // top_k: modelConfig.top_k, top_k: 5, + thinking: { + type: "enabled", + budget_tokens: modelConfig.max_tokens - 1, // Default value from example + }, }; const path = this.path(Anthropic.ChatPath); diff --git a/app/constant.ts b/app/constant.ts index c52d46821..57da263a1 100644 --- a/app/constant.ts +++ b/app/constant.ts @@ -535,6 +535,7 @@ const anthropicModels = [ "claude-3-5-sonnet-20240620", "claude-3-5-sonnet-20241022", "claude-3-5-sonnet-latest", + "claude-3-7-sonnet-20250219", ]; const baiduModels = [