feat(bedrock): Integrate AWS Bedrock as a new LLM provider

Adds support for using models hosted on AWS Bedrock, specifically Anthropic Claude models.

Key changes:
- Added '@aws-sdk/client-bedrock-runtime' dependency.
- Updated constants, server config, and auth logic for Bedrock.
- Implemented backend API handler () to communicate with the Bedrock API, handling streaming and non-streaming responses, and formatting output to be OpenAI compatible.
- Updated dynamic API router () to dispatch requests to the Bedrock handler.
- Created frontend client () and updated client factory ().
- Updated  with necessary Bedrock environment variables (AWS keys, region, enable flag) and an example for using  to alias Bedrock models.
This commit is contained in:
AC
2025-04-06 00:41:56 +08:00
parent 48469bd8ca
commit fc9688a1f7
9 changed files with 443 additions and 19 deletions

View File

@@ -14,6 +14,7 @@ import { handle as deepseekHandler } from "../../deepseek";
import { handle as siliconflowHandler } from "../../siliconflow";
import { handle as xaiHandler } from "../../xai";
import { handle as chatglmHandler } from "../../glm";
import { handle as bedrockHandler } from "../../bedrock";
import { handle as proxyHandler } from "../../proxy";
async function handle(
@@ -50,6 +51,8 @@ async function handle(
return chatglmHandler(req, { params });
case ApiPath.SiliconFlow:
return siliconflowHandler(req, { params });
case ApiPath.Bedrock:
return bedrockHandler(req, { params });
case ApiPath.OpenAI:
return openaiHandler(req, { params });
default: