feat: claude function call

This commit is contained in:
Hk-Gosuto
2024-08-11 06:32:52 +00:00
parent a0fc9bd316
commit ad5d81d76a
13 changed files with 292 additions and 167 deletions

View File

@@ -1,10 +1,10 @@
import { BaseLanguageModel } from "@langchain/core/language_models/base";
import { Embeddings } from "@langchain/core/embeddings";
import { ArxivAPIWrapper } from "@/app/api/langchain-tools/arxiv";
import { DallEAPIWrapper } from "@/app/api/langchain-tools/dalle_image_generator";
import { StableDiffusionWrapper } from "@/app/api/langchain-tools/stable_diffusion_image_generator";
import { BaseLanguageModel } from "langchain/dist/base_language";
import { Calculator } from "langchain/tools/calculator";
import { Calculator } from "@langchain/community/tools/calculator";
import { WebBrowser } from "langchain/tools/webbrowser";
import { Embeddings } from "langchain/dist/embeddings/base.js";
import { WolframAlphaTool } from "@/app/api/langchain-tools/wolframalpha";
import { BilibiliVideoInfoTool } from "./bilibili_vid_info";
import { BilibiliVideoSearchTool } from "./bilibili_vid_search";

View File

@@ -1,8 +1,8 @@
import { Tool } from "@langchain/core/tools";
import { CallbackManagerForToolRun } from "@langchain/core/callbacks/manager";
import { BaseLanguageModel } from "langchain/dist/base_language";
import { BaseLanguageModel } from "@langchain/core/language_models/base";
import { Embeddings } from "@langchain/core/embeddings";
import { formatDocumentsAsString } from "langchain/util/document";
import { Embeddings } from "langchain/dist/embeddings/base.js";
import { getServerSideConfig } from "@/app/config/server";
import { SupabaseVectorStore } from "@langchain/community/vectorstores/supabase";
import { createClient } from "@supabase/supabase-js";

View File

@@ -1,10 +1,10 @@
import { BaseLanguageModel } from "langchain/dist/base_language";
import { BaseLanguageModel } from "@langchain/core/language_models/base";
import { Embeddings } from "@langchain/core/embeddings";
import { PDFBrowser } from "@/app/api/langchain-tools/pdf_browser";
import { Embeddings } from "langchain/dist/embeddings/base.js";
import { ArxivAPIWrapper } from "@/app/api/langchain-tools/arxiv";
import { DallEAPINodeWrapper } from "@/app/api/langchain-tools/dalle_image_generator_node";
import { StableDiffusionNodeWrapper } from "@/app/api/langchain-tools/stable_diffusion_image_generator_node";
import { Calculator } from "langchain/tools/calculator";
import { Calculator } from "@langchain/community/tools/calculator";
import { WebBrowser } from "langchain/tools/webbrowser";
import { WolframAlphaTool } from "@/app/api/langchain-tools/wolframalpha";
import { BilibiliVideoInfoTool } from "./bilibili_vid_info";

View File

@@ -8,9 +8,9 @@ import {
} from "langchain/text_splitter";
import { CallbackManagerForToolRun } from "@langchain/core/callbacks/manager";
import { BaseLanguageModel } from "langchain/dist/base_language";
import { BaseLanguageModel } from "@langchain/core/language_models/base";
import { Embeddings } from "@langchain/core/embeddings";
import { formatDocumentsAsString } from "langchain/util/document";
import { Embeddings } from "langchain/dist/embeddings/base.js";
import { RunnableSequence } from "@langchain/core/runnables";
import { StringOutputParser } from "@langchain/core/output_parsers";

View File

@@ -21,7 +21,7 @@ import S3FileStorage from "@/app/utils/s3_file_storage";
import { OllamaEmbeddings } from "@langchain/community/embeddings/ollama";
import { SupabaseVectorStore } from "@langchain/community/vectorstores/supabase";
import { createClient } from "@supabase/supabase-js";
import { Embeddings } from "langchain/dist/embeddings/base";
import { Embeddings } from "@langchain/core/embeddings";
interface RequestBody {
sessionId: string;

View File

@@ -104,6 +104,7 @@ export class AgentApi {
var controller = this.controller;
return BaseCallbackHandler.fromMethods({
async handleLLMNewToken(token: string) {
console.log(token);
if (token && !controller.signal.aborted) {
var response = new ResponseBody();
response.message = token;
@@ -220,13 +221,14 @@ export class AgentApi {
baseUrl = reqBaseUrl;
if (!baseUrl.endsWith("/v1"))
baseUrl = baseUrl.endsWith("/") ? `${baseUrl}v1` : `${baseUrl}/v1`;
console.log("[baseUrl]", baseUrl);
console.log("[openai baseUrl]", baseUrl);
return baseUrl;
}
getLLM(reqBody: RequestBody, apiKey: string, baseUrl: string) {
const serverConfig = getServerSideConfig();
if (reqBody.isAzure || serverConfig.isAzure)
if (reqBody.isAzure || serverConfig.isAzure) {
console.log("[use Azure ChatOpenAI]");
return new ChatOpenAI({
temperature: reqBody.temperature,
streaming: reqBody.stream,
@@ -240,7 +242,9 @@ export class AgentApi {
azureOpenAIApiDeploymentName: reqBody.model,
azureOpenAIBasePath: baseUrl,
});
if (reqBody.provider === ServiceProvider.OpenAI)
}
if (reqBody.provider === ServiceProvider.OpenAI) {
console.log("[use ChatOpenAI]");
return new ChatOpenAI(
{
modelName: reqBody.model,
@@ -253,7 +257,9 @@ export class AgentApi {
},
{ basePath: baseUrl },
);
if (reqBody.provider === ServiceProvider.Anthropic)
}
if (reqBody.provider === ServiceProvider.Anthropic) {
console.log("[use ChatAnthropic]");
return new ChatAnthropic({
model: reqBody.model,
apiKey: apiKey,
@@ -265,6 +271,7 @@ export class AgentApi {
baseURL: baseUrl,
},
});
}
throw new Error("Unsupported model providers");
}
@@ -294,7 +301,10 @@ export class AgentApi {
) {
baseUrl = reqBody.baseUrl;
}
if (!isAzure && !baseUrl.endsWith("/v1")) {
if (
reqBody.provider === ServiceProvider.OpenAI &&
!baseUrl.endsWith("/v1")
) {
baseUrl = baseUrl.endsWith("/") ? `${baseUrl}v1` : `${baseUrl}/v1`;
}
if (!reqBody.isAzure && serverConfig.isAzure) {
@@ -408,8 +418,7 @@ export class AgentApi {
typeof lastMessageContent === "string"
? new HumanMessage(lastMessageContent)
: new HumanMessage({ content: lastMessageContent });
const agent = await createToolCallingAgent({
const agent = createToolCallingAgent({
llm,
tools,
prompt,
@@ -423,7 +432,7 @@ export class AgentApi {
{
input: lastMessageContent,
chat_history: pastMessages,
signal: this.controller.signal,
// signal: this.controller.signal,
},
{ callbacks: [handler] },
)

View File

@@ -4,8 +4,8 @@ import { auth } from "@/app/api/auth";
import { NodeJSTool } from "@/app/api/langchain-tools/nodejs_tools";
import { ModelProvider } from "@/app/constant";
import { ChatOpenAI, OpenAIEmbeddings } from "@langchain/openai";
import { Embeddings } from "langchain/dist/embeddings/base";
import { OllamaEmbeddings } from "@langchain/community/embeddings/ollama";
import { Embeddings } from "@langchain/core/embeddings";
async function handle(req: NextRequest) {
if (req.method === "OPTIONS") {

View File

@@ -227,7 +227,7 @@ export class ClientApi {
}
}
export function getHeaders(ignoreHeaders?: boolean) {
export function getHeaders(ignoreHeaders?: boolean, isFunctionCall?: boolean) {
const accessStore = useAccessStore.getState();
const chatStore = useChatStore.getState();
let headers: Record<string, string> = {};
@@ -285,6 +285,7 @@ export function getHeaders(ignoreHeaders?: boolean) {
}
function getAuthHeader(): string {
if (isFunctionCall) return "Authorization";
return isAzure ? "api-key" : isAnthropic ? "x-api-key" : "Authorization";
}

View File

@@ -1,4 +1,10 @@
import { ACCESS_CODE_PREFIX, Anthropic, ApiPath } from "@/app/constant";
import {
ACCESS_CODE_PREFIX,
Anthropic,
ApiPath,
REQUEST_TIMEOUT_MS,
ServiceProvider,
} from "@/app/constant";
import {
AgentChatOptions,
ChatOptions,
@@ -88,9 +94,164 @@ export class ClaudeApi implements LLMApi {
transcription(options: TranscriptionOptions): Promise<string> {
throw new Error("Method not implemented.");
}
toolAgentChat(options: AgentChatOptions): Promise<void> {
throw new Error("Method not implemented.");
async toolAgentChat(options: AgentChatOptions) {
const visionModel = isVisionModel(options.config.model);
const messages: AgentChatOptions["messages"] = [];
for (const v of options.messages) {
const content = visionModel
? await preProcessImageContent(v.content)
: getMessageTextContent(v);
messages.push({ role: v.role, content });
}
const modelConfig = {
...useAppConfig.getState().modelConfig,
...useChatStore.getState().currentSession().mask.modelConfig,
...{
model: options.config.model,
},
};
const accessStore = useAccessStore.getState();
let baseUrl = accessStore.anthropicUrl;
const requestPayload = {
chatSessionId: options.chatSessionId,
messages,
isAzure: false,
azureApiVersion: accessStore.azureApiVersion,
stream: options.config.stream,
model: modelConfig.model,
temperature: modelConfig.temperature,
presence_penalty: modelConfig.presence_penalty,
frequency_penalty: modelConfig.frequency_penalty,
top_p: modelConfig.top_p,
baseUrl: baseUrl,
maxIterations: options.agentConfig.maxIterations,
returnIntermediateSteps: options.agentConfig.returnIntermediateSteps,
useTools: options.agentConfig.useTools,
provider: ServiceProvider.Anthropic,
};
console.log("[Request] anthropic payload: ", requestPayload);
const shouldStream = true;
const controller = new AbortController();
options.onController?.(controller);
try {
let path = "/api/langchain/tool/agent/";
const enableNodeJSPlugin = !!process.env.NEXT_PUBLIC_ENABLE_NODEJS_PLUGIN;
path = enableNodeJSPlugin ? path + "nodejs" : path + "edge";
const chatPayload = {
method: "POST",
body: JSON.stringify(requestPayload),
signal: controller.signal,
headers: getHeaders(false, true),
};
// make a fetch request
const requestTimeoutId = setTimeout(
() => controller.abort(),
REQUEST_TIMEOUT_MS,
);
// console.log("shouldStream", shouldStream);
if (shouldStream) {
let responseText = "";
let finished = false;
const finish = () => {
if (!finished) {
options.onFinish(responseText);
finished = true;
}
};
controller.signal.onabort = finish;
fetchEventSource(path, {
...chatPayload,
async onopen(res) {
clearTimeout(requestTimeoutId);
const contentType = res.headers.get("content-type");
console.log(
"[OpenAI] request response content type: ",
contentType,
);
if (contentType?.startsWith("text/plain")) {
responseText = await res.clone().text();
return finish();
}
if (
!res.ok ||
!res.headers
.get("content-type")
?.startsWith(EventStreamContentType) ||
res.status !== 200
) {
const responseTexts = [responseText];
let extraInfo = await res.clone().text();
console.warn(`extraInfo: ${extraInfo}`);
if (res.status === 401) {
responseTexts.push(Locale.Error.Unauthorized);
}
if (extraInfo) {
responseTexts.push(extraInfo);
}
responseText = responseTexts.join("\n\n");
return finish();
}
},
onmessage(msg) {
let response = JSON.parse(msg.data);
if (!response.isSuccess) {
console.error("[Request]", msg.data);
responseText = msg.data;
throw Error(response.message);
}
if (msg.data === "[DONE]" || finished) {
return finish();
}
try {
if (response && !response.isToolMessage) {
responseText += response.message;
options.onUpdate?.(responseText, response.message);
} else {
options.onToolUpdate?.(response.toolName!, response.message);
}
} catch (e) {
console.error("[Request] parse error", response, msg);
}
},
onclose() {
finish();
},
onerror(e) {
options.onError?.(e);
throw e;
},
openWhenHidden: true,
});
} else {
const res = await fetch(path, chatPayload);
clearTimeout(requestTimeoutId);
const resJson = await res.json();
const message = this.extractMessage(resJson);
options.onFinish(message);
}
} catch (e) {
console.log("[Request] failed to make a chat reqeust", e);
options.onError?.(e as Error);
}
}
createRAGStore(options: CreateRAGStoreOptions): Promise<string> {
throw new Error("Method not implemented.");
}

View File

@@ -69,6 +69,7 @@ import {
isVisionModel,
isFirefox,
isSupportRAGModel,
isFunctionCallModel,
} from "../utils";
import { uploadImage as uploadImageRemote } from "@/app/utils/chat";
@@ -636,19 +637,17 @@ export function ChatActions(props: {
text={Locale.Chat.InputActions.Masks}
icon={<MaskIcon />}
/>
{config.pluginConfig.enable &&
/^gpt(?!.*03\d{2}$).*$/.test(currentModel) &&
currentModel != "gpt-4-vision-preview" && (
<ChatAction
onClick={switchUsePlugins}
text={
usePlugins
? Locale.Chat.InputActions.DisablePlugins
: Locale.Chat.InputActions.EnablePlugins
}
icon={usePlugins ? <EnablePluginIcon /> : <DisablePluginIcon />}
/>
)}
{config.pluginConfig.enable && isFunctionCallModel(currentModel) && (
<ChatAction
onClick={switchUsePlugins}
text={
usePlugins
? Locale.Chat.InputActions.DisablePlugins
: Locale.Chat.InputActions.EnablePlugins
}
icon={usePlugins ? <EnablePluginIcon /> : <DisablePluginIcon />}
/>
)}
<ChatAction
onClick={() => setShowModelSelector(true)}

View File

@@ -1,4 +1,8 @@
import { trimTopic, getMessageTextContent } from "../utils";
import {
trimTopic,
getMessageTextContent,
isFunctionCallModel,
} from "../utils";
import Locale, { getLang } from "../locales";
import { showToast } from "../components/ui-lib";
@@ -403,8 +407,7 @@ export const useChatStore = createPersistStore(
config.pluginConfig.enable &&
session.mask.usePlugins &&
(allPlugins.length > 0 || isEnableRAG) &&
modelConfig.model.startsWith("gpt") &&
modelConfig.model != "gpt-4-vision-preview"
isFunctionCallModel(modelConfig.model)
) {
console.log("[ToolAgent] start");
let pluginToolNames = allPlugins.map((m) => m.toolName);