mirror of
https://github.com/hwchase17/langchain.git
synced 2025-05-16 12:32:06 +00:00
AnthropicFunctions function_call compatibility (#13901)
- **Description:** Updates to `AnthropicFunctions` to be compatible with the OpenAI `function_call` functionality. - **Issue:** The functionality to indicate `auto`, `none` and a forced function_call was not completely implemented in the existing code. - **Dependencies:** None - **Tag maintainer:** @baskaryan , and any of the other maintainers if needed. - **Twitter handle:** None I have specifically tested this functionality via AWS Bedrock with the Claude-2 and Claude-Instant models.
This commit is contained in:
parent
14cc907d35
commit
fc40bd4cdb
@ -144,19 +144,30 @@ class AnthropicFunctions(BaseChatModel):
|
||||
forced = False
|
||||
function_call = ""
|
||||
if "functions" in kwargs:
|
||||
content = prompt.format(tools=json.dumps(kwargs["functions"], indent=2))
|
||||
system = SystemMessage(content=content)
|
||||
messages = [system] + messages
|
||||
# get the function call method
|
||||
if "function_call" in kwargs:
|
||||
function_call = kwargs["function_call"]
|
||||
del kwargs["function_call"]
|
||||
else:
|
||||
function_call = "auto"
|
||||
|
||||
# should function calling be used
|
||||
if function_call != "none":
|
||||
content = prompt.format(tools=json.dumps(kwargs["functions"], indent=2))
|
||||
system = SystemMessage(content=content)
|
||||
messages = [system] + messages
|
||||
|
||||
# is the function call a dictionary (forced function calling)
|
||||
if isinstance(function_call, dict):
|
||||
forced = True
|
||||
function_call_name = function_call["name"]
|
||||
messages.append(AIMessage(content=f"<tool>{function_call_name}</tool>"))
|
||||
|
||||
del kwargs["functions"]
|
||||
if stop is None:
|
||||
stop = ["</tool_input>"]
|
||||
else:
|
||||
stop.append("</tool_input>")
|
||||
if "function_call" in kwargs:
|
||||
forced = True
|
||||
function_call = kwargs["function_call"]["name"]
|
||||
AIMessage(content=f"<tool>{function_call}</tool>")
|
||||
del kwargs["function_call"]
|
||||
else:
|
||||
if "function_call" in kwargs:
|
||||
raise ValueError(
|
||||
@ -168,12 +179,19 @@ class AnthropicFunctions(BaseChatModel):
|
||||
completion = response.content
|
||||
if forced:
|
||||
tag_parser = TagParser()
|
||||
tag_parser.feed(completion.strip() + "</tool_input>")
|
||||
v1 = tag_parser.parse_data["tool_input"][0]
|
||||
|
||||
if "<tool_input>" in completion:
|
||||
tag_parser.feed(completion.strip() + "</tool_input>")
|
||||
v1 = tag_parser.parse_data["tool_input"][0]
|
||||
arguments = json.dumps(_destrip(v1))
|
||||
else:
|
||||
v1 = completion
|
||||
arguments = ""
|
||||
|
||||
kwargs = {
|
||||
"function_call": {
|
||||
"name": function_call,
|
||||
"arguments": json.dumps(_destrip(v1)),
|
||||
"name": function_call_name,
|
||||
"arguments": arguments,
|
||||
}
|
||||
}
|
||||
message = AIMessage(content="", additional_kwargs=kwargs)
|
||||
@ -181,7 +199,7 @@ class AnthropicFunctions(BaseChatModel):
|
||||
elif "<tool>" in completion:
|
||||
tag_parser = TagParser()
|
||||
tag_parser.feed(completion.strip() + "</tool_input>")
|
||||
msg = completion.split("<tool>")[0]
|
||||
msg = completion.split("<tool>")[0].strip()
|
||||
v1 = tag_parser.parse_data["tool_input"][0]
|
||||
kwargs = {
|
||||
"function_call": {
|
||||
@ -192,6 +210,7 @@ class AnthropicFunctions(BaseChatModel):
|
||||
message = AIMessage(content=msg, additional_kwargs=kwargs)
|
||||
return ChatResult(generations=[ChatGeneration(message=message)])
|
||||
else:
|
||||
response.content = response.content.strip()
|
||||
return ChatResult(generations=[ChatGeneration(message=response)])
|
||||
|
||||
@property
|
||||
|
Loading…
Reference in New Issue
Block a user