Source code for langchain_experimental.llms.anthropic_functions
importjsonfromcollectionsimportdefaultdictfromhtml.parserimportHTMLParserfromtypingimportAny,DefaultDict,Dict,List,Optional,castfromlangchain.schemaimport(ChatGeneration,ChatResult,)fromlangchain_community.chat_models.anthropicimportChatAnthropicfromlangchain_core._api.deprecationimportdeprecatedfromlangchain_core.callbacks.managerimport(CallbackManagerForLLMRun,)fromlangchain_core.language_modelsimportBaseChatModelfromlangchain_core.messagesimport(AIMessage,BaseMessage,SystemMessage,)frompydanticimportmodel_validatorprompt="""In addition to responding, you can use tools. \You have access to the following tools.{tools}In order to use a tool, you can use <tool></tool> to specify the name, \and the <tool_input></tool_input> tags to specify the parameters. \Each parameter should be passed in as <$param_name>$value</$param_name>, \Where $param_name is the name of the specific parameter, and $value \is the value for that parameter.You will then get back a response in the form <observation></observation>For example, if you have a tool called 'search' that accepts a single \parameter 'query' that could run a google search, in order to search \for the weather in SF you would respond:<tool>search</tool><tool_input><query>weather in SF</query></tool_input><observation>64 degrees</observation>"""
[docs]classTagParser(HTMLParser):"""Parser for the tool tags."""
[docs]def__init__(self)->None:"""A heavy-handed solution, but it's fast for prototyping. Might be re-implemented later to restrict scope to the limited grammar, and more efficiency. Uses an HTML parser to parse a limited grammar that allows for syntax of the form: INPUT -> JUNK? VALUE* JUNK -> JUNK_CHARACTER+ JUNK_CHARACTER -> whitespace | , VALUE -> <IDENTIFIER>DATA</IDENTIFIER> | OBJECT OBJECT -> <IDENTIFIER>VALUE+</IDENTIFIER> IDENTIFIER -> [a-Z][a-Z0-9_]* DATA -> .* Interprets the data to allow repetition of tags and recursion to support representation of complex types. ^ Just another approximately wrong grammar specification. """super().__init__()self.parse_data:DefaultDict[str,List[Any]]=defaultdict(list)self.stack:List[DefaultDict[str,List[str]]]=[self.parse_data]self.success=Trueself.depth=0self.data:Optional[str]=None
[docs]defhandle_starttag(self,tag:str,attrs:Any)->None:"""Hook when a new tag is encountered."""self.depth+=1self.stack.append(defaultdict(list))self.data=None
[docs]defhandle_endtag(self,tag:str)->None:"""Hook when a tag is closed."""self.depth-=1top_of_stack=dict(self.stack.pop(-1))# Pop the dictionary we don't need it# If a lead nodeis_leaf=self.dataisnotNone# Annoying to type here, code is tested, hopefully OKvalue=self.dataifis_leafelsetop_of_stack# Difficult to type this correctly with mypy (maybe impossible?)# Can be nested indefinitely, so requires self referencing typeself.stack[-1][tag].append(value)# type: ignore# Reset the data so we if we encounter a sequence of end tags, we# don't confuse an outer end tag for belonging to a leaf node.self.data=None
[docs]defhandle_data(self,data:str)->None:"""Hook when handling data."""stripped_data=data.strip()# The only data that's allowed is whitespace or a comma surrounded by whitespaceifself.depth==0andstripped_datanotin(",",""):# If this is triggered the parse should be considered invalid.self.success=Falseifstripped_data:# ignore whitespace-only stringsself.data=stripped_data
[docs]@deprecated(since="0.0.54",removal="1.0",alternative_import="langchain_anthropic.experimental.ChatAnthropicTools",)classAnthropicFunctions(BaseChatModel):"""Chat model for interacting with Anthropic functions."""llm:BaseChatModel@model_validator(mode="before")@classmethoddefvalidate_environment(cls,values:Dict)->Any:values["llm"]=values.get("llm")orChatAnthropic(**values)returnvalues@propertydefmodel(self)->BaseChatModel:"""For backwards compatibility."""returnself.llmdef_generate(self,messages:List[BaseMessage],stop:Optional[List[str]]=None,run_manager:Optional[CallbackManagerForLLMRun]=None,**kwargs:Any,)->ChatResult:forced=Falsefunction_call=""if"functions"inkwargs:# get the function call methodif"function_call"inkwargs:function_call=kwargs["function_call"]delkwargs["function_call"]else:function_call="auto"# should function calling be usediffunction_call!="none":content=prompt.format(tools=json.dumps(kwargs["functions"],indent=2))system=SystemMessage(content=content)messages=[system]+messages# is the function call a dictionary (forced function calling)ifisinstance(function_call,dict):forced=Truefunction_call_name=function_call["name"]messages.append(AIMessage(content=f"<tool>{function_call_name}</tool>"))delkwargs["functions"]ifstopisNone:stop=["</tool_input>"]else:stop.append("</tool_input>")else:if"function_call"inkwargs:raiseValueError("if `function_call` provided, `functions` must also be")response=self.model.invoke(messages,stop=stop,callbacks=run_manager,**kwargs)completion=cast(str,response.content)ifforced:tag_parser=TagParser()if"<tool_input>"incompletion:tag_parser.feed(completion.strip()+"</tool_input>")v1=tag_parser.parse_data["tool_input"][0]arguments=json.dumps(_destrip(v1))else:v1=completionarguments=""kwargs={"function_call":{"name":function_call_name,# type: ignore[has-type]"arguments":arguments,}}message=AIMessage(content="",additional_kwargs=kwargs)returnChatResult(generations=[ChatGeneration(message=message)])elif"<tool>"incompletion:tag_parser=TagParser()tag_parser.feed(completion.strip()+"</tool_input>")msg=completion.split("<tool>")[0].strip()v1=tag_parser.parse_data["tool_input"][0]kwargs={"function_call":{"name":tag_parser.parse_data["tool"][0],"arguments":json.dumps(_destrip(v1)),}}message=AIMessage(content=msg,additional_kwargs=kwargs)returnChatResult(generations=[ChatGeneration(message=message)])else:response.content=cast(str,response.content).strip()returnChatResult(generations=[ChatGeneration(message=response)])@propertydef_llm_type(self)->str:return"anthropic_functions"