Source code for langchain_core.utils.function_calling
"""Methods for creating function specs in the style of OpenAI Functions."""from__future__importannotationsimportcollectionsimportinspectimportloggingimporttypesimporttypingimportuuidfromtypingimport(TYPE_CHECKING,Annotated,Any,Callable,Literal,Optional,Union,cast,)frompydanticimportBaseModelfrompydantic.v1importBaseModelasBaseModelV1fromtyping_extensionsimportTypedDict,get_args,get_origin,is_typeddictfromlangchain_core._apiimportbeta,deprecatedfromlangchain_core.messagesimportAIMessage,BaseMessage,HumanMessage,ToolMessagefromlangchain_core.utils.json_schemaimportdereference_refsfromlangchain_core.utils.pydanticimportis_basemodel_subclassifTYPE_CHECKING:fromlangchain_core.toolsimportBaseToollogger=logging.getLogger(__name__)PYTHON_TO_JSON_TYPES={"str":"string","int":"integer","float":"number","bool":"boolean",}
[docs]classFunctionDescription(TypedDict):"""Representation of a callable function to send to an LLM."""name:str"""The name of the function."""description:str"""A description of the function."""parameters:dict"""The parameters of the function."""
[docs]classToolDescription(TypedDict):"""Representation of a callable function to the OpenAI API."""type:Literal["function"]"""The type of the tool."""function:FunctionDescription"""The function description."""
def_rm_titles(kv:dict,prev_key:str="")->dict:"""Recursively removes "title" fields from a JSON schema dictionary. Remove "title" fields from the input JSON schema dictionary, except when a "title" appears within a property definition under "properties". Args: kv (dict): The input JSON schema as a dictionary. prev_key (str): The key from the parent dictionary, used to identify context. Returns: dict: A new dictionary with appropriate "title" fields removed. """new_kv={}fork,vinkv.items():ifk=="title":# If the value is a nested dict and part of a property under "properties",# preserve the title but continue recursionifisinstance(v,dict)andprev_key=="properties":new_kv[k]=_rm_titles(v,k)else:# Otherwise, remove this "title" keycontinueelifisinstance(v,dict):# Recurse into nested dictionariesnew_kv[k]=_rm_titles(v,k)else:# Leave non-dict values untouchednew_kv[k]=vreturnnew_kvdef_convert_json_schema_to_openai_function(schema:dict,*,name:Optional[str]=None,description:Optional[str]=None,rm_titles:bool=True,)->FunctionDescription:"""Converts a Pydantic model to a function description for the OpenAI API. Args: schema: The JSON schema to convert. name: The name of the function. If not provided, the title of the schema will be used. description: The description of the function. If not provided, the description of the schema will be used. rm_titles: Whether to remove titles from the schema. Defaults to True. Returns: The function description. """schema=dereference_refs(schema)if"definitions"inschema:# pydantic 1schema.pop("definitions",None)if"$defs"inschema:# pydantic 2schema.pop("$defs",None)title=schema.pop("title","")default_description=schema.pop("description","")return{"name":nameortitle,"description":descriptionordefault_description,"parameters":_rm_titles(schema)ifrm_titleselseschema,}def_convert_pydantic_to_openai_function(model:type,*,name:Optional[str]=None,description:Optional[str]=None,rm_titles:bool=True,)->FunctionDescription:"""Converts a Pydantic model to a function description for the OpenAI API. Args: model: The Pydantic model to convert. name: The name of the function. If not provided, the title of the schema will be used. description: The description of the function. If not provided, the description of the schema will be used. rm_titles: Whether to remove titles from the schema. Defaults to True. Returns: The function description. """ifhasattr(model,"model_json_schema"):schema=model.model_json_schema()# Pydantic 2elifhasattr(model,"schema"):schema=model.schema()# Pydantic 1else:msg="Model must be a Pydantic model."raiseTypeError(msg)return_convert_json_schema_to_openai_function(schema,name=name,description=description,rm_titles=rm_titles)convert_pydantic_to_openai_function=deprecated("0.1.16",alternative="langchain_core.utils.function_calling.convert_to_openai_function()",removal="1.0",)(_convert_pydantic_to_openai_function)
[docs]@deprecated("0.1.16",alternative="langchain_core.utils.function_calling.convert_to_openai_tool()",removal="1.0",)defconvert_pydantic_to_openai_tool(model:type[BaseModel],*,name:Optional[str]=None,description:Optional[str]=None,)->ToolDescription:"""Converts a Pydantic model to a function description for the OpenAI API. Args: model: The Pydantic model to convert. name: The name of the function. If not provided, the title of the schema will be used. description: The description of the function. If not provided, the description of the schema will be used. Returns: The tool description. """function=_convert_pydantic_to_openai_function(model,name=name,description=description)return{"type":"function","function":function}
def_get_python_function_name(function:Callable)->str:"""Get the name of a Python function."""returnfunction.__name__def_convert_python_function_to_openai_function(function:Callable,)->FunctionDescription:"""Convert a Python function to an OpenAI function-calling API compatible dict. Assumes the Python function has type hints and a docstring with a description. If the docstring has Google Python style argument descriptions, these will be included as well. Args: function: The Python function to convert. Returns: The OpenAI function description. """fromlangchain_core.tools.baseimportcreate_schema_from_functionfunc_name=_get_python_function_name(function)model=create_schema_from_function(func_name,function,filter_args=(),parse_docstring=True,error_on_invalid_docstring=False,include_injected=False,)return_convert_pydantic_to_openai_function(model,name=func_name,description=model.__doc__,)convert_python_function_to_openai_function=deprecated("0.1.16",alternative="langchain_core.utils.function_calling.convert_to_openai_function()",removal="1.0",)(_convert_python_function_to_openai_function)def_convert_typed_dict_to_openai_function(typed_dict:type)->FunctionDescription:visited:dict={}model=cast("type[BaseModel]",_convert_any_typed_dicts_to_pydantic(typed_dict,visited=visited),)return_convert_pydantic_to_openai_function(model)# type: ignore_MAX_TYPED_DICT_RECURSION=25def_convert_any_typed_dicts_to_pydantic(type_:type,*,visited:dict,depth:int=0,)->type:frompydantic.v1importFieldasField_v1frompydantic.v1importcreate_modelascreate_model_v1iftype_invisited:returnvisited[type_]elifdepth>=_MAX_TYPED_DICT_RECURSION:returntype_elifis_typeddict(type_):typed_dict=type_docstring=inspect.getdoc(typed_dict)annotations_=typed_dict.__annotations__description,arg_descriptions=_parse_google_docstring(docstring,list(annotations_))fields:dict={}forarg,arg_typeinannotations_.items():ifget_origin(arg_type)isAnnotated:annotated_args=get_args(arg_type)new_arg_type=_convert_any_typed_dicts_to_pydantic(annotated_args[0],depth=depth+1,visited=visited)field_kwargs=dict(zip(("default","description"),annotated_args[1:]))if(field_desc:=field_kwargs.get("description"))andnotisinstance(field_desc,str):msg=(f"Invalid annotation for field {arg}. Third argument to "f"Annotated must be a string description, received value of "f"type {type(field_desc)}.")raiseValueError(msg)elifarg_desc:=arg_descriptions.get(arg):field_kwargs["description"]=arg_descelse:passfields[arg]=(new_arg_type,Field_v1(**field_kwargs))else:new_arg_type=_convert_any_typed_dicts_to_pydantic(arg_type,depth=depth+1,visited=visited)field_kwargs={"default":...}ifarg_desc:=arg_descriptions.get(arg):field_kwargs["description"]=arg_descfields[arg]=(new_arg_type,Field_v1(**field_kwargs))model=create_model_v1(typed_dict.__name__,**fields)model.__doc__=descriptionvisited[typed_dict]=modelreturnmodelelif(origin:=get_origin(type_))and(type_args:=get_args(type_)):subscriptable_origin=_py_38_safe_origin(origin)type_args=tuple(_convert_any_typed_dicts_to_pydantic(arg,depth=depth+1,visited=visited)forargintype_args# type: ignore[index])returnsubscriptable_origin[type_args]# type: ignore[index]else:returntype_def_format_tool_to_openai_function(tool:BaseTool)->FunctionDescription:"""Format tool into the OpenAI function API. Args: tool: The tool to format. Returns: The function description. """fromlangchain_core.toolsimportsimpleis_simple_oai_tool=isinstance(tool,simple.Tool)andnottool.args_schemaiftool.tool_call_schemaandnotis_simple_oai_tool:ifisinstance(tool.tool_call_schema,dict):return_convert_json_schema_to_openai_function(tool.tool_call_schema,name=tool.name,description=tool.description)elifissubclass(tool.tool_call_schema,(BaseModel,BaseModelV1)):return_convert_pydantic_to_openai_function(tool.tool_call_schema,name=tool.name,description=tool.description)else:error_msg=(f"Unsupported tool call schema: {tool.tool_call_schema}. ""Tool call schema must be a JSON schema dict or a Pydantic model.")raiseValueError(error_msg)else:return{"name":tool.name,"description":tool.description,"parameters":{# This is a hack to get around the fact that some tools# do not expose an args_schema, and expect an argument# which is a string.# And Open AI does not support an array type for the# parameters."properties":{"__arg1":{"title":"__arg1","type":"string"},},"required":["__arg1"],"type":"object",},}format_tool_to_openai_function=deprecated("0.1.16",alternative="langchain_core.utils.function_calling.convert_to_openai_function()",removal="1.0",)(_format_tool_to_openai_function)
[docs]@deprecated("0.1.16",alternative="langchain_core.utils.function_calling.convert_to_openai_tool()",removal="1.0",)defformat_tool_to_openai_tool(tool:BaseTool)->ToolDescription:"""Format tool into the OpenAI function API. Args: tool: The tool to format. Returns: The tool description. """function=_format_tool_to_openai_function(tool)return{"type":"function","function":function}
[docs]defconvert_to_openai_function(function:Union[dict[str,Any],type,Callable,BaseTool],*,strict:Optional[bool]=None,)->dict[str,Any]:"""Convert a raw function/class to an OpenAI function. Args: function: A dictionary, Pydantic BaseModel class, TypedDict class, a LangChain Tool object, or a Python function. If a dictionary is passed in, it is assumed to already be a valid OpenAI function, a JSON schema with top-level 'title' key specified, an Anthropic format tool, or an Amazon Bedrock Converse format tool. strict: If True, model output is guaranteed to exactly match the JSON Schema provided in the function definition. If None, ``strict`` argument will not be included in function definition. Returns: A dict version of the passed in function which is compatible with the OpenAI function-calling API. Raises: ValueError: If function is not in a supported format. .. versionchanged:: 0.2.29 ``strict`` arg added. .. versionchanged:: 0.3.13 Support for Anthropic format tools added. .. versionchanged:: 0.3.14 Support for Amazon Bedrock Converse format tools added. .. versionchanged:: 0.3.16 'description' and 'parameters' keys are now optional. Only 'name' is required and guaranteed to be part of the output. """fromlangchain_core.toolsimportBaseTool# an Anthropic format toolifisinstance(function,dict)andall(kinfunctionforkin("name","input_schema")):oai_function={"name":function["name"],"parameters":function["input_schema"],}if"description"infunction:oai_function["description"]=function["description"]# an Amazon Bedrock Converse format toolelifisinstance(function,dict)and"toolSpec"infunction:oai_function={"name":function["toolSpec"]["name"],"parameters":function["toolSpec"]["inputSchema"]["json"],}if"description"infunction["toolSpec"]:oai_function["description"]=function["toolSpec"]["description"]# already in OpenAI function formatelifisinstance(function,dict)and"name"infunction:oai_function={k:vfork,vinfunction.items()ifkin("name","description","parameters","strict")}# a JSON schema with title and descriptionelifisinstance(function,dict)and"title"infunction:function_copy=function.copy()oai_function={"name":function_copy.pop("title")}if"description"infunction_copy:oai_function["description"]=function_copy.pop("description")iffunction_copyand"properties"infunction_copy:oai_function["parameters"]=function_copyelifisinstance(function,type)andis_basemodel_subclass(function):oai_function=cast("dict",_convert_pydantic_to_openai_function(function))elifis_typeddict(function):oai_function=cast("dict",_convert_typed_dict_to_openai_function(cast("type",function)))elifisinstance(function,BaseTool):oai_function=cast("dict",_format_tool_to_openai_function(function))elifcallable(function):oai_function=cast("dict",_convert_python_function_to_openai_function(function))else:msg=(f"Unsupported function\n\n{function}\n\nFunctions must be passed in"" as Dict, pydantic.BaseModel, or Callable. If they're a dict they must"" either be in OpenAI function format or valid JSON schema with top-level"" 'title' and 'description' keys.")raiseValueError(msg)ifstrictisnotNone:if"strict"inoai_functionandoai_function["strict"]!=strict:msg=(f"Tool/function already has a 'strict' key wth value "f"{oai_function['strict']} which is different from the explicit "f"`strict` arg received {strict=}.")raiseValueError(msg)oai_function["strict"]=strictifstrict:# As of 08/06/24, OpenAI requires that additionalProperties be supplied and# set to False if strict is True.# All properties layer needs 'additionalProperties=False'oai_function["parameters"]=_recursive_set_additional_properties_false(oai_function["parameters"])returnoai_function
[docs]defconvert_to_openai_tool(tool:Union[dict[str,Any],type[BaseModel],Callable,BaseTool],*,strict:Optional[bool]=None,)->dict[str,Any]:"""Convert a tool-like object to an OpenAI tool schema. OpenAI tool schema reference: https://platform.openai.com/docs/api-reference/chat/create#chat-create-tools Args: tool: Either a dictionary, a pydantic.BaseModel class, Python function, or BaseTool. If a dictionary is passed in, it is assumed to already be a valid OpenAI function, a JSON schema with top-level 'title' key specified, an Anthropic format tool, or an Amazon Bedrock Converse format tool. strict: If True, model output is guaranteed to exactly match the JSON Schema provided in the function definition. If None, ``strict`` argument will not be included in tool definition. Returns: A dict version of the passed in tool which is compatible with the OpenAI tool-calling API. .. versionchanged:: 0.2.29 ``strict`` arg added. .. versionchanged:: 0.3.13 Support for Anthropic format tools added. .. versionchanged:: 0.3.14 Support for Amazon Bedrock Converse format tools added. .. versionchanged:: 0.3.16 'description' and 'parameters' keys are now optional. Only 'name' is required and guaranteed to be part of the output. .. versionchanged:: 0.3.44 Return OpenAI Responses API-style tools unchanged. This includes any dict with "type" in "file_search", "function", "computer_use_preview", "web_search_preview". """ifisinstance(tool,dict):iftool.get("type")in("function","file_search","computer_use_preview"):returntool# As of 03.12.25 can be "web_search_preview" or "web_search_preview_2025_03_11"if(tool.get("type")or"").startswith("web_search_preview"):returntooloai_function=convert_to_openai_function(tool,strict=strict)return{"type":"function","function":oai_function}
[docs]defconvert_to_json_schema(schema:Union[dict[str,Any],type[BaseModel],Callable,BaseTool],*,strict:Optional[bool]=None,)->dict[str,Any]:"""Convert a schema representation to a JSON schema."""openai_tool=convert_to_openai_tool(schema,strict=strict)if(notisinstance(openai_tool,dict)or"function"notinopenai_toolor"name"notinopenai_tool["function"]):error_message="Input must be a valid OpenAI-format tool."raiseValueError(error_message)openai_function=openai_tool["function"]json_schema={}json_schema["title"]=openai_function["name"]if"description"inopenai_function:json_schema["description"]=openai_function["description"]if"parameters"inopenai_function:parameters=openai_function["parameters"].copy()json_schema.update(parameters)returnjson_schema
[docs]@beta()deftool_example_to_messages(input:str,tool_calls:list[BaseModel],tool_outputs:Optional[list[str]]=None,*,ai_response:Optional[str]=None,)->list[BaseMessage]:"""Convert an example into a list of messages that can be fed into an LLM. This code is an adapter that converts a single example to a list of messages that can be fed into a chat model. The list of messages per example by default corresponds to: 1) HumanMessage: contains the content from which content should be extracted. 2) AIMessage: contains the extracted information from the model 3) ToolMessage: contains confirmation to the model that the model requested a tool correctly. If `ai_response` is specified, there will be a final AIMessage with that response. The ToolMessage is required because some chat models are hyper-optimized for agents rather than for an extraction use case. Arguments: input: string, the user input tool_calls: List[BaseModel], a list of tool calls represented as Pydantic BaseModels tool_outputs: Optional[List[str]], a list of tool call outputs. Does not need to be provided. If not provided, a placeholder value will be inserted. Defaults to None. ai_response: Optional[str], if provided, content for a final AIMessage. Returns: A list of messages Examples: .. code-block:: python from typing import List, Optional from pydantic import BaseModel, Field from langchain_openai import ChatOpenAI class Person(BaseModel): '''Information about a person.''' name: Optional[str] = Field(..., description="The name of the person") hair_color: Optional[str] = Field( ..., description="The color of the person's hair if known" ) height_in_meters: Optional[str] = Field( ..., description="Height in METERs" ) examples = [ ( "The ocean is vast and blue. It's more than 20,000 feet deep.", Person(name=None, height_in_meters=None, hair_color=None), ), ( "Fiona traveled far from France to Spain.", Person(name="Fiona", height_in_meters=None, hair_color=None), ), ] messages = [] for txt, tool_call in examples: messages.extend( tool_example_to_messages(txt, [tool_call]) ) """messages:list[BaseMessage]=[HumanMessage(content=input)]openai_tool_calls=[]fortool_callintool_calls:openai_tool_calls.append({"id":str(uuid.uuid4()),"type":"function","function":{# The name of the function right now corresponds to the name# of the pydantic model. This is implicit in the API right now,# and will be improved over time."name":tool_call.__class__.__name__,"arguments":tool_call.model_dump_json(),},})messages.append(AIMessage(content="",additional_kwargs={"tool_calls":openai_tool_calls}))tool_outputs=tool_outputsor["You have correctly called this tool."]*len(openai_tool_calls)foroutput,tool_call_dictinzip(tool_outputs,openai_tool_calls):messages.append(ToolMessage(content=output,tool_call_id=tool_call_dict["id"]))# type: ignoreifai_response:messages.append(AIMessage(content=ai_response))returnmessages
def_parse_google_docstring(docstring:Optional[str],args:list[str],*,error_on_invalid_docstring:bool=False,)->tuple[str,dict]:"""Parse the function and argument descriptions from the docstring of a function. Assumes the function docstring follows Google Python style guide. """ifdocstring:docstring_blocks=docstring.split("\n\n")iferror_on_invalid_docstring:filtered_annotations={argforarginargsifargnotin("run_manager","callbacks","return")}iffiltered_annotationsand(len(docstring_blocks)<2ornotany(block.startswith("Args:")forblockindocstring_blocks[1:])):msg="Found invalid Google-Style docstring."raiseValueError(msg)descriptors=[]args_block=Nonepast_descriptors=Falseforblockindocstring_blocks:ifblock.startswith("Args:"):args_block=blockbreakelifblock.startswith(("Returns:","Example:")):# Don't break in case Args come afterpast_descriptors=Trueelifnotpast_descriptors:descriptors.append(block)else:continuedescription=" ".join(descriptors)else:iferror_on_invalid_docstring:msg="Found invalid Google-Style docstring."raiseValueError(msg)description=""args_block=Nonearg_descriptions={}ifargs_block:arg=Noneforlineinargs_block.split("\n")[1:]:if":"inline:arg,desc=line.split(":",maxsplit=1)arg=arg.strip()arg_name,_,_annotations=arg.partition(" ")if_annotations.startswith("(")and_annotations.endswith(")"):arg=arg_namearg_descriptions[arg]=desc.strip()elifarg:arg_descriptions[arg]+=" "+line.strip()returndescription,arg_descriptionsdef_py_38_safe_origin(origin:type)->type:origin_union_type_map:dict[type,Any]=({types.UnionType:Union}ifhasattr(types,"UnionType")else{})origin_map:dict[type,Any]={dict:dict,list:list,tuple:tuple,set:set,collections.abc.Iterable:typing.Iterable,collections.abc.Mapping:typing.Mapping,collections.abc.Sequence:typing.Sequence,collections.abc.MutableMapping:typing.MutableMapping,**origin_union_type_map,}returncast("type",origin_map.get(origin,origin))def_recursive_set_additional_properties_false(schema:dict[str,Any],)->dict[str,Any]:ifisinstance(schema,dict):# Check if 'required' is a key at the current level or if the schema is empty,# in which case additionalProperties still needs to be specified.if"required"inschemaor("properties"inschemaandnotschema["properties"]):schema["additionalProperties"]=False# Recursively check 'properties' and 'items' if they existif"properties"inschema:forvalueinschema["properties"].values():_recursive_set_additional_properties_false(value)if"items"inschema:_recursive_set_additional_properties_false(schema["items"])returnschema