Source code for langchain.chains.router.llm_router
"""Base classes for LLM-powered router chains."""from__future__importannotationsfromtypingimportAny,Dict,List,Optional,Type,castfromlangchain_core._apiimportdeprecatedfromlangchain_core.callbacksimport(AsyncCallbackManagerForChainRun,CallbackManagerForChainRun,)fromlangchain_core.exceptionsimportOutputParserExceptionfromlangchain_core.language_modelsimportBaseLanguageModelfromlangchain_core.output_parsersimportBaseOutputParserfromlangchain_core.promptsimportBasePromptTemplatefromlangchain_core.pydantic_v1importroot_validatorfromlangchain_core.utils.jsonimportparse_and_check_json_markdownfromlangchain.chainsimportLLMChainfromlangchain.chains.router.baseimportRouterChain
[docs]@deprecated(since="0.2.12",removal="1.0",message=("Use RunnableLambda to select from multiple prompt templates. See example ""in API reference: ""https://api.python.langchain.com/en/latest/chains/langchain.chains.router.llm_router.LLMRouterChain.html"# noqa: E501),)classLLMRouterChain(RouterChain):"""A router chain that uses an LLM chain to perform routing. This class is deprecated. See below for a replacement, which offers several benefits, including streaming and batch support. Below is an example implementation: .. code-block:: python from operator import itemgetter from typing import Literal from typing_extensions import TypedDict from langchain_core.output_parsers import StrOutputParser from langchain_core.prompts import ChatPromptTemplate from langchain_core.runnables import RunnableLambda, RunnablePassthrough from langchain_openai import ChatOpenAI llm = ChatOpenAI(model="gpt-4o-mini") prompt_1 = ChatPromptTemplate.from_messages( [ ("system", "You are an expert on animals."), ("human", "{query}"), ] ) prompt_2 = ChatPromptTemplate.from_messages( [ ("system", "You are an expert on vegetables."), ("human", "{query}"), ] ) chain_1 = prompt_1 | llm | StrOutputParser() chain_2 = prompt_2 | llm | StrOutputParser() route_system = "Route the user's query to either the animal or vegetable expert." route_prompt = ChatPromptTemplate.from_messages( [ ("system", route_system), ("human", "{query}"), ] ) class RouteQuery(TypedDict): \"\"\"Route query to destination.\"\"\" destination: Literal["animal", "vegetable"] route_chain = ( route_prompt | llm.with_structured_output(RouteQuery) | itemgetter("destination") ) chain = { "destination": route_chain, # "animal" or "vegetable" "query": lambda x: x["query"], # pass through input query } | RunnableLambda( # if animal, chain_1. otherwise, chain_2. lambda x: chain_1 if x["destination"] == "animal" else chain_2, ) chain.invoke({"query": "what color are carrots"}) """# noqa: E501llm_chain:LLMChain"""LLM chain used to perform routing"""@root_validator(pre=False,skip_on_failure=True)defvalidate_prompt(cls,values:dict)->dict:prompt=values["llm_chain"].promptifprompt.output_parserisNone:raiseValueError("LLMRouterChain requires base llm_chain prompt to have an output"" parser that converts LLM text output to a dictionary with keys"" 'destination' and 'next_inputs'. Received a prompt with no output"" parser.")returnvalues@propertydefinput_keys(self)->List[str]:"""Will be whatever keys the LLM chain prompt expects. :meta private: """returnself.llm_chain.input_keysdef_validate_outputs(self,outputs:Dict[str,Any])->None:super()._validate_outputs(outputs)ifnotisinstance(outputs["next_inputs"],dict):raiseValueErrordef_call(self,inputs:Dict[str,Any],run_manager:Optional[CallbackManagerForChainRun]=None,)->Dict[str,Any]:_run_manager=run_managerorCallbackManagerForChainRun.get_noop_manager()callbacks=_run_manager.get_child()prediction=self.llm_chain.predict(callbacks=callbacks,**inputs)output=cast(Dict[str,Any],self.llm_chain.prompt.output_parser.parse(prediction),)returnoutputasyncdef_acall(self,inputs:Dict[str,Any],run_manager:Optional[AsyncCallbackManagerForChainRun]=None,)->Dict[str,Any]:_run_manager=run_managerorCallbackManagerForChainRun.get_noop_manager()callbacks=_run_manager.get_child()output=cast(Dict[str,Any],awaitself.llm_chain.apredict_and_parse(callbacks=callbacks,**inputs),)returnoutput
[docs]classRouterOutputParser(BaseOutputParser[Dict[str,str]]):"""Parser for output of router chain in the multi-prompt chain."""default_destination:str="DEFAULT"next_inputs_type:Type=strnext_inputs_inner_key:str="input"
[docs]defparse(self,text:str)->Dict[str,Any]:try:expected_keys=["destination","next_inputs"]parsed=parse_and_check_json_markdown(text,expected_keys)ifnotisinstance(parsed["destination"],str):raiseValueError("Expected 'destination' to be a string.")ifnotisinstance(parsed["next_inputs"],self.next_inputs_type):raiseValueError(f"Expected 'next_inputs' to be {self.next_inputs_type}.")parsed["next_inputs"]={self.next_inputs_inner_key:parsed["next_inputs"]}if(parsed["destination"].strip().lower()==self.default_destination.lower()):parsed["destination"]=Noneelse:parsed["destination"]=parsed["destination"].strip()returnparsedexceptExceptionase:raiseOutputParserException(f"Parsing text\n{text}\n raised following error:\n{e}")