Source code for langchain.chains.qa_with_sources.loading
"""Load question answering with sources chains."""from__future__importannotationsfromtypingimportAny,Mapping,Optional,Protocolfromlangchain_core._apiimportdeprecatedfromlangchain_core.language_modelsimportBaseLanguageModelfromlangchain_core.promptsimportBasePromptTemplatefromlangchain.chains.combine_documents.baseimportBaseCombineDocumentsChainfromlangchain.chains.combine_documents.map_reduceimportMapReduceDocumentsChainfromlangchain.chains.combine_documents.map_rerankimportMapRerankDocumentsChainfromlangchain.chains.combine_documents.reduceimportReduceDocumentsChainfromlangchain.chains.combine_documents.refineimportRefineDocumentsChainfromlangchain.chains.combine_documents.stuffimportStuffDocumentsChainfromlangchain.chains.llmimportLLMChainfromlangchain.chains.qa_with_sourcesimport(map_reduce_prompt,refine_prompts,stuff_prompt,)fromlangchain.chains.question_answering.map_rerank_promptimport(PROMPTasMAP_RERANK_PROMPT,)
[docs]classLoadingCallable(Protocol):"""Interface for loading the combine documents chain."""def__call__(self,llm:BaseLanguageModel,**kwargs:Any)->BaseCombineDocumentsChain:"""Callable to load the combine documents chain."""
def_load_map_rerank_chain(llm:BaseLanguageModel,prompt:BasePromptTemplate=MAP_RERANK_PROMPT,verbose:bool=False,document_variable_name:str="context",rank_key:str="score",answer_key:str="answer",**kwargs:Any,)->MapRerankDocumentsChain:llm_chain=LLMChain(llm=llm,prompt=prompt,verbose=verbose)returnMapRerankDocumentsChain(llm_chain=llm_chain,rank_key=rank_key,answer_key=answer_key,document_variable_name=document_variable_name,**kwargs,)def_load_stuff_chain(llm:BaseLanguageModel,prompt:BasePromptTemplate=stuff_prompt.PROMPT,document_prompt:BasePromptTemplate=stuff_prompt.EXAMPLE_PROMPT,document_variable_name:str="summaries",verbose:Optional[bool]=None,**kwargs:Any,)->StuffDocumentsChain:llm_chain=LLMChain(llm=llm,prompt=prompt,verbose=verbose)# type: ignore[arg-type]returnStuffDocumentsChain(llm_chain=llm_chain,document_variable_name=document_variable_name,document_prompt=document_prompt,verbose=verbose,# type: ignore[arg-type]**kwargs,)def_load_map_reduce_chain(llm:BaseLanguageModel,question_prompt:BasePromptTemplate=map_reduce_prompt.QUESTION_PROMPT,combine_prompt:BasePromptTemplate=map_reduce_prompt.COMBINE_PROMPT,document_prompt:BasePromptTemplate=map_reduce_prompt.EXAMPLE_PROMPT,combine_document_variable_name:str="summaries",map_reduce_document_variable_name:str="context",collapse_prompt:Optional[BasePromptTemplate]=None,reduce_llm:Optional[BaseLanguageModel]=None,collapse_llm:Optional[BaseLanguageModel]=None,verbose:Optional[bool]=None,token_max:int=3000,**kwargs:Any,)->MapReduceDocumentsChain:map_chain=LLMChain(llm=llm,prompt=question_prompt,verbose=verbose)# type: ignore[arg-type]_reduce_llm=reduce_llmorllmreduce_chain=LLMChain(llm=_reduce_llm,prompt=combine_prompt,verbose=verbose)# type: ignore[arg-type]combine_documents_chain=StuffDocumentsChain(llm_chain=reduce_chain,document_variable_name=combine_document_variable_name,document_prompt=document_prompt,verbose=verbose,# type: ignore[arg-type])ifcollapse_promptisNone:collapse_chain=Noneifcollapse_llmisnotNone:raiseValueError("collapse_llm provided, but collapse_prompt was not: please ""provide one or stop providing collapse_llm.")else:_collapse_llm=collapse_llmorllmcollapse_chain=StuffDocumentsChain(llm_chain=LLMChain(llm=_collapse_llm,prompt=collapse_prompt,verbose=verbose,# type: ignore[arg-type]),document_variable_name=combine_document_variable_name,document_prompt=document_prompt,)reduce_documents_chain=ReduceDocumentsChain(combine_documents_chain=combine_documents_chain,collapse_documents_chain=collapse_chain,token_max=token_max,verbose=verbose,# type: ignore[arg-type])returnMapReduceDocumentsChain(llm_chain=map_chain,reduce_documents_chain=reduce_documents_chain,document_variable_name=map_reduce_document_variable_name,verbose=verbose,# type: ignore[arg-type]**kwargs,)def_load_refine_chain(llm:BaseLanguageModel,question_prompt:BasePromptTemplate=refine_prompts.DEFAULT_TEXT_QA_PROMPT,refine_prompt:BasePromptTemplate=refine_prompts.DEFAULT_REFINE_PROMPT,document_prompt:BasePromptTemplate=refine_prompts.EXAMPLE_PROMPT,document_variable_name:str="context_str",initial_response_name:str="existing_answer",refine_llm:Optional[BaseLanguageModel]=None,verbose:Optional[bool]=None,**kwargs:Any,)->RefineDocumentsChain:initial_chain=LLMChain(llm=llm,prompt=question_prompt,verbose=verbose)# type: ignore[arg-type]_refine_llm=refine_llmorllmrefine_chain=LLMChain(llm=_refine_llm,prompt=refine_prompt,verbose=verbose)# type: ignore[arg-type]returnRefineDocumentsChain(initial_llm_chain=initial_chain,refine_llm_chain=refine_chain,document_variable_name=document_variable_name,initial_response_name=initial_response_name,document_prompt=document_prompt,verbose=verbose,# type: ignore[arg-type]**kwargs,)
[docs]@deprecated(since="0.2.13",removal="1.0",message=("This function is deprecated. Refer to this guide on retrieval and question ""answering with sources: ""https://python.langchain.com/docs/how_to/qa_sources/""\nSee also the following migration guides for replacements ""based on `chain_type`:\n""stuff: https://python.langchain.com/docs/versions/migrating_chains/stuff_docs_chain\n"# noqa: E501"map_reduce: https://python.langchain.com/docs/versions/migrating_chains/map_reduce_chain\n"# noqa: E501"refine: https://python.langchain.com/docs/versions/migrating_chains/refine_chain\n"# noqa: E501"map_rerank: https://python.langchain.com/docs/versions/migrating_chains/map_rerank_docs_chain\n"# noqa: E501),)defload_qa_with_sources_chain(llm:BaseLanguageModel,chain_type:str="stuff",verbose:Optional[bool]=None,**kwargs:Any,)->BaseCombineDocumentsChain:"""Load a question answering with sources chain. Args: llm: Language Model to use in the chain. chain_type: Type of document combining chain to use. Should be one of "stuff", "map_reduce", "refine" and "map_rerank". verbose: Whether chains should be run in verbose mode or not. Note that this applies to all chains that make up the final chain. Returns: A chain to use for question answering with sources. """loader_mapping:Mapping[str,LoadingCallable]={"stuff":_load_stuff_chain,"map_reduce":_load_map_reduce_chain,"refine":_load_refine_chain,"map_rerank":_load_map_rerank_chain,}ifchain_typenotinloader_mapping:raiseValueError(f"Got unsupported chain type: {chain_type}. "f"Should be one of {loader_mapping.keys()}")_func:LoadingCallable=loader_mapping[chain_type]return_func(llm,verbose=verbose,**kwargs)