[docs]@deprecated(since="0.3.22",removal="1.0",message=("This class is deprecated. Please see the docstring below or at the link"" for a replacement option: ""https://python.langchain.com/api_reference/core/prompts/langchain_core.prompts.pipeline.PipelinePromptTemplate.html"),)classPipelinePromptTemplate(BasePromptTemplate):"""This has been deprecated in favor of chaining individual prompts together in your code. E.g. using a for loop, you could do: .. code-block:: python my_input = {"key": "value"} for name, prompt in pipeline_prompts: my_input[name] = prompt.invoke(my_input).to_string() my_output = final_prompt.invoke(my_input) Prompt template for composing multiple prompt templates together. This can be useful when you want to reuse parts of prompts. A PipelinePrompt consists of two main parts: - final_prompt: This is the final prompt that is returned - pipeline_prompts: This is a list of tuples, consisting of a string (`name`) and a Prompt Template. Each PromptTemplate will be formatted and then passed to future prompt templates as a variable with the same name as `name` """final_prompt:BasePromptTemplate"""The final prompt that is returned."""pipeline_prompts:list[tuple[str,BasePromptTemplate]]"""A list of tuples, consisting of a string (`name`) and a Prompt Template."""@classmethoddefget_lc_namespace(cls)->list[str]:"""Get the namespace of the langchain object."""return["langchain","prompts","pipeline"]@model_validator(mode="before")@classmethoddefget_input_variables(cls,values:dict)->Any:"""Get input variables."""created_variables=set()all_variables=set()fork,promptinvalues["pipeline_prompts"]:created_variables.add(k)all_variables.update(prompt.input_variables)values["input_variables"]=list(all_variables.difference(created_variables))returnvalues
[docs]defformat_prompt(self,**kwargs:Any)->PromptValue:"""Format the prompt with the inputs. Args: kwargs: Any arguments to be passed to the prompt template. Returns: A formatted string. """fork,promptinself.pipeline_prompts:_inputs=_get_inputs(kwargs,prompt.input_variables)ifisinstance(prompt,BaseChatPromptTemplate):kwargs[k]=prompt.format_messages(**_inputs)else:kwargs[k]=prompt.format(**_inputs)_inputs=_get_inputs(kwargs,self.final_prompt.input_variables)returnself.final_prompt.format_prompt(**_inputs)
[docs]asyncdefaformat_prompt(self,**kwargs:Any)->PromptValue:"""Async format the prompt with the inputs. Args: kwargs: Any arguments to be passed to the prompt template. Returns: A formatted string. """fork,promptinself.pipeline_prompts:_inputs=_get_inputs(kwargs,prompt.input_variables)ifisinstance(prompt,BaseChatPromptTemplate):kwargs[k]=awaitprompt.aformat_messages(**_inputs)else:kwargs[k]=awaitprompt.aformat(**_inputs)_inputs=_get_inputs(kwargs,self.final_prompt.input_variables)returnawaitself.final_prompt.aformat_prompt(**_inputs)
[docs]defformat(self,**kwargs:Any)->str:"""Format the prompt with the inputs. Args: kwargs: Any arguments to be passed to the prompt template. Returns: A formatted string. """returnself.format_prompt(**kwargs).to_string()
[docs]asyncdefaformat(self,**kwargs:Any)->str:"""Async format the prompt with the inputs. Args: kwargs: Any arguments to be passed to the prompt template. Returns: A formatted string. """return(awaitself.aformat_prompt(**kwargs)).to_string()