[docs]classBasePromptTemplate(RunnableSerializable[Dict,PromptValue],Generic[FormatOutputType],ABC):"""Base class for all prompt templates, returning a prompt."""input_variables:List[str]"""A list of the names of the variables whose values are required as inputs to the prompt."""optional_variables:List[str]=Field(default=[])"""optional_variables: A list of the names of the variables for placeholder or MessagePlaceholder that are optional. These variables are auto inferred from the prompt and user need not provide them."""input_types:Dict[str,Any]=Field(default_factory=dict,exclude=True)"""A dictionary of the types of the variables the prompt template expects. If not provided, all variables are assumed to be strings."""output_parser:Optional[BaseOutputParser]=None"""How to parse the output of calling an LLM on this formatted prompt."""partial_variables:Mapping[str,Any]=Field(default_factory=dict)"""A dictionary of the partial variables the prompt template carries. Partial variables populate the template so that you don't need to pass them in every time you call the prompt."""metadata:Optional[Dict[str,Any]]=None"""Metadata to be used for tracing."""tags:Optional[List[str]]=None"""Tags to be used for tracing."""@root_validator(pre=False,skip_on_failure=True)defvalidate_variable_names(cls,values:Dict)->Dict:"""Validate variable names do not include restricted names."""if"stop"invalues["input_variables"]:raiseValueError("Cannot have an input variable named 'stop', as it is used internally,"" please rename.")if"stop"invalues["partial_variables"]:raiseValueError("Cannot have an partial variable named 'stop', as it is used ""internally, please rename.")overall=set(values["input_variables"]).intersection(values["partial_variables"])ifoverall:raiseValueError(f"Found overlapping input and partial variables: {overall}")returnvalues@classmethoddefget_lc_namespace(cls)->List[str]:"""Get the namespace of the langchain object. Returns ["langchain", "schema", "prompt_template"]."""return["langchain","schema","prompt_template"]@classmethoddefis_lc_serializable(cls)->bool:"""Return whether this class is serializable. Returns True."""returnTrueclassConfig:arbitrary_types_allowed=True@propertydefOutputType(self)->Any:"""Return the output type of the prompt."""returnUnion[StringPromptValue,ChatPromptValueConcrete]defget_input_schema(self,config:Optional[RunnableConfig]=None)->Type[BaseModel]:"""Get the input schema for the prompt. Args: config: RunnableConfig, configuration for the prompt. Returns: Type[BaseModel]: The input schema for the prompt. """# This is correct, but pydantic typings/mypy don't think so.required_input_variables={k:(self.input_types.get(k,str),...)forkinself.input_variables}optional_input_variables={k:(self.input_types.get(k,str),None)forkinself.optional_variables}returncreate_model("PromptInput",**{**required_input_variables,**optional_input_variables})def_validate_input(self,inner_input:Any)->Dict:ifnotisinstance(inner_input,dict):iflen(self.input_variables)==1:var_name=self.input_variables[0]inner_input={var_name:inner_input}else:raiseTypeError(f"Expected mapping type as input to {self.__class__.__name__}. "f"Received {type(inner_input)}.")missing=set(self.input_variables).difference(inner_input)ifmissing:msg=(f"Input to {self.__class__.__name__} is missing variables {missing}. "f" Expected: {self.input_variables}"f" Received: {list(inner_input.keys())}")example_key=missing.pop()msg+=(f"\nNote: if you intended {{{example_key}}} to be part of the string"" and not a variable, please escape it with double curly braces like: "f"'{{{{{example_key}}}}}'.")raiseKeyError(msg)returninner_inputdef_format_prompt_with_error_handling(self,inner_input:Dict)->PromptValue:_inner_input=self._validate_input(inner_input)returnself.format_prompt(**_inner_input)asyncdef_aformat_prompt_with_error_handling(self,inner_input:Dict)->PromptValue:_inner_input=self._validate_input(inner_input)returnawaitself.aformat_prompt(**_inner_input)
[docs]definvoke(self,input:Dict,config:Optional[RunnableConfig]=None)->PromptValue:"""Invoke the prompt. Args: input: Dict, input to the prompt. config: RunnableConfig, configuration for the prompt. Returns: PromptValue: The output of the prompt. """config=ensure_config(config)ifself.metadata:config["metadata"]={**config["metadata"],**self.metadata}ifself.tags:config["tags"]=config["tags"]+self.tagsreturnself._call_with_config(self._format_prompt_with_error_handling,input,config,run_type="prompt",serialized=dumpd(self),)
[docs]asyncdefainvoke(self,input:Dict,config:Optional[RunnableConfig]=None,**kwargs:Any)->PromptValue:"""Async invoke the prompt. Args: input: Dict, input to the prompt. config: RunnableConfig, configuration for the prompt. Returns: PromptValue: The output of the prompt. """config=ensure_config(config)ifself.metadata:config["metadata"].update(self.metadata)ifself.tags:config["tags"].extend(self.tags)returnawaitself._acall_with_config(self._aformat_prompt_with_error_handling,input,config,run_type="prompt",serialized=dumpd(self),)
[docs]@abstractmethoddefformat_prompt(self,**kwargs:Any)->PromptValue:"""Create Prompt Value. Args: kwargs: Any arguments to be passed to the prompt template. Returns: PromptValue: The output of the prompt. """
[docs]asyncdefaformat_prompt(self,**kwargs:Any)->PromptValue:"""Async create Prompt Value. Args: kwargs: Any arguments to be passed to the prompt template. Returns: PromptValue: The output of the prompt. """returnself.format_prompt(**kwargs)
[docs]defpartial(self,**kwargs:Union[str,Callable[[],str]])->BasePromptTemplate:"""Return a partial of the prompt template. Args: kwargs: Union[str, Callable[[], str], partial variables to set. Returns: BasePromptTemplate: A partial of the prompt template. """prompt_dict=self.__dict__.copy()prompt_dict["input_variables"]=list(set(self.input_variables).difference(kwargs))prompt_dict["partial_variables"]={**self.partial_variables,**kwargs}returntype(self)(**prompt_dict)
def_merge_partial_and_user_variables(self,**kwargs:Any)->Dict[str,Any]:# Get partial params:partial_kwargs={k:vifnotcallable(v)elsev()fork,vinself.partial_variables.items()}return{**partial_kwargs,**kwargs}
[docs]@abstractmethoddefformat(self,**kwargs:Any)->FormatOutputType:"""Format the prompt with the inputs. Args: kwargs: Any arguments to be passed to the prompt template. Returns: A formatted string. Example: .. code-block:: python prompt.format(variable1="foo") """
[docs]asyncdefaformat(self,**kwargs:Any)->FormatOutputType:"""Async format the prompt with the inputs. Args: kwargs: Any arguments to be passed to the prompt template. Returns: A formatted string. Example: .. code-block:: python await prompt.aformat(variable1="foo") """returnself.format(**kwargs)
@propertydef_prompt_type(self)->str:"""Return the prompt type key."""raiseNotImplementedErrordefdict(self,**kwargs:Any)->Dict:"""Return dictionary representation of prompt. Args: kwargs: Any additional arguments to pass to the dictionary. Returns: Dict: Dictionary representation of the prompt. Raises: NotImplementedError: If the prompt type is not implemented. """prompt_dict=super().dict(**kwargs)try:prompt_dict["_type"]=self._prompt_typeexceptNotImplementedError:passreturnprompt_dict
[docs]defsave(self,file_path:Union[Path,str])->None:"""Save the prompt. Args: file_path: Path to directory to save prompt to. Raises: ValueError: If the prompt has partial variables. ValueError: If the file path is not json or yaml. NotImplementedError: If the prompt type is not implemented. Example: .. code-block:: python prompt.save(file_path="path/prompt.yaml") """ifself.partial_variables:raiseValueError("Cannot save prompt with partial variables.")# Fetch dictionary to saveprompt_dict=self.dict()if"_type"notinprompt_dict:raiseNotImplementedError(f"Prompt {self} does not support saving.")# Convert file to Path object.ifisinstance(file_path,str):save_path=Path(file_path)else:save_path=file_pathdirectory_path=save_path.parentdirectory_path.mkdir(parents=True,exist_ok=True)ifsave_path.suffix==".json":withopen(file_path,"w")asf:json.dump(prompt_dict,f,indent=4)elifsave_path.suffix.endswith((".yaml",".yml")):withopen(file_path,"w")asf:yaml.dump(prompt_dict,f,default_flow_style=False)else:raiseValueError(f"{save_path} must be json or yaml")
def_get_document_info(doc:Document,prompt:BasePromptTemplate[str])->Dict:base_info={"page_content":doc.page_content,**doc.metadata}missing_metadata=set(prompt.input_variables).difference(base_info)iflen(missing_metadata)>0:required_metadata=[ivforivinprompt.input_variablesifiv!="page_content"]raiseValueError(f"Document prompt requires documents to have metadata variables: "f"{required_metadata}. Received document with missing metadata: "f"{list(missing_metadata)}.")return{k:base_info[k]forkinprompt.input_variables}
[docs]defformat_document(doc:Document,prompt:BasePromptTemplate[str])->str:"""Format a document into a string based on a prompt template. First, this pulls information from the document from two sources: 1. page_content: This takes the information from the `document.page_content` and assigns it to a variable named `page_content`. 2. metadata: This takes information from `document.metadata` and assigns it to variables of the same name. Those variables are then passed into the `prompt` to produce a formatted string. Args: doc: Document, the page_content and metadata will be used to create the final string. prompt: BasePromptTemplate, will be used to format the page_content and metadata into the final string. Returns: string of the document formatted. Example: .. code-block:: python from langchain_core.documents import Document from langchain_core.prompts import PromptTemplate doc = Document(page_content="This is a joke", metadata={"page": "1"}) prompt = PromptTemplate.from_template("Page {page}: {page_content}") format_document(doc, prompt) >>> "Page 1: This is a joke" """returnprompt.format(**_get_document_info(doc,prompt))
[docs]asyncdefaformat_document(doc:Document,prompt:BasePromptTemplate[str])->str:"""Async format a document into a string based on a prompt template. First, this pulls information from the document from two sources: 1. page_content: This takes the information from the `document.page_content` and assigns it to a variable named `page_content`. 2. metadata: This takes information from `document.metadata` and assigns it to variables of the same name. Those variables are then passed into the `prompt` to produce a formatted string. Args: doc: Document, the page_content and metadata will be used to create the final string. prompt: BasePromptTemplate, will be used to format the page_content and metadata into the final string. Returns: string of the document formatted. """returnawaitprompt.aformat(**_get_document_info(doc,prompt))