[docs]classBasePromptTemplate(RunnableSerializable[dict,PromptValue],Generic[FormatOutputType],ABC):"""Base class for all prompt templates, returning a prompt."""input_variables:list[str]"""A list of the names of the variables whose values are required as inputs to the prompt."""optional_variables:list[str]=Field(default=[])"""optional_variables: A list of the names of the variables for placeholder or MessagePlaceholder that are optional. These variables are auto inferred from the prompt and user need not provide them."""input_types:typing.Dict[str,Any]=Field(default_factory=dict,exclude=True)# noqa: UP006"""A dictionary of the types of the variables the prompt template expects. If not provided, all variables are assumed to be strings."""output_parser:Optional[BaseOutputParser]=None"""How to parse the output of calling an LLM on this formatted prompt."""partial_variables:Mapping[str,Any]=Field(default_factory=dict)"""A dictionary of the partial variables the prompt template carries. Partial variables populate the template so that you don't need to pass them in every time you call the prompt."""metadata:Optional[typing.Dict[str,Any]]=None# noqa: UP006"""Metadata to be used for tracing."""tags:Optional[list[str]]=None"""Tags to be used for tracing."""@model_validator(mode="after")defvalidate_variable_names(self)->Self:"""Validate variable names do not include restricted names."""if"stop"inself.input_variables:msg=("Cannot have an input variable named 'stop', as it is used internally,"" please rename.")raiseValueError(create_message(message=msg,error_code=ErrorCode.INVALID_PROMPT_INPUT))if"stop"inself.partial_variables:msg=("Cannot have an partial variable named 'stop', as it is used ""internally, please rename.")raiseValueError(create_message(message=msg,error_code=ErrorCode.INVALID_PROMPT_INPUT))overall=set(self.input_variables).intersection(self.partial_variables)ifoverall:msg=f"Found overlapping input and partial variables: {overall}"raiseValueError(create_message(message=msg,error_code=ErrorCode.INVALID_PROMPT_INPUT))returnself@classmethoddefget_lc_namespace(cls)->list[str]:"""Get the namespace of the langchain object. Returns ["langchain", "schema", "prompt_template"]. """return["langchain","schema","prompt_template"]@classmethoddefis_lc_serializable(cls)->bool:"""Return whether this class is serializable. Returns True. """returnTruemodel_config=ConfigDict(arbitrary_types_allowed=True,)@cached_propertydef_serialized(self)->dict[str,Any]:returndumpd(self)@property@overridedefOutputType(self)->Any:"""Return the output type of the prompt."""returnUnion[StringPromptValue,ChatPromptValueConcrete]defget_input_schema(self,config:Optional[RunnableConfig]=None)->type[BaseModel]:"""Get the input schema for the prompt. Args: config: RunnableConfig, configuration for the prompt. Returns: Type[BaseModel]: The input schema for the prompt. """# This is correct, but pydantic typings/mypy don't think so.required_input_variables={k:(self.input_types.get(k,str),...)forkinself.input_variables}optional_input_variables={k:(self.input_types.get(k,str),None)forkinself.optional_variables}returncreate_model_v2("PromptInput",field_definitions={**required_input_variables,**optional_input_variables},)def_validate_input(self,inner_input:Any)->dict:ifnotisinstance(inner_input,dict):iflen(self.input_variables)==1:var_name=self.input_variables[0]inner_input={var_name:inner_input}else:msg=(f"Expected mapping type as input to {self.__class__.__name__}. "f"Received {type(inner_input)}.")raiseTypeError(create_message(message=msg,error_code=ErrorCode.INVALID_PROMPT_INPUT))missing=set(self.input_variables).difference(inner_input)ifmissing:msg=(f"Input to {self.__class__.__name__} is missing variables {missing}. "f" Expected: {self.input_variables}"f" Received: {list(inner_input.keys())}")example_key=missing.pop()msg+=(f"\nNote: if you intended {{{example_key}}} to be part of the string"" and not a variable, please escape it with double curly braces like: "f"'{{{{{example_key}}}}}'.")raiseKeyError(create_message(message=msg,error_code=ErrorCode.INVALID_PROMPT_INPUT))returninner_inputdef_format_prompt_with_error_handling(self,inner_input:dict)->PromptValue:_inner_input=self._validate_input(inner_input)returnself.format_prompt(**_inner_input)asyncdef_aformat_prompt_with_error_handling(self,inner_input:dict)->PromptValue:_inner_input=self._validate_input(inner_input)returnawaitself.aformat_prompt(**_inner_input)
[docs]definvoke(self,input:dict,config:Optional[RunnableConfig]=None,**kwargs:Any)->PromptValue:"""Invoke the prompt. Args: input: Dict, input to the prompt. config: RunnableConfig, configuration for the prompt. Returns: PromptValue: The output of the prompt. """config=ensure_config(config)ifself.metadata:config["metadata"]={**config["metadata"],**self.metadata}ifself.tags:config["tags"]=config["tags"]+self.tagsreturnself._call_with_config(self._format_prompt_with_error_handling,input,config,run_type="prompt",serialized=self._serialized,)
[docs]asyncdefainvoke(self,input:dict,config:Optional[RunnableConfig]=None,**kwargs:Any)->PromptValue:"""Async invoke the prompt. Args: input: Dict, input to the prompt. config: RunnableConfig, configuration for the prompt. Returns: PromptValue: The output of the prompt. """config=ensure_config(config)ifself.metadata:config["metadata"].update(self.metadata)ifself.tags:config["tags"].extend(self.tags)returnawaitself._acall_with_config(self._aformat_prompt_with_error_handling,input,config,run_type="prompt",serialized=self._serialized,)
[docs]@abstractmethoddefformat_prompt(self,**kwargs:Any)->PromptValue:"""Create Prompt Value. Args: kwargs: Any arguments to be passed to the prompt template. Returns: PromptValue: The output of the prompt. """
[docs]asyncdefaformat_prompt(self,**kwargs:Any)->PromptValue:"""Async create Prompt Value. Args: kwargs: Any arguments to be passed to the prompt template. Returns: PromptValue: The output of the prompt. """returnself.format_prompt(**kwargs)
[docs]defpartial(self,**kwargs:Union[str,Callable[[],str]])->BasePromptTemplate:"""Return a partial of the prompt template. Args: kwargs: Union[str, Callable[[], str]], partial variables to set. Returns: BasePromptTemplate: A partial of the prompt template. """prompt_dict=self.__dict__.copy()prompt_dict["input_variables"]=list(set(self.input_variables).difference(kwargs))prompt_dict["partial_variables"]={**self.partial_variables,**kwargs}returntype(self)(**prompt_dict)
def_merge_partial_and_user_variables(self,**kwargs:Any)->dict[str,Any]:# Get partial params:partial_kwargs={k:vifnotcallable(v)elsev()fork,vinself.partial_variables.items()}return{**partial_kwargs,**kwargs}
[docs]@abstractmethoddefformat(self,**kwargs:Any)->FormatOutputType:"""Format the prompt with the inputs. Args: kwargs: Any arguments to be passed to the prompt template. Returns: A formatted string. Example: .. code-block:: python prompt.format(variable1="foo") """
[docs]asyncdefaformat(self,**kwargs:Any)->FormatOutputType:"""Async format the prompt with the inputs. Args: kwargs: Any arguments to be passed to the prompt template. Returns: A formatted string. Example: .. code-block:: python await prompt.aformat(variable1="foo") """returnself.format(**kwargs)
@propertydef_prompt_type(self)->str:"""Return the prompt type key."""raiseNotImplementedErrordefdict(self,**kwargs:Any)->dict:"""Return dictionary representation of prompt. Args: kwargs: Any additional arguments to pass to the dictionary. Returns: Dict: Dictionary representation of the prompt. Raises: NotImplementedError: If the prompt type is not implemented. """prompt_dict=super().model_dump(**kwargs)withcontextlib.suppress(NotImplementedError):prompt_dict["_type"]=self._prompt_typereturnprompt_dict
[docs]defsave(self,file_path:Union[Path,str])->None:"""Save the prompt. Args: file_path: Path to directory to save prompt to. Raises: ValueError: If the prompt has partial variables. ValueError: If the file path is not json or yaml. NotImplementedError: If the prompt type is not implemented. Example: .. code-block:: python prompt.save(file_path="path/prompt.yaml") """ifself.partial_variables:msg="Cannot save prompt with partial variables."raiseValueError(msg)# Fetch dictionary to saveprompt_dict=self.dict()if"_type"notinprompt_dict:msg=f"Prompt {self} does not support saving."raiseNotImplementedError(msg)# Convert file to Path object.save_path=Path(file_path)directory_path=save_path.parentdirectory_path.mkdir(parents=True,exist_ok=True)ifsave_path.suffix==".json":withsave_path.open("w")asf:json.dump(prompt_dict,f,indent=4)elifsave_path.suffix.endswith((".yaml",".yml")):withsave_path.open("w")asf:yaml.dump(prompt_dict,f,default_flow_style=False)else:msg=f"{save_path} must be json or yaml"raiseValueError(msg)
def_get_document_info(doc:Document,prompt:BasePromptTemplate[str])->dict:base_info={"page_content":doc.page_content,**doc.metadata}missing_metadata=set(prompt.input_variables).difference(base_info)iflen(missing_metadata)>0:required_metadata=[ivforivinprompt.input_variablesifiv!="page_content"]msg=(f"Document prompt requires documents to have metadata variables: "f"{required_metadata}. Received document with missing metadata: "f"{list(missing_metadata)}.")raiseValueError(create_message(message=msg,error_code=ErrorCode.INVALID_PROMPT_INPUT))return{k:base_info[k]forkinprompt.input_variables}
[docs]defformat_document(doc:Document,prompt:BasePromptTemplate[str])->str:"""Format a document into a string based on a prompt template. First, this pulls information from the document from two sources: 1. page_content: This takes the information from the `document.page_content` and assigns it to a variable named `page_content`. 2. metadata: This takes information from `document.metadata` and assigns it to variables of the same name. Those variables are then passed into the `prompt` to produce a formatted string. Args: doc: Document, the page_content and metadata will be used to create the final string. prompt: BasePromptTemplate, will be used to format the page_content and metadata into the final string. Returns: string of the document formatted. Example: .. code-block:: python from langchain_core.documents import Document from langchain_core.prompts import PromptTemplate doc = Document(page_content="This is a joke", metadata={"page": "1"}) prompt = PromptTemplate.from_template("Page {page}: {page_content}") format_document(doc, prompt) >>> "Page 1: This is a joke" """returnprompt.format(**_get_document_info(doc,prompt))
[docs]asyncdefaformat_document(doc:Document,prompt:BasePromptTemplate[str])->str:"""Async format a document into a string based on a prompt template. First, this pulls information from the document from two sources: 1. page_content: This takes the information from the `document.page_content` and assigns it to a variable named `page_content`. 2. metadata: This takes information from `document.metadata` and assigns it to variables of the same name. Those variables are then passed into the `prompt` to produce a formatted string. Args: doc: Document, the page_content and metadata will be used to create the final string. prompt: BasePromptTemplate, will be used to format the page_content and metadata into the final string. Returns: string of the document formatted. """returnawaitprompt.aformat(**_get_document_info(doc,prompt))