"""Prompt template that contains few shot examples."""from__future__importannotationsfrompathlibimportPathfromtypingimportAny,Dict,List,Literal,Optional,Unionfromlangchain_core.example_selectorsimportBaseExampleSelectorfromlangchain_core.messagesimportBaseMessage,get_buffer_stringfromlangchain_core.prompts.chatimport(BaseChatPromptTemplate,BaseMessagePromptTemplate,)fromlangchain_core.prompts.promptimportPromptTemplatefromlangchain_core.prompts.stringimport(DEFAULT_FORMATTER_MAPPING,StringPromptTemplate,check_valid_template,get_template_variables,)fromlangchain_core.pydantic_v1importBaseModel,Extra,Field,root_validatorclass_FewShotPromptTemplateMixin(BaseModel):"""Prompt template that contains few shot examples."""examples:Optional[List[dict]]=None"""Examples to format into the prompt. Either this or example_selector should be provided."""example_selector:Optional[BaseExampleSelector]=None"""ExampleSelector to choose the examples to format into the prompt. Either this or examples should be provided."""classConfig:arbitrary_types_allowed=Trueextra=Extra.forbid@root_validator(pre=True)defcheck_examples_and_selector(cls,values:Dict)->Dict:"""Check that one and only one of examples/example_selector are provided. Args: values: The values to check. Returns: The values if they are valid. Raises: ValueError: If neither or both examples and example_selector are provided. ValueError: If both examples and example_selector are provided. """examples=values.get("examples",None)example_selector=values.get("example_selector",None)ifexamplesandexample_selector:raiseValueError("Only one of 'examples' and 'example_selector' should be provided")ifexamplesisNoneandexample_selectorisNone:raiseValueError("One of 'examples' and 'example_selector' should be provided")returnvaluesdef_get_examples(self,**kwargs:Any)->List[dict]:"""Get the examples to use for formatting the prompt. Args: **kwargs: Keyword arguments to be passed to the example selector. Returns: List of examples. Raises: ValueError: If neither examples nor example_selector are provided. """ifself.examplesisnotNone:returnself.exampleselifself.example_selectorisnotNone:returnself.example_selector.select_examples(kwargs)else:raiseValueError("One of 'examples' and 'example_selector' should be provided")asyncdef_aget_examples(self,**kwargs:Any)->List[dict]:"""Async get the examples to use for formatting the prompt. Args: **kwargs: Keyword arguments to be passed to the example selector. Returns: List of examples. Raises: ValueError: If neither examples nor example_selector are provided. """ifself.examplesisnotNone:returnself.exampleselifself.example_selectorisnotNone:returnawaitself.example_selector.aselect_examples(kwargs)else:raiseValueError("One of 'examples' and 'example_selector' should be provided")
[docs]classFewShotPromptTemplate(_FewShotPromptTemplateMixin,StringPromptTemplate):"""Prompt template that contains few shot examples."""@classmethoddefis_lc_serializable(cls)->bool:"""Return whether or not the class is serializable."""returnFalsevalidate_template:bool=False"""Whether or not to try validating the template."""example_prompt:PromptTemplate"""PromptTemplate used to format an individual example."""suffix:str"""A prompt template string to put after the examples."""example_separator:str="\n\n""""String separator used to join the prefix, the examples, and suffix."""prefix:str="""""A prompt template string to put before the examples."""template_format:Literal["f-string","jinja2"]="f-string""""The format of the prompt template. Options are: 'f-string', 'jinja2'."""def__init__(self,**kwargs:Any)->None:"""Initialize the few shot prompt template."""if"input_variables"notinkwargsand"example_prompt"inkwargs:kwargs["input_variables"]=kwargs["example_prompt"].input_variablessuper().__init__(**kwargs)@root_validator(pre=False,skip_on_failure=True)deftemplate_is_valid(cls,values:Dict)->Dict:"""Check that prefix, suffix, and input variables are consistent."""ifvalues["validate_template"]:check_valid_template(values["prefix"]+values["suffix"],values["template_format"],values["input_variables"]+list(values["partial_variables"]),)elifvalues.get("template_format"):values["input_variables"]=[varforvaringet_template_variables(values["prefix"]+values["suffix"],values["template_format"])ifvarnotinvalues["partial_variables"]]returnvaluesclassConfig:arbitrary_types_allowed=Trueextra=Extra.forbid
[docs]defformat(self,**kwargs:Any)->str:"""Format the prompt with inputs generating a string. Use this method to generate a string representation of a prompt. Args: **kwargs: keyword arguments to use for formatting. Returns: A string representation of the prompt. """kwargs=self._merge_partial_and_user_variables(**kwargs)# Get the examples to use.examples=self._get_examples(**kwargs)examples=[{k:e[k]forkinself.example_prompt.input_variables}foreinexamples]# Format the examples.example_strings=[self.example_prompt.format(**example)forexampleinexamples]# Create the overall template.pieces=[self.prefix,*example_strings,self.suffix]template=self.example_separator.join([pieceforpieceinpiecesifpiece])# Format the template with the input variables.returnDEFAULT_FORMATTER_MAPPING[self.template_format](template,**kwargs)
[docs]asyncdefaformat(self,**kwargs:Any)->str:"""Async format the prompt with inputs generating a string. Use this method to generate a string representation of a prompt. Args: **kwargs: keyword arguments to use for formatting. Returns: A string representation of the prompt. """kwargs=self._merge_partial_and_user_variables(**kwargs)# Get the examples to use.examples=awaitself._aget_examples(**kwargs)examples=[{k:e[k]forkinself.example_prompt.input_variables}foreinexamples]# Format the examples.example_strings=[awaitself.example_prompt.aformat(**example)forexampleinexamples]# Create the overall template.pieces=[self.prefix,*example_strings,self.suffix]template=self.example_separator.join([pieceforpieceinpiecesifpiece])# Format the template with the input variables.returnDEFAULT_FORMATTER_MAPPING[self.template_format](template,**kwargs)
@propertydef_prompt_type(self)->str:"""Return the prompt type key."""return"few_shot"
[docs]defsave(self,file_path:Union[Path,str])->None:"""Save the prompt template to a file. Args: file_path: The path to save the prompt template to. Raises: ValueError: If example_selector is provided. """ifself.example_selector:raiseValueError("Saving an example selector is not currently supported")returnsuper().save(file_path)
[docs]classFewShotChatMessagePromptTemplate(BaseChatPromptTemplate,_FewShotPromptTemplateMixin):"""Chat prompt template that supports few-shot examples. The high level structure of produced by this prompt template is a list of messages consisting of prefix message(s), example message(s), and suffix message(s). This structure enables creating a conversation with intermediate examples like: System: You are a helpful AI Assistant Human: What is 2+2? AI: 4 Human: What is 2+3? AI: 5 Human: What is 4+4? This prompt template can be used to generate a fixed list of examples or else to dynamically select examples based on the input. Examples: Prompt template with a fixed list of examples (matching the sample conversation above): .. code-block:: python from langchain_core.prompts import ( FewShotChatMessagePromptTemplate, ChatPromptTemplate ) examples = [ {"input": "2+2", "output": "4"}, {"input": "2+3", "output": "5"}, ] example_prompt = ChatPromptTemplate.from_messages( [('human', '{input}'), ('ai', '{output}')] ) few_shot_prompt = FewShotChatMessagePromptTemplate( examples=examples, # This is a prompt template used to format each individual example. example_prompt=example_prompt, ) final_prompt = ChatPromptTemplate.from_messages( [ ('system', 'You are a helpful AI Assistant'), few_shot_prompt, ('human', '{input}'), ] ) final_prompt.format(input="What is 4+4?") Prompt template with dynamically selected examples: .. code-block:: python from langchain_core.prompts import SemanticSimilarityExampleSelector from langchain_core.embeddings import OpenAIEmbeddings from langchain_core.vectorstores import Chroma examples = [ {"input": "2+2", "output": "4"}, {"input": "2+3", "output": "5"}, {"input": "2+4", "output": "6"}, # ... ] to_vectorize = [ " ".join(example.values()) for example in examples ] embeddings = OpenAIEmbeddings() vectorstore = Chroma.from_texts( to_vectorize, embeddings, metadatas=examples ) example_selector = SemanticSimilarityExampleSelector( vectorstore=vectorstore ) from langchain_core import SystemMessage from langchain_core.prompts import HumanMessagePromptTemplate from langchain_core.prompts.few_shot import FewShotChatMessagePromptTemplate few_shot_prompt = FewShotChatMessagePromptTemplate( # Which variable(s) will be passed to the example selector. input_variables=["input"], example_selector=example_selector, # Define how each example will be formatted. # In this case, each example will become 2 messages: # 1 human, and 1 AI example_prompt=( HumanMessagePromptTemplate.from_template("{input}") + AIMessagePromptTemplate.from_template("{output}") ), ) # Define the overall prompt. final_prompt = ( SystemMessagePromptTemplate.from_template( "You are a helpful AI Assistant" ) + few_shot_prompt + HumanMessagePromptTemplate.from_template("{input}") ) # Show the prompt print(final_prompt.format_messages(input="What's 3+3?")) # noqa: T201 # Use within an LLM from langchain_core.chat_models import ChatAnthropic chain = final_prompt | ChatAnthropic(model="claude-3-haiku-20240307") chain.invoke({"input": "What's 3+3?"}) """input_variables:List[str]=Field(default_factory=list)"""A list of the names of the variables the prompt template will use to pass to the example_selector, if provided."""example_prompt:Union[BaseMessagePromptTemplate,BaseChatPromptTemplate]"""The class to format each example."""@classmethoddefis_lc_serializable(cls)->bool:"""Return whether or not the class is serializable."""returnFalseclassConfig:arbitrary_types_allowed=Trueextra=Extra.forbid
[docs]defformat_messages(self,**kwargs:Any)->List[BaseMessage]:"""Format kwargs into a list of messages. Args: **kwargs: keyword arguments to use for filling in templates in messages. Returns: A list of formatted messages with all template variables filled in. """# Get the examples to use.examples=self._get_examples(**kwargs)examples=[{k:e[k]forkinself.example_prompt.input_variables}foreinexamples]# Format the examples.messages=[messageforexampleinexamplesformessageinself.example_prompt.format_messages(**example)]returnmessages
[docs]asyncdefaformat_messages(self,**kwargs:Any)->List[BaseMessage]:"""Async format kwargs into a list of messages. Args: **kwargs: keyword arguments to use for filling in templates in messages. Returns: A list of formatted messages with all template variables filled in. """# Get the examples to use.examples=awaitself._aget_examples(**kwargs)examples=[{k:e[k]forkinself.example_prompt.input_variables}foreinexamples]# Format the examples.messages=[messageforexampleinexamplesformessageinawaitself.example_prompt.aformat_messages(**example)]returnmessages
[docs]defformat(self,**kwargs:Any)->str:"""Format the prompt with inputs generating a string. Use this method to generate a string representation of a prompt consisting of chat messages. Useful for feeding into a string-based completion language model or debugging. Args: **kwargs: keyword arguments to use for formatting. Returns: A string representation of the prompt """messages=self.format_messages(**kwargs)returnget_buffer_string(messages)
[docs]asyncdefaformat(self,**kwargs:Any)->str:"""Async format the prompt with inputs generating a string. Use this method to generate a string representation of a prompt consisting of chat messages. Useful for feeding into a string-based completion language model or debugging. Args: **kwargs: keyword arguments to use for formatting. Returns: A string representation of the prompt """messages=awaitself.aformat_messages(**kwargs)returnget_buffer_string(messages)
[docs]defpretty_repr(self,html:bool=False)->str:"""Return a pretty representation of the prompt template. Args: html: Whether or not to return an HTML formatted string. Returns: A pretty representation of the prompt template. """raiseNotImplementedError()