[docs]classBaseMessage(Serializable):"""Base abstract message class. Messages are the inputs and outputs of ChatModels. """content:Union[str,list[Union[str,dict]]]"""The string contents of the message."""additional_kwargs:dict=Field(default_factory=dict)"""Reserved for additional payload data associated with the message. For example, for a message from an AI, this could include tool calls as encoded by the model provider. """response_metadata:dict=Field(default_factory=dict)"""Response metadata. For example: response headers, logprobs, token counts."""type:str"""The type of the message. Must be a string that is unique to the message type. The purpose of this field is to allow for easy identification of the message type when deserializing messages. """name:Optional[str]=None"""An optional name for the message. This can be used to provide a human-readable name for the message. Usage of this field is optional, and whether it's used or not is up to the model implementation. """id:Optional[str]=None"""An optional unique identifier for the message. This should ideally be provided by the provider/model which created the message."""model_config=ConfigDict(extra="allow",)@field_validator("id",mode="before")defcast_id_to_str(cls,id_value:Any)->Optional[str]:ifid_valueisnotNone:returnstr(id_value)else:returnid_valuedef__init__(self,content:Union[str,list[Union[str,dict]]],**kwargs:Any)->None:"""Pass in content as positional arg. Args: content: The string contents of the message. kwargs: Additional fields to pass to the """super().__init__(content=content,**kwargs)@classmethoddefis_lc_serializable(cls)->bool:"""Return whether this class is serializable. This is used to determine whether the class should be included in the langchain schema. Returns: True if the class is serializable, False otherwise. """returnTrue@classmethoddefget_lc_namespace(cls)->list[str]:"""Get the namespace of the langchain object. Default is ["langchain", "schema", "messages"]. """return["langchain","schema","messages"]
[docs]deftext(self)->str:"""Get the text content of the message. Returns: The text content of the message. """ifisinstance(self.content,str):returnself.content# must be a listblocks=[blockforblockinself.contentifisinstance(block,str)orblock.get("type")=="text"andisinstance(block.get("text"),str)]return"".join(blockifisinstance(block,str)elseblock["text"]forblockinblocks)
def__add__(self,other:Any)->ChatPromptTemplate:"""Concatenate this message with another message."""fromlangchain_core.prompts.chatimportChatPromptTemplateprompt=ChatPromptTemplate(messages=[self])# type: ignore[call-arg]returnprompt+other
[docs]defpretty_repr(self,html:bool=False)->str:"""Get a pretty representation of the message. Args: html: Whether to format the message as HTML. If True, the message will be formatted with HTML tags. Default is False. Returns: A pretty representation of the message. """title=get_msg_title_repr(self.type.title()+" Message",bold=html)# TODO: handle non-string content.ifself.nameisnotNone:title+=f"\nName: {self.name}"returnf"{title}\n\n{self.content}"
[docs]defmerge_content(first_content:Union[str,list[Union[str,dict]]],*contents:Union[str,list[Union[str,dict]]],)->Union[str,list[Union[str,dict]]]:"""Merge multiple message contents. Args: first_content: The first content. Can be a string or a list. contents: The other contents. Can be a string or a list. Returns: The merged content. """merged=first_contentforcontentincontents:# If current is a stringifisinstance(merged,str):# If the next chunk is also a string, then merge them naivelyifisinstance(content,str):merged=cast(str,merged)+content# If the next chunk is a list, add the current to the start of the listelse:merged=[merged]+content# type: ignoreelifisinstance(content,list):# If both are listsmerged=merge_lists(cast(list,merged),content)# type: ignore# If the first content is a list, and the second content is a stringelse:# If the last element of the first content is a string# Add the second content to the last elementifmergedandisinstance(merged[-1],str):merged[-1]+=content# If second content is an empty string, treat as a no-opelifcontent=="":passelse:# Otherwise, add the second content as a new element of the listmerged.append(content)returnmerged
[docs]classBaseMessageChunk(BaseMessage):"""Message chunk, which can be concatenated with other Message chunks."""@classmethoddefget_lc_namespace(cls)->list[str]:"""Get the namespace of the langchain object. Default is ["langchain", "schema", "messages"]. """return["langchain","schema","messages"]def__add__(self,other:Any)->BaseMessageChunk:# type: ignore"""Message chunks support concatenation with other message chunks. This functionality is useful to combine message chunks yielded from a streaming model into a complete message. Args: other: Another message chunk to concatenate with this one. Returns: A new message chunk that is the concatenation of this message chunk and the other message chunk. Raises: TypeError: If the other object is not a message chunk. For example, `AIMessageChunk(content="Hello") + AIMessageChunk(content=" World")` will give `AIMessageChunk(content="Hello World")` """ifisinstance(other,BaseMessageChunk):# If both are (subclasses of) BaseMessageChunk,# concat into a single BaseMessageChunkreturnself.__class__(# type: ignore[call-arg]id=self.id,type=self.type,content=merge_content(self.content,other.content),additional_kwargs=merge_dicts(self.additional_kwargs,other.additional_kwargs),response_metadata=merge_dicts(self.response_metadata,other.response_metadata),)elifisinstance(other,list)andall(isinstance(o,BaseMessageChunk)foroinother):content=merge_content(self.content,*(o.contentforoinother))additional_kwargs=merge_dicts(self.additional_kwargs,*(o.additional_kwargsforoinother))response_metadata=merge_dicts(self.response_metadata,*(o.response_metadataforoinother))returnself.__class__(# type: ignore[call-arg]id=self.id,content=content,additional_kwargs=additional_kwargs,response_metadata=response_metadata,)else:msg=('unsupported operand type(s) for +: "'f"{self.__class__.__name__}"f'" and "{other.__class__.__name__}"')raiseTypeError(msg)
[docs]defmessage_to_dict(message:BaseMessage)->dict:"""Convert a Message to a dictionary. Args: message: Message to convert. Returns: Message as a dict. The dict will have a "type" key with the message type and a "data" key with the message data as a dict. """return{"type":message.type,"data":message.model_dump()}
[docs]defmessages_to_dict(messages:Sequence[BaseMessage])->list[dict]:"""Convert a sequence of Messages to a list of dictionaries. Args: messages: Sequence of messages (as BaseMessages) to convert. Returns: List of messages as dicts. """return[message_to_dict(m)forminmessages]
[docs]defget_msg_title_repr(title:str,*,bold:bool=False)->str:"""Get a title representation for a message. Args: title: The title. bold: Whether to bold the title. Default is False. Returns: The title representation. """padded=" "+title+" "sep_len=(80-len(padded))//2sep="="*sep_lensecond_sep=sep+"="iflen(padded)%2elsesepifbold:padded=get_bolded_text(padded)returnf"{sep}{padded}{second_sep}"