fromtypingimportAny,Dict,List,Type,Unionfromlangchain_core.language_modelsimportBaseLanguageModelfromlangchain_core.messagesimportBaseMessage,SystemMessage,get_buffer_stringfromlangchain_core.promptsimportBasePromptTemplatefrompydanticimportFieldfromlangchain_community.graphsimportNetworkxEntityGraphfromlangchain_community.graphs.networkx_graphimport(KnowledgeTriple,get_entities,parse_triples,)try:fromlangchain.chains.llmimportLLMChainfromlangchain.memory.chat_memoryimportBaseChatMemoryfromlangchain.memory.promptimport(ENTITY_EXTRACTION_PROMPT,KNOWLEDGE_TRIPLE_EXTRACTION_PROMPT,)fromlangchain.memory.utilsimportget_prompt_input_keyclassConversationKGMemory(BaseChatMemory):"""Knowledge graph conversation memory. Integrates with external knowledge graph to store and retrieve information about knowledge triples in the conversation. """k:int=2human_prefix:str="Human"ai_prefix:str="AI"kg:NetworkxEntityGraph=Field(default_factory=NetworkxEntityGraph)knowledge_extraction_prompt:BasePromptTemplate=(KNOWLEDGE_TRIPLE_EXTRACTION_PROMPT)entity_extraction_prompt:BasePromptTemplate=ENTITY_EXTRACTION_PROMPTllm:BaseLanguageModelsummary_message_cls:Type[BaseMessage]=SystemMessage"""Number of previous utterances to include in the context."""memory_key:str="history"#: :meta private:
[docs]defload_memory_variables(self,inputs:Dict[str,Any])->Dict[str,Any]:"""Return history buffer."""entities=self._get_current_entities(inputs)summary_strings=[]forentityinentities:knowledge=self.kg.get_entity_knowledge(entity)ifknowledge:summary=f"On {entity}: {'. '.join(knowledge)}."summary_strings.append(summary)context:Union[str,List]ifnotsummary_strings:context=[]ifself.return_messageselse""elifself.return_messages:context=[self.summary_message_cls(content=text)fortextinsummary_strings]else:context="\n".join(summary_strings)return{self.memory_key:context}
@propertydefmemory_variables(self)->List[str]:"""Will always return list of memory variables. :meta private: """return[self.memory_key]def_get_prompt_input_key(self,inputs:Dict[str,Any])->str:"""Get the input key for the prompt."""ifself.input_keyisNone:returnget_prompt_input_key(inputs,self.memory_variables)returnself.input_keydef_get_prompt_output_key(self,outputs:Dict[str,Any])->str:"""Get the output key for the prompt."""ifself.output_keyisNone:iflen(outputs)!=1:raiseValueError(f"One output key expected, got {outputs.keys()}")returnlist(outputs.keys())[0]returnself.output_key
def_get_current_entities(self,inputs:Dict[str,Any])->List[str]:"""Get the current entities in the conversation."""prompt_input_key=self._get_prompt_input_key(inputs)returnself.get_current_entities(inputs[prompt_input_key])
def_get_and_update_kg(self,inputs:Dict[str,Any])->None:"""Get and update knowledge graph from the conversation history."""prompt_input_key=self._get_prompt_input_key(inputs)knowledge=self.get_knowledge_triplets(inputs[prompt_input_key])fortripleinknowledge:self.kg.add_triple(triple)
[docs]defsave_context(self,inputs:Dict[str,Any],outputs:Dict[str,str])->None:"""Save context from this conversation to buffer."""super().save_context(inputs,outputs)self._get_and_update_kg(inputs)