[docs]classCohereRagRetriever(BaseRetriever):"""Cohere Chat API with RAG."""connectors:List[Dict]=Field(default_factory=lambda:[{"id":"web-search"}])""" When specified, the model's reply will be enriched with information found by querying each of the connectors (RAG). These will be returned as langchain documents. Currently only accepts {"id": "web-search"}. """llm:BaseChatModel"""Cohere ChatModel to use."""classConfig:"""Configuration for this pydantic object."""arbitrary_types_allowed=True"""Allow arbitrary types."""def_get_relevant_documents(self,query:str,*,run_manager:CallbackManagerForRetrieverRun,documents:Optional[List[Dict[str,str]]]=None,**kwargs:Any,)->List[Document]:messages:List[List[BaseMessage]]=[[HumanMessage(content=query)]]res=self.llm.generate(messages,connectors=self.connectorsifdocumentsisNoneelseNone,documents=documents,callbacks=run_manager.get_child(),**kwargs,).generations[0][0]return_get_docs(res)asyncdef_aget_relevant_documents(self,query:str,*,run_manager:AsyncCallbackManagerForRetrieverRun,documents:Optional[List[Dict[str,str]]]=None,**kwargs:Any,)->List[Document]:messages:List[List[BaseMessage]]=[[HumanMessage(content=query)]]res=(awaitself.llm.agenerate(messages,connectors=self.connectorsifdocumentsisNoneelseNone,documents=documents,callbacks=run_manager.get_child(),**kwargs,)).generations[0][0]return_get_docs(res)