[docs]classVectorStoreIndexWrapper(BaseModel):"""Wrapper around a vectorstore for easy access."""vectorstore:VectorStoreclassConfig:arbitrary_types_allowed=Trueextra="forbid"
[docs]defquery(self,question:str,llm:Optional[BaseLanguageModel]=None,retriever_kwargs:Optional[Dict[str,Any]]=None,**kwargs:Any,)->str:"""Query the vectorstore."""ifllmisNone:raiseNotImplementedError("This API has been changed to require an LLM. ""Please provide an llm to use for querying the vectorstore.\n""For example,\n""from langchain_openai import OpenAI\n""llm = OpenAI(temperature=0)")retriever_kwargs=retriever_kwargsor{}chain=RetrievalQA.from_chain_type(llm,retriever=self.vectorstore.as_retriever(**retriever_kwargs),**kwargs)returnchain.invoke({chain.input_key:question})[chain.output_key]
[docs]asyncdefaquery(self,question:str,llm:Optional[BaseLanguageModel]=None,retriever_kwargs:Optional[Dict[str,Any]]=None,**kwargs:Any,)->str:"""Query the vectorstore."""ifllmisNone:raiseNotImplementedError("This API has been changed to require an LLM. ""Please provide an llm to use for querying the vectorstore.\n""For example,\n""from langchain_openai import OpenAI\n""llm = OpenAI(temperature=0)")retriever_kwargs=retriever_kwargsor{}chain=RetrievalQA.from_chain_type(llm,retriever=self.vectorstore.as_retriever(**retriever_kwargs),**kwargs)return(awaitchain.ainvoke({chain.input_key:question}))[chain.output_key]
[docs]defquery_with_sources(self,question:str,llm:Optional[BaseLanguageModel]=None,retriever_kwargs:Optional[Dict[str,Any]]=None,**kwargs:Any,)->dict:"""Query the vectorstore and get back sources."""ifllmisNone:raiseNotImplementedError("This API has been changed to require an LLM. ""Please provide an llm to use for querying the vectorstore.\n""For example,\n""from langchain_openai import OpenAI\n""llm = OpenAI(temperature=0)")retriever_kwargs=retriever_kwargsor{}chain=RetrievalQAWithSourcesChain.from_chain_type(llm,retriever=self.vectorstore.as_retriever(**retriever_kwargs),**kwargs)returnchain.invoke({chain.question_key:question})
[docs]asyncdefaquery_with_sources(self,question:str,llm:Optional[BaseLanguageModel]=None,retriever_kwargs:Optional[Dict[str,Any]]=None,**kwargs:Any,)->dict:"""Query the vectorstore and get back sources."""ifllmisNone:raiseNotImplementedError("This API has been changed to require an LLM. ""Please provide an llm to use for querying the vectorstore.\n""For example,\n""from langchain_openai import OpenAI\n""llm = OpenAI(temperature=0)")retriever_kwargs=retriever_kwargsor{}chain=RetrievalQAWithSourcesChain.from_chain_type(llm,retriever=self.vectorstore.as_retriever(**retriever_kwargs),**kwargs)returnawaitchain.ainvoke({chain.question_key:question})
def_get_in_memory_vectorstore()->Type[VectorStore]:"""Get the InMemoryVectorStore."""importwarningstry:fromlangchain_community.vectorstores.inmemoryimportInMemoryVectorStoreexceptImportError:raiseImportError("Please install langchain-community to use the InMemoryVectorStore.")warnings.warn("Using InMemoryVectorStore as the default vectorstore.""This memory store won't persist data. You should explicitly""specify a vectorstore when using VectorstoreIndexCreator")returnInMemoryVectorStore
[docs]classVectorstoreIndexCreator(BaseModel):"""Logic for creating indexes."""vectorstore_cls:Type[VectorStore]=Field(default_factory=_get_in_memory_vectorstore)embedding:Embeddingstext_splitter:TextSplitter=Field(default_factory=_get_default_text_splitter)vectorstore_kwargs:dict=Field(default_factory=dict)classConfig:arbitrary_types_allowed=Trueextra="forbid"
[docs]deffrom_loaders(self,loaders:List[BaseLoader])->VectorStoreIndexWrapper:"""Create a vectorstore index from loaders."""docs=[]forloaderinloaders:docs.extend(loader.load())returnself.from_documents(docs)
[docs]asyncdefafrom_loaders(self,loaders:List[BaseLoader])->VectorStoreIndexWrapper:"""Create a vectorstore index from loaders."""docs=[]forloaderinloaders:asyncfordocinloader.alazy_load():docs.append(doc)returnawaitself.afrom_documents(docs)
[docs]deffrom_documents(self,documents:List[Document])->VectorStoreIndexWrapper:"""Create a vectorstore index from documents."""sub_docs=self.text_splitter.split_documents(documents)vectorstore=self.vectorstore_cls.from_documents(sub_docs,self.embedding,**self.vectorstore_kwargs)returnVectorStoreIndexWrapper(vectorstore=vectorstore)
[docs]asyncdefafrom_documents(self,documents:List[Document])->VectorStoreIndexWrapper:"""Create a vectorstore index from documents."""sub_docs=self.text_splitter.split_documents(documents)vectorstore=awaitself.vectorstore_cls.afrom_documents(sub_docs,self.embedding,**self.vectorstore_kwargs)returnVectorStoreIndexWrapper(vectorstore=vectorstore)