[docs]classNomicEmbeddings(Embeddings):"""NomicEmbeddings embedding model. Example: .. code-block:: python from langchain_nomic import NomicEmbeddings model = NomicEmbeddings() """@overloaddef__init__(self,*,model:str,nomic_api_key:Optional[str]=...,dimensionality:Optional[int]=...,inference_mode:Literal["remote"]=...,):...@overloaddef__init__(self,*,model:str,nomic_api_key:Optional[str]=...,dimensionality:Optional[int]=...,inference_mode:Literal["local","dynamic"],device:Optional[str]=...,):...@overloaddef__init__(self,*,model:str,nomic_api_key:Optional[str]=...,dimensionality:Optional[int]=...,inference_mode:str,device:Optional[str]=...,):...
[docs]def__init__(self,*,model:str,nomic_api_key:Optional[str]=None,dimensionality:Optional[int]=None,inference_mode:str="remote",device:Optional[str]=None,vision_model:Optional[str]=None,):"""Initialize NomicEmbeddings model. Args: model: model name nomic_api_key: optionally, set the Nomic API key. Uses the NOMIC_API_KEY environment variable by default. dimensionality: The embedding dimension, for use with Matryoshka-capable models. Defaults to full-size. inference_mode: How to generate embeddings. One of `remote`, `local` (Embed4All), or `dynamic` (automatic). Defaults to `remote`. device: The device to use for local embeddings. Choices include `cpu`, `gpu`, `nvidia`, `amd`, or a specific device name. See the docstring for `GPT4All.__init__` for more info. Typically defaults to CPU. Do not use on macOS. """_api_key=nomic_api_keyoros.environ.get("NOMIC_API_KEY")if_api_key:nomic.login(_api_key)self.model=modelself.dimensionality=dimensionalityself.inference_mode=inference_modeself.device=deviceself.vision_model=vision_model
[docs]defembed(self,texts:List[str],*,task_type:str)->List[List[float]]:"""Embed texts. Args: texts: list of texts to embed task_type: the task type to use when embedding. One of `search_query`, `search_document`, `classification`, `clustering` """output=embed.text(texts=texts,model=self.model,task_type=task_type,dimensionality=self.dimensionality,inference_mode=self.inference_mode,device=self.device,)returnoutput["embeddings"]
[docs]defembed_documents(self,texts:List[str])->List[List[float]]:"""Embed search docs. Args: texts: list of texts to embed as documents """returnself.embed(texts=texts,task_type="search_document",)
[docs]defembed_query(self,text:str)->List[float]:"""Embed query text. Args: text: query text """returnself.embed(texts=[text],task_type="search_query",)[0]