[docs]classBigdlLLM(IpexLLM):"""Wrapper around the BigdlLLM model Example: .. code-block:: python from langchain_community.llms import BigdlLLM llm = BigdlLLM.from_model_id(model_id="THUDM/chatglm-6b") """
[docs]@classmethoddeffrom_model_id(cls,model_id:str,model_kwargs:Optional[dict]=None,*,tokenizer_id:Optional[str]=None,load_in_4bit:bool=True,load_in_low_bit:Optional[str]=None,**kwargs:Any,)->LLM:""" Construct object from model_id Args: model_id: Path for the huggingface repo id to be downloaded or the huggingface checkpoint folder. tokenizer_id: Path for the huggingface repo id to be downloaded or the huggingface checkpoint folder which contains the tokenizer. model_kwargs: Keyword arguments to pass to the model and tokenizer. kwargs: Extra arguments to pass to the model and tokenizer. Returns: An object of BigdlLLM. """logger.warning("BigdlLLM was deprecated. Please use IpexLLM instead.")try:frombigdl.llm.transformersimport(AutoModel,AutoModelForCausalLM,)fromtransformersimportAutoTokenizer,LlamaTokenizerexceptImportError:raiseImportError("Could not import bigdl-llm or transformers. ""Please install it with `pip install --pre --upgrade bigdl-llm[all]`.")ifload_in_low_bitisnotNone:logger.warning("""`load_in_low_bit` option is not supported in BigdlLLM and is ignored. For more data types support with `load_in_low_bit`, use IpexLLM instead.""")ifnotload_in_4bit:raiseValueError("BigdlLLM only supports loading in 4-bit mode, ""i.e. load_in_4bit = True. ""Please install it with `pip install --pre --upgrade bigdl-llm[all]`.")_model_kwargs=model_kwargsor{}_tokenizer_id=tokenizer_idormodel_idtry:tokenizer=AutoTokenizer.from_pretrained(_tokenizer_id,**_model_kwargs)exceptException:tokenizer=LlamaTokenizer.from_pretrained(_tokenizer_id,**_model_kwargs)try:model=AutoModelForCausalLM.from_pretrained(model_id,load_in_4bit=True,**_model_kwargs)exceptException:model=AutoModel.from_pretrained(model_id,load_in_4bit=True,**_model_kwargs)if"trust_remote_code"in_model_kwargs:_model_kwargs={k:vfork,vin_model_kwargs.items()ifk!="trust_remote_code"}returncls(model_id=model_id,model=model,tokenizer=tokenizer,model_kwargs=_model_kwargs,**kwargs,)
[docs]@classmethoddeffrom_model_id_low_bit(cls,model_id:str,model_kwargs:Optional[dict]=None,*,tokenizer_id:Optional[str]=None,**kwargs:Any,)->LLM:""" Construct low_bit object from model_id Args: model_id: Path for the bigdl-llm transformers low-bit model folder. tokenizer_id: Path for the huggingface repo id or local model folder which contains the tokenizer. model_kwargs: Keyword arguments to pass to the model and tokenizer. kwargs: Extra arguments to pass to the model and tokenizer. Returns: An object of BigdlLLM. """logger.warning("BigdlLLM was deprecated. Please use IpexLLM instead.")try:frombigdl.llm.transformersimport(AutoModel,AutoModelForCausalLM,)fromtransformersimportAutoTokenizer,LlamaTokenizerexceptImportError:raiseImportError("Could not import bigdl-llm or transformers. ""Please install it with `pip install --pre --upgrade bigdl-llm[all]`.")_model_kwargs=model_kwargsor{}_tokenizer_id=tokenizer_idormodel_idtry:tokenizer=AutoTokenizer.from_pretrained(_tokenizer_id,**_model_kwargs)exceptException:tokenizer=LlamaTokenizer.from_pretrained(_tokenizer_id,**_model_kwargs)try:model=AutoModelForCausalLM.load_low_bit(model_id,**_model_kwargs)exceptException:model=AutoModel.load_low_bit(model_id,**_model_kwargs)if"trust_remote_code"in_model_kwargs:_model_kwargs={k:vfork,vin_model_kwargs.items()ifk!="trust_remote_code"}returncls(model_id=model_id,model=model,tokenizer=tokenizer,model_kwargs=_model_kwargs,**kwargs,)