Source code for langchain_community.chat_models.everlyai
"""EverlyAI Endpoints chat wrapper. Relies heavily on ChatOpenAI."""from__future__importannotationsimportloggingimportsysimportwarningsfromtypingimport(TYPE_CHECKING,Any,Callable,Dict,Optional,Sequence,Set,Type,Union,)fromlangchain_core.messagesimportBaseMessagefromlangchain_core.toolsimportBaseToolfromlangchain_core.utilsimportconvert_to_secret_str,get_from_dict_or_envfrompydanticimportField,model_validatorfromlangchain_community.adapters.openaiimportconvert_message_to_dictfromlangchain_community.chat_models.openaiimport(ChatOpenAI,_import_tiktoken,)ifTYPE_CHECKING:importtiktokenlogger=logging.getLogger(__name__)DEFAULT_API_BASE="https://everlyai.xyz/hosted"DEFAULT_MODEL="meta-llama/Llama-2-7b-chat-hf"
[docs]classChatEverlyAI(ChatOpenAI):"""`EverlyAI` Chat large language models. To use, you should have the ``openai`` python package installed, and the environment variable ``EVERLYAI_API_KEY`` set with your API key. Alternatively, you can use the everlyai_api_key keyword argument. Any parameters that are valid to be passed to the `openai.create` call can be passed in, even if not explicitly saved on this class. Example: .. code-block:: python from langchain_community.chat_models import ChatEverlyAI chat = ChatEverlyAI(model_name="meta-llama/Llama-2-7b-chat-hf") """@propertydef_llm_type(self)->str:"""Return type of chat model."""return"everlyai-chat"@propertydeflc_secrets(self)->Dict[str,str]:return{"everlyai_api_key":"EVERLYAI_API_KEY"}@classmethoddefis_lc_serializable(cls)->bool:returnFalseeverlyai_api_key:Optional[str]=None"""EverlyAI Endpoints API keys."""model_name:str=Field(default=DEFAULT_MODEL,alias="model")"""Model name to use."""everlyai_api_base:str=DEFAULT_API_BASE"""Base URL path for API requests."""available_models:Optional[Set[str]]=None"""Available models from EverlyAI API."""
[docs]@staticmethoddefget_available_models()->Set[str]:"""Get available models from EverlyAI API."""# EverlyAI doesn't yet support dynamically query for available models.returnset(["meta-llama/Llama-2-7b-chat-hf","meta-llama/Llama-2-13b-chat-hf-quantized",])
@model_validator(mode="before")@classmethoddefvalidate_environment_override(cls,values:dict)->Any:"""Validate that api key and python package exists in environment."""values["openai_api_key"]=convert_to_secret_str(get_from_dict_or_env(values,"everlyai_api_key","EVERLYAI_API_KEY",))values["openai_api_base"]=DEFAULT_API_BASEtry:importopenaiexceptImportErrorase:raiseImportError("Could not import openai python package. ""Please install it with `pip install openai`.",)frometry:values["client"]=openai.ChatCompletion# type: ignore[attr-defined]exceptAttributeErrorasexc:raiseValueError("`openai` has no `ChatCompletion` attribute, this is likely ""due to an old version of the openai package. Try upgrading it ""with `pip install --upgrade openai`.",)fromexcif"model_name"notinvalues.keys():values["model_name"]=DEFAULT_MODELmodel_name=values["model_name"]available_models=cls.get_available_models()ifmodel_namenotinavailable_models:raiseValueError(f"Model name {model_name} not found in available models: "f"{available_models}.",)values["available_models"]=available_modelsreturnvaluesdef_get_encoding_model(self)->tuple[str,tiktoken.Encoding]:tiktoken_=_import_tiktoken()ifself.tiktoken_model_nameisnotNone:model=self.tiktoken_model_nameelse:model=self.model_name# Returns the number of tokens used by a list of messages.try:encoding=tiktoken_.encoding_for_model("gpt-3.5-turbo-0301")exceptKeyError:logger.warning("Warning: model not found. Using cl100k_base encoding.")model="cl100k_base"encoding=tiktoken_.get_encoding(model)returnmodel,encoding
[docs]defget_num_tokens_from_messages(self,messages:list[BaseMessage],tools:Optional[Sequence[Union[Dict[str,Any],Type,Callable,BaseTool]]]=None,)->int:"""Calculate num tokens with tiktoken package. Official documentation: https://github.com/openai/openai-cookbook/blob/ main/examples/How_to_format_inputs_to_ChatGPT_models.ipynb"""iftoolsisnotNone:warnings.warn("Counting tokens in tool schemas is not yet supported. Ignoring tools.")ifsys.version_info[1]<=7:returnsuper().get_num_tokens_from_messages(messages)model,encoding=self._get_encoding_model()tokens_per_message=3tokens_per_name=1num_tokens=0messages_dict=[convert_message_to_dict(m)forminmessages]formessageinmessages_dict:num_tokens+=tokens_per_messageforkey,valueinmessage.items():# Cast str(value) in case the message value is not a string# This occurs with function messagesnum_tokens+=len(encoding.encode(str(value)))ifkey=="name":num_tokens+=tokens_per_name# every reply is primed with <im_start>assistantnum_tokens+=3returnnum_tokens