Source code for langchain_community.chat_models.octoai
"""OctoAI Endpoints chat wrapper. Relies heavily on ChatOpenAI."""fromtypingimport(Any,Callable,Dict,Literal,Optional,Sequence,Type,Union,)fromlangchain_core.language_modelsimportLanguageModelInputfromlangchain_core.messagesimportBaseMessagefromlangchain_core.runnablesimportRunnablefromlangchain_core.toolsimportBaseToolfromlangchain_core.utilsimportconvert_to_secret_str,get_from_dict_or_env,pre_initfromlangchain_core.utils.function_callingimportconvert_to_openai_toolfrompydanticimportField,SecretStrfromlangchain_community.chat_models.openaiimportChatOpenAIfromlangchain_community.utils.openaiimportis_openai_v1DEFAULT_API_BASE="https://text.octoai.run/v1/"DEFAULT_MODEL="llama-2-13b-chat"
[docs]classChatOctoAI(ChatOpenAI):"""OctoAI Chat large language models. See https://octo.ai/ for information about OctoAI. To use, you should have the ``openai`` python package installed and the environment variable ``OCTOAI_API_TOKEN`` set with your API token. Alternatively, you can use the octoai_api_token keyword argument. Any parameters that are valid to be passed to the `openai.create` call can be passed in, even if not explicitly saved on this class. Example: .. code-block:: python from langchain_community.chat_models import ChatOctoAI chat = ChatOctoAI(model_name="mixtral-8x7b-instruct") """octoai_api_base:str=Field(default=DEFAULT_API_BASE)octoai_api_token:SecretStr=Field(default=SecretStr(""),alias="api_key")model_name:str=Field(default=DEFAULT_MODEL,alias="model")@propertydef_llm_type(self)->str:"""Return type of chat model."""return"octoai-chat"@propertydeflc_secrets(self)->Dict[str,str]:return{"octoai_api_token":"OCTOAI_API_TOKEN"}@classmethoddefis_lc_serializable(cls)->bool:returnFalse
[docs]@pre_initdefvalidate_environment(cls,values:Dict)->Dict:"""Validate that api key and python package exists in environment."""values["octoai_api_base"]=get_from_dict_or_env(values,"octoai_api_base","OCTOAI_API_BASE",default=DEFAULT_API_BASE,)values["octoai_api_token"]=convert_to_secret_str(get_from_dict_or_env(values,"octoai_api_token","OCTOAI_API_TOKEN"))values["model_name"]=get_from_dict_or_env(values,"model_name","MODEL_NAME",default=DEFAULT_MODEL,)try:importopenaiifis_openai_v1():client_params={"api_key":values["octoai_api_token"].get_secret_value(),"base_url":values["octoai_api_base"],}ifnotvalues.get("client"):values["client"]=openai.OpenAI(**client_params).chat.completionsifnotvalues.get("async_client"):values["async_client"]=openai.AsyncOpenAI(**client_params).chat.completionselse:values["openai_api_base"]=values["octoai_api_base"]values["openai_api_key"]=values["octoai_api_token"].get_secret_value()values["client"]=openai.ChatCompletion# type: ignore[attr-defined]exceptImportError:raiseImportError("Could not import openai python package. ""Please install it with `pip install openai`.")returnvalues
[docs]defbind_tools(self,tools:Sequence[Union[Dict[str,Any],Type,Callable,BaseTool]],*,tool_choice:Optional[Union[dict,str,Literal["auto","none","required","any"],bool]]=None,strict:Optional[bool]=None,**kwargs:Any,)->Runnable[LanguageModelInput,BaseMessage]:"""Imitating bind_tool method from langchain_openai.ChatOpenAI"""formatted_tools=[convert_to_openai_tool(tool,strict=strict)fortoolintools]iftool_choice:ifisinstance(tool_choice,str):# tool_choice is a tool/function nameiftool_choicenotin("auto","none","any","required"):tool_choice={"type":"function","function":{"name":tool_choice},}# 'any' is not natively supported by OpenAI API.# We support 'any' since other models use this instead of 'required'.iftool_choice=="any":tool_choice="required"elifisinstance(tool_choice,bool):tool_choice="required"elifisinstance(tool_choice,dict):tool_names=[formatted_tool["function"]["name"]forformatted_toolinformatted_tools]ifnotany(tool_name==tool_choice["function"]["name"]fortool_nameintool_names):raiseValueError(f"Tool choice {tool_choice} was specified, but the only "f"provided tools were {tool_names}.")else:raiseValueError(f"Unrecognized tool_choice type. Expected str, bool or dict. "f"Received: {tool_choice}")kwargs["tool_choice"]=tool_choicereturnsuper().bind(tools=formatted_tools,**kwargs)