[docs]classChatKonko(ChatOpenAI):"""`ChatKonko` Chat large language models API. To use, you should have the ``konko`` python package installed, and the environment variable ``KONKO_API_KEY`` and ``OPENAI_API_KEY`` set with your API key. Any parameters that are valid to be passed to the konko.create call can be passed in, even if not explicitly saved on this class. Example: .. code-block:: python from langchain_community.chat_models import ChatKonko llm = ChatKonko(model="meta-llama/Llama-2-13b-chat-hf") """@propertydeflc_secrets(self)->Dict[str,str]:return{"konko_api_key":"KONKO_API_KEY","openai_api_key":"OPENAI_API_KEY"}@classmethoddefis_lc_serializable(cls)->bool:"""Return whether this model can be serialized by Langchain."""returnFalseclient:Any=None#: :meta private:model:str=Field(default=DEFAULT_MODEL,alias="model")"""Model name to use."""temperature:float=0.7"""What sampling temperature to use."""model_kwargs:Dict[str,Any]=Field(default_factory=dict)"""Holds any model parameters valid for `create` call not explicitly specified."""openai_api_key:Optional[str]=Nonekonko_api_key:Optional[str]=Nonemax_retries:int=6"""Maximum number of retries to make when generating."""streaming:bool=False"""Whether to stream the results or not."""n:int=1"""Number of chat completions to generate for each prompt."""max_tokens:int=20"""Maximum number of tokens to generate."""@pre_initdefvalidate_environment(cls,values:Dict)->Dict:"""Validate that api key and python package exists in environment."""values["konko_api_key"]=convert_to_secret_str(get_from_dict_or_env(values,"konko_api_key","KONKO_API_KEY"))try:importkonkoexceptImportError:raiseImportError("Could not import konko python package. ""Please install it with `pip install konko`.")try:ifis_openai_v1():values["client"]=konko.chat.completionselse:values["client"]=konko.ChatCompletionexceptAttributeError:raiseValueError("`konko` has no `ChatCompletion` attribute, this is likely ""due to an old version of the konko package. Try upgrading it ""with `pip install --upgrade konko`.")ifnothasattr(konko,"_is_legacy_openai"):warnings.warn("You are using an older version of the 'konko' package. ""Please consider upgrading to access new features.")ifvalues["n"]<1:raiseValueError("n must be at least 1.")ifvalues["n"]>1andvalues["streaming"]:raiseValueError("n must be 1 when streaming.")returnvalues@propertydef_default_params(self)->Dict[str,Any]:"""Get the default parameters for calling Konko API."""return{"model":self.model,"max_tokens":self.max_tokens,"stream":self.streaming,"n":self.n,"temperature":self.temperature,**self.model_kwargs,}
[docs]@staticmethoddefget_available_models(konko_api_key:Union[str,SecretStr,None]=None,openai_api_key:Union[str,SecretStr,None]=None,konko_api_base:str=DEFAULT_API_BASE,)->Set[str]:"""Get available models from Konko API."""# Try to retrieve the OpenAI API key if it's not passed as an argumentifnotopenai_api_key:try:openai_api_key=convert_to_secret_str(os.environ["OPENAI_API_KEY"])exceptKeyError:pass# It's okay if it's not set, we just won't use itelifisinstance(openai_api_key,str):openai_api_key=convert_to_secret_str(openai_api_key)# Try to retrieve the Konko API key if it's not passed as an argumentifnotkonko_api_key:try:konko_api_key=convert_to_secret_str(os.environ["KONKO_API_KEY"])exceptKeyError:raiseValueError("Konko API key must be passed as keyword argument or ""set in environment variable KONKO_API_KEY.")elifisinstance(konko_api_key,str):konko_api_key=convert_to_secret_str(konko_api_key)models_url=f"{konko_api_base}/models"headers={"Authorization":f"Bearer {konko_api_key.get_secret_value()}",}ifopenai_api_key:headers["X-OpenAI-Api-Key"]=cast(SecretStr,openai_api_key).get_secret_value()models_response=requests.get(models_url,headers=headers)ifmodels_response.status_code!=200:raiseValueError(f"Error getting models from {models_url}: "f"{models_response.status_code}")return{model["id"]formodelinmodels_response.json()["data"]}
def_stream(self,messages:List[BaseMessage],stop:Optional[List[str]]=None,run_manager:Optional[CallbackManagerForLLMRun]=None,**kwargs:Any,)->Iterator[ChatGenerationChunk]:message_dicts,params=self._create_message_dicts(messages,stop)params={**params,**kwargs,"stream":True}default_chunk_class=AIMessageChunkforchunkinself.completion_with_retry(messages=message_dicts,run_manager=run_manager,**params):iflen(chunk["choices"])==0:continuechoice=chunk["choices"][0]chunk=_convert_delta_to_message_chunk(choice["delta"],default_chunk_class)finish_reason=choice.get("finish_reason")generation_info=(dict(finish_reason=finish_reason)iffinish_reasonisnotNoneelseNone)default_chunk_class=chunk.__class__cg_chunk=ChatGenerationChunk(message=chunk,generation_info=generation_info)ifrun_manager:run_manager.on_llm_new_token(cg_chunk.text,chunk=cg_chunk)yieldcg_chunkdef_generate(self,messages:List[BaseMessage],stop:Optional[List[str]]=None,run_manager:Optional[CallbackManagerForLLMRun]=None,stream:Optional[bool]=None,**kwargs:Any,)->ChatResult:should_stream=streamifstreamisnotNoneelseself.streamingifshould_stream:stream_iter=self._stream(messages,stop=stop,run_manager=run_manager,**kwargs)returngenerate_from_stream(stream_iter)message_dicts,params=self._create_message_dicts(messages,stop)params={**params,**kwargs}response=self.completion_with_retry(messages=message_dicts,run_manager=run_manager,**params)returnself._create_chat_result(response)def_create_message_dicts(self,messages:List[BaseMessage],stop:Optional[List[str]])->Tuple[List[Dict[str,Any]],Dict[str,Any]]:params=self._client_paramsifstopisnotNone:if"stop"inparams:raiseValueError("`stop` found in both the input and default params.")params["stop"]=stopmessage_dicts=[convert_message_to_dict(m)forminmessages]returnmessage_dicts,params@propertydef_identifying_params(self)->Dict[str,Any]:"""Get the identifying parameters."""return{**{"model_name":self.model},**self._default_params}@propertydef_client_params(self)->Dict[str,Any]:"""Get the parameters used for the konko client."""return{**self._default_params}def_get_invocation_params(self,stop:Optional[List[str]]=None,**kwargs:Any)->Dict[str,Any]:"""Get the parameters used to invoke the model."""return{"model":self.model,**super()._get_invocation_params(stop=stop),**self._default_params,**kwargs,}@propertydef_llm_type(self)->str:"""Return type of chat model."""return"konko-chat"