[docs]classMlflow(LLM):"""MLflow LLM service. To use, you should have the `mlflow[genai]` python package installed. For more information, see https://mlflow.org/docs/latest/llms/deployments. Example: .. code-block:: python from langchain_community.llms import Mlflow completions = Mlflow( target_uri="http://localhost:5000", endpoint="test", temperature=0.1, ) """endpoint:str"""The endpoint to use."""target_uri:str"""The target URI to use."""temperature:float=0.0"""The sampling temperature."""n:int=1"""The number of completion choices to generate."""stop:Optional[List[str]]=None"""The stop sequence."""max_tokens:Optional[int]=None"""The maximum number of tokens to generate."""extra_params:Dict[str,Any]=Field(default_factory=dict)"""Any extra parameters to pass to the endpoint.""""""Extra parameters such as `temperature`."""_client:Any=PrivateAttr()def__init__(self,**kwargs:Any):super().__init__(**kwargs)self._validate_uri()try:frommlflow.deploymentsimportget_deploy_clientself._client=get_deploy_client(self.target_uri)exceptImportErrorase:raiseImportError("Failed to create the client. ""Please run `pip install mlflow[genai]` to install ""required dependencies.")fromedef_validate_uri(self)->None:ifself.target_uri=="databricks":returnallowed=["http","https","databricks"]ifurlparse(self.target_uri).schemenotinallowed:raiseValueError(f"Invalid target URI: {self.target_uri}. "f"The scheme must be one of {allowed}.")@propertydef_default_params(self)->Dict[str,Any]:return{"target_uri":self.target_uri,"endpoint":self.endpoint,"temperature":self.temperature,"n":self.n,"stop":self.stop,"max_tokens":self.max_tokens,"extra_params":self.extra_params,}@propertydef_identifying_params(self)->Mapping[str,Any]:returnself._default_paramsdef_call(self,prompt:str,stop:Optional[List[str]]=None,run_manager:Optional[CallbackManagerForLLMRun]=None,**kwargs:Any,)->str:data:Dict[str,Any]={"prompt":prompt,"temperature":self.temperature,"n":self.n,**self.extra_params,**kwargs,}ifstop:=self.stoporstop:data["stop"]=stopifself.max_tokensisnotNone:data["max_tokens"]=self.max_tokensresp=self._client.predict(endpoint=self.endpoint,inputs=data)returnresp["choices"][0]["text"]@propertydef_llm_type(self)->str:return"mlflow"