[docs]classModal(LLM):"""Modal large language models. To use, you should have the ``modal-client`` python package installed. Any parameters that are valid to be passed to the call can be passed in, even if not explicitly saved on this class. Example: .. code-block:: python from langchain_community.llms import Modal modal = Modal(endpoint_url="") """endpoint_url:str="""""model endpoint to use"""model_kwargs:Dict[str,Any]=Field(default_factory=dict)"""Holds any model parameters valid for `create` call not explicitly specified."""model_config=ConfigDict(extra="forbid",)@model_validator(mode="before")@classmethoddefbuild_extra(cls,values:Dict[str,Any])->Any:"""Build extra kwargs from additional params that were passed in."""all_required_field_names={field.aliasforfieldinget_fields(cls).values()}extra=values.get("model_kwargs",{})forfield_nameinlist(values):iffield_namenotinall_required_field_names:iffield_nameinextra:raiseValueError(f"Found {field_name} supplied twice.")logger.warning(f"""{field_name} was transferred to model_kwargs. Please confirm that {field_name} is what you intended.""")extra[field_name]=values.pop(field_name)values["model_kwargs"]=extrareturnvalues@propertydef_identifying_params(self)->Mapping[str,Any]:"""Get the identifying parameters."""return{**{"endpoint_url":self.endpoint_url},**{"model_kwargs":self.model_kwargs},}@propertydef_llm_type(self)->str:"""Return type of llm."""return"modal"def_call(self,prompt:str,stop:Optional[List[str]]=None,run_manager:Optional[CallbackManagerForLLMRun]=None,**kwargs:Any,)->str:"""Call to Modal endpoint."""params=self.model_kwargsor{}params={**params,**kwargs}response=requests.post(url=self.endpoint_url,headers={"Content-Type":"application/json",},json={"prompt":prompt,**params},)try:ifpromptinresponse.json()["prompt"]:response_json=response.json()exceptKeyError:raiseKeyError("LangChain requires 'prompt' key in response.")text=response_json["prompt"]ifstopisnotNone:# I believe this is required since the stop tokens# are not enforced by the model parameterstext=enforce_stop_tokens(text,stop)returntext