[docs]classContentHandlerAmazonAPIGateway:"""Adapter to prepare the inputs from Langchain to a format that LLM model expects. It also provides helper function to extract the generated text from the model response."""
[docs]classAmazonAPIGateway(LLM):"""Amazon API Gateway to access LLM models hosted on AWS."""api_url:str"""API Gateway URL"""headers:Optional[Dict]=None"""API Gateway HTTP Headers to send, e.g. for authentication"""model_kwargs:Optional[Dict]=None"""Keyword arguments to pass to the model."""content_handler:ContentHandlerAmazonAPIGateway=ContentHandlerAmazonAPIGateway()"""The content handler class that provides an input and output transform functions to handle formats between LLM and the endpoint. """model_config=ConfigDict(extra="forbid",)@propertydef_identifying_params(self)->Mapping[str,Any]:"""Get the identifying parameters."""_model_kwargs=self.model_kwargsor{}return{**{"api_url":self.api_url,"headers":self.headers},**{"model_kwargs":_model_kwargs},}@propertydef_llm_type(self)->str:"""Return type of llm."""return"amazon_api_gateway"def_call(self,prompt:str,stop:Optional[List[str]]=None,run_manager:Optional[CallbackManagerForLLMRun]=None,**kwargs:Any,)->str:"""Call out to Amazon API Gateway model. Args: prompt: The prompt to pass into the model. stop: Optional list of stop words to use when generating. Returns: The string generated by the model. Example: .. code-block:: python response = se("Tell me a joke.") """_model_kwargs=self.model_kwargsor{}payload=self.content_handler.transform_input(prompt,_model_kwargs)try:response=requests.post(self.api_url,headers=self.headers,json=payload,)text=self.content_handler.transform_output(response)exceptExceptionaserror:raiseValueError(f"Error raised by the service: {error}")ifstopisnotNone:text=enforce_stop_tokens(text,stop)returntext