Source code for langchain_experimental.llm_bash.base
"""Chain that interprets a prompt and executes bash operations."""from__future__importannotationsimportloggingimportwarningsfromtypingimportAny,Dict,List,Optionalfromlangchain.chains.baseimportChainfromlangchain.chains.llmimportLLMChainfromlangchain.schemaimportBasePromptTemplate,OutputParserExceptionfromlangchain_core.callbacks.managerimportCallbackManagerForChainRunfromlangchain_core.language_modelsimportBaseLanguageModelfrompydanticimportConfigDict,Field,model_validatorfromlangchain_experimental.llm_bash.bashimportBashProcessfromlangchain_experimental.llm_bash.promptimportPROMPTlogger=logging.getLogger(__name__)
[docs]classLLMBashChain(Chain):"""Chain that interprets a prompt and executes bash operations. Example: .. code-block:: python from langchain.chains import LLMBashChain from langchain_community.llms import OpenAI llm_bash = LLMBashChain.from_llm(OpenAI()) """llm_chain:LLMChainllm:Optional[BaseLanguageModel]=None"""[Deprecated] LLM wrapper to use."""input_key:str="question"#: :meta private:output_key:str="answer"#: :meta private:prompt:BasePromptTemplate=PROMPT"""[Deprecated]"""bash_process:BashProcess=Field(default_factory=BashProcess)#: :meta private:model_config=ConfigDict(arbitrary_types_allowed=True,extra="forbid",)@model_validator(mode="before")@classmethoddefraise_deprecation(cls,values:Dict)->Any:if"llm"invalues:warnings.warn("Directly instantiating an LLMBashChain with an llm is deprecated. ""Please instantiate with llm_chain or using the from_llm class method.")if"llm_chain"notinvaluesandvalues["llm"]isnotNone:prompt=values.get("prompt",PROMPT)values["llm_chain"]=LLMChain(llm=values["llm"],prompt=prompt)returnvalues@model_validator(mode="before")@classmethoddefvalidate_prompt(cls,values:Dict)->Any:ifvalues["llm_chain"].prompt.output_parserisNone:raiseValueError("The prompt used by llm_chain is expected to have an output_parser.")returnvalues@propertydefinput_keys(self)->List[str]:"""Expect input key. :meta private: """return[self.input_key]@propertydefoutput_keys(self)->List[str]:"""Expect output key. :meta private: """return[self.output_key]def_call(self,inputs:Dict[str,Any],run_manager:Optional[CallbackManagerForChainRun]=None,)->Dict[str,str]:_run_manager=run_managerorCallbackManagerForChainRun.get_noop_manager()_run_manager.on_text(inputs[self.input_key],verbose=self.verbose)t=self.llm_chain.predict(question=inputs[self.input_key],callbacks=_run_manager.get_child())_run_manager.on_text(t,color="green",verbose=self.verbose)t=t.strip()try:parser=self.llm_chain.prompt.output_parsercommand_list=parser.parse(t)# type: ignore[union-attr]exceptOutputParserExceptionase:_run_manager.on_chain_error(e,verbose=self.verbose)raiseeifself.verbose:_run_manager.on_text("\nCode: ",verbose=self.verbose)_run_manager.on_text(str(command_list),color="yellow",verbose=self.verbose)output=self.bash_process.run(command_list)_run_manager.on_text("\nAnswer: ",verbose=self.verbose)_run_manager.on_text(output,color="yellow",verbose=self.verbose)return{self.output_key:output}@propertydef_chain_type(self)->str:return"llm_bash_chain"