[docs]classDynamicRunnable(RunnableSerializable[Input,Output]):"""Serializable Runnable that can be dynamically configured. A DynamicRunnable should be initiated using the `configurable_fields` or `configurable_alternatives` method of a Runnable. Parameters: default: The default Runnable to use. config: The configuration to use. """default:RunnableSerializable[Input,Output]config:Optional[RunnableConfig]=NoneclassConfig:arbitrary_types_allowed=True@classmethoddefis_lc_serializable(cls)->bool:returnTrue@classmethoddefget_lc_namespace(cls)->List[str]:"""Get the namespace of the langchain object."""return["langchain","schema","runnable"]@propertydefInputType(self)->Type[Input]:returnself.default.InputType@propertydefOutputType(self)->Type[Output]:returnself.default.OutputTypedefget_input_schema(self,config:Optional[RunnableConfig]=None)->Type[BaseModel]:runnable,config=self.prepare(config)returnrunnable.get_input_schema(config)defget_output_schema(self,config:Optional[RunnableConfig]=None)->Type[BaseModel]:runnable,config=self.prepare(config)returnrunnable.get_output_schema(config)defget_graph(self,config:Optional[RunnableConfig]=None)->Graph:runnable,config=self.prepare(config)returnrunnable.get_graph(config)defwith_config(self,config:Optional[RunnableConfig]=None,# Sadly Unpack is not well supported by mypy so this will have to be untyped**kwargs:Any,)->Runnable[Input,Output]:returnself.__class__(**{**self.__dict__,"config":ensure_config(merge_configs(config,kwargs))}# type: ignore[arg-type])
[docs]defprepare(self,config:Optional[RunnableConfig]=None)->Tuple[Runnable[Input,Output],RunnableConfig]:"""Prepare the Runnable for invocation. Args: config: The configuration to use. Defaults to None. Returns: Tuple[Runnable[Input, Output], RunnableConfig]: The prepared Runnable and configuration. """runnable:Runnable[Input,Output]=selfwhileisinstance(runnable,DynamicRunnable):runnable,config=runnable._prepare(merge_configs(runnable.config,config))returnrunnable,cast(RunnableConfig,config)
[docs]defbatch(self,inputs:List[Input],config:Optional[Union[RunnableConfig,List[RunnableConfig]]]=None,*,return_exceptions:bool=False,**kwargs:Optional[Any],)->List[Output]:configs=get_config_list(config,len(inputs))prepared=[self.prepare(c)forcinconfigs]ifall(pisself.defaultforp,_inprepared):returnself.default.batch(inputs,[cfor_,cinprepared],return_exceptions=return_exceptions,**kwargs,)ifnotinputs:return[]definvoke(prepared:Tuple[Runnable[Input,Output],RunnableConfig],input:Input,)->Union[Output,Exception]:bound,config=preparedifreturn_exceptions:try:returnbound.invoke(input,config,**kwargs)exceptExceptionase:returneelse:returnbound.invoke(input,config,**kwargs)# If there's only one input, don't bother with the executoriflen(inputs)==1:returncast(List[Output],[invoke(prepared[0],inputs[0])])withget_executor_for_config(configs[0])asexecutor:returncast(List[Output],list(executor.map(invoke,prepared,inputs)))
[docs]classRunnableConfigurableFields(DynamicRunnable[Input,Output]):"""Runnable that can be dynamically configured. A RunnableConfigurableFields should be initiated using the `configurable_fields` method of a Runnable. Parameters: fields: The configurable fields to use. Here is an example of using a RunnableConfigurableFields with LLMs: .. code-block:: python from langchain_core.prompts import PromptTemplate from langchain_core.runnables import ConfigurableField from langchain_openai import ChatOpenAI model = ChatOpenAI(temperature=0).configurable_fields( temperature=ConfigurableField( id="temperature", name="LLM Temperature", description="The temperature of the LLM", ) ) # This creates a RunnableConfigurableFields for a chat model. # When invoking the created RunnableSequence, you can pass in the # value for your ConfigurableField's id which in this case # will be change in temperature prompt = PromptTemplate.from_template("Pick a random number above {x}") chain = prompt | model chain.invoke({"x": 0}) chain.invoke({"x": 0}, config={"configurable": {"temperature": 0.9}}) Here is an example of using a RunnableConfigurableFields with HubRunnables: .. code-block:: python from langchain_core.prompts import PromptTemplate from langchain_core.runnables import ConfigurableField from langchain_openai import ChatOpenAI from langchain.runnables.hub import HubRunnable prompt = HubRunnable("rlm/rag-prompt").configurable_fields( owner_repo_commit=ConfigurableField( id="hub_commit", name="Hub Commit", description="The Hub commit to pull from", ) ) prompt.invoke({"question": "foo", "context": "bar"}) # Invoking prompt with `with_config` method prompt.invoke( {"question": "foo", "context": "bar"}, config={"configurable": {"hub_commit": "rlm/rag-prompt-llama"}}, ) """fields:Dict[str,AnyConfigurableField]@classmethoddefget_lc_namespace(cls)->List[str]:"""Get the namespace of the langchain object."""return["langchain","schema","runnable"]@propertydefconfig_specs(self)->List[ConfigurableFieldSpec]:"""Get the configuration specs for the RunnableConfigurableFields. Returns: List[ConfigurableFieldSpec]: The configuration specs. """returnget_unique_config_specs([(ConfigurableFieldSpec(id=spec.id,name=spec.name,description=spec.descriptionorself.default.__fields__[field_name].field_info.description,annotation=spec.annotationorself.default.__fields__[field_name].annotation,default=getattr(self.default,field_name),is_shared=spec.is_shared,)ifisinstance(spec,ConfigurableField)elsemake_options_spec(spec,self.default.__fields__[field_name].field_info.description))forfield_name,specinself.fields.items()]+list(self.default.config_specs))
[docs]defconfigurable_fields(self,**kwargs:AnyConfigurableField)->RunnableSerializable[Input,Output]:"""Get a new RunnableConfigurableFields with the specified configurable fields."""returnself.default.configurable_fields(**{**self.fields,**kwargs})
[docs]classRunnableConfigurableAlternatives(DynamicRunnable[Input,Output]):"""Runnable that can be dynamically configured. A RunnableConfigurableAlternatives should be initiated using the `configurable_alternatives` method of a Runnable or can be initiated directly as well. Here is an example of using a RunnableConfigurableAlternatives that uses alternative prompts to illustrate its functionality: .. code-block:: python from langchain_core.runnables import ConfigurableField from langchain_openai import ChatOpenAI # This creates a RunnableConfigurableAlternatives for Prompt Runnable # with two alternatives. prompt = PromptTemplate.from_template( "Tell me a joke about {topic}" ).configurable_alternatives( ConfigurableField(id="prompt"), default_key="joke", poem=PromptTemplate.from_template("Write a short poem about {topic}") ) # When invoking the created RunnableSequence, you can pass in the # value for your ConfigurableField's id which in this case will either be # `joke` or `poem`. chain = prompt | ChatOpenAI(model="gpt-3.5-turbo-0125", temperature=0) # The `with_config` method brings in the desired Prompt Runnable in your # Runnable Sequence. chain.with_config(configurable={"prompt": "poem"}).invoke({"topic": "bears"}) Equivalently, you can initialize RunnableConfigurableAlternatives directly and use in LCEL in the same way: .. code-block:: python from langchain_core.runnables import ConfigurableField from langchain_core.runnables.configurable import RunnableConfigurableAlternatives from langchain_openai import ChatOpenAI prompt = RunnableConfigurableAlternatives( which=ConfigurableField(id='prompt'), default=PromptTemplate.from_template("Tell me a joke about {topic}"), default_key='joke', prefix_keys=False, alternatives={"poem":PromptTemplate.from_template("Write a short poem about {topic}")} ) chain = prompt | ChatOpenAI(model="gpt-3.5-turbo-0125", temperature=0) chain.with_config(configurable={"prompt": "poem"}).invoke({"topic": "bears"}) """# noqa: E501which:ConfigurableField"""The ConfigurableField to use to choose between alternatives."""alternatives:Dict[str,Union[Runnable[Input,Output],Callable[[],Runnable[Input,Output]]],]"""The alternatives to choose from."""default_key:str="default""""The enum value to use for the default option. Defaults to "default"."""prefix_keys:bool"""Whether to prefix configurable fields of each alternative with a namespace of the form <which.id>==<alternative_key>, eg. a key named "temperature" used by the alternative named "gpt3" becomes "model==gpt3/temperature"."""@classmethoddefget_lc_namespace(cls)->List[str]:"""Get the namespace of the langchain object."""return["langchain","schema","runnable"]@propertydefconfig_specs(self)->List[ConfigurableFieldSpec]:with_enums_for_spec_lock:ifwhich_enum:=_enums_for_spec.get(self.which):passelse:which_enum=StrEnum(# type: ignore[call-overload]self.which.nameorself.which.id,((v,v)forvinlist(self.alternatives.keys())+[self.default_key]),)_enums_for_spec[self.which]=cast(Type[StrEnum],which_enum)returnget_unique_config_specs(# which alternative[ConfigurableFieldSpec(id=self.which.id,name=self.which.name,description=self.which.description,annotation=which_enum,default=self.default_key,is_shared=self.which.is_shared,),]# config specs of the default option+([prefix_config_spec(s,f"{self.which.id}=={self.default_key}")forsinself.default.config_specs]ifself.prefix_keyselseself.default.config_specs)# config specs of the alternatives+[(prefix_config_spec(s,f"{self.which.id}=={alt_key}")ifself.prefix_keyselses)foralt_key,altinself.alternatives.items()ifisinstance(alt,RunnableSerializable)forsinalt.config_specs])
def_prepare(self,config:Optional[RunnableConfig]=None)->Tuple[Runnable[Input,Output],RunnableConfig]:config=ensure_config(config)which=config.get("configurable",{}).get(self.which.id,self.default_key)# remap configurable keys for the chosen alternativeifself.prefix_keys:config=cast(RunnableConfig,{**config,"configurable":{_strremoveprefix(k,f"{self.which.id}=={which}/"):vfork,vinconfig.get("configurable",{}).items()},},)# return the chosen alternativeifwhich==self.default_key:return(self.default,config)elifwhichinself.alternatives:alt=self.alternatives[which]ifisinstance(alt,Runnable):return(alt,config)else:return(alt(),config)else:raiseValueError(f"Unknown alternative: {which}")
def_strremoveprefix(s:str,prefix:str)->str:"""str.removeprefix() is only available in Python 3.9+."""returns.replace(prefix,"",1)ifs.startswith(prefix)elses
[docs]defprefix_config_spec(spec:ConfigurableFieldSpec,prefix:str)->ConfigurableFieldSpec:"""Prefix the id of a ConfigurableFieldSpec. This is useful when a RunnableConfigurableAlternatives is used as a ConfigurableField of another RunnableConfigurableAlternatives. Args: spec: The ConfigurableFieldSpec to prefix. prefix: The prefix to add. Returns: ConfigurableFieldSpec: The prefixed ConfigurableFieldSpec. """return(ConfigurableFieldSpec(id=f"{prefix}/{spec.id}",name=spec.name,description=spec.description,annotation=spec.annotation,default=spec.default,is_shared=spec.is_shared,)ifnotspec.is_sharedelsespec)
[docs]defmake_options_spec(spec:Union[ConfigurableFieldSingleOption,ConfigurableFieldMultiOption],description:Optional[str],)->ConfigurableFieldSpec:"""Make a ConfigurableFieldSpec for a ConfigurableFieldSingleOption or ConfigurableFieldMultiOption. Args: spec: The ConfigurableFieldSingleOption or ConfigurableFieldMultiOption. description: The description to use if the spec does not have one. Returns: The ConfigurableFieldSpec. """with_enums_for_spec_lock:ifenum:=_enums_for_spec.get(spec):passelse:enum=StrEnum(# type: ignore[call-overload]spec.nameorspec.id,((v,v)forvinlist(spec.options.keys())),)_enums_for_spec[spec]=cast(Type[StrEnum],enum)ifisinstance(spec,ConfigurableFieldSingleOption):returnConfigurableFieldSpec(id=spec.id,name=spec.name,description=spec.descriptionordescription,annotation=enum,default=spec.default,is_shared=spec.is_shared,)else:returnConfigurableFieldSpec(id=spec.id,name=spec.name,description=spec.descriptionordescription,annotation=Sequence[enum],# type: ignore[valid-type]default=spec.default,is_shared=spec.is_shared,)