Source code for langchain_community.chat_models.fake
"""Fake ChatModel for testing purposes."""importasyncioimporttimefromtypingimportAny,AsyncIterator,Dict,Iterator,List,Optional,Unionfromlangchain_core.callbacksimport(AsyncCallbackManagerForLLMRun,CallbackManagerForLLMRun,)fromlangchain_core.language_models.chat_modelsimportBaseChatModel,SimpleChatModelfromlangchain_core.messagesimportAIMessageChunk,BaseMessagefromlangchain_core.outputsimportChatGeneration,ChatGenerationChunk,ChatResult
[docs]classFakeMessagesListChatModel(BaseChatModel):"""Fake ChatModel for testing purposes."""responses:List[BaseMessage]sleep:Optional[float]=Nonei:int=0def_generate(self,messages:List[BaseMessage],stop:Optional[List[str]]=None,run_manager:Optional[CallbackManagerForLLMRun]=None,**kwargs:Any,)->ChatResult:response=self.responses[self.i]ifself.i<len(self.responses)-1:self.i+=1else:self.i=0generation=ChatGeneration(message=response)returnChatResult(generations=[generation])@propertydef_llm_type(self)->str:return"fake-messages-list-chat-model"
[docs]classFakeListChatModel(SimpleChatModel):"""Fake ChatModel for testing purposes."""responses:Listsleep:Optional[float]=Nonei:int=0@propertydef_llm_type(self)->str:return"fake-list-chat-model"def_call(self,messages:List[BaseMessage],stop:Optional[List[str]]=None,run_manager:Optional[CallbackManagerForLLMRun]=None,**kwargs:Any,)->str:"""First try to lookup in queries, else return 'foo' or 'bar'."""response=self.responses[self.i]ifself.i<len(self.responses)-1:self.i+=1else:self.i=0returnresponsedef_stream(self,messages:List[BaseMessage],stop:Union[List[str],None]=None,run_manager:Union[CallbackManagerForLLMRun,None]=None,**kwargs:Any,)->Iterator[ChatGenerationChunk]:response=self.responses[self.i]ifself.i<len(self.responses)-1:self.i+=1else:self.i=0forcinresponse:ifself.sleepisnotNone:time.sleep(self.sleep)yieldChatGenerationChunk(message=AIMessageChunk(content=c))asyncdef_astream(self,messages:List[BaseMessage],stop:Union[List[str],None]=None,run_manager:Union[AsyncCallbackManagerForLLMRun,None]=None,**kwargs:Any,)->AsyncIterator[ChatGenerationChunk]:response=self.responses[self.i]ifself.i<len(self.responses)-1:self.i+=1else:self.i=0forcinresponse:ifself.sleepisnotNone:awaitasyncio.sleep(self.sleep)yieldChatGenerationChunk(message=AIMessageChunk(content=c))@propertydef_identifying_params(self)->Dict[str,Any]:return{"responses":self.responses}