[docs]classNoLock:"""Dummy lock that provides the proper interface but no protection."""def__enter__(self)->None:passdef__exit__(self,exc_type:Any,exc_val:Any,exc_tb:Any)->Literal[False]:returnFalse
[docs]deftee_peer(iterator:Iterator[T],# the buffer specific to this peerbuffer:deque[T],# the buffers of all peers, including our ownpeers:list[deque[T]],lock:AbstractContextManager[Any],)->Generator[T,None,None]:"""An individual iterator of a :py:func:`~.tee`. This function is a generator that yields items from the shared iterator ``iterator``. It buffers items until the least advanced iterator has yielded them as well. The buffer is shared with all other peers. Args: iterator: The shared iterator. buffer: The buffer for this peer. peers: The buffers of all peers. lock: The lock to synchronise access to the shared buffers. Yields: The next item from the shared iterator. """try:whileTrue:ifnotbuffer:withlock:# Another peer produced an item while we were waiting for the lock.# Proceed with the next loop iteration to yield the item.ifbuffer:continuetry:item=next(iterator)exceptStopIteration:breakelse:# Append to all buffers, including our own. We'll fetch our# item from the buffer again, instead of yielding it directly.# This ensures the proper item ordering if any of our peers# are fetching items concurrently. They may have buffered their# item already.forpeer_bufferinpeers:peer_buffer.append(item)yieldbuffer.popleft()finally:withlock:# this peer is done – remove its bufferforidx,peer_bufferinenumerate(peers):# pragma: no branchifpeer_bufferisbuffer:peers.pop(idx)break# if we are the last peer, try and close the iteratorifnotpeersandhasattr(iterator,"close"):iterator.close()
[docs]classTee(Generic[T]):"""Create ``n`` separate asynchronous iterators over ``iterable``. This splits a single ``iterable`` into multiple iterators, each providing the same items in the same order. All child iterators may advance separately but share the same items from ``iterable`` -- when the most advanced iterator retrieves an item, it is buffered until the least advanced iterator has yielded it as well. A ``tee`` works lazily and can handle an infinite ``iterable``, provided that all iterators advance. .. code-block:: python3 async def derivative(sensor_data): previous, current = a.tee(sensor_data, n=2) await a.anext(previous) # advance one iterator return a.map(operator.sub, previous, current) Unlike :py:func:`itertools.tee`, :py:func:`~.tee` returns a custom type instead of a :py:class:`tuple`. Like a tuple, it can be indexed, iterated and unpacked to get the child iterators. In addition, its :py:meth:`~.tee.aclose` method immediately closes all children, and it can be used in an ``async with`` context for the same effect. If ``iterable`` is an iterator and read elsewhere, ``tee`` will *not* provide these items. Also, ``tee`` must internally buffer each item until the last iterator has yielded it; if the most and least advanced iterator differ by most data, using a :py:class:`list` is more efficient (but not lazy). If the underlying iterable is concurrency safe (``anext`` may be awaited concurrently) the resulting iterators are concurrency safe as well. Otherwise, the iterators are safe if there is only ever one single "most advanced" iterator. To enforce sequential use of ``anext``, provide a ``lock`` - e.g. an :py:class:`asyncio.Lock` instance in an :py:mod:`asyncio` application - and access is automatically synchronised. """
[docs]def__init__(self,iterable:Iterator[T],n:int=2,*,lock:Optional[AbstractContextManager[Any]]=None,):"""Create a new ``tee``. Args: iterable: The iterable to split. n: The number of iterators to create. Defaults to 2. lock: The lock to synchronise access to the shared buffers. Defaults to None. """self._iterator=iter(iterable)self._buffers:list[deque[T]]=[deque()for_inrange(n)]self._children=tuple(tee_peer(iterator=self._iterator,buffer=buffer,peers=self._buffers,lock=lockiflockisnotNoneelseNoLock(),)forbufferinself._buffers)
# Why this is needed https://stackoverflow.com/a/44638570safetee=Tee
[docs]defbatch_iterate(size:Optional[int],iterable:Iterable[T])->Iterator[list[T]]:"""Utility batching function. Args: size: The size of the batch. If None, returns a single batch. iterable: The iterable to batch. Yields: The batches of the iterable. """it=iter(iterable)whileTrue:chunk=list(islice(it,size))ifnotchunk:returnyieldchunk