|
@@ -3,8 +3,9 @@ import warnings
|
|
|
|
|
|
from collections import Counter, defaultdict, deque, abc
|
|
|
from collections.abc import Sequence
|
|
|
+from contextlib import suppress
|
|
|
from functools import cached_property, partial, reduce, wraps
|
|
|
-from heapq import heapify, heapreplace, heappop
|
|
|
+from heapq import heapify, heapreplace
|
|
|
from itertools import (
|
|
|
chain,
|
|
|
combinations,
|
|
@@ -21,10 +22,10 @@ from itertools import (
|
|
|
zip_longest,
|
|
|
product,
|
|
|
)
|
|
|
-from math import comb, e, exp, factorial, floor, fsum, log, perm, tau
|
|
|
+from math import comb, e, exp, factorial, floor, fsum, log, log1p, perm, tau
|
|
|
from queue import Empty, Queue
|
|
|
-from random import random, randrange, uniform
|
|
|
-from operator import itemgetter, mul, sub, gt, lt, ge, le
|
|
|
+from random import random, randrange, shuffle, uniform
|
|
|
+from operator import itemgetter, mul, sub, gt, lt, le
|
|
|
from sys import hexversion, maxsize
|
|
|
from time import monotonic
|
|
|
|
|
@@ -34,7 +35,6 @@ from .recipes import (
|
|
|
UnequalIterablesError,
|
|
|
consume,
|
|
|
flatten,
|
|
|
- pairwise,
|
|
|
powerset,
|
|
|
take,
|
|
|
unique_everseen,
|
|
@@ -473,12 +473,10 @@ def ilen(iterable):
|
|
|
This consumes the iterable, so handle with care.
|
|
|
|
|
|
"""
|
|
|
- # This approach was selected because benchmarks showed it's likely the
|
|
|
- # fastest of the known implementations at the time of writing.
|
|
|
- # See GitHub tracker: #236, #230.
|
|
|
- counter = count()
|
|
|
- deque(zip(iterable, counter), maxlen=0)
|
|
|
- return next(counter)
|
|
|
+ # This is the "most beautiful of the fast variants" of this function.
|
|
|
+ # If you think you can improve on it, please ensure that your version
|
|
|
+ # is both 10x faster and 10x more beautiful.
|
|
|
+ return sum(compress(repeat(1), zip(iterable)))
|
|
|
|
|
|
|
|
|
def iterate(func, start):
|
|
@@ -666,9 +664,9 @@ def distinct_permutations(iterable, r=None):
|
|
|
>>> sorted(distinct_permutations([1, 0, 1]))
|
|
|
[(0, 1, 1), (1, 0, 1), (1, 1, 0)]
|
|
|
|
|
|
- Equivalent to ``set(permutations(iterable))``, except duplicates are not
|
|
|
- generated and thrown away. For larger input sequences this is much more
|
|
|
- efficient.
|
|
|
+ Equivalent to yielding from ``set(permutations(iterable))``, except
|
|
|
+ duplicates are not generated and thrown away. For larger input sequences
|
|
|
+ this is much more efficient.
|
|
|
|
|
|
Duplicate permutations arise when there are duplicated elements in the
|
|
|
input iterable. The number of items returned is
|
|
@@ -683,6 +681,25 @@ def distinct_permutations(iterable, r=None):
|
|
|
>>> sorted(distinct_permutations(range(3), r=2))
|
|
|
[(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)]
|
|
|
|
|
|
+ *iterable* need not be sortable, but note that using equal (``x == y``)
|
|
|
+ but non-identical (``id(x) != id(y)``) elements may produce surprising
|
|
|
+ behavior. For example, ``1`` and ``True`` are equal but non-identical:
|
|
|
+
|
|
|
+ >>> list(distinct_permutations([1, True, '3'])) # doctest: +SKIP
|
|
|
+ [
|
|
|
+ (1, True, '3'),
|
|
|
+ (1, '3', True),
|
|
|
+ ('3', 1, True)
|
|
|
+ ]
|
|
|
+ >>> list(distinct_permutations([1, 2, '3'])) # doctest: +SKIP
|
|
|
+ [
|
|
|
+ (1, 2, '3'),
|
|
|
+ (1, '3', 2),
|
|
|
+ (2, 1, '3'),
|
|
|
+ (2, '3', 1),
|
|
|
+ ('3', 1, 2),
|
|
|
+ ('3', 2, 1)
|
|
|
+ ]
|
|
|
"""
|
|
|
|
|
|
# Algorithm: https://w.wiki/Qai
|
|
@@ -749,14 +766,44 @@ def distinct_permutations(iterable, r=None):
|
|
|
i += 1
|
|
|
head[i:], tail[:] = tail[: r - i], tail[r - i :]
|
|
|
|
|
|
- items = sorted(iterable)
|
|
|
+ items = list(iterable)
|
|
|
+
|
|
|
+ try:
|
|
|
+ items.sort()
|
|
|
+ sortable = True
|
|
|
+ except TypeError:
|
|
|
+ sortable = False
|
|
|
+
|
|
|
+ indices_dict = defaultdict(list)
|
|
|
+
|
|
|
+ for item in items:
|
|
|
+ indices_dict[items.index(item)].append(item)
|
|
|
+
|
|
|
+ indices = [items.index(item) for item in items]
|
|
|
+ indices.sort()
|
|
|
+
|
|
|
+ equivalent_items = {k: cycle(v) for k, v in indices_dict.items()}
|
|
|
+
|
|
|
+ def permuted_items(permuted_indices):
|
|
|
+ return tuple(
|
|
|
+ next(equivalent_items[index]) for index in permuted_indices
|
|
|
+ )
|
|
|
|
|
|
size = len(items)
|
|
|
if r is None:
|
|
|
r = size
|
|
|
|
|
|
+ # functools.partial(_partial, ... )
|
|
|
+ algorithm = _full if (r == size) else partial(_partial, r=r)
|
|
|
+
|
|
|
if 0 < r <= size:
|
|
|
- return _full(items) if (r == size) else _partial(items, r)
|
|
|
+ if sortable:
|
|
|
+ return algorithm(items)
|
|
|
+ else:
|
|
|
+ return (
|
|
|
+ permuted_items(permuted_indices)
|
|
|
+ for permuted_indices in algorithm(indices)
|
|
|
+ )
|
|
|
|
|
|
return iter(() if r else ((),))
|
|
|
|
|
@@ -1743,7 +1790,9 @@ def zip_offset(*iterables, offsets, longest=False, fillvalue=None):
|
|
|
return zip(*staggered)
|
|
|
|
|
|
|
|
|
-def sort_together(iterables, key_list=(0,), key=None, reverse=False):
|
|
|
+def sort_together(
|
|
|
+ iterables, key_list=(0,), key=None, reverse=False, strict=False
|
|
|
+):
|
|
|
"""Return the input iterables sorted together, with *key_list* as the
|
|
|
priority for sorting. All iterables are trimmed to the length of the
|
|
|
shortest one.
|
|
@@ -1782,6 +1831,10 @@ def sort_together(iterables, key_list=(0,), key=None, reverse=False):
|
|
|
>>> sort_together([(1, 2, 3), ('c', 'b', 'a')], reverse=True)
|
|
|
[(3, 2, 1), ('a', 'b', 'c')]
|
|
|
|
|
|
+ If the *strict* keyword argument is ``True``, then
|
|
|
+ ``UnequalIterablesError`` will be raised if any of the iterables have
|
|
|
+ different lengths.
|
|
|
+
|
|
|
"""
|
|
|
if key is None:
|
|
|
# if there is no key function, the key argument to sorted is an
|
|
@@ -1804,8 +1857,9 @@ def sort_together(iterables, key_list=(0,), key=None, reverse=False):
|
|
|
*get_key_items(zipped_items)
|
|
|
)
|
|
|
|
|
|
+ zipper = zip_equal if strict else zip
|
|
|
return list(
|
|
|
- zip(*sorted(zip(*iterables), key=key_argument, reverse=reverse))
|
|
|
+ zipper(*sorted(zipper(*iterables), key=key_argument, reverse=reverse))
|
|
|
)
|
|
|
|
|
|
|
|
@@ -2747,8 +2801,6 @@ class seekable:
|
|
|
>>> it.seek(0)
|
|
|
>>> next(it), next(it), next(it)
|
|
|
('0', '1', '2')
|
|
|
- >>> next(it)
|
|
|
- '3'
|
|
|
|
|
|
You can also seek forward:
|
|
|
|
|
@@ -2756,15 +2808,29 @@ class seekable:
|
|
|
>>> it.seek(10)
|
|
|
>>> next(it)
|
|
|
'10'
|
|
|
- >>> it.relative_seek(-2) # Seeking relative to the current position
|
|
|
- >>> next(it)
|
|
|
- '9'
|
|
|
>>> it.seek(20) # Seeking past the end of the source isn't a problem
|
|
|
>>> list(it)
|
|
|
[]
|
|
|
>>> it.seek(0) # Resetting works even after hitting the end
|
|
|
+ >>> next(it)
|
|
|
+ '0'
|
|
|
+
|
|
|
+ Call :meth:`relative_seek` to seek relative to the source iterator's
|
|
|
+ current position.
|
|
|
+
|
|
|
+ >>> it = seekable((str(n) for n in range(20)))
|
|
|
>>> next(it), next(it), next(it)
|
|
|
('0', '1', '2')
|
|
|
+ >>> it.relative_seek(2)
|
|
|
+ >>> next(it)
|
|
|
+ '5'
|
|
|
+ >>> it.relative_seek(-3) # Source is at '6', we move back to '3'
|
|
|
+ >>> next(it)
|
|
|
+ '3'
|
|
|
+ >>> it.relative_seek(-3) # Source is at '4', we move back to '1'
|
|
|
+ >>> next(it)
|
|
|
+ '1'
|
|
|
+
|
|
|
|
|
|
Call :meth:`peek` to look ahead one item without advancing the iterator:
|
|
|
|
|
@@ -2873,8 +2939,10 @@ class seekable:
|
|
|
consume(self, remainder)
|
|
|
|
|
|
def relative_seek(self, count):
|
|
|
- index = len(self._cache)
|
|
|
- self.seek(max(index + count, 0))
|
|
|
+ if self._index is None:
|
|
|
+ self._index = len(self._cache)
|
|
|
+
|
|
|
+ self.seek(max(self._index + count, 0))
|
|
|
|
|
|
|
|
|
class run_length:
|
|
@@ -2903,7 +2971,7 @@ class run_length:
|
|
|
|
|
|
@staticmethod
|
|
|
def decode(iterable):
|
|
|
- return chain.from_iterable(repeat(k, n) for k, n in iterable)
|
|
|
+ return chain.from_iterable(starmap(repeat, iterable))
|
|
|
|
|
|
|
|
|
def exactly_n(iterable, n, predicate=bool):
|
|
@@ -2924,14 +2992,34 @@ def exactly_n(iterable, n, predicate=bool):
|
|
|
return len(take(n + 1, filter(predicate, iterable))) == n
|
|
|
|
|
|
|
|
|
-def circular_shifts(iterable):
|
|
|
- """Return a list of circular shifts of *iterable*.
|
|
|
+def circular_shifts(iterable, steps=1):
|
|
|
+ """Yield the circular shifts of *iterable*.
|
|
|
|
|
|
- >>> circular_shifts(range(4))
|
|
|
+ >>> list(circular_shifts(range(4)))
|
|
|
[(0, 1, 2, 3), (1, 2, 3, 0), (2, 3, 0, 1), (3, 0, 1, 2)]
|
|
|
+
|
|
|
+ Set *steps* to the number of places to rotate to the left
|
|
|
+ (or to the right if negative). Defaults to 1.
|
|
|
+
|
|
|
+ >>> list(circular_shifts(range(4), 2))
|
|
|
+ [(0, 1, 2, 3), (2, 3, 0, 1)]
|
|
|
+
|
|
|
+ >>> list(circular_shifts(range(4), -1))
|
|
|
+ [(0, 1, 2, 3), (3, 0, 1, 2), (2, 3, 0, 1), (1, 2, 3, 0)]
|
|
|
+
|
|
|
"""
|
|
|
- lst = list(iterable)
|
|
|
- return take(len(lst), windowed(cycle(lst), len(lst)))
|
|
|
+ buffer = deque(iterable)
|
|
|
+ if steps == 0:
|
|
|
+ raise ValueError('Steps should be a non-zero integer')
|
|
|
+
|
|
|
+ buffer.rotate(steps)
|
|
|
+ steps = -steps
|
|
|
+ n = len(buffer)
|
|
|
+ n //= math.gcd(n, steps)
|
|
|
+
|
|
|
+ for __ in repeat(None, n):
|
|
|
+ buffer.rotate(steps)
|
|
|
+ yield tuple(buffer)
|
|
|
|
|
|
|
|
|
def make_decorator(wrapping_func, result_index=0):
|
|
@@ -3191,7 +3279,7 @@ def partitions(iterable):
|
|
|
yield [sequence[i:j] for i, j in zip((0,) + i, i + (n,))]
|
|
|
|
|
|
|
|
|
-def set_partitions(iterable, k=None):
|
|
|
+def set_partitions(iterable, k=None, min_size=None, max_size=None):
|
|
|
"""
|
|
|
Yield the set partitions of *iterable* into *k* parts. Set partitions are
|
|
|
not order-preserving.
|
|
@@ -3215,6 +3303,20 @@ def set_partitions(iterable, k=None):
|
|
|
['b', 'ac']
|
|
|
['a', 'b', 'c']
|
|
|
|
|
|
+ if *min_size* and/or *max_size* are given, the minimum and/or maximum size
|
|
|
+ per block in partition is set.
|
|
|
+
|
|
|
+ >>> iterable = 'abc'
|
|
|
+ >>> for part in set_partitions(iterable, min_size=2):
|
|
|
+ ... print([''.join(p) for p in part])
|
|
|
+ ['abc']
|
|
|
+ >>> for part in set_partitions(iterable, max_size=2):
|
|
|
+ ... print([''.join(p) for p in part])
|
|
|
+ ['a', 'bc']
|
|
|
+ ['ab', 'c']
|
|
|
+ ['b', 'ac']
|
|
|
+ ['a', 'b', 'c']
|
|
|
+
|
|
|
"""
|
|
|
L = list(iterable)
|
|
|
n = len(L)
|
|
@@ -3226,6 +3328,11 @@ def set_partitions(iterable, k=None):
|
|
|
elif k > n:
|
|
|
return
|
|
|
|
|
|
+ min_size = min_size if min_size is not None else 0
|
|
|
+ max_size = max_size if max_size is not None else n
|
|
|
+ if min_size > max_size:
|
|
|
+ return
|
|
|
+
|
|
|
def set_partitions_helper(L, k):
|
|
|
n = len(L)
|
|
|
if k == 1:
|
|
@@ -3242,9 +3349,15 @@ def set_partitions(iterable, k=None):
|
|
|
|
|
|
if k is None:
|
|
|
for k in range(1, n + 1):
|
|
|
- yield from set_partitions_helper(L, k)
|
|
|
+ yield from filter(
|
|
|
+ lambda z: all(min_size <= len(bk) <= max_size for bk in z),
|
|
|
+ set_partitions_helper(L, k),
|
|
|
+ )
|
|
|
else:
|
|
|
- yield from set_partitions_helper(L, k)
|
|
|
+ yield from filter(
|
|
|
+ lambda z: all(min_size <= len(bk) <= max_size for bk in z),
|
|
|
+ set_partitions_helper(L, k),
|
|
|
+ )
|
|
|
|
|
|
|
|
|
class time_limited:
|
|
@@ -3535,32 +3648,27 @@ def map_if(iterable, pred, func, func_else=lambda x: x):
|
|
|
yield func(item) if pred(item) else func_else(item)
|
|
|
|
|
|
|
|
|
-def _sample_unweighted(iterable, k):
|
|
|
- # Implementation of "Algorithm L" from the 1994 paper by Kim-Hung Li:
|
|
|
+def _sample_unweighted(iterator, k, strict):
|
|
|
+ # Algorithm L in the 1994 paper by Kim-Hung Li:
|
|
|
# "Reservoir-Sampling Algorithms of Time Complexity O(n(1+log(N/n)))".
|
|
|
|
|
|
- # Fill up the reservoir (collection of samples) with the first `k` samples
|
|
|
- reservoir = take(k, iterable)
|
|
|
-
|
|
|
- # Generate random number that's the largest in a sample of k U(0,1) numbers
|
|
|
- # Largest order statistic: https://en.wikipedia.org/wiki/Order_statistic
|
|
|
- W = exp(log(random()) / k)
|
|
|
+ reservoir = list(islice(iterator, k))
|
|
|
+ if strict and len(reservoir) < k:
|
|
|
+ raise ValueError('Sample larger than population')
|
|
|
+ W = 1.0
|
|
|
|
|
|
- # The number of elements to skip before changing the reservoir is a random
|
|
|
- # number with a geometric distribution. Sample it using random() and logs.
|
|
|
- next_index = k + floor(log(random()) / log(1 - W))
|
|
|
-
|
|
|
- for index, element in enumerate(iterable, k):
|
|
|
- if index == next_index:
|
|
|
- reservoir[randrange(k)] = element
|
|
|
- # The new W is the largest in a sample of k U(0, `old_W`) numbers
|
|
|
+ with suppress(StopIteration):
|
|
|
+ while True:
|
|
|
W *= exp(log(random()) / k)
|
|
|
- next_index += floor(log(random()) / log(1 - W)) + 1
|
|
|
+ skip = floor(log(random()) / log1p(-W))
|
|
|
+ element = next(islice(iterator, skip, None))
|
|
|
+ reservoir[randrange(k)] = element
|
|
|
|
|
|
+ shuffle(reservoir)
|
|
|
return reservoir
|
|
|
|
|
|
|
|
|
-def _sample_weighted(iterable, k, weights):
|
|
|
+def _sample_weighted(iterator, k, weights, strict):
|
|
|
# Implementation of "A-ExpJ" from the 2006 paper by Efraimidis et al. :
|
|
|
# "Weighted random sampling with a reservoir".
|
|
|
|
|
@@ -3569,7 +3677,10 @@ def _sample_weighted(iterable, k, weights):
|
|
|
|
|
|
# Fill up the reservoir (collection of samples) with the first `k`
|
|
|
# weight-keys and elements, then heapify the list.
|
|
|
- reservoir = take(k, zip(weight_keys, iterable))
|
|
|
+ reservoir = take(k, zip(weight_keys, iterator))
|
|
|
+ if strict and len(reservoir) < k:
|
|
|
+ raise ValueError('Sample larger than population')
|
|
|
+
|
|
|
heapify(reservoir)
|
|
|
|
|
|
# The number of jumps before changing the reservoir is a random variable
|
|
@@ -3577,7 +3688,7 @@ def _sample_weighted(iterable, k, weights):
|
|
|
smallest_weight_key, _ = reservoir[0]
|
|
|
weights_to_skip = log(random()) / smallest_weight_key
|
|
|
|
|
|
- for weight, element in zip(weights, iterable):
|
|
|
+ for weight, element in zip(weights, iterator):
|
|
|
if weight >= weights_to_skip:
|
|
|
# The notation here is consistent with the paper, but we store
|
|
|
# the weight-keys in log-space for better numerical stability.
|
|
@@ -3591,44 +3702,103 @@ def _sample_weighted(iterable, k, weights):
|
|
|
else:
|
|
|
weights_to_skip -= weight
|
|
|
|
|
|
- # Equivalent to [element for weight_key, element in sorted(reservoir)]
|
|
|
- return [heappop(reservoir)[1] for _ in range(k)]
|
|
|
+ ret = [element for weight_key, element in reservoir]
|
|
|
+ shuffle(ret)
|
|
|
+ return ret
|
|
|
+
|
|
|
|
|
|
+def _sample_counted(population, k, counts, strict):
|
|
|
+ element = None
|
|
|
+ remaining = 0
|
|
|
|
|
|
-def sample(iterable, k, weights=None):
|
|
|
+ def feed(i):
|
|
|
+ # Advance *i* steps ahead and consume an element
|
|
|
+ nonlocal element, remaining
|
|
|
+
|
|
|
+ while i + 1 > remaining:
|
|
|
+ i = i - remaining
|
|
|
+ element = next(population)
|
|
|
+ remaining = next(counts)
|
|
|
+ remaining -= i + 1
|
|
|
+ return element
|
|
|
+
|
|
|
+ with suppress(StopIteration):
|
|
|
+ reservoir = []
|
|
|
+ for _ in range(k):
|
|
|
+ reservoir.append(feed(0))
|
|
|
+ if strict and len(reservoir) < k:
|
|
|
+ raise ValueError('Sample larger than population')
|
|
|
+
|
|
|
+ W = 1.0
|
|
|
+ while True:
|
|
|
+ W *= exp(log(random()) / k)
|
|
|
+ skip = floor(log(random()) / log1p(-W))
|
|
|
+ element = feed(skip)
|
|
|
+ reservoir[randrange(k)] = element
|
|
|
+
|
|
|
+ shuffle(reservoir)
|
|
|
+ return reservoir
|
|
|
+
|
|
|
+
|
|
|
+def sample(iterable, k, weights=None, *, counts=None, strict=False):
|
|
|
"""Return a *k*-length list of elements chosen (without replacement)
|
|
|
- from the *iterable*. Like :func:`random.sample`, but works on iterables
|
|
|
- of unknown length.
|
|
|
+ from the *iterable*. Similar to :func:`random.sample`, but works on
|
|
|
+ iterables of unknown length.
|
|
|
|
|
|
>>> iterable = range(100)
|
|
|
>>> sample(iterable, 5) # doctest: +SKIP
|
|
|
[81, 60, 96, 16, 4]
|
|
|
|
|
|
- An iterable with *weights* may also be given:
|
|
|
+ For iterables with repeated elements, you may supply *counts* to
|
|
|
+ indicate the repeats.
|
|
|
+
|
|
|
+ >>> iterable = ['a', 'b']
|
|
|
+ >>> counts = [3, 4] # Equivalent to 'a', 'a', 'a', 'b', 'b', 'b', 'b'
|
|
|
+ >>> sample(iterable, k=3, counts=counts) # doctest: +SKIP
|
|
|
+ ['a', 'a', 'b']
|
|
|
+
|
|
|
+ An iterable with *weights* may be given:
|
|
|
|
|
|
>>> iterable = range(100)
|
|
|
>>> weights = (i * i + 1 for i in range(100))
|
|
|
>>> sampled = sample(iterable, 5, weights=weights) # doctest: +SKIP
|
|
|
[79, 67, 74, 66, 78]
|
|
|
|
|
|
- The algorithm can also be used to generate weighted random permutations.
|
|
|
- The relative weight of each item determines the probability that it
|
|
|
- appears late in the permutation.
|
|
|
+ Weighted selections are made without replacement.
|
|
|
+ After an element is selected, it is removed from the pool and the
|
|
|
+ relative weights of the other elements increase (this
|
|
|
+ does not match the behavior of :func:`random.sample`'s *counts*
|
|
|
+ parameter). Note that *weights* may not be used with *counts*.
|
|
|
+
|
|
|
+ If the length of *iterable* is less than *k*,
|
|
|
+ ``ValueError`` is raised if *strict* is ``True`` and
|
|
|
+ all elements are returned (in shuffled order) if *strict* is ``False``.
|
|
|
|
|
|
- >>> data = "abcdefgh"
|
|
|
- >>> weights = range(1, len(data) + 1)
|
|
|
- >>> sample(data, k=len(data), weights=weights) # doctest: +SKIP
|
|
|
- ['c', 'a', 'b', 'e', 'g', 'd', 'h', 'f']
|
|
|
+ By default, the `Algorithm L <https://w.wiki/ANrM>`__ reservoir sampling
|
|
|
+ technique is used. When *weights* are provided,
|
|
|
+ `Algorithm A-ExpJ <https://w.wiki/ANrS>`__ is used.
|
|
|
"""
|
|
|
+ iterator = iter(iterable)
|
|
|
+
|
|
|
+ if k < 0:
|
|
|
+ raise ValueError('k must be non-negative')
|
|
|
+
|
|
|
if k == 0:
|
|
|
return []
|
|
|
|
|
|
- iterable = iter(iterable)
|
|
|
- if weights is None:
|
|
|
- return _sample_unweighted(iterable, k)
|
|
|
- else:
|
|
|
+ if weights is not None and counts is not None:
|
|
|
+ raise TypeError('weights and counts are mutally exclusive')
|
|
|
+
|
|
|
+ elif weights is not None:
|
|
|
weights = iter(weights)
|
|
|
- return _sample_weighted(iterable, k, weights)
|
|
|
+ return _sample_weighted(iterator, k, weights, strict)
|
|
|
+
|
|
|
+ elif counts is not None:
|
|
|
+ counts = iter(counts)
|
|
|
+ return _sample_counted(iterator, k, counts, strict)
|
|
|
+
|
|
|
+ else:
|
|
|
+ return _sample_unweighted(iterator, k, strict)
|
|
|
|
|
|
|
|
|
def is_sorted(iterable, key=None, reverse=False, strict=False):
|
|
@@ -3650,12 +3820,16 @@ def is_sorted(iterable, key=None, reverse=False, strict=False):
|
|
|
False
|
|
|
|
|
|
The function returns ``False`` after encountering the first out-of-order
|
|
|
- item. If there are no out-of-order items, the iterable is exhausted.
|
|
|
+ item, which means it may produce results that differ from the built-in
|
|
|
+ :func:`sorted` function for objects with unusual comparison dynamics.
|
|
|
+ If there are no out-of-order items, the iterable is exhausted.
|
|
|
"""
|
|
|
+ compare = le if strict else lt
|
|
|
+ it = iterable if (key is None) else map(key, iterable)
|
|
|
+ it_1, it_2 = tee(it)
|
|
|
+ next(it_2 if reverse else it_1, None)
|
|
|
|
|
|
- compare = (le if reverse else ge) if strict else (lt if reverse else gt)
|
|
|
- it = iterable if key is None else map(key, iterable)
|
|
|
- return not any(starmap(compare, pairwise(it)))
|
|
|
+ return not any(map(compare, it_1, it_2))
|
|
|
|
|
|
|
|
|
class AbortThread(BaseException):
|