How to use try_shrinking_blocks method in hypothesis

Best Python code snippet using hypothesis

shrinker.py

Source:shrinker.py Github

copy

Full Screen

...644 self.__all_changed_blocks = set()645 self.__last_checked_changed_at = new_target646 self.shrink_target = new_target647 self.__derived_values = {}648 def try_shrinking_blocks(self, blocks, b):649 """Attempts to replace each block in the blocks list with b. Returns650 True if it succeeded (which may include some additional modifications651 to shrink_target).652 In current usage it is expected that each of the blocks currently have653 the same value, although this is not essential. Note that b must be654 < the block at min(blocks) or this is not a valid shrink.655 This method will attempt to do some small amount of work to delete data656 that occurs after the end of the blocks. This is useful for cases where657 there is some size dependency on the value of a block.658 """659 initial_attempt = bytearray(self.shrink_target.buffer)660 for i, block in enumerate(blocks):661 if block >= len(self.blocks):662 blocks = blocks[:i]663 break664 u, v = self.blocks[block].bounds665 n = min(self.blocks[block].length, len(b))666 initial_attempt[v - n : v] = b[-n:]667 if not blocks:668 return False669 start = self.shrink_target.blocks[blocks[0]].start670 end = self.shrink_target.blocks[blocks[-1]].end671 initial_data = self.cached_test_function(initial_attempt)672 if initial_data is self.shrink_target:673 self.lower_common_block_offset()674 return True675 # If this produced something completely invalid we ditch it676 # here rather than trying to persevere.677 if initial_data.status < Status.VALID:678 return False679 # We've shrunk inside our group of blocks, so we have no way to680 # continue. (This only happens when shrinking more than one block at681 # a time).682 if len(initial_data.buffer) < v:683 return False684 lost_data = len(self.shrink_target.buffer) - len(initial_data.buffer)685 # If this did not in fact cause the data size to shrink we686 # bail here because it's not worth trying to delete stuff from687 # the remainder.688 if lost_data <= 0:689 return False690 # We now look for contiguous regions to delete that might help fix up691 # this failed shrink. We only look for contiguous regions of the right692 # lengths because doing anything more than that starts to get very693 # expensive. See minimize_individual_blocks for where we694 # try to be more aggressive.695 regions_to_delete = {(end, end + lost_data)}696 for j in (blocks[-1] + 1, blocks[-1] + 2):697 if j >= min(len(initial_data.blocks), len(self.blocks)):698 continue699 # We look for a block very shortly after the last one that has700 # lost some of its size, and try to delete from the beginning so701 # that it retains the same integer value. This is a bit of a hyper702 # specific trick designed to make our integers() strategy shrink703 # well.704 r1, s1 = self.shrink_target.blocks[j].bounds705 r2, s2 = initial_data.blocks[j].bounds706 lost = (s1 - r1) - (s2 - r2)707 # Apparently a coverage bug? An assert False in the body of this708 # will reliably fail, but it shows up as uncovered.709 if lost <= 0 or r1 != r2: # pragma: no cover710 continue711 regions_to_delete.add((r1, r1 + lost))712 for ex in self.shrink_target.examples:713 if ex.start > start:714 continue715 if ex.end <= end:716 continue717 replacement = initial_data.examples[ex.index]718 in_original = [c for c in ex.children if c.start >= end]719 in_replaced = [c for c in replacement.children if c.start >= end]720 if len(in_replaced) >= len(in_original) or not in_replaced:721 continue722 # We've found an example where some of the children went missing723 # as a result of this change, and just replacing it with the data724 # it would have had and removing the spillover didn't work. This725 # means that some of its children towards the right must be726 # important, so we try to arrange it so that it retains its727 # rightmost children instead of its leftmost.728 regions_to_delete.add(729 (in_original[0].start, in_original[-len(in_replaced)].start)730 )731 for u, v in sorted(regions_to_delete, key=lambda x: x[1] - x[0], reverse=True):732 try_with_deleted = bytearray(initial_attempt)733 del try_with_deleted[u:v]734 if self.incorporate_new_buffer(try_with_deleted):735 return True736 return False737 def remove_discarded(self):738 """Try removing all bytes marked as discarded.739 This is primarily to deal with data that has been ignored while740 doing rejection sampling - e.g. as a result of an integer range, or a741 filtered strategy.742 Such data will also be handled by the adaptive_example_deletion pass,743 but that pass is necessarily more conservative and will try deleting744 each interval individually. The common case is that all data drawn and745 rejected can just be thrown away immediately in one block, so this pass746 will be much faster than trying each one individually when it works.747 returns False if there is discarded data and removing it does not work,748 otherwise returns True.749 """750 while self.shrink_target.has_discards:751 discarded = []752 for ex in self.shrink_target.examples:753 if (754 ex.length > 0755 and ex.discarded756 and (not discarded or ex.start >= discarded[-1][-1])757 ):758 discarded.append((ex.start, ex.end))759 # This can happen if we have discards but they are all of760 # zero length. This shouldn't happen very often so it's761 # faster to check for it here than at the point of example762 # generation.763 if not discarded:764 break765 attempt = bytearray(self.shrink_target.buffer)766 for u, v in reversed(discarded):767 del attempt[u:v]768 if not self.incorporate_new_buffer(attempt):769 return False770 return True771 @defines_shrink_pass()772 def adaptive_example_deletion(self, chooser):773 """Attempts to delete every example from the test case.774 That is, it is logically equivalent to trying ``self.buffer[:ex.start] +775 self.buffer[ex.end:]`` for every example ``ex``. The order in which776 examples are tried is randomized, and when deletion is successful it777 will attempt to adapt to delete more than one example at a time.778 """779 example = chooser.choose(self.examples)780 if not self.incorporate_new_buffer(781 self.buffer[: example.start] + self.buffer[example.end :]782 ):783 return784 # If we successfully deleted one example there may be a useful785 # deletable region around here.786 original = self.shrink_target787 endpoints = set()788 for ex in original.examples:789 if ex.depth <= example.depth:790 endpoints.add(ex.start)791 endpoints.add(ex.end)792 partition = sorted(endpoints)793 j = partition.index(example.start)794 def delete_region(a, b):795 assert a <= j <= b796 if a < 0 or b >= len(partition) - 1:797 return False798 return self.consider_new_buffer(799 original.buffer[: partition[a]] + original.buffer[partition[b] :]800 )801 to_right = find_integer(lambda n: delete_region(j, j + n))802 find_integer(lambda n: delete_region(j - n, j + to_right))803 def try_zero_example(self, ex):804 u = ex.start805 v = ex.end806 attempt = self.cached_test_function(807 self.buffer[:u] + bytes(v - u) + self.buffer[v:]808 )809 if attempt is Overrun:810 return False811 in_replacement = attempt.examples[ex.index]812 used = in_replacement.length813 if attempt is not self.shrink_target:814 if in_replacement.end < len(attempt.buffer) and used < ex.length:815 self.incorporate_new_buffer(816 self.buffer[:u] + bytes(used) + self.buffer[v:]817 )818 return self.examples[ex.index].trivial819 @defines_shrink_pass()820 def zero_examples(self, chooser):821 """Attempt to replace each example with a minimal version of itself."""822 ex = chooser.choose(self.examples, lambda ex: not ex.trivial)823 # If the example is already trivial, assume there's nothing to do here.824 # We could attempt to use it as an adaptive replacement for other825 # similar examples, but that seems to be ineffective, resulting mostly826 # in redundant work rather than helping.827 if not self.try_zero_example(ex):828 return829 # If we zeroed the example we need to get the new one that replaced it.830 ex = self.examples[ex.index]831 original = self.shrink_target832 group = self.examples_by_label[ex.label]833 i = group.index(ex)834 replacement = self.buffer[ex.start : ex.end]835 # We first expand to cover the trivial region surrounding this group.836 # This avoids a situation where the adaptive phase "succeeds" a lot by837 # virtue of not doing anything and then goes into a galloping phase838 # where it does a bunch of useless work.839 def all_trivial(a, b):840 if a < 0 or b > len(group):841 return False842 return all(e.trivial for e in group[a:b])843 start, end = expand_region(all_trivial, i, i + 1)844 # If we've got multiple trivial examples of different lengths then845 # this isn't going to work as a replacement for all of them and so we846 # skip out early.847 if any(e.length != len(replacement) for e in group[start:end]):848 return849 def can_zero(a, b):850 if a < 0 or b > len(group):851 return False852 regions = []853 for e in group[a:b]:854 t = (e.start, e.end, replacement)855 if not regions or t[0] >= regions[-1][1]:856 regions.append(t)857 return self.consider_new_buffer(replace_all(original.buffer, regions))858 expand_region(can_zero, start, end)859 @derived_value860 def blocks_by_non_zero_suffix(self):861 """Returns a list of blocks grouped by their non-zero suffix,862 as a list of (suffix, indices) pairs, skipping all groupings863 where there is only one index.864 This is only used for the arguments of minimize_duplicated_blocks.865 """866 duplicates = defaultdict(list)867 for block in self.blocks:868 duplicates[non_zero_suffix(self.buffer[block.start : block.end])].append(869 block.index870 )871 return duplicates872 @derived_value873 def duplicated_block_suffixes(self):874 return sorted(self.blocks_by_non_zero_suffix)875 @defines_shrink_pass()876 def minimize_duplicated_blocks(self, chooser):877 """Find blocks that have been duplicated in multiple places and attempt878 to minimize all of the duplicates simultaneously.879 This lets us handle cases where two values can't be shrunk880 independently of each other but can easily be shrunk together.881 For example if we had something like:882 ls = data.draw(lists(integers()))883 y = data.draw(integers())884 assert y not in ls885 Suppose we drew y = 3 and after shrinking we have ls = [3]. If we were886 to replace both 3s with 0, this would be a valid shrink, but if we were887 to replace either 3 with 0 on its own the test would start passing.888 It is also useful for when that duplication is accidental and the value889 of the blocks doesn't matter very much because it allows us to replace890 more values at once.891 """892 block = chooser.choose(self.duplicated_block_suffixes)893 targets = self.blocks_by_non_zero_suffix[block]894 if len(targets) <= 1:895 return896 Lexical.shrink(897 block,898 lambda b: self.try_shrinking_blocks(targets, b),899 random=self.random,900 full=False,901 )902 @defines_shrink_pass()903 def minimize_floats(self, chooser):904 """Some shrinks that we employ that only really make sense for our905 specific floating point encoding that are hard to discover from any906 sort of reasonable general principle. This allows us to make907 transformations like replacing a NaN with an Infinity or replacing908 a float with its nearest integers that we would otherwise not be909 able to due to them requiring very specific transformations of910 the bit sequence.911 We only apply these transformations to blocks that "look like" our912 standard float encodings because they are only really meaningful913 there. The logic for detecting this is reasonably precise, but914 it doesn't matter if it's wrong. These are always valid915 transformations to make, they just don't necessarily correspond to916 anything particularly meaningful for non-float values.917 """918 ex = chooser.choose(919 self.examples,920 lambda ex: (921 ex.label == DRAW_FLOAT_LABEL922 and len(ex.children) == 2923 and ex.children[0].length == 8924 ),925 )926 u = ex.children[0].start927 v = ex.children[0].end928 buf = self.shrink_target.buffer929 b = buf[u:v]930 f = lex_to_float(int_from_bytes(b))931 b2 = int_to_bytes(float_to_lex(f), 8)932 if b == b2 or self.consider_new_buffer(buf[:u] + b2 + buf[v:]):933 Float.shrink(934 f,935 lambda x: self.consider_new_buffer(936 self.shrink_target.buffer[:u]937 + int_to_bytes(float_to_lex(x), 8)938 + self.shrink_target.buffer[v:]939 ),940 random=self.random,941 )942 @defines_shrink_pass()943 def minimize_individual_blocks(self, chooser):944 """Attempt to minimize each block in sequence.945 This is the pass that ensures that e.g. each integer we draw is a946 minimum value. So it's the part that guarantees that if we e.g. do947 x = data.draw(integers())948 assert x < 10949 then in our shrunk example, x = 10 rather than say 97.950 If we are unsuccessful at minimizing a block of interest we then951 check if that's because it's changing the size of the test case and,952 if so, we also make an attempt to delete parts of the test case to953 see if that fixes it.954 We handle most of the common cases in try_shrinking_blocks which is955 pretty good at clearing out large contiguous blocks of dead space,956 but it fails when there is data that has to stay in particular places957 in the list.958 """959 block = chooser.choose(self.blocks, lambda b: not b.trivial)960 initial = self.shrink_target961 u, v = block.bounds962 i = block.index963 Lexical.shrink(964 self.shrink_target.buffer[u:v],965 lambda b: self.try_shrinking_blocks((i,), b),966 random=self.random,967 full=False,968 )969 if self.shrink_target is not initial:970 return971 lowered = (972 self.buffer[: block.start]973 + int_to_bytes(974 int_from_bytes(self.buffer[block.start : block.end]) - 1, block.length975 )976 + self.buffer[block.end :]977 )978 attempt = self.cached_test_function(lowered)979 if (...

Full Screen

Full Screen

test_shrinker.py

Source:test_shrinker.py Github

copy

Full Screen

...188 )189 monkeypatch.setattr(190 Shrinker,191 "shrink",192 lambda self: self.try_shrinking_blocks((0, 1, 5), bytes([2])),193 )194 @run_to_buffer195 def x(data):196 n1 = data.draw_bits(8)197 data.draw_bits(8)198 if n1 == 3:199 data.draw_bits(8)200 k = data.draw_bits(8)201 if k == 1:202 data.mark_interesting()203 assert list(x) == [2, 2, 1]204def test_dependent_block_pairs_is_up_to_shrinking_integers():205 # Unit test extracted from a failure in tests/nocover/test_integers.py206 distribution = Sampler([4.0, 8.0, 1.0, 1.0, 0.5])207 sizes = [8, 16, 32, 64, 128]208 @shrinking_from(b"\x03\x01\x00\x00\x00\x00\x00\x01\x00\x02\x01")209 def shrinker(data):210 size = sizes[distribution.sample(data)]211 result = data.draw_bits(size)212 sign = (-1) ** (result & 1)213 result = (result >> 1) * sign214 cap = data.draw_bits(8)215 if result >= 32768 and cap == 1:216 data.mark_interesting()217 shrinker.fixate_shrink_passes(["minimize_individual_blocks"])218 assert list(shrinker.shrink_target.buffer) == [1, 1, 0, 1, 0, 0, 1]219def test_finding_a_minimal_balanced_binary_tree():220 # Tests iteration while the shape of the thing being iterated over can221 # change. In particular the current example can go from trivial to non222 # trivial.223 def tree(data):224 # Returns height of a binary tree and whether it is height balanced.225 data.start_example("tree")226 n = data.draw_bits(1)227 if n == 0:228 result = (1, True)229 else:230 h1, b1 = tree(data)231 h2, b2 = tree(data)232 result = (1 + max(h1, h2), b1 and b2 and abs(h1 - h2) <= 1)233 data.stop_example("tree")234 return result235 # Starting from an unbalanced tree of depth six236 @shrinking_from([1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0])237 def shrinker(data):238 _, b = tree(data)239 if not b:240 data.mark_interesting()241 shrinker.shrink()242 assert list(shrinker.shrink_target.buffer) == [1, 0, 1, 0, 1, 0, 0]243def test_float_shrink_can_run_when_canonicalisation_does_not_work(monkeypatch):244 # This should be an error when called245 monkeypatch.setattr(Float, "shrink", None)246 base_buf = int_to_bytes(flt.base_float_to_lex(1000.0), 8) + bytes(1)247 @shrinking_from(base_buf)248 def shrinker(data):249 flt.draw_float(data)250 if bytes(data.buffer) == base_buf:251 data.mark_interesting()252 shrinker.fixate_shrink_passes(["minimize_floats"])253 assert shrinker.shrink_target.buffer == base_buf254def test_try_shrinking_blocks_out_of_bounds():255 @shrinking_from(bytes([1]))256 def shrinker(data):257 data.draw_bits(1)258 data.mark_interesting()259 assert not shrinker.try_shrinking_blocks((1,), bytes([1]))260def test_block_programs_are_adaptive():261 @shrinking_from(bytes(1000) + bytes([1]))262 def shrinker(data):263 while not data.draw_bits(1):264 pass265 data.mark_interesting()266 p = shrinker.add_new_pass(block_program("X"))267 shrinker.fixate_shrink_passes([p.name])268 assert len(shrinker.shrink_target.buffer) == 1269 assert shrinker.calls <= 60270def test_zero_examples_with_variable_min_size():271 @shrinking_from(bytes([255]) * 100)272 def shrinker(data):273 any_nonzero = False...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run hypothesis automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful