Best Python code snippet using hypothesis
__init__.py
Source:__init__.py  
...300                queue.append(k)301        for j in explored:302            cache[j] = j in marked_live303        return cache[state]304    def all_matching_strings_of_length(self, k):305        """Yields all matching strings whose length is ``k``, in ascending306        lexicographic order."""307        if k == 0:308            if self.is_accepting(self.start):309                yield b""310            return311        if not self.has_strings(self.start, k):312            return313        # This tracks a path through the DFA. We alternate between growing314        # it until it has length ``k`` and is in an accepting state, then315        # yielding that as a result, then modifying it so that the next316        # time we do that it will yield the lexicographically next matching317        # string.318        path = bytearray()319        # Tracks the states that are visited by following ``path`` from the320        # starting point.321        states = [self.start]322        while True:323            # First we build up our current best prefix to the lexicographically324            # first string starting with it.325            while len(path) < k:326                state = states[-1]327                for c, j in self.transitions(state):328                    if self.has_strings(j, k - len(path) - 1):329                        states.append(j)330                        path.append(c)331                        break332                else:  # pragma: no cover333                    assert False334            assert self.is_accepting(states[-1])335            assert len(states) == len(path) + 1336            yield bytes(path)337            # Now we want to replace this string with the prefix that will338            # cause us to extend to its lexicographic successor. This can339            # be thought of as just repeatedly moving to the next lexicographic340            # successor until we find a matching string, but we're able to341            # use our length counts to jump over long sequences where there342            # cannot be a match.343            while True:344                # As long as we are in this loop we are trying to move to345                # the successor of the current string.346                # If we've removed the entire prefix then we're done - no347                # successor is possible.348                if not path:349                    return350                if path[-1] == 255:351                    # If our last element is maximal then the we have to "carry352                    # the one" - our lexicographic successor must be incremented353                    # earlier than this.354                    path.pop()355                    states.pop()356                else:357                    # Otherwise increment by one.358                    path[-1] += 1359                    states[-1] = self.transition(states[-2], path[-1])360                    # If there are no strings of the right length starting from361                    # this prefix we need to keep going. Otherwise, this is362                    # the right place to be and we break out of our loop of363                    # trying to find the successor because it starts here.364                    if self.count_strings(states[-1], k - len(path)) > 0:365                        break366    def all_matching_strings(self, min_length=0):367        """Iterate over all strings matched by this automaton368        in shortlex-ascending order."""369        # max_length might be infinite, hence the while loop370        max_length = self.max_length(self.start)371        length = min_length372        while length <= max_length:373            yield from self.all_matching_strings_of_length(length)374            length += 1375    def raw_transitions(self, i):376        for c in self.alphabet:377            j = self.transition(i, c)378            yield c, j379    def canonicalise(self):380        """Return a canonical version of ``self`` as a ConcreteDFA.381        The DFA is not minimized, but nodes are sorted and relabelled382        and dead nodes are pruned, so two minimized DFAs for the same383        language will end up with identical canonical representatives.384        This is mildly important because it means that the output of385        L* should produce the same canonical DFA regardless of what386        order we happen to have run it in.387        """...Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!
