Best Python code snippet using lisa_python
bonus.py
Source:bonus.py  
...8    Inherits from DecoderWithoutAttention to avoid repeated code.910    Reference: http://www.cs.toronto.edu/~rgrosse/courses/csc421_2019/slides/lec16.pdf11    '''12    def init_submodules(self):13        # Hints:14        # 1. Same as the case without attention, you must initialize the15        #   following submodules: self.embedding, self.cell, self.ff16        # 2. You will need these object attributes:17        #   self.target_vocab_size, self.word_embedding_size, self.pad_id18        #   self.hidden_state_size, self.cell_type.19        # 3. cell_type will be one of: ['lstm', 'gru', 'rnn']20        # 4. Relevant pytorch modules:21        #   torch.nn.{Embedding, Linear, LSTMCell, RNNCell, GRUCell}22        # 5. The implementation of this function should be different from23        #   DecoderWithoutAttention.init_submodules.2425        cells = {"lstm": nn.LSTMCell, "gru": nn.GRUCell, "rnn": nn.RNNCell}2627        # Modify input size to accomodate for context vector28        self.cell = cells[self.cell_type](29            input_size=self.word_embedding_size + self.hidden_state_size,30            hidden_size=self.hidden_state_size)3132        self.embedding = nn.Embedding(num_embeddings=self.target_vocab_size,33                                      embedding_dim=self.word_embedding_size,34                                      padding_idx=self.pad_id)35        self.ff = nn.Linear(in_features=self.hidden_state_size,36                            out_features=self.target_vocab_size)3738    def get_energy_scores(self, htilde_t, h):3940        # The scale is set to be the 1/dim of the hidden state size41        # Recall:42        #   htilde_t is of shape (M, 2 * H)43        #   h is of shape (S, M, 2 * H)44        #   e_t (output) is of shape (S, M)4546        # Transform htilde_t to 1 * M * 2H47        if self.cell_type == "lstm":48            htilde_t_temp = htilde_t[0].unsqueeze(1)49        else:50            htilde_t_temp = htilde_t.unsqueeze(1)5152        scale = 1/(h.shape[2]**0.5)53        h_temp = h.permute(1, 2, 0)5455        e_t = torch.matmul(htilde_t_temp, h_temp) * scale56        return e_t.squeeze(1).transpose(0, 1)575859class DecoderMultiDotProductHeadAttention(DecoderSingleDotProductAttention):60    '''A decoder, this time with multi scaled dot product attention61    Inherits from DecoderSingleDotProductionAttention to avoid repeated code.62    '''63    def __init__(self, *args, **kwargs):64        super().__init__(*args, **kwargs)65        assert self.W is not None, 'initialize W!'66        assert self.Wtilde is not None, 'initialize Wtilde!'67        assert self.Q is not None, 'initialize Q!'6869    def init_submodules(self):70        super().init_submodules()  # Do not modify this line7172        # Hints:73        # 1. The above line should ensure self.ff, self.embedding, self.cell are74        #    initialized75        # 2. You need to initialize these submodules:76        #       self.W, self.Wtilde, self.Q77        # 3. You will need these object attributes:78        #       self.hidden_state_size79        # 4. self.W, self.Wtilde, and self.Q should process all heads at once. They80        #    should not be lists!81        # 5. You do *NOT* need self.heads at this point82        # 6. Relevant pytorch module: torch.nn.Linear (note: set bias=False!)8384        # Size for all submodules are H * H (Does not depend on the number of heads)85        self.W = nn.Linear(self.hidden_state_size, self.hidden_state_size, bias=False)86        self.Wtilde = nn.Linear(self.hidden_state_size, self.hidden_state_size, bias=False)87        self.Q = nn.Linear(self.hidden_state_size, self.hidden_state_size, bias=False)8889    def attend(self, htilde_t, h, F_lens):90        # Hints:91        # 1. You can use super().attend to call for the regular attention92        #   function.93        # 2. Relevant pytorch functions:94        #   tensor().repeat_interleave, tensor().view95        # 3. Fun fact:96        #   tensor([1,2,3,4]).repeat(2) will output tensor([1,2,3,4,1,2,3,4]).97        #   tensor([1,2,3,4]).repeat_interleave(2) will output98        #   tensor([1,1,2,2,3,3,4,4]), just like numpy.repeat.99        # 4. You *WILL* need self.heads at this point100101        # Case LSTM: Use only 2nd tuple102        # Let K represent the number of heads103        # Transformation: htilde_t_n = MK * H/K, after applying linear layer104        # To ensure that the energy_score function is computed correctly, we can only expand along the batch dimension105        if self.cell_type == "lstm":106            htilde_t_n = (self.Wtilde(htilde_t[0]).view(h.shape[1] * self.heads, h.shape[2]//self.heads),107                          htilde_t[1].view(h.shape[1] * self.heads, h.shape[2]//self.heads))108        else:109            htilde_t_n = self.Wtilde(htilde_t).view(h.shape[1] * self.heads, h.shape[2]//self.heads)110111        # Transformation: h_n = S * MK * H/K, after applying linear layer112        h_n = self.W(h).view(h.shape[0], h.shape[1] * self.heads, h.shape[2]//self.heads)113114        # Transformation: F_lens_n = MK115        # This ensures that padding is applied correctly as F_lens_n keeps track of the last element in sequence116        F_lens_n = F_lens.repeat_interleave(self.heads)117118        # Call attend (for single-head) and transform from MK * H/K to M * H context vector119        c_t_n = super().attend(htilde_t_n, h_n, F_lens_n).view(h.shape[1], h.shape[2])120121        # Apply a final linear layer122        return self.Q(c_t_n)123124125class DecoderMultiplicativeAttention(DecoderWithAttention):126    '''A decoder using multiplicative attention127    Reference: https://ruder.io/deep-learning-nlp-best-practices/128    '''129    def __init__(self, *args, **kwargs):130        super().__init__(*args, **kwargs)131        assert self.Wm is not None, 'initialize Wm!'132133    def init_submodules(self):134        super().init_submodules()  # Do not modify this line135136        # Hints:137        # 1. Same as the case without attention, you must initialize the138        #   following submodules: self.embedding, self.cell, self.ff139        # 2. You will need these object attributes:140        #   self.target_vocab_size, self.word_embedding_size, self.pad_id141        #   self.hidden_state_size, self.cell_type.142        # 3. cell_type will be one of: ['lstm', 'gru', 'rnn']143        # 4. Relevant pytorch modules:144        #   torch.nn.{Embedding, Linear, LSTMCell, RNNCell, GRUCell}145        # 5. The implementation of this function should be different from146        #   DecoderWithoutAttention.init_submodules.147148        self.Wm = nn.Linear(self.hidden_state_size, self.hidden_state_size, bias=False)
...git.py
Source:git.py  
1import os2import shutil3import subprocess4import tempfile5from . import Plugin6class Git(Plugin):7    def __init__(self, **kwargs):8        super(Git, self).__init__()9        self._git = kwargs.get('git-binary')10        self._sh = kwargs.get('sh-binary')11    def check(self):12        if self._git is None:13            self._git = shutil.which('git')14        if self._git is None:15            raise AssertionError('plugin \'Git\' requires \'git\' to be installed')16        if self._sh is None:17            for sh in ['sh', 'bash', 'zsh']:18                self._sh = shutil.which(sh)19                if not self._sh is None:20                    break21        if self._sh is None:22            raise AssertionError('plugin \'Git\' requires \'sh\' to be installed')23    def define(self):24        @self.rule25        def git_repository(name, remote, commit=None, branch=None,26                           init_submodules=False, sha256=None, tag=None, 27                           output=None):28            def info(is_on_running, is_finish_successful=False, **kwargs):29                if is_on_running == True:30                    if is_finish_successful is True:31                        print('Finish cloning: %s' % remote)32                    else:33                        print('Clone: %s' % remote)34                else:35                    print('Remove: %s' % remote.split('.git')[0].split('/')[-1])36            def callback(root, **kwargs):37                fd, shscript = tempfile.mkstemp(suffix='.sh')38                repo_name = remote.split('.git')[0].split('/')[-1]39                cmd = [self._sh, shscript]40                git = [self._git, 'clone']41                # @NOTE: sometime we need to clone a repo to exact directory42                # so we must instruct 'builder' to do it43                if not output is None:44                    for dir_name in output.split('/'):45                        root = '%s/%s' % (root, dir_name)46                        if os.path.exists(root) is False:47                            os.mkdir(root, 0o777)48                # @NOTE: clone repo if it's needed49                if init_submodules is True:50                    git.append('-recurse-submodules')51                    git.appned('-j%d' % os.cpu_count())52                if not branch is None:53                    git.appned('--single-branch')54                    git.append('-b')55                    git.append(branch)56                git.append(remote)57                script = 'cd %s;%s' % (root, ' '.join(git))58                os.write(fd, script.encode())59                os.close(fd)60                clone = subprocess.Popen(cmd, stdout=subprocess.PIPE, \61                                         stderr=subprocess.PIPE)62                error = clone.stderr.read()63                os.remove(shscript)64                expected_msg = "Cloning into '%s'...\n" % repo_name65                if len(error) > 0 and error.decode('utf-8') != expected_msg:66                    raise AssertionError('there is an error while perform '67                                         'cloning repo %s' % (remote, error.decode('utf-8')))68                # @NOTE: jump to exact commit of this repo if it's requested69                if (not commit is None) or (not sha256 is None) or (not tag is None):70                    fd, shscript = tempfile.mkstemp(suffix='.sh')71                    cmd = [self._sh, shscript]72                    if not commit is None:73                        script = 'cd %s;cd %s;%s reset --hard %s' % (root, repo_name, commit)74                    elif not sha256 is None:75                        script = 'cd %s;cd %s;%s reset --hard %s' % (root, repo_name, sha256)76                    elif not tag is None:77                        script = 'cd %s;cd %s;%s checkout tags/%s' % (root, repo_name, tag)78                    os.write(fd, script.encode())79                    os.close(fd)80                    changing = subprocess.Popen(cmd, stdout=subprocess.PIPE, \81                                                stderr=subprocess.PIPE)82                    error = changing.stderr.read()83                    os.remove(shscript)84                    if len(error) > 0:85                        raise AssertionError('there is an error while perform '86                                             'changing repo %s to exact commit as you requested: %s'87                                             % (remote, error.decode('utf-8')))88                return True89            def teardown(root, **kwargs):90                fd, shscript = tempfile.mkstemp(suffix='.sh')91                repo_name = remote.split('.git')[0].split('/')[-1]92                cmd = [self._sh, shscript]93                if output is None:94                    script = 'rm -fr %s/%s' % (root, repo_name)95                else:96                    script = 'cd %s/%s;%s' % (root, output, ' '.join(git))97                98                os.write(fd, script.encode())99                os.close(fd)100                removing = subprocess.Popen(cmd, stdout=subprocess.PIPE, \101                                            stderr=subprocess.PIPE)102                error = removing.stderr.read()103                os.remove(shscript)104                if len(error) > 0:105                    raise AssertionError('there is an error while perform '106                                         'removing repo %s: %s' % (remote, error.decode('utf-8')))107                return True108            node = {109                'callback': callback,110                'info': info,111                'teardown': teardown,112                'remote': remote113            }114            self._manager.add_to_dependency_tree(name, node, None)115        @self.rule116        def new_git_repository(name, remote, commit=None,117                               init_submodules=False, sha256=None, tag=None):...git.bzl
Source:git.bzl  
1# Copyright 2015 The Bazel Authors. All rights reserved.2#3# Licensed under the Apache License, Version 2.0 (the "License");4# you may not use this file except in compliance with the License.5# You may obtain a copy of the License at6#7#    http://www.apache.org/licenses/LICENSE-2.08#9# Unless required by applicable law or agreed to in writing, software10# distributed under the License is distributed on an "AS IS" BASIS,11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.12# See the License for the specific language governing permissions and13# limitations under the License.14"""Rules for cloning external git repositories."""15def _clone_or_update(ctx):16  if ((ctx.attr.tag == "" and ctx.attr.commit == "") or17      (ctx.attr.tag != "" and ctx.attr.commit != "")):18    ctx.fail("Exactly one of commit and tag must be provided")19  if ctx.attr.commit != "":20    ref = ctx.attr.commit21  else:22    ref = "tags/" + ctx.attr.tag23  st = ctx.execute(["bash", '-c', """24set -ex25( cd {working_dir} &&26    if ! ( cd '{dir}' && git rev-parse --git-dir ) >/dev/null 2>&1; then27      rm -rf '{dir}'28      git clone '{remote}' '{dir}'29    fi30    cd '{dir}'31    git reset --hard {ref} || (git fetch && git reset --hard {ref})32    git clean -xdf )33  """.format(34    working_dir=ctx.path(".").dirname,35    dir=ctx.path("."),36    remote=ctx.attr.remote,37    ref=ref,38  )])39  if st.return_code != 0:40    fail("error cloning %s:\n%s" % (ctx.name, st.stderr))41  if ctx.attr.init_submodules:42    st = ctx.execute(["bash", '-c', """43set -ex44(   cd '{dir}'45    git submodule update --init --checkout --force )46  """.format(47    dir=ctx.path("."),48    )])49    if st.return_code != 0:50      fail("error updating submodules %s:\n%s" % (ctx.name, st.stderr))51def _new_git_repository_implementation(ctx):52  if ((ctx.attr.build_file == None and ctx.attr.build_file_content == '') or53      (ctx.attr.build_file != None and ctx.attr.build_file_content != '')):54    ctx.fail("Exactly one of build_file and build_file_content must be provided.")55  _clone_or_update(ctx)56  ctx.file('WORKSPACE', "workspace(name = \"{name}\")\n".format(name=ctx.name))57  if ctx.attr.build_file:58    ctx.symlink(ctx.attr.build_file, 'BUILD')59  else:60    ctx.file('BUILD', ctx.attr.build_file_content)61def _git_repository_implementation(ctx):62  _clone_or_update(ctx)63_common_attrs = {64  "remote": attr.string(mandatory=True),65  "commit": attr.string(default=""),66  "tag": attr.string(default=""),67  "init_submodules": attr.bool(default=False),68}69new_git_repository = repository_rule(70  implementation=_new_git_repository_implementation,71  attrs=_common_attrs + {72    "build_file": attr.label(),73    "build_file_content": attr.string(),74  }75)76"""Clone an external git repository.77Clones a Git repository, checks out the specified tag, or commit, and78makes its targets available for binding.79Args:80  name: A unique name for this rule.81  build_file: The file to use as the BUILD file for this repository.82    Either build_file or build_file_content must be specified.83    This attribute is a label relative to the main workspace. The file84    does not need to be named BUILD, but can be (something like85    BUILD.new-repo-name may work well for distinguishing it from the86    repository's actual BUILD files.87  build_file_content: The content for the BUILD file for this repository.88    Either build_file or build_file_content must be specified.89  init_submodules: Whether to clone submodules in the repository.90  remote: The URI of the remote Git repository.91"""92git_repository = repository_rule(93  implementation=_git_repository_implementation,94  attrs=_common_attrs,95)96"""Clone an external git repository.97Clones a Git repository, checks out the specified tag, or commit, and98makes its targets available for binding.99Args:100  name: A unique name for this rule.101  init_submodules: Whether to clone submodules in the repository.102  remote: The URI of the remote Git repository....Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!
