How to use _module_exists method in avocado

Best Python code snippet using avocado_python

test_jasper_block.py

Source:test_jasper_block.py Github

copy

Full Screen

...29 se=False,30 )31 base.update(kwargs)32 return base33 def check_module_exists(self, module, cls):34 global _MODULE_EXISTS35 _MODULE_EXISTS = 036 def _traverse(m):37 if isinstance(m, cls):38 global _MODULE_EXISTS39 _MODULE_EXISTS += 140 module.apply(_traverse)41 assert _MODULE_EXISTS > 042 @pytest.mark.unit43 def test_basic_block(self):44 config = self.jasper_base_config(residual=False)45 act = jasper.jasper_activations.get(config.pop('activation'))()46 block = jasper.JasperBlock(**config, activation=act)47 x = torch.randn(1, 16, 131)48 xlen = torch.tensor([131])49 y, ylen = block(([x], xlen))50 assert isinstance(block, jasper.JasperBlock)51 assert y[0].shape == torch.Size([1, config['planes'], 131])52 assert ylen[0] == 13153 @pytest.mark.unit54 def test_residual_block(self):55 config = self.jasper_base_config(residual=True)56 act = jasper.jasper_activations.get(config.pop('activation'))()57 block = jasper.JasperBlock(**config, activation=act)58 x = torch.randn(1, 16, 131)59 xlen = torch.tensor([131])60 y, ylen = block(([x], xlen))61 assert isinstance(block, jasper.JasperBlock)62 assert y[0].shape == torch.Size([1, config['planes'], 131])63 assert ylen[0] == 13164 @pytest.mark.unit65 def test_basic_block_repeat(self):66 config = self.jasper_base_config(residual=False, repeat=3)67 act = jasper.jasper_activations.get(config.pop('activation'))()68 block = jasper.JasperBlock(**config, activation=act)69 x = torch.randn(1, 16, 131)70 xlen = torch.tensor([131])71 y, ylen = block(([x], xlen))72 assert isinstance(block, jasper.JasperBlock)73 assert y[0].shape == torch.Size([1, config['planes'], 131])74 assert ylen[0] == 13175 assert len(block.mconv) == 3 * 3 + 1 # (3 repeats x {1 conv + 1 norm + 1 dropout} + final conv)76 @pytest.mark.unit77 def test_basic_block_repeat_stride(self):78 config = self.jasper_base_config(residual=False, repeat=3, stride=[2])79 act = jasper.jasper_activations.get(config.pop('activation'))()80 block = jasper.JasperBlock(**config, activation=act)81 x = torch.randn(1, 16, 131)82 xlen = torch.tensor([131])83 y, ylen = block(([x], xlen))84 assert isinstance(block, jasper.JasperBlock)85 assert y[0].shape == torch.Size([1, config['planes'], 17]) # 131 // (stride ^ repeats)86 assert ylen[0] == 17 # 131 // (stride ^ repeats)87 assert len(block.mconv) == 3 * 3 + 1 # (3 repeats x {1 conv + 1 norm + 1 dropout} + final conv)88 @pytest.mark.unit89 def test_basic_block_repeat_stride_last(self):90 config = self.jasper_base_config(residual=False, repeat=3, stride=[2], stride_last=True)91 act = jasper.jasper_activations.get(config.pop('activation'))()92 block = jasper.JasperBlock(**config, activation=act)93 x = torch.randn(1, 16, 131)94 xlen = torch.tensor([131])95 y, ylen = block(([x], xlen))96 assert isinstance(block, jasper.JasperBlock)97 assert y[0].shape == torch.Size([1, config['planes'], 66]) # 131 // stride98 assert ylen[0] == 66 # 131 // stride99 assert len(block.mconv) == 3 * 3 + 1 # (3 repeats x {1 conv + 1 norm + 1 dropout} + final conv)100 @pytest.mark.unit101 def test_basic_block_repeat_separable(self):102 config = self.jasper_base_config(residual=False, repeat=3, separable=True)103 act = jasper.jasper_activations.get(config.pop('activation'))()104 block = jasper.JasperBlock(**config, activation=act)105 x = torch.randn(1, 16, 131)106 xlen = torch.tensor([131])107 y, ylen = block(([x], xlen))108 assert isinstance(block, jasper.JasperBlock)109 assert y[0].shape == torch.Size([1, config['planes'], 131])110 assert ylen[0] == 131111 assert len(block.mconv) == 3 * 4 + 1 # (3 repeats x {1 dconv + 1 pconv + 1 norm + 1 dropout} + final conv)112 @pytest.mark.unit113 def test_basic_block_stride(self):114 config = self.jasper_base_config(stride=[2], residual=False)115 act = jasper.jasper_activations.get(config.pop('activation'))()116 print(config)117 block = jasper.JasperBlock(**config, activation=act)118 x = torch.randn(1, 16, 131)119 xlen = torch.tensor([131])120 y, ylen = block(([x], xlen))121 assert isinstance(block, jasper.JasperBlock)122 assert y[0].shape == torch.Size([1, config['planes'], 66])123 assert ylen[0] == 66124 @pytest.mark.unit125 def test_residual_block_stride(self):126 config = self.jasper_base_config(stride=[2], residual=True, residual_mode='stride_add')127 act = jasper.jasper_activations.get(config.pop('activation'))()128 print(config)129 block = jasper.JasperBlock(**config, activation=act)130 x = torch.randn(1, 16, 131)131 xlen = torch.tensor([131])132 y, ylen = block(([x], xlen))133 assert isinstance(block, jasper.JasperBlock)134 assert y[0].shape == torch.Size([1, config['planes'], 66])135 assert ylen[0] == 66136 @pytest.mark.unit137 def test_residual_block_activations(self):138 for activation in jasper.jasper_activations.keys():139 config = self.jasper_base_config(activation=activation)140 act = jasper.jasper_activations.get(config.pop('activation'))()141 block = jasper.JasperBlock(**config, activation=act)142 x = torch.randn(1, 16, 131)143 xlen = torch.tensor([131])144 y, ylen = block(([x], xlen))145 self.check_module_exists(block, act.__class__)146 assert isinstance(block, jasper.JasperBlock)147 assert y[0].shape == torch.Size([1, config['planes'], 131])148 assert ylen[0] == 131149 @pytest.mark.unit150 def test_residual_block_normalizations(self):151 NORMALIZATIONS = ["batch", "layer", "group"]152 for normalization in NORMALIZATIONS:153 config = self.jasper_base_config(normalization=normalization)154 act = jasper.jasper_activations.get(config.pop('activation'))()155 block = jasper.JasperBlock(**config, activation=act)156 x = torch.randn(1, 16, 131)157 xlen = torch.tensor([131])158 y, ylen = block(([x], xlen))159 assert isinstance(block, jasper.JasperBlock)160 assert y[0].shape == torch.Size([1, config['planes'], 131])161 assert ylen[0] == 131162 @pytest.mark.unit163 def test_residual_block_se(self):164 config = self.jasper_base_config(se=True, se_reduction_ratio=8)165 act = jasper.jasper_activations.get(config.pop('activation'))()166 block = jasper.JasperBlock(**config, activation=act)167 x = torch.randn(1, 16, 131)168 xlen = torch.tensor([131])169 y, ylen = block(([x], xlen))170 self.check_module_exists(block, jasper.SqueezeExcite)171 assert isinstance(block, jasper.JasperBlock)172 assert y[0].shape == torch.Size([1, config['planes'], 131])173 assert ylen[0] == 131174 @pytest.mark.unit175 def test_residual_block_asymmetric_pad_future_contexts(self):176 # test future contexts at various values177 # 0 = no future context178 # 2 = limited future context179 # 5 = symmetric context180 # 8 = excess future context (more future context than present or past context)181 future_contexts = [0, 2, 5, 8]182 for future_context in future_contexts:183 print(future_context)184 config = self.jasper_base_config(future_context=future_context)185 act = jasper.jasper_activations.get(config.pop('activation'))()186 block = jasper.JasperBlock(**config, activation=act)187 x = torch.randn(1, 16, 131)188 xlen = torch.tensor([131])189 y, ylen = block(([x], xlen))190 self.check_module_exists(block, torch.nn.ConstantPad1d)191 self.check_module_exists(block, jasper.MaskedConv1d)192 assert isinstance(block, jasper.JasperBlock)193 assert y[0].shape == torch.Size([1, config['planes'], 131])194 assert ylen[0] == 131195 assert block.mconv[0].pad_layer is not None196 assert block.mconv[0]._padding == (config['kernel_size'][0] - 1 - future_context, future_context)197 @pytest.mark.unit198 def test_residual_block_asymmetric_pad_future_context_fallback(self):199 # test future contexts at various values200 # 15 = K < FC; fall back to symmetric context201 future_context = 15202 print(future_context)203 config = self.jasper_base_config(future_context=future_context)204 act = jasper.jasper_activations.get(config.pop('activation'))()205 block = jasper.JasperBlock(**config, activation=act)206 x = torch.randn(1, 16, 131)207 xlen = torch.tensor([131])208 y, ylen = block(([x], xlen))209 self.check_module_exists(block, jasper.MaskedConv1d)210 assert isinstance(block, jasper.JasperBlock)211 assert y[0].shape == torch.Size([1, config['planes'], 131])212 assert ylen[0] == 131213 assert block.mconv[0].pad_layer is None214 assert block.mconv[0]._padding == config['kernel_size'][0] // 2215 @pytest.mark.unit216 def test_padding_size_conv1d(self):217 input_channels = 1218 output_channels = 1219 kernel_sizes = [3, 7, 11]220 dilation_sizes = [2, 3, 4]221 stride = 1222 inp = torch.rand(2, 1, 40)223 for kernel_size in kernel_sizes:...

Full Screen

Full Screen

startproject.py

Source:startproject.py Github

copy

Full Screen

...23 return "<project_name>"24 def short_desc(self):25 return "Create new project"26 def _is_valid_name(self, project_name):27 def _module_exists(module_name):28 try:29 import_module(module_name)30 return True31 except ImportError:32 return False33 if not re.search(r'^[_a-zA-Z]\w*$', project_name):34 print('Error: Project names must begin with a letter and contain'\35 ' only\nletters, numbers and underscores')36 elif exists(project_name):37 print('Error: Directory %r already exists' % project_name)38 elif _module_exists(project_name):39 print('Error: Module %r already exists' % project_name)40 else:41 return True42 return False43 def run(self, args, opts):44 if len(args) != 1:45 raise UsageError()46 project_name = args[0]47 if not self._is_valid_name(project_name):48 self.exitcode = 149 return50 moduletpl = join(TEMPLATES_PATH, 'module')51 copytree(moduletpl, join(project_name, project_name), ignore=IGNORE)52 shutil.copy(join(TEMPLATES_PATH, 'scrapy.cfg'), project_name)...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run avocado automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful