Best Python code snippet using autotest_python
transformer_scaffold_test.py
Source:transformer_scaffold_test.py  
1# Copyright 2019 The TensorFlow Authors. All Rights Reserved.2#3# Licensed under the Apache License, Version 2.0 (the "License");4# you may not use this file except in compliance with the License.5# You may obtain a copy of the License at6#7#     http://www.apache.org/licenses/LICENSE-2.08#9# Unless required by applicable law or agreed to in writing, software10# distributed under the License is distributed on an "AS IS" BASIS,11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.12# See the License for the specific language governing permissions and13# limitations under the License.14# ==============================================================================15"""Tests for Keras-based transformer block layer."""16from __future__ import absolute_import17from __future__ import division18from __future__ import print_function19import json20import numpy as np21import tensorflow as tf22from tensorflow.python.keras import keras_parameterized  # pylint: disable=g-direct-tensorflow-import23from official.nlp.modeling.layers import attention24from official.nlp.modeling.layers import transformer_scaffold25# Test class that wraps a standard attention layer. If this layer is called26# at any point, the list passed to the config object will be filled with a27# boolean 'True'. We register this class as a Keras serializable so we can28# test serialization below.29# @tf.keras.utils.register_keras_serializable(package='TestOnly')30class ValidatedAttentionLayer(attention.Attention):31  def __init__(self, call_list, **kwargs):32    super(ValidatedAttentionLayer, self).__init__(**kwargs)33    self.list = call_list34  def call(self, inputs):35    self.list.append(True)36    return super(ValidatedAttentionLayer, self).call(inputs)37  def get_config(self):38    config = super(ValidatedAttentionLayer, self).get_config()39    config['call_list'] = []40    return config41# This decorator runs the test in V1, V2-Eager, and V2-Functional mode. It42# guarantees forward compatibility of this code for the V2 switchover.43@keras_parameterized.run_all_keras_modes44class TransformerLayerTest(keras_parameterized.TestCase):45  def test_layer_creation(self):46    sequence_length = 2147    width = 8048    call_list = []49    attention_layer_cfg = {50        'num_heads': 10,51        'head_size': 8,52        'call_list': call_list,53    }54    test_layer = transformer_scaffold.TransformerScaffold(55        attention_cls=ValidatedAttentionLayer,56        attention_cfg=attention_layer_cfg,57        num_attention_heads=10,58        intermediate_size=2048,59        intermediate_activation='relu')60    # Create a 3-dimensional input (the first dimension is implicit).61    data_tensor = tf.keras.Input(shape=(sequence_length, width))62    output_tensor = test_layer(data_tensor)63    # The default output of a transformer layer should be the same as the input.64    self.assertEqual(data_tensor.shape.as_list(), output_tensor.shape.as_list())65    # If call_list[0] exists and is True, the passed layer class was66    # instantiated from the given config properly.67    self.assertNotEmpty(call_list)68    self.assertTrue(call_list[0], "The passed layer class wasn't instantiated.")69  def test_layer_creation_with_mask(self):70    sequence_length = 2171    width = 8072    call_list = []73    attention_layer_cfg = {74        'num_heads': 10,75        'head_size': 8,76        'call_list': call_list,77    }78    test_layer = transformer_scaffold.TransformerScaffold(79        attention_cls=ValidatedAttentionLayer,80        attention_cfg=attention_layer_cfg,81        num_attention_heads=10,82        intermediate_size=2048,83        intermediate_activation='relu')84    # Create a 3-dimensional input (the first dimension is implicit).85    data_tensor = tf.keras.Input(shape=(sequence_length, width))86    # Create a 2-dimensional input (the first dimension is implicit).87    mask_tensor = tf.keras.Input(shape=(sequence_length, sequence_length))88    output_tensor = test_layer([data_tensor, mask_tensor])89    # The default output of a transformer layer should be the same as the input.90    self.assertEqual(data_tensor.shape.as_list(), output_tensor.shape.as_list())91    # If call_list[0] exists and is True, the passed layer class was92    # instantiated from the given config properly.93    self.assertNotEmpty(call_list)94    self.assertTrue(call_list[0], "The passed layer class wasn't instantiated.")95  def test_layer_creation_with_incorrect_mask_fails(self):96    sequence_length = 2197    width = 8098    call_list = []99    attention_layer_cfg = {100        'num_heads': 10,101        'head_size': 8,102        'call_list': call_list,103    }104    test_layer = transformer_scaffold.TransformerScaffold(105        attention_cls=ValidatedAttentionLayer,106        attention_cfg=attention_layer_cfg,107        num_attention_heads=10,108        intermediate_size=2048,109        intermediate_activation='relu')110    # Create a 3-dimensional input (the first dimension is implicit).111    data_tensor = tf.keras.Input(shape=(sequence_length, width))112    # Create a 2-dimensional input (the first dimension is implicit).113    mask_tensor = tf.keras.Input(shape=(sequence_length, sequence_length - 3))114    with self.assertRaisesRegex(ValueError, 'When passing a mask tensor.*'):115      _ = test_layer([data_tensor, mask_tensor])116  def test_layer_invocation(self):117    sequence_length = 21118    width = 80119    call_list = []120    attention_layer_cfg = {121        'num_heads': 10,122        'head_size': 8,123        'call_list': call_list,124    }125    test_layer = transformer_scaffold.TransformerScaffold(126        attention_cls=ValidatedAttentionLayer,127        attention_cfg=attention_layer_cfg,128        num_attention_heads=10,129        intermediate_size=2048,130        intermediate_activation='relu')131    # Create a 3-dimensional input (the first dimension is implicit).132    data_tensor = tf.keras.Input(shape=(sequence_length, width))133    output_tensor = test_layer(data_tensor)134    # Create a model from the test layer.135    model = tf.keras.Model(data_tensor, output_tensor)136    # Invoke the model on test data. We can't validate the output data itself137    # (the NN is too complex) but this will rule out structural runtime errors.138    batch_size = 6139    input_data = 10 * np.random.random_sample(140        (batch_size, sequence_length, width))141    _ = model.predict(input_data)142    # If call_list[0] exists and is True, the passed layer class was143    # instantiated from the given config properly.144    self.assertNotEmpty(call_list)145    self.assertTrue(call_list[0], "The passed layer class wasn't instantiated.")146  def test_layer_invocation_with_mask(self):147    sequence_length = 21148    width = 80149    call_list = []150    attention_layer_cfg = {151        'num_heads': 10,152        'head_size': 8,153        'call_list': call_list,154    }155    test_layer = transformer_scaffold.TransformerScaffold(156        attention_cls=ValidatedAttentionLayer,157        attention_cfg=attention_layer_cfg,158        num_attention_heads=10,159        intermediate_size=2048,160        intermediate_activation='relu')161    # Create a 3-dimensional input (the first dimension is implicit).162    data_tensor = tf.keras.Input(shape=(sequence_length, width))163    # Create a 2-dimensional input (the first dimension is implicit).164    mask_tensor = tf.keras.Input(shape=(sequence_length, sequence_length))165    output_tensor = test_layer([data_tensor, mask_tensor])166    # Create a model from the test layer.167    model = tf.keras.Model([data_tensor, mask_tensor], output_tensor)168    # Invoke the model on test data. We can't validate the output data itself169    # (the NN is too complex) but this will rule out structural runtime errors.170    batch_size = 6171    input_data = 10 * np.random.random_sample(172        (batch_size, sequence_length, width))173    # The attention mask should be of shape (batch, from_seq_len, to_seq_len),174    # which here is (batch, sequence_length, sequence_length)175    mask_data = np.random.randint(176        2, size=(batch_size, sequence_length, sequence_length))177    _ = model.predict([input_data, mask_data])178    # If call_list[0] exists and is True, the passed layer class was179    # instantiated from the given config properly.180    self.assertNotEmpty(call_list)181    self.assertTrue(call_list[0], "The passed layer class wasn't instantiated.")182  def test_layer_invocation_with_float16_dtype(self):183    sequence_length = 21184    width = 80185    call_list = []186    attention_layer_cfg = {187        'num_heads': 10,188        'head_size': 8,189        'call_list': call_list,190    }191    test_layer = transformer_scaffold.TransformerScaffold(192        attention_cls=ValidatedAttentionLayer,193        attention_cfg=attention_layer_cfg,194        num_attention_heads=10,195        intermediate_size=2048,196        intermediate_activation='relu',197        dtype='float16')198    # Create a 3-dimensional input (the first dimension is implicit).199    data_tensor = tf.keras.Input(200        shape=(sequence_length, width), dtype=tf.float16)201    # Create a 2-dimensional input (the first dimension is implicit).202    mask_tensor = tf.keras.Input(shape=(sequence_length, sequence_length))203    output_tensor = test_layer([data_tensor, mask_tensor])204    # Create a model from the test layer.205    model = tf.keras.Model([data_tensor, mask_tensor], output_tensor)206    # Invoke the model on test data. We can't validate the output data itself207    # (the NN is too complex) but this will rule out structural runtime errors.208    batch_size = 6209    input_data = (10 * np.random.random_sample(210        (batch_size, sequence_length, width))).astype(np.float16)211    # The attention mask should be of shape (batch, from_seq_len, to_seq_len),212    # which here is (batch, sequence_length, sequence_length)213    mask_data = np.random.randint(214        2, size=(batch_size, sequence_length, sequence_length))215    _ = model.predict([input_data, mask_data])216    # If call_list[0] exists and is True, the passed layer class was217    # instantiated from the given config properly.218    self.assertNotEmpty(call_list)219    self.assertTrue(call_list[0], "The passed layer class wasn't instantiated.")220  def test_transform_with_initializer(self):221    sequence_length = 21222    width = 80223    call_list = []224    attention_layer_cfg = {225        'num_heads': 10,226        'head_size': 8,227        'call_list': call_list,228    }229    test_layer = transformer_scaffold.TransformerScaffold(230        attention_cls=ValidatedAttentionLayer,231        attention_cfg=attention_layer_cfg,232        num_attention_heads=10,233        intermediate_size=2048,234        intermediate_activation='relu',235        kernel_initializer=tf.keras.initializers.TruncatedNormal(stddev=0.02))236    # Create a 3-dimensional input (the first dimension is implicit).237    data_tensor = tf.keras.Input(shape=(sequence_length, width))238    output = test_layer(data_tensor)239    # The default output of a transformer layer should be the same as the input.240    self.assertEqual(data_tensor.shape.as_list(), output.shape.as_list())241    # If call_list[0] exists and is True, the passed layer class was242    # instantiated from the given config properly.243    self.assertNotEmpty(call_list)244    self.assertTrue(call_list[0])245  def test_layer_restoration_from_config(self):246    sequence_length = 21247    width = 80248    call_list = []249    attention_layer_cfg = {250        'num_heads': 10,251        'head_size': 8,252        'call_list': call_list,253        'name': 'test_layer',254    }255    test_layer = transformer_scaffold.TransformerScaffold(256        attention_cls=ValidatedAttentionLayer,257        attention_cfg=attention_layer_cfg,258        num_attention_heads=10,259        intermediate_size=2048,260        intermediate_activation='relu')261    # Create a 3-dimensional input (the first dimension is implicit).262    data_tensor = tf.keras.Input(shape=(sequence_length, width))263    # Create a 2-dimensional input (the first dimension is implicit).264    mask_tensor = tf.keras.Input(shape=(sequence_length, sequence_length))265    output_tensor = test_layer([data_tensor, mask_tensor])266    # Create a model from the test layer.267    model = tf.keras.Model([data_tensor, mask_tensor], output_tensor)268    # Invoke the model on test data. We can't validate the output data itself269    # (the NN is too complex) but this will rule out structural runtime errors.270    batch_size = 6271    input_data = 10 * np.random.random_sample(272        (batch_size, sequence_length, width))273    # The attention mask should be of shape (batch, from_seq_len, to_seq_len),274    # which here is (batch, sequence_length, sequence_length)275    mask_data = np.random.randint(276        2, size=(batch_size, sequence_length, sequence_length))277    pre_serialization_output = model.predict([input_data, mask_data])278    # Serialize the model config. Pass the serialized data through json to279    # ensure that we can serialize this layer to disk.280    serialized_data = json.dumps(model.get_config())281    post_string_serialized_data = json.loads(serialized_data)282    # Create a new model from the old config, and copy the weights. These models283    # should have identical outputs.284    new_model = tf.keras.Model.from_config(post_string_serialized_data)285    new_model.set_weights(model.get_weights())286    output = new_model.predict([input_data, mask_data])287    self.assertAllClose(pre_serialization_output, output)288    # If the layer was configured correctly, it should have a list attribute289    # (since it should have the custom class and config passed to it).290    new_model.summary()291    new_call_list = new_model.get_layer(292        name='transformer_scaffold')._attention_layer.list293    self.assertNotEmpty(new_call_list)294    self.assertTrue(new_call_list[0],295                    "The passed layer class wasn't instantiated.")296if __name__ == '__main__':...defer_test.py
Source:defer_test.py  
1#!/usr/bin/python2.42# Copyright 2009, Google Inc.3# All rights reserved.4#5# Redistribution and use in source and binary forms, with or without6# modification, are permitted provided that the following conditions are7# met:8#9#     * Redistributions of source code must retain the above copyright10# notice, this list of conditions and the following disclaimer.11#     * Redistributions in binary form must reproduce the above12# copyright notice, this list of conditions and the following disclaimer13# in the documentation and/or other materials provided with the14# distribution.15#     * Neither the name of Google Inc. nor the names of its16# contributors may be used to endorse or promote products derived from17# this software without specific prior written permission.18#19# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS20# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT21# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR22# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT23# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,24# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT25# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,26# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY27# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT28# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE29# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.30"""Test for defer.  These are SMALL tests."""31import unittest32import SCons.Errors33import TestFramework34#------------------------------------------------------------------------------35class DeferTests(unittest.TestCase):36  """Tests for defer module."""37  def setUp(self):38    """Per-test setup."""39    self.call_list = []40    self.env = self.root_env.Clone()41  def testSimpleDefer(self):42    """Simple defer, passing function pointers."""43    def Sub1(env):44      self.call_list.append(1)45      # Somewhat counter-intuitively, defer does NOT make a copy of the46      # environment, so VAR1 will actually be 'cherry' here.  Should we change47      # this?48      self.assertEqual(env['VAR1'], 'cherry')49    def Sub2(env):50      env = env51      self.call_list.append(2)52    def Sub3(env):53      env = env54      self.call_list.append(3)55    env = self.env56    env['VAR1'] = 'apple'57    env.Defer(Sub1)58    env['VAR1'] = 'cherry'59    env.Defer(Sub2, after=Sub1)60    env.Defer(Sub3)61    # Now add relationships between Sub3 and other methods.  Note that while62    # after can refer to a function directly or by name, the function we're63    # deferring needs to be referenced by name, since otherwise it'll add64    # another instance that function to the list.65    env.Defer('Sub3', after=Sub1)66    env.Defer('Sub2', after='Sub3')67    # Functions are not called until ExecuteDefer()68    self.assertEqual(self.call_list, [])69    env.ExecuteDefer()70    self.assertEqual(self.call_list, [1, 3, 2])71    # Calling ExecuteDefer() again won't do anything, since the previous call72    # consumed the deferred functions.73    env.ExecuteDefer()74    self.assertEqual(self.call_list, [1, 3, 2])75  def testDeferGroups(self):76    """Test defer groups."""77    def Sub1a(env):78      env = env79      self.call_list.append(1)80    def Sub1b(env):81      env = env82      # Append the same thing as 4a; order within defer groups is not defined.83      self.call_list.append(1)84    def Sub2(env):85      env = env86      self.call_list.append(2)87    def Sub3(env):88      env = env89      self.call_list.append(3)90    def Sub4(env):91      env = env92      self.call_list.append(4)93    def Sub5(env):94      env = env95      self.call_list.append(5)96    def Sub6(env):97      env = env98      self.call_list.append(6)99    env = self.env100    # Note that we can set up the relationships by name before any functions101    # are actually deferred.  Also note that each function is implicitly in a102    # group with its function name.103    env.Defer('GroupA', after='Sub4')104    # Can defer after multiple groups/functions, by either name or reference.105    # Note that Sub6 is not actually deferred; just passing it in after, or by106    # name only, doesn't cause it to be called.107    env.Defer('Sub6')108    env.Defer('GroupB', after=['GroupC', Sub5, Sub6])109    env.Defer(Sub1a, 'GroupC', after='GroupA')110    env.Defer(Sub1b, 'GroupC')111    env.Defer(Sub2, 'GroupA')112    env.Defer(Sub3, 'GroupB')113    env.Defer(Sub4)114    env.Defer(Sub5)115    env.ExecuteDefer()116    self.assertEqual(self.call_list, [4, 5, 2, 1, 1, 3])117  def testDeferNotString(self):118    """Test attempts to defer after things that aren't strings or functions."""119    env = self.env120    # Can only defer after strings and functions121    self.assertRaises(ValueError, env.Defer, 'GroupB', after=42)122  def testDeferInheritance(self):123    """Test defer inheritance."""124    def Sub1(env):125      env = env126      self.call_list.append(1)127    def Sub2(env):128      env = env129      self.call_list.append(2)130    def Sub3(env):131      env = env132      self.call_list.append(3)133    env = self.env134    env.Defer(Sub1)135    # Permitted (but not required) to forward-declare defer-after relationships136    env.Defer('GroupA', after=Sub1)137    env_child1 = env.Clone()138    env_child1.Defer('GroupA', Sub2)139    env_child2 = env.Clone()140    env_child2.Defer('GroupA', Sub3)141    env.ExecuteDefer()              # Should execute Sub1 but not Sub2142    self.assertEqual(self.call_list, [1])143    self.call_list = []144    env_child1.ExecuteDefer()       # Should execute Sub1 (again) and Sub2145    self.assertEqual(self.call_list, [1, 2])146    self.call_list = []147    env_child2.ExecuteDefer()       # Should execute Sub1 (again) and Sub3148    self.assertEqual(self.call_list, [1, 3])149  def testDeferRoot(self):150    """Test defer root."""151    # If SetDeferRoot() is used, deferrals are executed by the root's152    # ExecuteDefer().  Deferrals inherited from parent environments are brought153    # down to the new root environment.  Subsequent deferrals from children of154    # the root keep their environments as specified by Defer().155    def Sub1(env):156      self.call_list.append(1)157      # Defer from child of root is passed the child environment158      self.assertEqual(env['VAR1'], 'child')159    def Sub2(env):160      self.call_list.append(2)161      # Defer from parent of root is passed the root environment162      self.assertEqual(env['VAR2'], 'defer_root')163    env_parent = self.env.Clone(VAR2='parent')164    env_parent.Defer(Sub2)165    env_defer_root = env_parent.Clone(VAR2='defer_root')166    env_defer_root.SetDeferRoot()167    env_child = env_defer_root.Clone(VAR1='child')168    env_child.Defer(Sub1, after=Sub2)169    # Since child is now the root, calling ExecuteDefer() from one of its170    # children does nothing.171    env_child.ExecuteDefer()172    self.assertEqual(self.call_list, [])173    env_defer_root.ExecuteDefer()       # Should run Sub1174    self.assertEqual(self.call_list, [2, 1])175    # Environments above the one calling GetDeferRoot() keep their own roots.176    self.assertEqual(env_parent.GetDeferRoot(), env_parent)177    # Environments at or beneath should see the context for GetDeferRoot().178    self.assertEqual(env_defer_root.GetDeferRoot(), env_defer_root)179    self.assertEqual(env_child.GetDeferRoot(), env_defer_root)180  def testDeferReentrancy(self):181    """Test re-entrant calls to ExecuteDefer()."""182    def Sub1(env):183      env.ExecuteDefer()184    env = self.env185    env.Defer(Sub1)186    self.assertRaises(SCons.Errors.UserError, env.ExecuteDefer)187  def testDeferNested(self):188    """Test nested calls to ExecuteDefer()."""189    def Sub1(env):190      env = env191      self.call_list.append(1)192    def Sub2(env):193      env = env194      self.call_list.append(2)195      env.Defer(Sub1)196    def Sub3(env):197      env = env198      self.call_list.append(3)199    env = self.env200    env.Defer(Sub2)201    env.Defer(Sub3, after=Sub2)202    # Make sure PrintDefer() at least doesn't crash.203    env.PrintDefer()204    env.ExecuteDefer()205    self.assertEqual(self.call_list, [2, 1, 3])206#------------------------------------------------------------------------------207def TestSConstruct(scons_globals):208  """Test SConstruct file.209  Args:210    scons_globals: Global variables dict from the SConscript file.211  """212  # Get globals from SCons213  Environment = scons_globals['Environment']214  env = Environment(tools=['environment_tools', 'defer'])215  # Run unit tests216  TestFramework.RunUnitTests(DeferTests, root_env=env)217def main():218  test = TestFramework.TestFramework()219  test.subdir('defer')220  base = 'defer/'221  test.WriteSConscript(base + 'SConstruct', TestSConstruct)222  test.run(chdir=base, stderr=None)223  test.pass_test()224if __name__ == '__main__':...update_call_list.py
Source:update_call_list.py  
1from django.db.models import get_model2from config.celery import app3from edc.constants import NEW, CLOSED, OPEN4@app.task5def update_call_list(label, verbose=True):6    """Adds information from SubjectConsent instances from the specified survey to the7    CallList model for the current survey.8    If there is no SubjectLocator or subject_locator.may_follow='No', the subject is not added9    to the call list.10    If needed, the household member for the next survey will be created.11    See (HouseholdStructure manager method add_household_members_from_survey).12    """13    CallList = get_model('mpepu_maternal', 'CallList')14    MaternalConsent = get_model('mpepu_maternal', 'MaternalConsent')15    MaternalLocator = get_model('mpepu_maternal', 'MaternalLocator')16    options = {}17    n = 018    total = MaternalConsent.objects.all().count()19    print 'Pulled {} consents.'.format(total)...Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!
