How to use check_attributes method in localstack

Best Python code snippet using localstack_python

test_datasets.py

Source:test_datasets.py Github

copy

Full Screen

...19 'test': (test_examples, num_features),20 }21 )22 23 check_attributes(24 data_dir, 'adult', {25 'task': 'classification',26 'num_classes': 2,27 }28 )29 30 validate_dataset_file(data_dir / 'adult.npz')31@pytest.mark.dataset32def test_amazon(data_dir):33 num_features = 934 train_examples = 26_21535 test_examples = 6_55436 37 check_split_sizes(38 data_dir, 'amazon', {39 'train': (train_examples, num_features),40 'test': (test_examples, num_features),41 }42 )43 44 check_attributes(45 data_dir, 'amazon', {46 'task': 'classification',47 'num_classes': 2,48 }49 )50 51 validate_dataset_file(data_dir / 'amazon.npz')52@pytest.mark.dataset53def test_arcene(data_dir):54 train_test_shape = (100, 10_000)55 56 check_split_sizes(57 data_dir, 'arcene', {58 'train': train_test_shape,59 'test': train_test_shape, # arcene's "official" validation set used as test set60 }61 )62 63 check_attributes(64 data_dir, 'arcene', {65 'task': 'classification',66 'num_classes': 2,67 }68 )69 70 validate_dataset_file(data_dir / 'arcene.npz')71@pytest.mark.dataset72@pytest.mark.large73def test_cifar10(data_dir):74 num_features = 32 * 32 * 375 76 check_split_sizes(77 data_dir, 'cifar10', {78 'train': (50_000, num_features),79 'test': (10_000, num_features),80 }81 )82 83 check_attributes(84 data_dir, 'cifar10', {85 'task': 'classification',86 'num_classes': 10,87 }88 )89 90 validate_dataset_file(data_dir / 'cifar10.npz')91@pytest.mark.dataset92def test_covertype(data_dir):93 num_features = 5494 95 check_split_sizes(96 data_dir, 'covertype', {97 'train': (11_340, num_features),98 'valid': (3_780, num_features),99 'test': (565_892, num_features),100 }101 )102 103 check_attributes(104 data_dir, 'covertype', {105 'task': 'classification',106 'num_classes': 7,107 }108 )109 110 validate_dataset_file(data_dir / 'covertype.npz')111@pytest.mark.dataset112@pytest.mark.large113def test_duolingo_original(data_dir):114 num_features = 10115 116 check_split_sizes(117 data_dir, 'duolingo-original', {118 'train': (10_275_881, num_features),119 'test': (2_578_345, num_features),120 }121 )122 123 check_attributes(124 data_dir, 'duolingo-original', {125 'task': 'regression',126 }127 )128 129 validate_dataset_file(data_dir / 'duolingo-original.npz')130@pytest.mark.dataset131@pytest.mark.large132def test_duolingo_categorical(data_dir):133 num_features = 10134 135 check_split_sizes(136 data_dir, 'duolingo-categorical', {137 'train': (10_275_881, num_features),138 'test': (2_578_345, num_features),139 }140 )141 142 check_attributes(143 data_dir, 'duolingo-categorical', {144 'task': 'regression',145 }146 )147 148 validate_dataset_file(data_dir / 'duolingo-categorical.npz')149@pytest.mark.dataset150@pytest.mark.large151def test_higgs(data_dir):152 num_features = 28153 154 check_split_sizes(155 data_dir, 'higgs', {156 'train': (10_500_000, num_features),157 'test': (500_000, num_features),158 }159 )160 161 check_attributes(162 data_dir, 'higgs', {163 'task': 'classification',164 'num_classes': 2,165 }166 )167 168 validate_dataset_file(data_dir / 'higgs.npz')169@pytest.mark.dataset170def test_musk(data_dir):171 num_features = 166172 train_examples = 5548173 test_examples = 1050174 175 check_split_sizes(176 data_dir, 'musk', {177 'train': (train_examples, num_features),178 'test': (test_examples, num_features),179 }, )180 181 check_attributes(182 data_dir, 'musk', {183 'task': 'classification',184 'num_outputs': 1,185 'num_classes': 2,186 }187 )188 189 validate_dataset_file(data_dir / 'musk.npz')190@pytest.mark.dataset191def test_parkinsons(data_dir):192 num_features = 16193 num_outputs = 2194 train_examples = 4646195 test_examples = 1229196 197 check_split_sizes(198 data_dir, 'parkinsons', {199 'train': (train_examples, num_features, num_outputs),200 'test': (test_examples, num_features, num_outputs),201 }, )202 203 check_attributes(204 data_dir, 'parkinsons', {205 'task': 'regression',206 'num_outputs': 2,207 }208 )209 210 validate_dataset_file(data_dir / 'parkinsons.npz')211@pytest.mark.dataset212def test_poker(data_dir):213 num_features = 10214 215 check_split_sizes(216 data_dir, 'poker', {217 'train': (25_010, num_features),218 'test': (1_000_000, num_features),219 }220 )221 222 check_attributes(223 data_dir, 'poker', {224 'task': 'classification',225 'num_classes': 10,226 }227 )228 229 validate_dataset_file(data_dir / 'poker.npz')230@pytest.mark.dataset231def test_rossman(data_dir):232 num_features = 18233 train_examples = 814_688234 test_examples = 202_521235 236 check_split_sizes(237 data_dir, 'rossman', {238 'train': (train_examples, num_features),239 'test': (test_examples, num_features),240 }241 )242 243 check_attributes(244 data_dir, 'rossman', {245 'task': 'regression',246 }247 )248 ...

Full Screen

Full Screen

test_models.py

Source:test_models.py Github

copy

Full Screen

...5nd = 2006X = np.random.uniform(low=0, high=30, size=(nx, nd)).astype(np.float32)7sf = np.ones(nx)8cond = np.random.randint(3, size=nx).astype(np.float32)9def check_attributes(model, ce=False, cd=False, c=False, sf=False):10 assert model._conditional_encoder() == ce11 assert model._conditional_decoder() == cd12 assert model._use_conditions() == c13 assert model._use_sf() == sf14def test_autoencoder():15 lat_dim = 1816 ae = Autoencoder(x_dim=X.shape[1], latent_dim=lat_dim)17 ae.compile(optimizer="adam", loss="mse", run_eagerly=False)18 check_attributes(ae)19 ae.fit(X, batch_size=50, epochs=1)20 lat = ae.transform(X)21 assert X.shape[0] == lat.shape[0]22 assert lat.shape[1] == lat_dim23 rec = ae.predict(X)24 assert rec.shape == X.shape25def test_conditional_autoencoder():26 ae = Autoencoder(x_dim=X.shape[1], conditional="all")27 ae.compile(optimizer="adam", loss="mse", run_eagerly=True)28 check_attributes(ae, ce=True, cd=True, c=True)29 ae.fit([X, cond], batch_size=50, epochs=1)30 lat = ae.transform([X, cond])31 assert X.shape[0] == lat.shape[0]32 rec = ae.predict([X, cond])33 assert rec.shape == X.shape34 ae = Autoencoder(x_dim=X.shape[1], conditional="first")35 ae.compile(optimizer="adam", loss="mse", run_eagerly=False)36 check_attributes(ae, ce=True, cd=True, c=True)37 ae.fit([X, cond], batch_size=50, epochs=1)38 lat = ae.transform([X, cond])39 assert X.shape[0] == lat.shape[0]40 rec = ae.predict([X, cond])41 assert rec.shape == X.shape42def test_poisson_autoencoder():43 ae = PoissonAutoencoder(x_dim=X.shape[1])44 ae.compile(optimizer="adam", loss="mse", run_eagerly=False)45 check_attributes(ae, sf=True)46 ae.fit([X, sf], batch_size=50, epochs=1)47 lat = ae.transform(X)48 assert X.shape[0] == lat.shape[0]49 rec = ae.predict([X, sf])50 assert rec.shape == X.shape51def test_nb_autoencoder():52 ae = NegativeBinomialAutoencoder(x_dim=X.shape[1], dispersion="gene")53 ae.compile(optimizer="adam", run_eagerly=False)54 check_attributes(ae, sf=True)55 ae.fit([X, sf], batch_size=50, epochs=1)56 lat = ae.transform(X)57 assert X.shape[0] == lat.shape[0]58 rec = ae.predict([X, sf])59 assert rec.shape == X.shape60def test_conditional_zinb_autoencoder():61 ae = NegativeBinomialAutoencoder(62 x_dim=X.shape[1], dispersion="constant", conditional="all"63 )64 ae.compile(optimizer="adam", run_eagerly=True)65 check_attributes(ae, ce=True, cd=True, c=True, sf=True)66 ae.fit([X, cond, sf], batch_size=50, epochs=1)67 lat = ae.transform([X, cond])68 assert X.shape[0] == lat.shape[0]69 rec = ae.predict([X, cond, sf])70 assert rec.shape == X.shape71def test_topological_autoencoder():72 ae = TopologicalAutoencoder(x_dim=X.shape[1])73 ae.compile(optimizer="adam", run_eagerly=True)74 check_attributes(ae)75 ae.fit(X, batch_size=50, epochs=1)76 lat = ae.transform(X)77 assert X.shape[0] == lat.shape[0]78 rec = ae.predict(X)79 assert rec.shape == X.shape80def test_variational_autoencoder():81 ae = VariationalAutoencoder(x_dim=X.shape[1])82 ae.compile(optimizer="adam", run_eagerly=False)83 check_attributes(ae)84 ae.fit(X, batch_size=50, epochs=1)85 lat = ae.transform(X)86 assert X.shape[0] == lat.shape[0]87 rec = ae.predict(X)88 assert rec.shape == X.shape89 ae = VariationalAutoencoder(x_dim=X.shape[1], latent_dist="multivariate")90 ae.compile(optimizer="adam", run_eagerly=False)91 check_attributes(ae)92 ae.fit(X, batch_size=50, epochs=1)93 lat = ae.transform(X)94 assert X.shape[0] == lat.shape[0]95 rec = ae.predict(X)96 assert rec.shape == X.shape97 ae = VariationalAutoencoder(x_dim=X.shape[1], prior="iaf", iaf_units=[128, 128])98 ae.compile(optimizer="adam", run_eagerly=False)99 check_attributes(ae)100 ae.fit(X, batch_size=50, epochs=1)101 lat = ae.transform(X)102 assert X.shape[0] == lat.shape[0]103 rec = ae.predict(X)104 assert rec.shape == X.shape105 ae = VariationalAutoencoder(x_dim=X.shape[1], prior="vamp", n_pseudoinputs=30)106 ae.compile(optimizer="adam", run_eagerly=False)107 check_attributes(ae)108 ae.fit(X, batch_size=50, epochs=1)109 lat = ae.transform(X)110 assert X.shape[0] == lat.shape[0]111 rec = ae.predict(X)112 assert rec.shape == X.shape113def test_conditional_variational_autoencoder():114 ae = VariationalAutoencoder(x_dim=X.shape[1], conditional="all")115 ae.compile(optimizer="adam", run_eagerly=True)116 check_attributes(ae, ce=True, cd=True, c=True)117 ae.fit([X, cond], batch_size=50, epochs=1)118 lat = ae.transform([X, cond])119 assert X.shape[0] == lat.shape[0]120 rec = ae.predict([X, cond])121 assert rec.shape == X.shape122def test_negative_binomial_variational_autoencoder():123 ae = NegativeBinomialVAE(x_dim=X.shape[1], dispersion="cell-gene")124 ae.compile(optimizer="adam", run_eagerly=False)125 check_attributes(ae, sf=True)126 ae.fit([X, sf], batch_size=50, epochs=1)127 lat = ae.transform(X)128 assert X.shape[0] == lat.shape[0]129 rec = ae.predict([X, sf])...

Full Screen

Full Screen

user_config_reader.py

Source:user_config_reader.py Github

copy

Full Screen

...23 empty_string_check(self._config_dict['@id'])24 25 # Query Generator26 empty_string_check(self._config_dict['queryGenerator']['@class'])27 check_attributes(self._config_dict['queryGenerator'])28 29 # Snippet Classifier30 empty_string_check(self._config_dict['textClassifiers']['snippetClassifier']['@class'])31 check_attributes(self._config_dict['textClassifiers']['snippetClassifier'])32 33 # Document Classifier34 empty_string_check(self._config_dict['textClassifiers']['documentClassifier']['@class'])35 check_attributes(self._config_dict['textClassifiers']['documentClassifier'])36 37 # Stopping Decision Maker38 empty_string_check(self._config_dict['stoppingDecisionMaker']['@class'])39 check_attributes(self._config_dict['stoppingDecisionMaker'])40 41 # Logger42 empty_string_check(self._config_dict['logger']['@class'])43 check_attributes(self._config_dict['logger'])44 45 # Search Context46 empty_string_check(self._config_dict['searchContext']['@class'])47 check_attributes(self._config_dict['searchContext'])48 49 # SERP Impression50 empty_string_check(self._config_dict['serpImpression']['@class'])...

Full Screen

Full Screen

validation_test.py

Source:validation_test.py Github

copy

Full Screen

...5import re6class Test_check_attributes:7 def test_None_inputs(self):8 with pytest.raises(ValueError, match=none_arg_msg):9 check_attributes(None, None)10 with pytest.raises(ValueError, match=none_arg_msg):11 check_attributes([], None)12 def test_incompatible_dims(self):13 X = np.array([[1, 2, 3], [11, 21, 31]])14 with pytest.raises(15 ValueError,16 match=re.escape(17 f"Incompatible dimension for X and e matrices. X and e should have the same feature dimension: X.shape[0] = {X.shape[0]} while e.shape[0] = {X.T.shape[0]}."18 ),19 ):20 check_attributes(X, X.T)21 def test_invalid_iterations(self):22 X = np.array([[1, 2, 3], [None, 21, 31]])23 str_input = "cake"24 neg_input = -125 with pytest.raises(26 ValueError,27 match=f"iterations has incorrect type or less than 2. iterations: {str_input}",28 ):29 check_attributes(X, X, str_input)30 with pytest.raises(31 ValueError,32 match=f"iterations has incorrect type or less than 2. iterations: {neg_input}",33 ):34 check_attributes(X, X, neg_input)35 def test_invalid_n_jobs(self):36 X = np.array([[1, 2, 3], [11, 21, 31]])37 str_input = "cake"38 neg_input = -139 with pytest.raises(40 ValueError,41 match=f"n_jobs is incorrect type or less than 1. n_jobs: {str_input}",42 ):43 check_attributes(X, X, n_jobs=str_input)44 with pytest.raises(45 ValueError,46 match=f"n_jobs is incorrect type or less than 1. n_jobs: {neg_input}",47 ):...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run localstack automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful