How to use check_s3 method in localstack

Best Python code snippet using localstack_python

s3mysqldump_tests.py

Source:s3mysqldump_tests.py Github

copy

Full Screen

...104 # I suspect it's better to do this in reverse order, though105 # it may not matter106 for obj, field, real_value in reversed(self._monkey_patches):107 setattr(obj, field, real_value)108 def check_s3(self, bucket, key, args):109 """Check that mysqldump was run with the given args, and the110 contents copied to S3.111 """112 assert_in(bucket, self.mock_s3_fs)113 assert_in(key, self.mock_s3_fs[bucket])114 contents = self.mock_s3_fs[bucket][key]115 # we run "echo" in place of mysqldump, so the key's contents116 # should just be the arguments we passed to echo117 assert_equal(contents.rstrip(), args)118class TestTablesAndDatabases(MockS3AndMysqldumpTestCase):119 """Tests of specifying databases and the %D and %T options"""120 def test_basic_case(self):121 s3mysqldump.main(['foo', 's3://walrus/foo.sql'])122 self.check_s3('walrus', 'foo.sql', '--tables -- foo')123 def test_percent_D(self):124 s3mysqldump.main(['foo', 's3://walrus/%D.sql'])125 self.check_s3('walrus', 'foo.sql', '--tables -- foo')126 def no_percent_T_without_tables(self):127 assert_raises(SystemExit,128 s3mysqldump.main, ['foo', 's3://walrus/%D/%T.sql'])129 def test_one_table(self):130 s3mysqldump.main(['foo', 'bar', 's3://walrus/foo.sql'])131 self.check_s3('walrus', 'foo.sql', '--tables -- foo bar')132 def test_percent_T_on_one_table(self):133 s3mysqldump.main(['foo', 'bar', 's3://walrus/%T.sql'])134 self.check_s3('walrus', 'bar.sql', '--tables -- foo bar')135 def test_percent_D_and_T_on_one_table(self):136 s3mysqldump.main(['foo', 'bar', 's3://walrus/%D/%T.sql'])137 self.check_s3('walrus', 'foo/bar.sql', '--tables -- foo bar')138 def test_many_tables(self):139 s3mysqldump.main(['foo', 'bar', 'baz', 'qux',140 's3://walrus/foo.sql'])141 self.check_s3('walrus', 'foo.sql', '--tables -- foo bar baz qux')142 def test_percent_T_on_many_tables(self):143 s3mysqldump.main(['foo', 'bar', 'baz', 'qux',144 's3://walrus/%T.sql'])145 self.check_s3('walrus', 'bar.sql', '--tables -- foo bar')146 self.check_s3('walrus', 'baz.sql', '--tables -- foo baz')147 self.check_s3('walrus', 'qux.sql', '--tables -- foo qux')148 def test_percent_D_and_T_on_many_tables(self):149 s3mysqldump.main(['foo', 'bar', 'baz', 'qux',150 's3://walrus/%D/%T.sql'])151 self.check_s3('walrus', 'foo/bar.sql', '--tables -- foo bar')152 self.check_s3('walrus', 'foo/baz.sql', '--tables -- foo baz')153 self.check_s3('walrus', 'foo/qux.sql', '--tables -- foo qux')154 def test_one_database(self):155 s3mysqldump.main(['-B', 'foo', 's3://walrus/foo.sql'])156 self.check_s3('walrus', 'foo.sql', '--databases -- foo')157 def test_percent_D_with_one_database(self):158 s3mysqldump.main(['-B', 'foo', 's3://walrus/%D.sql'])159 self.check_s3('walrus', 'foo.sql', '--databases -- foo')160 def test_no_percent_T_with_databases_mode(self):161 assert_raises(SystemExit,162 s3mysqldump.main, ['-B', 'foo', 's3://walrus/%D/%T.sql'])163 def test_many_databases(self):164 s3mysqldump.main(['-B', 'foo1', 'foo2', 'foo3', 's3://walrus/foo.sql'])165 self.check_s3('walrus', 'foo.sql', '--databases -- foo1 foo2 foo3')166 def test_percent_D_with_many_databases(self):167 s3mysqldump.main(['-B', 'foo1', 'foo2', 'foo3', 's3://walrus/%D.sql'])168 self.check_s3('walrus', 'foo1.sql', '--databases -- foo1')169 self.check_s3('walrus', 'foo2.sql', '--databases -- foo2')170 self.check_s3('walrus', 'foo3.sql', '--databases -- foo3')171 def test_all_databases(self):172 s3mysqldump.main(['-A', 's3://walrus/dbs.sql'])173 self.check_s3('walrus', 'dbs.sql', '--all-databases')174 def test_no_names_with_all_databases(self):175 assert_raises(SystemExit,176 s3mysqldump.main, ['-A', 'foo', 's3://walrus/foo.sql'])177 def test_no_percent_T_with_all_databases(self):178 assert_raises(SystemExit,179 s3mysqldump.main, ['-A', 's3://walrus/%T.sql'])180 def test_no_percent_D_with_all_databases(self):181 assert_raises(SystemExit,182 s3mysqldump.main, ['-A', 's3://walrus/%D.sql'])183class TestInterpolation(MockS3AndMysqldumpTestCase):184 @setup185 def set_now(self):186 self.set_now_to(datetime.datetime(2010, 6, 6, 4, 26))187 self.set_utcnow_to(datetime.datetime(2010, 6, 6, 11, 26))188 def test_date_interpolation(self):189 s3mysqldump.main(['foo', 's3://walrus/%Y/%m/%d/foo.sql'])190 self.check_s3('walrus', '2010/06/06/foo.sql', '--tables -- foo')191 def test_time_interpolation(self):192 s3mysqldump.main(['foo', 's3://walrus/%Y/%m/%d/%H:%M/foo.sql'])193 self.check_s3('walrus', '2010/06/06/04:26/foo.sql', '--tables -- foo')194 def test_utc(self):195 s3mysqldump.main(196 ['foo', '--utc', 's3://walrus/%Y/%m/%d/%H:%M/foo.sql'])197 self.check_s3('walrus', '2010/06/06/11:26/foo.sql', '--tables -- foo')198 def test_date_and_percent_D_and_T(self):199 s3mysqldump.main(['foo', 'bar', 'baz', 'qux',200 's3://walrus/%Y/%m/%d/%D/%T.sql'])201 self.check_s3(202 'walrus', '2010/06/06/foo/bar.sql', '--tables -- foo bar')203 self.check_s3(204 'walrus', '2010/06/06/foo/baz.sql', '--tables -- foo baz')205 self.check_s3(206 'walrus', '2010/06/06/foo/qux.sql', '--tables -- foo qux')207 def test_percent_escaping(self):208 # %D, %T aren't allowed with -A, so check that we don't209 # interpret %%D and %%T as these fields210 s3mysqldump.main(['-A', 's3://walrus/%%Y%%m%%d/%Y/%m/%d/%%D%%T.sql'])211 self.check_s3(212 'walrus', '%Y%m%d/2010/06/06/%D%T.sql', '--all-databases')213class TestBotoConfig(MockS3AndMysqldumpTestCase):214 @setup215 def make_boto_cfg(self):216 _, self.boto_cfg = tempfile.mkstemp(prefix='boto.cfg')217 with open(self.boto_cfg, 'w') as f:218 f.write('[Credentials]\n')219 f.write('aws_access_key_id = 12345678910\n')220 f.write('aws_secret_access_key = ABCDEFGHIJKLMNOPQRSTUVWXYZ\n')221 @teardown222 def rm_boto_cfg(self):223 os.unlink(self.boto_cfg)224 def test_no_boto_cfg(self):225 s3mysqldump.main(['foo', 's3://walrus/foo.sql'])...

Full Screen

Full Screen

kid_news_wordcount.py

Source:kid_news_wordcount.py Github

copy

Full Screen

1from airflow.models import DAG2from airflow.models import Variable3from airflow.contrib.operators.ssh_operator import SSHOperator4from airflow.operators.python import ShortCircuitOperator5import boto36from datetime import datetime, timedelta7default_args = {8 'owner': 'admin',9 'depends_on_past': False,10 'start_date': datetime(2022, 3, 29),11 'catchup': False,12 'email_on_failure': True,13 'email_on_retry': False,14 'retries': 2,15 'retry_delay': timedelta(minutes=1),16}17templated_bash_command_scrapy = """18 su - {{params.user}}19 cd {{params.scrapy_env_dir}}20 source ./.venv/bin/activate21 cd {{params.scrapy_project_dir}}22 scrapy crawl {{params.spider}}23""" 24def s3_data_load():25 """check whether data was crawled"""26 s3_client = boto3.client(27 "s3", 28 aws_access_key_id=Variable.get("AWS_ACCESS_KEY_ID"),29 aws_secret_access_key=Variable.get("AWS_SECRET_ACCESS_KEY")30 )31 today = datetime.strftime(datetime.now() + timedelta(hours=9), "%Y-%m-%d")32 Bucket, path = Variable.get("Bucket"), f'{Variable.get("kid_news_dir")}/{today}'33 res = s3_client.list_objects_v2(Bucket=Bucket, Prefix=path, MaxKeys=1)34 return 'Contents' in res35templated_bash_command_pyspark = """36 {{params.spark_submit}} \37 --master {{params.master}} \38 --deploy-mode {{params.deploy_mode}} \39 --num-executors {{params.num_executors}} \40 --executor-cores {{params.executor_cores}} \41 --executor-memory {{params.executor_memory}} \42 --conf {{params.conf1}} \43 --conf {{params.conf2}} \44 --conf {{params.conf3}} \45 --jars {{params.jars}} \46 {{params.application}}47"""48with DAG(49 "kid_news_wordcount",50 schedule_interval="30 14 * * *",51 default_args=default_args,52 catchup=False,53 params={54 # params for scrapy55 "user": "scrapy",56 "scrapy_env_dir": "/home/scrapy/crawling",57 "scrapy_project_dir": "/home/scrapy/crawling/kidnewscrawling/kidnewscrawling",58 "spider": "kidNewsSpiderCurrentAffairs",59 # params for spark60 "spark_submit": "/opt/spark/bin/spark-submit",61 "master": "yarn",62 "deploy_mode": "client",63 "num_executors": "2",64 "executor_cores": "2",65 "executor_memory": "2048m",66 "conf1": "spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem",67 "conf2": f'spark.hadoop.fs.s3a.access.key={Variable.get("AWS_ACCESS_KEY_ID")}',68 "conf3": f'spark.hadoop.fs.s3a.secret.key={Variable.get("AWS_SECRET_ACCESS_KEY")}',69 "jars": Variable.get("wordcount_jars"),70 "application": "/opt/workspace/src/kid_word_count_batch.py"71 }72) as dag:73 # task for kid news scraping74 kid_news_scrapy = SSHOperator(75 task_id="kid_news_scrapy",76 ssh_conn_id="ssh_scrapy",77 command=templated_bash_command_scrapy,78 dag=dag79 )80 81 # task to check whether data exists82 check_s3 = ShortCircuitOperator(83 task_id="check_s3",84 python_callable=s3_data_load,85 dag=dag)86 # task for kid wordcount87 kid_wordcount = SSHOperator(88 task_id="kid_wordcount",89 ssh_conn_id="ssh_spark_yarn",90 command=templated_bash_command_pyspark,91 dag=dag92 )...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run localstack automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful