How to use s3_bucket_name method in localstack

Best Python code snippet using localstack_python

s3_encryption.py

Source:s3_encryption.py Github

copy

Full Screen

1# Copyright 2017 Insurance Australia Group Limited2#3# Licensed under the Apache License, Version 2.0 (the "License");4# you may not use this file except in compliance with the License.5# You may obtain a copy of the License at6#7# http://www.apache.org/licenses/LICENSE-2.08#9# Unless required by applicable law or agreed to in writing, software10# distributed under the License is distributed on an "AS IS" BASIS,11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.12# See the License for the specific language governing permissions and13# limitations under the License.14#15"""Facilitates checking S3 bucket policies for encryption"""16import json17from botocore.exceptions import ClientError18class S3Encryption(object):19 """20 Provides the list of S3 buckets that are compliant.21 Attributes:22 b3_s3_client: Boto3 S3 client.23 """24 def __init__(self, b3_s3_client):25 """Constructor"""26 self.client = b3_s3_client27 self.s3_bucket_list = self.client.list_buckets()['Buckets']28 def get_s3_bucket_policy_statement(self, s3_bucket_name):29 """Retrieves policies attached to specified S3 bucket30 Args:31 s3_bucket_name: S3 Bucket name32 Returns:33 Policy statement34 """35 try:36 s3_bucket_policy = self.client.get_bucket_policy(Bucket=s3_bucket_name)37 except:38 s3_bucket_policy = []39 if s3_bucket_policy == []:40 policy_statement = []41 else:42 policy_statement = json.loads(s3_bucket_policy['Policy'])['Statement']43 return policy_statement44 def get_default_encr_bucket_list(self, s3_bucket_name):45 """Verifies whether the bucket has default encryption enabled.46 Args:47 s3_bucket_name: Name of the S3 Bucket48 Returns:49 Name of the S3 Bucket if it has default encryption enabled. Catch exception when no "default encryption" exists on a bucket50 """51 try:52 default_encryption = self.client.get_bucket_encryption(Bucket=s3_bucket_name)53 return s3_bucket_name if 'ServerSideEncryptionConfiguration' in default_encryption else None54 except ClientError as e: # Handle error when buckets have no 'default encryption'55 if e.response['Error']['Code'] == 'ServerSideEncryptionConfigurationNotFoundError':56 return None57 else:58 print "Unexpected error: %s" % e59 def get_encr_policy_bucket_list(self, s3_bucket_name, policy_statements):60 """Verifies whether the bucket has encryption policy enabled.61 Args:62 s3_bucket_name: Name of the S3 Bucket63 policy_statements: Policy attached to specified S3 Bucket64 Returns:65 Name of the S3 Bucket if it has encryption enabled.66 """67 if policy_statements != []:68 for policy_statement in policy_statements:69 try:70 if 's3:PutObject' in policy_statement['Action'] and \71 'Deny' in policy_statement['Effect'] and \72 policy_statement['Condition']['StringNotEquals']['s3:x-amz-server-side-encryption'] == 'AES256':73 return s3_bucket_name74 elif 's3:PutObject' in policy_statement['Action'] and \75 'Allow' in policy_statement['Effect'] and \76 policy_statement['Condition']['StringEquals']['s3:x-amz-server-side-encryption'] == 'AES256':77 return s3_bucket_name78 except:79 return None80 else:81 return None82 def get_encryp_comp_s3_bucket_list(self):83 """Get the list of compliant S3 Buckets84 Returns:85 List of S3 buckets86 """87 compliant_s3_bucket_list = []88 for s3_bucket_name in self.s3_bucket_list:89 policy_statements = self.get_s3_bucket_policy_statement(s3_bucket_name['Name'])90 compliant_s3_bucket_list.append(91 self.get_encr_policy_bucket_list(s3_bucket_name['Name'], policy_statements)92 )93 compliant_s3_bucket_list.append(94 self.get_default_encr_bucket_list(s3_bucket_name['Name'])95 )...

Full Screen

Full Screen

fabfile.py

Source:fabfile.py Github

copy

Full Screen

1from fabric.api import *2from fabric.contrib.files import exists, contains, append3from fabric.api import settings4from boto.s3.key import Key5import boto6import os7#constants8ENVIRONMENT = None9AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')10AWS_ACCESS_KEY_SECRET = os.environ.get('AWS_ACCESS_KEY_SECRET')11S3_BUCKET_NAME = None12SOURCE_DIR = 'docs/'13DEST_DIR = 'assets/docs/'14URL = None15@task16def prod():17 global ENVIRONMENT18 global S3_BUCKET_NAME19 global URL20 ENVIRONMENT = 'prod'21 S3_BUCKET_NAME = 'co-static-preview'22 URL = 'https://pstatic01.usejargon.com/'23@task24def stage():25 global ENVIRONMENT26 global S3_BUCKET_NAME27 global URL28 ENVIRONMENT = 'stage'29 S3_BUCKET_NAME = 'co-static-stage'30 URL = 'https://s3-us-west-1.amazonaws.com/co-static-stage/'31@task32def dev():33 global ENVIRONMENT34 global S3_BUCKET_NAME35 global URL36 ENVIRONMENT = 'dev'37 S3_BUCKET_NAME = 'co-static-dev'38 URL = 'https://s3-us-west-1.amazonaws.com/co-static-dev/'39@task40def upload_docs():41 conn = boto.connect_s3(AWS_ACCESS_KEY_ID, AWS_ACCESS_KEY_SECRET)42 bucket = conn.get_bucket(S3_BUCKET_NAME)43 #get names of files from directory44 file_paths = []45 for (source_dir, dname, fnames) in os.walk(SOURCE_DIR):46 for fname in fnames:47 file_paths.append(source_dir + '/' + fname)48 for path in file_paths:49 dest_path = os.path.join(DEST_DIR + path.replace(SOURCE_DIR, ''))50 print 'Uploading %s to S3 bucket %s at %s' % \51 (path, S3_BUCKET_NAME, dest_path)52 k = boto.s3.key.Key(bucket)53 k.key = dest_path54 k.set_contents_from_filename(path)55 print('Done')56 print('---------------------')57 print('UPLOAD: [OK]')...

Full Screen

Full Screen

initialize.py

Source:initialize.py Github

copy

Full Screen

1from config import conn, s3_bucket_name, model_name, dataset_name2import tarfile3import os4import io5import requests6def get_archive_and_extract(path, file_name, s3_bucket_name, extension='.tar.gz'):7 if not os.path.exists(path):8 # os.makedirs(path)9 full_path = os.path.join(path, file_name+extension).strip('./')10 print('Downloading: '+s3_bucket_name+full_path)11 try:12 s3_data = conn.get(full_path, s3_bucket_name)13 print('Extracting: '+str(file_name+extension)+' to '+str(path))14 tar_file_like = io.BytesIO(s3_data.content)15 tar_obj = tarfile.open(fileobj=tar_file_like)16 tar_obj.extractall(path=os.path.join(path,dataset_name))17 except requests.exceptions.HTTPError as e:18 print(e)19 print('No such ressource on s3. Starting over.')20 os.makedirs(path)21 else:22 print(path+' already exists.')23def initialize(dataset_name, model_name, s3_bucket_name):24 get_archive_and_extract('./datasets', dataset_name, s3_bucket_name)25 get_archive_and_extract('./checkpoints', model_name, s3_bucket_name)26def initialize_env():27 initialize(dataset_name, model_name, s3_bucket_name)28def main():29 initialize(dataset_name, model_name, s3_bucket_name)30if __name__ == '__main__':...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run localstack automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful