Best Python code snippet using localstack_python
test_sampleLambda.py
Source:test_sampleLambda.py  
1# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.2# SPDX-License-Identifier: MIT-03# Start of unit test code: tests/unit/src/test_sampleLambda.py4import os5import json6from typing import Any, Dict7from unittest import TestCase8from unittest.mock import MagicMock, patch9import boto310import moto11from src.sampleLambda.app import LambdaResources12from src.sampleLambda.app import create_letter_in_s313from src.sampleLambda.app import lambda_handler14from src.sampleLambda import schemas15from aws_lambda_powertools.utilities.validation import validate16# [1] Mock all AWS Services in use17@moto.mock_dynamodb18@moto.mock_s319class TestSampleLambda(TestCase):20    # Test Set up 21    def setUp(self) -> None:22    23        # [2] Mock environment24        self.test_ddb_table_name = "unit_test_ddb"25        self.test_s3_bucket_name = "unit_test_s3_bucket"26        os.environ["DYNAMODB_TABLE_NAME"] = self.test_ddb_table_name27        os.environ["S3_BUCKET_NAME"] = self.test_s3_bucket_name 28        29        # [3] Set up the services: construct a (mocked!) DynamoDB table30        dynamodb = boto3.resource("dynamodb", region_name="us-east-1")31        dynamodb.create_table(32            TableName = self.test_ddb_table_name,33            KeySchema=[{"AttributeName": "PK", "KeyType": "HASH"}],34            AttributeDefinitions=[{"AttributeName": "PK", "AttributeType": "S"}],35            BillingMode='PAY_PER_REQUEST'36            )37            38        # [3] Set up the services: construct a (mocked!) S3 Bucket table    39        s3_client = boto3.client('s3', region_name="us-east-1")40        s3_client.create_bucket(Bucket = self.test_s3_bucket_name )41        42        # [4] Establish the "GLOBAL" environment for use in tests.43        self.test_LAMBDA_GLOBAL = LambdaResources( initialize_resources = True)44    def test_create_letter_in_s3(self) -> None:45        # [5] Post test items to a mocked database46        self.test_LAMBDA_GLOBAL.ddb_table.put_item(Item={"PK":"D#UnitTestDoc", 47                                                         "data":"Unit Test Doc Corpi"})48        self.test_LAMBDA_GLOBAL.ddb_table.put_item(Item={"PK":"C#UnitTestCust", 49                                                         "data":"Unit Test Customer"})50 51        # [6] Run DynamoDB to S3 file function52        create_letter_in_s3(env = self.test_LAMBDA_GLOBAL, 53                            doc_type = "UnitTestDoc",54                            cust_id = "UnitTestCust"55                            )56        # [7] Ensure the data was written to S3 correctly, with correct contents57        body = self.test_LAMBDA_GLOBAL.s3_bucket.Object("UnitTestCust/UnitTestDoc.txt").get()['Body'].read()58        59        # Test60        self.assertEqual(body.decode('ascii'),"Dear Unit Test Customer;\nUnit Test Doc Corpi")61    # [8] Load and validate test events from the file system62    def load_test_event(self, test_event_file_name: str) ->  Dict[str, Any]:63        with open(f"tests/events/{test_event_file_name}.json") as f:64            event = json.load(f)65            validate(event=event, schema=schemas.INPUT)66            return event67    # [9] Patch the Global Class and any function calls68    @patch("src.sampleLambda.app._LAMBDA_GLOBAL_RESOURCES")69    @patch("src.sampleLambda.app.create_letter_in_s3")70    def test_lambda_handler(self, 71                            mock_create_letter_in_s3 : MagicMock,72                            mock_lambda_global_resources : MagicMock):73                            74        # [10] Test setup - Return a mock for the Global Var LAMBDA_GLOBAL75        mock_lambda_global_resources.return_value = self.test_LAMBDA_GLOBAL76        mock_create_letter_in_s3.return_value = {"statusCode" : 200, "body":"OK"}77        78        # [11] Run Test using a test event from /tests/events/*.json79        test_event = self.load_test_event("sampleEvent1")80        ret_val = lambda_handler(event=test_event, context=None)81        82        # [12] Validate the function was called with the mocked globals83        # and event values84        mock_create_letter_in_s3.assert_called_once_with( env=mock_lambda_global_resources, 85                                        doc_type=test_event["pathParameters"]["docType"],86                                        cust_id=test_event["pathParameters"]["customerId"])87        self.assertEqual(ret_val,mock_create_letter_in_s3.return_value)88    def tearDown(self) -> None:89        # [13] Remove (mocked!) S3 Objects and Bucket90        s3 = boto3.resource("s3",region_name="us-east-1")91        bucket = s3.Bucket( self.test_s3_bucket_name )92        for key in bucket.objects.all():93            key.delete()94        bucket.delete()95        # [14] Remove (mocked!) DynamoDB Table96        dynamodb = boto3.client("dynamodb", region_name="us-east-1")97        dynamodb.delete_table(TableName = self.test_ddb_table_name )98        # [15] Remove the GLOBAL settings99        self.test_LAMBDA_GLOBAL = None100        ...test_main.py
Source:test_main.py  
1from moto import mock_s3, mock_sqs2import boto33import os4from unittest import TestCase, mock5from radarprocessor.main import handler6from radarprocessor.tests.aws_test_helper import S3BucketFile, s3_event_bucket_uploaded7AWS_DEFAULT_REGION = 'us-east-1'8TEST_S3_BUCKET_NAME = 'my-test-bucket'9TEST_SQS_QUEUE_NAME = 'ferje-ais-importer-test-pathtaker-source'10dir_path = os.path.dirname(os.path.realpath(__file__))11def _read_testdata(name):12    with open(f'{dir_path}/testdata/{name}', 'r') as f:13        return f.read()14@mock_s315@mock_sqs16class IngestAisData(TestCase):17    s3 = None18    sqs = None19    def setUp(self) -> None:20        """21        Creates our mocked S3 bucket which radarprocessor.main.handler will automatically connect to22        :return:23        """24        # Ensure test setup uses the correct test credentials25        os.environ['AWS_ACCESS_KEY_ID'] = 'testing'26        os.environ['AWS_SECRET_ACCESS_KEY'] = 'testing'27        os.environ['AWS_SECURITY_TOKEN'] = 'testing'28        os.environ['AWS_SESSION_TOKEN'] = 'testing'29        os.environ['AWS_DEFAULT_REGION'] = AWS_DEFAULT_REGION30        # Initialize S3 test-bucket31        s3 = boto3.resource('s3', region_name=AWS_DEFAULT_REGION)32        s3.create_bucket(Bucket=TEST_S3_BUCKET_NAME)33        self.s3 = boto3.client('s3', region_name=AWS_DEFAULT_REGION)34        # Initialize SQS test-queue35        sqs = boto3.resource('sqs', region_name=AWS_DEFAULT_REGION)36        test_queue = sqs.create_queue(QueueName=TEST_SQS_QUEUE_NAME, Attributes={'DelaySeconds': '0'})37        self.sqs = boto3.client('sqs')38        environment_patcher = mock.patch.dict(os.environ, {39            'SQS_QUEUE_URL': test_queue.url,40            # Ensure our system looks for resources in the correct region41            'AWS_DEFAULT_REGION': AWS_DEFAULT_REGION,42            # Prevent any use of non-test credentials43            'AWS_ACCESS_KEY_ID': 'testing',44            'AWS_SECRET_ACCESS_KEY': 'testing',45            'AWS_SECURITY_TOKEN': 'testing',46            'AWS_SESSION_TOKEN': 'testing',47        })48        environment_patcher.start()49        self.addCleanup(environment_patcher.stop)50    def test_import_success(self):51        """52        Verifies that uploaded files are processed correctly53        and removed from S3 when completed54        :return:55        """56        # Files we are using in this test57        uploaded_files = [58            S3BucketFile(59                object_key='ScenarioLatLon.csv',60                content=_read_testdata('ScenarioLatLon.csv'),61            ),62        ]63        # Upload the data to the mocked instance of S364        for file in uploaded_files:65            self.s3.put_object(Bucket=TEST_S3_BUCKET_NAME, Key=file.object_key, Body=file.content)66        event = s3_event_bucket_uploaded([uploaded_files[0]])67        # Run our event handler68        handler(event, {})69        print(self.s3.list_objects_v2(Bucket=TEST_S3_BUCKET_NAME))70        list_response = self.s3.list_objects_v2(Bucket=TEST_S3_BUCKET_NAME)71        self.assertNotIn('Content', list_response)72        # # Assert the outcome is correct73        # objects_in_s3 = {content['Key'] for content in self.s3.list_objects_v2(Bucket=TEST_S3_BUCKET_NAME)['Contents']}74        #75        # # TODO This will correctly fail, because handler has not been correctly imported yet76        # # All processed files should have been deleted from S377        # for file in uploaded_files:...Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!
