How to use kinesis_stream_name method in localstack

Best Python code snippet using localstack_python

cli.py

Source:cli.py Github

copy

Full Screen

1import click2import boto33import sys4import os5from io import BytesIO6import zipfile7from .iam import create_firehoser_role8from .iam import put_firehoser_role_policy9lambda_client = boto3.client('lambda')10iam_client = boto3.client('iam')11kinesis_client = boto3.client('kinesis')12def compress_file(path, mode=zipfile.ZIP_DEFLATED):13 with open(path, 'r') as zip_file:14 lambda_file_content = zip_file.read()15 lambda_io_buffer = BytesIO()16 zipFile = zipfile.ZipFile(lambda_io_buffer, 'w')17 zipFile.writestr('lambda.py', lambda_file_content, compress_type=mode)18 zipFile.close()19 lambda_io_buffer.seek(0)20 return lambda_io_buffer.read()21@click.group()22def firehoser():23 """Fowards records from Kinesis to Firehose."""24 pass25@firehoser.command()26@click.argument('kinesis_stream_name')27@click.argument('firehose_stream_name')28@click.option('--record_delimiter', '-d', default='\n')29def link(kinesis_stream_name, firehose_stream_name, record_delimiter):30 """Setup lambda to forwards events."""31 # Check the stream exists and grab information32 try:33 response = kinesis_client.describe_stream(StreamName=kinesis_stream_name)34 except kinesis_client.exceptions.ResourceNotFoundException as e:35 click.echo('Error: Stream {} not found'.format(kinesis_stream_name))36 sys.exit(-1)37 stream_arn = response['StreamDescription']['StreamARN']38 # Create Firehose role39 response = create_firehoser_role()40 role_arn = response['Role']['Arn']41 # Attach required policy42 response = put_firehoser_role_policy()43 # Read lambda zip44 directory = os.path.dirname(__file__)45 lambda_path = os.path.join(directory, 'lambda.py')46 # Compress in memory47 lambda_zip_file = compress_file(lambda_path)48 lambda_function_name = '{}_backup'.format(kinesis_stream_name)49 try:50 lambda_client.create_function(51 FunctionName=lambda_function_name,52 Runtime='python3.6',53 Role=role_arn,54 Handler='lambda.handler',55 Code={56 'ZipFile': lambda_zip_file,57 },58 Description='Lambda to backup {} Kinesis Stream'.format(kinesis_stream_name),59 Timeout=20,60 MemorySize=1024,61 Environment={62 'Variables': {63 'FIREHOSE_STREAM_NAME': firehose_stream_name,64 'RECORD_DELIMITER': record_delimiter65 }66 }67 )68 except lambda_client.exceptions.ResourceConflictException as e:69 click.echo('Error: A function named {} already exists!'.format(lambda_function_name))70 sys.exit(-1)71 # Add event source72 response = lambda_client.create_event_source_mapping(73 EventSourceArn=stream_arn,74 FunctionName=lambda_function_name,75 Enabled=True,76 BatchSize=500,77 StartingPosition='LATEST'78 )79@firehoser.command()80@click.argument('kinesis_stream_name')81@click.argument('firehose_stream_name')82def unlink(kinesis_stream_name, firehose_stream_name):83 """Destroy Lambda created with link."""84 function_name = '{}_backup'.format(kinesis_stream_name)85 lambda_arn = None86 # Grab Lambda87 try:88 response = lambda_client.get_function(FunctionName=function_name)89 lambda_arn = response['Configuration']['FunctionArn']90 except lambda_client.exceptions.ResourceNotFoundException as e:91 click.echo('Warning: Lambda {} does not exist'.format(function_name))92 event_source_mappings = lambda_client.list_event_source_mappings()['EventSourceMappings']93 event_source_mapping_uuid = list(filter(lambda x: x.get('FunctionArn') == lambda_arn, event_source_mappings))[0]['UUID']94 response = lambda_client.delete_event_source_mapping(UUID=event_source_mapping_uuid)95 lambda_client.delete_function(FunctionName=function_name)96firehoser.add_command(link)...

Full Screen

Full Screen

trigger_image_auto_tagger.py

Source:trigger_image_auto_tagger.py Github

copy

Full Screen

1#!/usr/bin/env python32# -*- encoding: utf-8 -*-3# vim: tabstop=2 shiftwidth=2 softtabstop=2 expandtab4import sys5import json6import os7import urllib.parse8import traceback9import datetime10import boto311DRY_RUN = (os.getenv('DRY_RUN', 'false') == 'true')12AWS_REGION = os.getenv('REGION_NAME', 'us-east-1')13KINESIS_STREAM_NAME = os.getenv('KINESIS_STREAM_NAME', 'november-photo')14def write_records_to_kinesis(kinesis_client, kinesis_stream_name, records):15 import random16 random.seed(47)17 def gen_records():18 record_list = []19 for rec in records:20 payload = json.dumps(rec, ensure_ascii=False)21 partition_key = 'part-{:05}'.format(random.randint(1, 1024))22 record_list.append({'Data': payload, 'PartitionKey': partition_key})23 return record_list24 MAX_RETRY_COUNT = 325 record_list = gen_records()26 for _ in range(MAX_RETRY_COUNT):27 try:28 response = kinesis_client.put_records(Records=record_list, StreamName=kinesis_stream_name)29 print("[DEBUG] try to write_records_to_kinesis", response, file=sys.stderr)30 break31 except Exception as ex:32 import time33 traceback.print_exc()34 time.sleep(2)35 else:36 raise RuntimeError('[ERROR] Failed to put_records into kinesis stream: {}'.format(kinesis_stream_name))37def lambda_handler(event, context):38 kinesis_client = boto3.client('kinesis', region_name=AWS_REGION)39 for record in event['Records']:40 try:41 bucket = record['s3']['bucket']['name']42 key = urllib.parse.unquote_plus(record['s3']['object']['key'], encoding='utf-8')43 record = {'s3_bucket': bucket, 's3_key': key}44 print("[INFO] object created: ", record, file=sys.stderr)45 write_records_to_kinesis(kinesis_client, KINESIS_STREAM_NAME, [record])46 except Exception as ex:47 traceback.print_exc()48if __name__ == '__main__':49 s3_event = '''{50 "Records": [51 {52 "eventVersion": "2.0",53 "eventSource": "aws:s3",54 "awsRegion": "us-east-1",55 "eventTime": "1970-01-01T00:00:00.000Z",56 "eventName": "ObjectCreated:Put",57 "userIdentity": {58 "principalId": "EXAMPLE"59 },60 "requestParameters": {61 "sourceIPAddress": "127.0.0.1"62 },63 "responseElements": {64 "x-amz-request-id": "EXAMPLE123456789",65 "x-amz-id-2": "EXAMPLE123/5678abcdefghijklambdaisawesome/mnopqrstuvwxyzABCDEFGH"66 },67 "s3": {68 "s3SchemaVersion": "1.0",69 "configurationId": "testConfigRule",70 "bucket": {71 "name": "november-photo",72 "ownerIdentity": {73 "principalId": "EXAMPLE"74 },75 "arn": "arn:aws:s3:::november-photo"76 },77 "object": {78 "key": "raw-image/20191120_122332.jpg",79 "size": 4300,80 "eTag": "bca44a2aac2c789bc77b5eb13bcb04e2",81 "sequencer": "0A1B2C3D4E5F678901"82 }83 }84 }85 ]86}'''87 event = json.loads(s3_event)...

Full Screen

Full Screen

random_stream_2_kensis.py

Source:random_stream_2_kensis.py Github

copy

Full Screen

1import time2import json3import boto34import random5import calendar6from datetime import datetime7from pprint import pprint8import os 9KINESIS_STREAM_NAME = os.environ['KINESIS_STREAM_NAME'] 10REGION_NAME = os.environ['REGION_NAME'] 11def write_to_stream(event_id, event, region_name, stream_name):12 """Write streaming event to specified Kinesis Stream within specified region.13 Parameters14 ----------15 event_id: str16 The unique identifer for the event which will be needed in partitioning.17 event: dict18 The actual payload including all the details of the event.19 region_name: str20 AWS region identifier, e.g., "ap-northeast-1".21 stream_name: str22 Kinesis Stream name to write.23 Returns24 -------25 res: Response returned by `put_record` func defined in boto3.client('kinesis')26 """27 client = boto3.client('kinesis', region_name=region_name)28 res = client.put_record(29 StreamName=stream_name,30 Data=json.dumps(event) + '\n',31 PartitionKey=event_id32 )33 return res34if __name__ == '__main__':35 # simulate streaming data generation36 while True:37 event = {38 "event_id": str(random.randint(1, 100000)),39 "event_type": random.choice(['open_app', 'close_app', 'make_comments']),40 "user_id" : str(random.randint(1, 30)), 41 "timestamp": calendar.timegm(datetime.utcnow().timetuple())42 }43 pprint(event)44 # send to Kinesis Stream45 event_id = event['event_id']46 write_to_stream(event_id, event, REGION_NAME, KINESIS_STREAM_NAME)...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run localstack automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful