How to use create_kinesis_stream method in localstack

Best Python code snippet using localstack_python

test_integration.py

Source:test_integration.py Github

copy

Full Screen

...65 sns = aws_stack.connect_to_service('sns')66 LOGGER.info('Creating test streams...')67 run_safe(lambda: dynamodb_service.delete_table(68 TableName=TEST_STREAM_NAME + ddb_lease_table_suffix), print_error=False)69 aws_stack.create_kinesis_stream(TEST_STREAM_NAME, delete=True)70 aws_stack.create_kinesis_stream(TEST_LAMBDA_SOURCE_STREAM_NAME)71 # subscribe to inbound Kinesis stream72 def process_records(records, shard_id):73 EVENTS.extend(records)74 # start the KCL client process in the background75 kinesis_connector.listen_to_kinesis(TEST_STREAM_NAME, listener_func=process_records,76 wait_until_started=True, ddb_lease_table_suffix=ddb_lease_table_suffix)77 LOGGER.info("Kinesis consumer initialized.")78 # create table with stream forwarding config79 testutil.create_dynamodb_table(TEST_TABLE_NAME, partition_key=PARTITION_KEY,80 stream_view_type='NEW_AND_OLD_IMAGES')81 # list DDB streams and make sure the table stream is there82 streams = dynamodbstreams.list_streams()83 ddb_event_source_arn = None84 for stream in streams['Streams']:85 if stream['TableName'] == TEST_TABLE_NAME:86 ddb_event_source_arn = stream['StreamArn']87 assert ddb_event_source_arn88 # deploy test lambda connected to DynamoDB Stream89 zip_file = testutil.create_lambda_archive(load_file(TEST_LAMBDA_PYTHON), get_content=True,90 libs=TEST_LAMBDA_LIBS, runtime=LAMBDA_RUNTIME_PYTHON27)91 testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_DDB,92 zip_file=zip_file, event_source_arn=ddb_event_source_arn, runtime=LAMBDA_RUNTIME_PYTHON27)93 # make sure we cannot create Lambda with same name twice94 assert_raises(Exception, testutil.create_lambda_function, func_name=TEST_LAMBDA_NAME_DDB,95 zip_file=zip_file, event_source_arn=ddb_event_source_arn, runtime=LAMBDA_RUNTIME_PYTHON27)96 # deploy test lambda connected to Kinesis Stream97 kinesis_event_source_arn = kinesis.describe_stream(98 StreamName=TEST_LAMBDA_SOURCE_STREAM_NAME)['StreamDescription']['StreamARN']99 testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_STREAM,100 zip_file=zip_file, event_source_arn=kinesis_event_source_arn, runtime=LAMBDA_RUNTIME_PYTHON27)101 # put items to table102 num_events_ddb = 10103 LOGGER.info('Putting %s items to table...' % num_events_ddb)104 table = dynamodb.Table(TEST_TABLE_NAME)105 for i in range(0, num_events_ddb - 3):106 table.put_item(Item={107 PARTITION_KEY: 'testId%s' % i,108 'data': 'foobar123'109 })110 dynamodb.batch_write_item(RequestItems={TEST_TABLE_NAME: [111 {'PutRequest': {'Item': {PARTITION_KEY: short_uid(), 'data': 'foobar123'}}},112 {'PutRequest': {'Item': {PARTITION_KEY: short_uid(), 'data': 'foobar123'}}},113 {'PutRequest': {'Item': {PARTITION_KEY: short_uid(), 'data': 'foobar123'}}}114 ]})115 # put items to stream116 num_events_kinesis = 10117 LOGGER.info('Putting %s items to stream...' % num_events_kinesis)118 kinesis.put_records(119 Records=[120 {121 'Data': '{}',122 'PartitionKey': 'testId%s' % i123 } for i in range(0, num_events_kinesis)124 ], StreamName=TEST_LAMBDA_SOURCE_STREAM_NAME125 )126 # put 1 item to stream that will trigger an error in the Lambda127 kinesis.put_record(Data='{"%s": 1}' % lambda_integration.MSG_BODY_RAISE_ERROR_FLAG,128 PartitionKey='testIderror', StreamName=TEST_LAMBDA_SOURCE_STREAM_NAME)129 # create SNS topic, connect it to the Lambda, publish test message130 num_events_sns = 3131 response = sns.create_topic(Name=TEST_TOPIC_NAME)132 sns.subscribe(TopicArn=response['TopicArn'], Protocol='lambda',133 Endpoint=aws_stack.lambda_function_arn(TEST_LAMBDA_NAME_STREAM))134 for i in range(0, num_events_sns):135 sns.publish(TopicArn=response['TopicArn'], Message='test message %s' % i)136 # get latest records137 latest = aws_stack.kinesis_get_latest_records(TEST_LAMBDA_SOURCE_STREAM_NAME,138 shard_id='shardId-000000000000', count=10)139 assert len(latest) == 10140 LOGGER.info("Waiting some time before finishing test.")141 time.sleep(2)142 num_events = num_events_ddb + num_events_kinesis + num_events_sns143 if len(EVENTS) != num_events:144 LOGGER.warning('DynamoDB and Kinesis updates retrieved (actual/expected): %s/%s' % (len(EVENTS), num_events))145 assert len(EVENTS) == num_events146 # check cloudwatch notifications147 stats1 = get_lambda_metrics(TEST_LAMBDA_NAME_STREAM)148 assert len(stats1['Datapoints']) == 2 + num_events_sns149 stats2 = get_lambda_metrics(TEST_LAMBDA_NAME_STREAM, 'Errors')150 assert len(stats2['Datapoints']) == 1151 stats3 = get_lambda_metrics(TEST_LAMBDA_NAME_DDB)152 assert len(stats3['Datapoints']) == 10153def test_kinesis_lambda_forward_chain():154 kinesis = aws_stack.connect_to_service('kinesis')155 s3 = aws_stack.connect_to_service('s3')156 aws_stack.create_kinesis_stream(TEST_CHAIN_STREAM1_NAME, delete=True)157 aws_stack.create_kinesis_stream(TEST_CHAIN_STREAM2_NAME, delete=True)158 s3.create_bucket(Bucket=TEST_BUCKET_NAME)159 # deploy test lambdas connected to Kinesis streams160 zip_file = testutil.create_lambda_archive(load_file(TEST_LAMBDA_PYTHON), get_content=True,161 libs=TEST_LAMBDA_LIBS, runtime=LAMBDA_RUNTIME_PYTHON27)162 testutil.create_lambda_function(func_name=TEST_CHAIN_LAMBDA1_NAME, zip_file=zip_file,163 event_source_arn=get_event_source_arn(TEST_CHAIN_STREAM1_NAME), runtime=LAMBDA_RUNTIME_PYTHON27)164 testutil.create_lambda_function(func_name=TEST_CHAIN_LAMBDA2_NAME, zip_file=zip_file,165 event_source_arn=get_event_source_arn(TEST_CHAIN_STREAM2_NAME), runtime=LAMBDA_RUNTIME_PYTHON27)166 # publish test record167 test_data = {'test_data': 'forward_chain_data_%s' % short_uid()}168 data = clone(test_data)169 data[lambda_integration.MSG_BODY_MESSAGE_TARGET] = 'kinesis:%s' % TEST_CHAIN_STREAM2_NAME170 kinesis.put_record(Data=to_bytes(json.dumps(data)), PartitionKey='testId', StreamName=TEST_CHAIN_STREAM1_NAME)171 # check results...

Full Screen

Full Screen

cdk_data_stream_stack.py

Source:cdk_data_stream_stack.py Github

copy

Full Screen

...5 )6class CdkDataStreamStack(core.Stack):7 def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:8 super().__init__(scope, id, **kwargs)9 stream = self.create_kinesis_stream("StockTradeStream")10 self.create_iam(stream.stream_arn, "StockTradeStreamUser", "StockTradeStreamPolicy")11 12 def create_kinesis_stream(self, stream_name):13 kinesis_stream = kinesis.Stream(14 self,15 "stock_stream",16 stream_name=stream_name17 )18 return kinesis_stream19 def create_iam(self, stream_arn ,user_name, policy_name):20 user = iam.User(21 self,22 "iam_user_stock_stream",23 user_name=user_name 24 )25 26 policy = iam.Policy(...

Full Screen

Full Screen

create_kinesis_streams.py

Source:create_kinesis_streams.py Github

copy

Full Screen

...5import os6firehose = boto3.client("firehose")7kinesis = boto3.client("kinesis")8path = Path(os.path.realpath(__file__))9def create_kinesis_stream(file_name="kinesis_stream.json"):10 config_path = os.path.join(path.parent, "config", file_name)11 with open(config_path) as f:12 request = json.load(f)13 stream_name = request["StreamName"]14 if stream_name in kinesis.list_streams()["StreamNames"]:15 print("\n Stream already exists so deleting...")16 kinesis.delete_stream(StreamName=stream_name)17 time.sleep(10)18 print(f"Creating new stream {stream_name}: \n")19 if request["StreamModeDetails"]["StreamMode"] == "ON_DEMAND":20 request.pop(21 "ShardCount"22 ) # if on demand param passed, shard count will throw error23 response = kinesis.create_stream(**request)24 res_str = json.dumps(response, sort_keys=True, indent=4)25 print(res_str)26 return response27def create_firehose_delivery_stream(file_name="firehose_description.json"):28 """29 Creates firehose delivery stream. If this already exists, then deletes and30 creates new one with same name. Needs json config file, with stream name specified31 """32 config_path = os.path.join(path.parent, "config", file_name)33 with open(config_path) as f:34 request = json.load(f)35 stream_name = request["DeliveryStreamName"]36 if stream_name in firehose.list_delivery_streams()["DeliveryStreamNames"]:37 print("\n Firehose delivery stream already exists so deleting...")38 firehose.delete_delivery_stream(DeliveryStreamName=stream_name)39 # do this as print(client.waiter_names) returns [] so assuming no waiters for this client40 time.sleep(10)41 print(f"Creating new delivery stream {stream_name} with kinesis source \n")42 response = firehose.create_delivery_stream(**request)43 res_str = json.dumps(response, sort_keys=True, indent=4)44 print(res_str)45 return response46if __name__ == "__main__":47 create_kinesis_stream()48 time.sleep(10)...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run localstack automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful