How to use kinesis_api method in localstack

Best Python code snippet using localstack_python

consumer.py

Source:consumer.py Github

copy

Full Screen

1"""2Author:3Date:4Python: 3.7.95Consumer for AWS Kinesis Stock Data Stream6"""7from datetime import datetime8import time9from kinesis_api import KinesisAPI, DynamoDbAPI10KINESIS_STREAM_NAME = "stock-stream"11KINESIS_SHARD_PARTITION_KEY = "stock"12DYNAMO_DB_TABLE = "stock-stream-data"13DYNAMO_DB_PARTITION_KEY = "symbol"14SYNAMO_DB_SORT_KEY = "minute"15def push_data():16 """17 Push to front end18 """19 pass20def insert_db(db_api, data):21 """22 Insert to database23 """24 print("Writing to dynamo DB")25 for row in data:26 db_api.put(row)27 return28def parse_record(data):29 """30 parse consumed records31 """32 required_keys = ["minute", "symbol", "open",33 "high", "low", "close", "volume"]34 # get relavent keys35 parsed_data = {key: data[key] for key in required_keys}36 # Create array structure37 arrayed = []38 for _ in range(10):39 arrayed.append({key: '0' for key in required_keys})40 # Unpack dict and load to array41 for key, value in parsed_data.items():42 for subkey in sorted(value):43 arrayed[int(subkey) % 10][key] = value[subkey]44 return arrayed45def consume():46 api = KinesisAPI(stream_name=KINESIS_STREAM_NAME)47 db_api = DynamoDbAPI(DYNAMO_DB_TABLE)48 last_seq_num = ""49 for record in api.read_records(time_limit=1.0):50 data = record.get("data")51 last_seq_num = record.get("sequence_number")52 # print(f"\nDATA: {data}\nLAST_SEQ_NUM: {last_seq_num}\n")53 print("---------------------------")54 parsed_data = parse_record(data)55 print(parsed_data)56 insert_db(db_api, parsed_data)57 print("---------------------------")58 starttime = time.time()59 while True:60 print("Retrieving...")61 shard_iter = api.get_shard_iterator(62 iterator_type="AFTER_SEQUENCE_NUMBER",63 sequence_number=last_seq_num64 )65 for record in api.read_records(time_limit=1.0, shard_iterator=shard_iter):66 data = record.get("data")67 last_seq_num = record.get("sequence_number")68 # print(f"\nDATA: {data}\nLAST_SEQ_NUM: {last_seq_num}\n")69 print("---------------------------")70 parsed_data = parse_record(data)71 print(parsed_data)72 insert_db(db_api, parsed_data)73 print("---------------------------")74 print("Sleeping...")75 time.sleep(60.0 - ((time.time() - starttime) % 60.0))76if __name__ == "__main__":77 print("====================================")78 print("Stock Data Consumer for AWS Kinesis")79 print("====================================")...

Full Screen

Full Screen

server.py

Source:server.py Github

copy

Full Screen

1from os import urandom, path2from flask import Flask, render_template, request, jsonify3from flask_socketio import SocketIO, emit4from flask_cors import CORS5from kinesis_api import DynamoDbAPI6from time import sleep7import pandas as pd8CUR_DIR = path.realpath(path.dirname(__file__))9DYNAMO_DB_TABLE = "stock-stream-data"10DYNAMO_DB_PARTITION_KEY = "symbol"11SYNAMO_DB_SORT_KEY = "minute"12app = Flask(__name__)13app.config["SECRET_KEY"] = urandom(32).hex14app.config["THREADED"] = True15app.config["DEBUG"] = True16CORS(app)17socketio = SocketIO(app, cors_allowed_origins="*")18db = DynamoDbAPI(table_name=DYNAMO_DB_TABLE)19@app.route("/")20def index():21 return render_template("index.html")22@app.route("/api/get_historical_data")23def get_historical_data():24 stock = request.args.get("stock")25 file_ = path.join(CUR_DIR, "data", f"hist-{stock}.csv")26 df = pd.read_csv(file_, index_col=0)27 return jsonify(df.to_dict(orient="records"))28@socketio.on("connect", namespace="/api/socket.io")29def on_connect():30 print("SocketIO: Connected!")31@socketio.on_error(namespace="/api/socket.io")32def error_handler(err):33 print(f"ERROR: {err}")34@socketio.on("get_live_data", namespace='/api/socket.io')35def get_live_data(symbol):36 print("Stock Symbol:", symbol)37 resp = db.get_all(38 projection_expr="symbol, #m, #o",39 expr_attr_names={"#m": "minute", "#o": "open"},40 filter="symbol = :stock",41 expr_attr_values={":stock": symbol}42 )43 labels = []44 data = []45 for item in resp["items"]:46 labels.append(item["minute"]["S"])47 data.append(float(item["open"]["N"]))48 emit(49 "graph_data",50 {"symbol": symbol, "labels": labels, "data": data},51 json=True,52 namespace="/api/socket.io"53 )54 sleep(60.0)55@socketio.on("disconnect", namespace="/api/socket.io")56def on_disconnect():57 print("SocketIO: Disconnected!")58if __name__ == "__main__":...

Full Screen

Full Screen

producer.py

Source:producer.py Github

copy

Full Screen

1"""2Author: Maneesh Divana <maneeshd77@gmail.com>3Date: 2020-12-034Python: 3.7.95Reads stock data from CSV files and simulates stock streaming into AWS Kinesis Data Stream6"""7from os import path8from datetime import datetime9from time import sleep10import pandas as pd11from kinesis_api import KinesisAPI12CUR_DIR = path.realpath(path.dirname(__file__))13BASE_DIR = path.dirname(CUR_DIR)14DATA_DIR_ROOT = path.join(BASE_DIR, "data")15CSV_FILENAME = "intraday-22-oct-merged.csv"16KINESIS_STREAM_NAME = "stock-stream"17KINESIS_SHARD_PARTITION_KEY = "stock"18def simulate():19 """20 Simualte real time stream and post to Kinesis Data Stream Shard21 All companies at regular interval22 """23 # Load CSV into DataFrame24 df = pd.read_csv(path.join(DATA_DIR_ROOT, CSV_FILENAME))25 print(f"Shape of DataFrame: {df.shape}")26 print("DataFrame:")27 print(df, "\n")28 # Groups of 10 rows29 group = df.groupby(df.index // 10)30 # Connect to Kinesis using API31 api = KinesisAPI(stream_name=KINESIS_STREAM_NAME)32 print("-" * 64, "\n")33 start = datetime.now()34 print(f"[{start.strftime('%Y-%m-%d %H:%M:%S')}] Starting Kinesis producer...\n")35 # Send records for a group of 10 companies every 1 minute36 for idx, group in df.groupby(df.index // 10):37 now = datetime.now()38 print(f"[{now.strftime('%Y-%m-%d %H:%M:%S')}] Sending data:")39 print(group)40 api.write_record(41 data=group.to_dict(),42 partition_key=KINESIS_SHARD_PARTITION_KEY43 )44 print("")45 sleep(60.0)46 end = datetime.now()47 print(f"[{end.strftime('%Y-%m-%d %H:%M:%S')}] Finished producing. Exiting...\n")48 api.close()49if __name__ == "__main__":50 print("====================================")51 print("Stock Data Producer for AWS Kinesis")52 print("====================================")...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run localstack automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful