Best Python code snippet using localstack_python
prepare_data.py
Source:prepare_data.py  
...133        self.get_word_mappings()134        print "Data is now ready."135        self.isprepared = True136        return137    def create_input_stream(self):138        self.infile = open(self.options.DEP_PATH, 'r')139    def get_batch(self, batch_size):140        if self.infile == None:141            self.create_input_stream()142        batch_words = []143        batch_contexts = []144        left = batch_size145        while left > 0:146            line = self.infile.readline()147            if len(line) == 0: # EOF, start a new epoch148                self.create_input_stream()149                line = self.infile.readline()150                self.epoch += 1151            t = line.strip().split()152            if len(t) < 2: continue153            w,c = t[0], t[1]154            if w not in self.word2idx or c not in self.ctx2idx: continue155            batch_words.append(self.word2idx[w])156            batch_contexts.append(self.ctx2idx[c])157            left -= 1158        # reshape the context vector so that it is compatible with the model159        batch_contexts = np.asarray(batch_contexts).reshape([batch_size, 1])160        return (batch_words, batch_contexts)161if __name__=='__main__':162    opt = DataOptions()...aws_job_manager.py
Source:aws_job_manager.py  
...37        """38        :param records: generator for s3 Records39        :return:40        """41        input_stream_name = self.create_input_stream()42        failed_records = []43        for index, record in enumerate(records):44            try:45                self.api.upload_record(46                    record=record,47                    input_stream_name=input_stream_name48                )49            except Exception as e:50                logging.error('Failed to upload %s record' % index)51                failed_records.append(index)52        self.upload_stream_to_s3(input_stream_name)53        if len(failed_records) > 0:54            logging.error('%s Errors occured to upload records' % len(failed_records))55        else:56            logging.info('Uploaded successfully no errors')57            #TODO generate s3 link58    def create_input_stream(self):59        """60        process does not take long , but could fail via api timeout61        TODO test on few more streams with other regions and shard-count parameters62        :return:63        """64        num_stream = len(self.input_streams)65        input_stream_name = self.api.create_new_kinesis_steam(num_stream)66        self.input_streams.append(input_stream_name)67        return input_stream_name68    def upload_stream_to_s3(self, input_stream_name):69        """70        from uploaded kinesis aws client transfer the data to s371        :param input_stream_name:72        :return:...HttpReceiver.py
Source:HttpReceiver.py  
...4from pyspark.streaming import StreamingContext5session = SparkSession.builder .appName("py-streaming").getOrCreate()6sc = session.sparkContext7ssc = StreamingContext(sc,10)8def create_input_stream(ssc):9    """type sc: SparkContext """10    return sc._jvm.com.dataorc.spark.streaming.HttpStream.createInputDStream(ssc)11def convert_to_dstream(ssc,javadstream):12    ser = PairDeserializer(NoOpSerializer(), NoOpSerializer())13    stream = DStream(javadstream, ssc, ser)14    return stream15inputStream = create_input_stream(ssc._jssc)16pystream = convert_to_dstream(ssc,inputStream)17pystream.map(lambda x: x[:6]).pprint()18ssc.start()19ssc.awaitTermination(1000)...Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!
