How to use update_job_status method in localstack

Best Python code snippet using localstack_python

wrapper.py

Source:wrapper.py Github

copy

Full Screen

...42current_user = conn.get_current_user()43user_job = current_user44job = conn.get_job(user_job.job)45# job=66646job = conn.update_job_status(job, status = job.RUNNING, progress = 0, status_comment = "Loading images...")47# Get the list of images in the project48image_instances = ImageInstanceCollection()49image_instances.project = id_project50image_instances = conn.fetch(image_instances)51images = image_instances.data()52# split the list of all images into a list of input images and 53# a list of mask images representing the ground truth data.54inputImages = []55masks = []56for image in images:57 if "_lbl." in image.filename:58 masks.append(image)59 else:60 inputImages.append(image)61# create the folder structure for the folders shared with docker 62jobFolder = baseOutputFolder + str(job.id) + "/"63inDir = jobFolder + "in"64outDir = jobFolder + "out"65if not os.path.exists(inDir):66 os.makedirs(inDir)67if not os.path.exists(outDir):68 os.makedirs(outDir)69# download the images70for image in inputImages:71 # url format: CYTOMINEURL/api/imageinstance/$idOfMyImageInstance/download72 url = cytomine_host+"/api/imageinstance/" + str(image.id) + "/download"73 filename = str(image.id) + ".tif"74 conn.fetch_url_into_file(url, inDir+"/"+filename, True, True) 75# call the image analysis workflow in the docker image76shArgs = "data/in data/out "+radius+" "+tolerance + ""77job = conn.update_job_status(job, status = job.RUNNING, progress = 25, status_comment = "Launching workflow...")78command = "docker run --rm -v "+jobFolder+":/fiji/data neubiaswg5/spotdetection-imagej-fjlap " + shArgs79call(command,shell=True) # waits for the subprocess to return80# remove existing annotations if any81for image in inputImages:82 annotations = conn.get_annotations(id_image=image.id)83 for annotation in annotations:84 conn.delete_annotation(annotation.id)85files = os.listdir(outDir)86job = conn.update_job_status(job, status = job.RUNNING, progress = 50, status_comment = "Extracting polygons...")87for image in inputImages:88 file = str(image.id) + ".tif.csv"89 path = outDir + "/" + file90 if(os.path.isfile(path)):91 (X,Y) = readcoords(path)92 for i in range(len(X)):93 circle = Point(X[i],image.height-Y[i])94 annotation.location=circle.wkt95 new_annotation = conn.add_annotation(annotation.location, image.id)96 else:97 print path + " does not exist"98# cleanup - remove the downloaded images and the images created by the workflow99job = conn.update_job_status(job, status = job.TERMINATED, progress = 90, status_comment = "Cleaning up..")100for image in inputImages:101 file = str(image.id) + ".tif"102 path = outDir + "/" + file + ".csv"103 os.remove(path);104 path = inDir + "/" + file105 os.remove(path);...

Full Screen

Full Screen

vmix_db.py

Source:vmix_db.py Github

copy

Full Screen

1import logging.config2from os import path3import mysql.connector4import mysql.connector5from mysql.connector import errorcode6log_file_path = path.join(path.dirname(path.abspath(__file__)), 'logger.config')7logging.config.fileConfig(log_file_path)8logger = logging.getLogger('app_logger')9config = {10 'user': 'root',11 'password': 'password',12 'host': '127.0.0.1',13 'database': 'vmix_video',14 'raise_on_warnings': True15}16def record_file_info(file_info):17 try:18 cnx = mysql.connector.connect(**config)19 cursor = cnx.cursor()20 insert_job_record = ("INSERT INTO live_file(file_name, size, from_client, c_time, m_time) "21 "VALUES (%s, %s, %s, %s, %s)")22 job_data = (file_info.file_name, file_info.size, file_info.from_client, file_info.c_time, file_info.m_time)23 cursor.execute(insert_job_record, job_data)24 file_id = cursor.lastrowid25 logger.info("live file record is inserted. %d %s" % (file_id, file_info.file_name))26 cnx.commit()27 except Exception as e:28 logger.error(e)29 finally:30 close_connect(cnx)31def record_upload_file_status(status, file_name, from_client, c_time):32 try:33 cnx = mysql.connector.connect(**config)34 cursor = cnx.cursor()35 update_job_status = ("UPDATE live_file set status=%s "36 "where file_name=%s and from_client=%s")37 status_param = (status, file_name, from_client)38 logger.info(update_job_status)39 result = cursor.execute(update_job_status, status_param)40 cnx.commit()41 except Exception as err:42 logger.error(err)43 raise err44 finally:45 close_connect(cnx)46def record_upload_job(by_client, files_amount):47 try:48 cnx = mysql.connector.connect(**config)49 cursor = cnx.cursor()50 insert_job_record = ("INSERT INTO upload_job(from_client, files_amount) "51 "VALUES (%s, %s)")52 job_data = (by_client, files_amount)53 cursor.execute(insert_job_record, job_data)54 job_id = cursor.lastrowid55 logger.info("upload job id %d" % job_id)56 cnx.commit()57 except Exception as err:58 logger.error(err)59 raise err60 finally:61 close_connect(cnx)62def record_upload_job_status(job_id, status):63 try:64 cnx = mysql.connector.connect(**config)65 cursor = cnx.cursor()66 update_job_status = "UPDATE upload_job set status=%d where job_id= " % (status, job_id)67 logger.info(update_job_status)68 cursor.execute(update_job_status)69 cnx.commit()70 except Exception as err:71 logger.error(err)72 raise err73 finally:74 close_connect(cnx)75def close_connect(cnx):76 try:77 cnx.close()78 except Exception as e:79 print(e)80def get_latest_upload_job(by_client):81 try:82 cnx = mysql.connector.connect(**config)83 cursor = cnx.cursor()84 query = "SELECT id from upload_job WHERE from_client=\"%s\" ORDER BY start_time desc limit 1" % by_client85 cursor.execute(query)86 record = cursor.fetchone()87 logger.info("job id %d " % record[0])88 return record[0]89 except Exception as err:90 logger.error(err)91 raise err92 finally:93 try:94 cnx.close()95 except Exception as e:...

Full Screen

Full Screen

worker.py

Source:worker.py Github

copy

Full Screen

...15 Returns:16 None. This function continuously runs in the background.17 18 """19 update_job_status(jid, 'in progress')20 21 data = jdb.hgetall(f'job.{jid}')22 stored_data = json.loads(rd.get("data"))23 stored_data = stored_data[f'{data["LAT"]}, {data["LON"]}, {data["PARAMETER"]}, {data["YEAR"]}']24 xval = ['JAN', 'FEB', 'MAR', 'APR', 'MAY', 'JUN', 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC']25 yval = []26 for month in xval:27 yval.append(float(stored_data[month]))28 plt.xlabel("Months")29 plt.ylabel("Wind Speed")30 plt.title(f'Wind Speed in {data["YEAR"]} at Lat = {data["LAT"]} and Lon = {data["LON"]}')31 plt.plot(xval, yval)32 plt.savefig('/simple_line.png')33 with open('/simple_line.png', 'rb') as f:34 img = f.read()35 img_db.hset(f'job.{jid}', "image", img)36 jdb.hset(f'job.{jid}', 'status', 'finished')37 update_job_status(jid, 'finished')38 time.sleep(2) 39 update_job_status(jid, 'complete')...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run localstack automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful