How to use tasks method in fast-check-monorepo

Best JavaScript code snippet using fast-check-monorepo

codcorrect_job_swarm.py

Source:codcorrect_job_swarm.py Github

copy

Full Screen

...66 )67 self.task_map: Dict[str, Dict[str, PythonTask]] = defaultdict(dict)68 def construct_workflow(self) -> None:69 """Adds all of our tasks to our workflow."""70 self.create_cache_tasks()71 self.create_draw_validate_tasks()72 self.create_apply_correction_tasks()73 self.create_cause_aggregation_tasks()74 if gbd.measures.YLL in self.parameters.measure_ids:75 self.create_calculate_ylls_tasks()76 self.create_location_aggregation_tasks()77 self.create_append_shocks_tasks()78 self.create_summarize_tasks()79 self.create_upload_tasks()80 def run(self) -> None:81 """Runs the FauxCorrect workflow and checks for success or failure."""82 exit_code = self.workflow.run()83 if exit_code != DagExecutionStatus.SUCCEEDED:84 raise RuntimeError(85 f"Workflow dag_id: {self.workflow.dag_id} was unsuccessful, "86 "check the job_swarm database for more information."87 )88 def create_cache_tasks(self) -> None:89 """90 Adds up to five cache tasks that must occur at the beginning of each91 FauxCorrect run:92 1. envelope93 2. population94 3. spacetime restrictions95 4. regional scalars96 5. pred_ex97 These jobs kick off the CoDCorrect process and do not have any98 upstream dependencies.99 """100 # Create mortality inputs cache jobs.101 for mort_process, memory in zip(MortalityInputs.ALL_INPUTS,102 DAG.Tasks.Memory.CACHE_ALL_MORTALITY_INPUTS103 ):104 cache_mortality_task = PythonTask(105 script=os.path.join(106 self.code_dir, DAG.Executables.CACHE_MORT_INPUT107 ),108 args=[109 '--mort_process', mort_process,110 '--machine_process', self.parameters.process,111 '--version_id', self.parameters.version_id112 ],113 name=DAG.Tasks.Name.CACHE_MORTALITY.format(114 mort_process=mort_process115 ),116 num_cores=DAG.Tasks.Cores.CACHE_MORTALITY,117 m_mem_free=memory,118 max_runtime_seconds=DAG.Tasks.Runtime.CACHE_MORTALITY,119 queue=DAG.QUEUE,120 tag=DAG.Tasks.Type.CACHE121 )122 self.task_map[DAG.Tasks.Type.CACHE][cache_mortality_task.name] = (123 cache_mortality_task124 )125 self.workflow.add_task(cache_mortality_task)126 # Create cache_spacetime_restrictions job127 cache_spacetime_restrictions = PythonTask(128 script=os.path.join(129 self.code_dir, DAG.Executables.CORRECT130 ),131 args=[132 '--action', DAG.Tasks.Type.CACHE,133 '--parent_dir', self.parameters.parent_dir,134 '--gbd_round_id', self.parameters.gbd_round_id135 ],136 name=DAG.Tasks.Name.CACHE_SPACETIME,137 num_cores=DAG.Tasks.Cores.CACHE_SPACETIME_RESTRICTIONS,138 m_mem_free=DAG.Tasks.Memory.CACHE_SPACETIME_RESTRICTIONS,139 max_runtime_seconds=DAG.Tasks.Runtime.CACHE_SPACETIME_RESTRICTIONS,140 queue=DAG.QUEUE,141 tag=DAG.Tasks.Type.CACHE142 )143 self.task_map[DAG.Tasks.Type.CACHE][144 cache_spacetime_restrictions.name] = cache_spacetime_restrictions145 self.workflow.add_task(cache_spacetime_restrictions)146 # Create cache_regional_scalars job147 cache_regional_scalars = PythonTask(148 script=os.path.join(self.code_dir, DAG.Executables.LOC_AGG),149 args=[150 '--action', DAG.Tasks.Type.CACHE,151 '--parent_dir', self.parameters.parent_dir,152 '--gbd_round_id', self.parameters.gbd_round_id153 ],154 name=DAG.Tasks.Name.CACHE_REGIONAL_SCALARS,155 num_cores=DAG.Tasks.Cores.CACHE_REGIONAL_SCALARS,156 m_mem_free=DAG.Tasks.Memory.CACHE_REGIONAL_SCALARS,157 max_runtime_seconds=DAG.Tasks.Runtime.CACHE_REGIONAL_SCALARS,158 queue=DAG.QUEUE,159 tag=DAG.Tasks.Type.CACHE160 )161 self.task_map[DAG.Tasks.Type.CACHE][cache_regional_scalars.name] = (162 cache_regional_scalars163 )164 self.workflow.add_task(cache_regional_scalars)165 if gbd.measures.YLL in self.parameters.measure_ids:166 # Create cache_pred_ex job167 cache_pred_ex = PythonTask(168 script=os.path.join(self.code_dir, DAG.Executables.YLLS),169 args=[170 '--action', DAG.Tasks.Type.CACHE,171 '--parent_dir', self.parameters.parent_dir,172 '--gbd_round_id', self.parameters.gbd_round_id173 ],174 name=DAG.Tasks.Name.CACHE_PRED_EX,175 num_cores=DAG.Tasks.Cores.CACHE_PRED_EX,176 m_mem_free=DAG.Tasks.Memory.CACHE_PRED_EX,177 max_runtime_seconds=DAG.Tasks.Runtime.CACHE_PRED_EX,178 queue=DAG.QUEUE,179 tag=DAG.Tasks.Type.CACHE180 )181 self.task_map[DAG.Tasks.Type.CACHE][cache_pred_ex.name] = (182 cache_pred_ex183 )184 self.workflow.add_task(cache_pred_ex)185 def create_draw_validate_tasks(self) -> None:186 """187 Adds tasks to validate the input draws against expected hypercube188 Creates one job per model_version_id in our parameters of best models.189 Doesn't utilize any cached assets, so these jobs don't have any190 upstream dependencies.191 """192 for model_version_id in self.parameters.best_model_version_ids:193 draw_validate_task = PythonTask(194 script=os.path.join(195 self.code_dir, DAG.Executables.VALIDATE_DRAWS196 ),197 args=[198 '--version_id', self.parameters.version_id,199 '--machine_process', self.parameters.process,200 '--model_version_id', model_version_id201 ],202 name=DAG.Tasks.Name.VALIDATE_DRAWS.format(203 model_version_id=model_version_id204 ),205 num_cores=DAG.Tasks.Cores.VALIDATE_DRAWS,206 m_mem_free=DAG.Tasks.Memory.VALIDATE_DRAWS,207 max_runtime_seconds=DAG.Tasks.Runtime.VALIDATE_DRAWS,208 queue=DAG.QUEUE,209 tag=DAG.Tasks.Type.VALIDATE210 )211 self.task_map[DAG.Tasks.Type.VALIDATE][212 draw_validate_task.name213 ] = draw_validate_task214 self.workflow.add_task(draw_validate_task)215 def create_apply_correction_tasks(self) -> None:216 for sex in self.parameters.sex_ids:217 for location in self.parameters.most_detailed_location_ids:218 correct_task = PythonTask(219 script=os.path.join(220 self.code_dir, DAG.Executables.CORRECT221 ),222 args=[223 '--action', DAG.Tasks.Type.CORRECT,224 '--parent_dir', self.parameters.parent_dir,225 '--gbd_round_id', self.parameters.gbd_round_id,226 '--version_id', self.parameters.version_id,227 '--location_id', location,228 '--sex_id', sex,229 '--env_version_id', self.parameters.envelope_version_id230 ],231 name=DAG.Tasks.Name.APPLY_CORRECTION.format(232 location=location,233 sex=sex234 ),235 num_cores=DAG.Tasks.Cores.APPLY_CORRECTION,236 m_mem_free=DAG.Tasks.Memory.APPLY_CORRECTION,237 upstream_tasks=[238 self.task_map[DAG.Tasks.Type.CACHE][239 DAG.Tasks.Name.CACHE_MORTALITY.format(240 mort_process=MortalityInputs.ENVELOPE_DRAWS241 )242 ],243 self.task_map[DAG.Tasks.Type.CACHE][244 DAG.Tasks.Name.CACHE_MORTALITY.format(245 mort_process=(246 MortalityInputs.ENVELOPE_SUMMARY247 )248 )249 ]250 ],251 max_runtime_seconds=DAG.Tasks.Runtime.APPLY_CORRECTION,252 tag=DAG.Tasks.Type.CORRECT253 )254 for mvid in self.parameters.best_model_version_ids:255 upstream_name = DAG.Tasks.Name.VALIDATE_DRAWS.format(256 model_version_id=mvid257 )258 correct_task.add_upstream(259 self.task_map[DAG.Tasks.Type.VALIDATE][upstream_name]260 )261 self.task_map[DAG.Tasks.Type.CORRECT][262 correct_task.name263 ] = correct_task264 self.workflow.add_task(correct_task)265 def create_cause_aggregation_tasks(self) -> None:266 for sex in self.parameters.sex_ids:267 for location in self.parameters.most_detailed_location_ids:268 agg_cause_task = PythonTask(269 script=os.path.join(270 self.code_dir, DAG.Executables.CAUSE_AGG271 ),272 args=[273 '--version_id', self.parameters.version_id,274 '--parent_dir', self.parameters.parent_dir,275 '--location_id', location,276 '--sex_id', sex,277 ],278 name=DAG.Tasks.Name.CAUSE_AGGREGATION.format(279 location=location,280 sex=sex281 ),282 num_cores=DAG.Tasks.Cores.CAUSE_AGGREGATION,283 m_mem_free=DAG.Tasks.Memory.CAUSE_AGGREGATION,284 upstream_tasks=[285 self.task_map[DAG.Tasks.Type.CORRECT][286 DAG.Tasks.Name.APPLY_CORRECTION.format(287 location=location,288 sex=sex289 )290 ]291 ],292 max_runtime_seconds=DAG.Tasks.Runtime.CAUSE_AGGREGATION,293 tag=DAG.Tasks.Type.CAUSE_AGG294 )295 self.task_map[DAG.Tasks.Type.CAUSE_AGG][296 agg_cause_task.name297 ] = agg_cause_task298 self.workflow.add_task(agg_cause_task)299 def create_calculate_ylls_tasks(self) -> None:300 """301 Adds tasks to calculate YLLs from our scaled draws by location, sex,302 and year.303 Dependent on completed scalar application for respective locations,304 sexes, and years.305 """306 for sex in self.parameters.sex_ids:307 for location in self.parameters.most_detailed_location_ids:308 calc_ylls = PythonTask(309 script=os.path.join(310 self.code_dir, DAG.Executables.YLLS311 ),312 args=[313 '--action', DAG.Tasks.Type.CALCULATE,314 '--machine_process', self.parameters.process,315 '--parent_dir', self.parameters.parent_dir,316 '--location_id', location,317 '--sex_id', sex318 ],319 name=DAG.Tasks.Name.CALC_YLLS.format(320 location=location,321 sex=sex322 ),323 num_cores=DAG.Tasks.Cores.YLLS,324 m_mem_free=DAG.Tasks.Memory.YLLS,325 max_runtime_seconds=DAG.Tasks.Runtime.CALCULATE,326 upstream_tasks=[327 self.task_map[DAG.Tasks.Type.CACHE][328 DAG.Tasks.Name.CACHE_PRED_EX329 ],330 self.task_map[DAG.Tasks.Type.CAUSE_AGG][331 DAG.Tasks.Name.CAUSE_AGGREGATION.format(332 location=location,333 sex=sex334 )335 ]336 ],337 tag=DAG.Tasks.Type.CALCULATE338 )339 self.task_map[DAG.Tasks.Type.CALCULATE][340 calc_ylls.name341 ] = calc_ylls342 self.workflow.add_task(calc_ylls)343 def create_location_aggregation_tasks(self) -> None:344 """345 Adds tasks to aggregate up the location hierarchy for each location set346 id, sex, year, and measure in our FauxCorrect run.347 Dependent on each location, sex, and year specific group of scalar or348 calculate ylls tasks to be completed for their respective measure ids.349 """350 for location_set_id in self.parameters.location_set_ids:351 for year in self.parameters.year_ids:352 for measure in self.parameters.measure_ids:353 for loc_agg_type in LocationAggregation.Type.CODCORRECT:354 if (355 FilePaths.UNSCALED_DIR in loc_agg_type356 and357 measure == Measures.Ids.YLLS358 ):359 continue360 agg_task = PythonTask(361 script=os.path.join(362 self.code_dir,363 DAG.Executables.LOC_AGG364 ),365 args=[366 '--action', DAG.Tasks.Type.LOC_AGG,367 '--parent_dir', self.parameters.parent_dir,368 '--gbd_round_id', (369 self.parameters.gbd_round_id370 ),371 '--aggregation_type', loc_agg_type,372 '--location_set_id', location_set_id,373 '--year_id', year,374 '--measure_id', measure375 ],376 name=DAG.Tasks.Name.LOCATION_AGGREGATION.format(377 aggregation_type=(378 loc_agg_type.replace("/","_")379 ),380 location_set=location_set_id,381 measure=measure,382 year=year383 ),384 num_cores=DAG.Tasks.Cores.LOCATION_AGGREGATION,385 m_mem_free=(386 DAG.Tasks.Memory.LOCATION_AGGREGATION387 ),388 max_runtime_seconds=(389 DAG.Tasks.Runtime.LOCATION_AGGREGATION390 ),391 queue=DAG.QUEUE,392 upstream_tasks=[393 self.task_map[DAG.Tasks.Type.CACHE][394 DAG.Tasks.Name.CACHE_REGIONAL_SCALARS395 ]396 ],397 tag=DAG.Tasks.Type.LOC_AGG398 )399 # Attach upstream dependencies, all locations for400 # sex and year.401 for sex in self.parameters.sex_ids:402 for loc in (403 self.parameters.most_detailed_location_ids404 ):405 if measure == Measures.Ids.YLLS:406 # If aggregating measure 4 (YLLs), attach407 # calc ylls jobs as upstream408 agg_task.add_upstream(409 self.task_map[DAG.Tasks.Type.CALCULATE][410 DAG.Tasks.Name.CALC_YLLS.format(411 location=loc,412 sex=sex413 )414 ]415 )416 else:417 # If measure is not 4 (YLLs), then add418 # cause agg jobs as upstream.419 agg_task.add_upstream(420 self.task_map[DAG.Tasks.Type.CAUSE_AGG][421 (DAG.Tasks.Name.CAUSE_AGGREGATION422 .format(423 location=loc,424 sex=sex425 ))426 ]427 )428 if location_set_id not in [429 LocationSetId.OUTPUTS, LocationSetId.SDI,430 LocationSetId.STANDARD431 ]:432 # If location set is one of the special433 # sets that central computation only aggregates434 # at the end of a round, add the outputs location435 # set as upstream dependency so it finishes first.436 agg_task.add_upstream(437 self.task_map[DAG.Tasks.Type.LOC_AGG][438 DAG.Tasks.Name.LOCATION_AGGREGATION.format(439 aggregation_type=(440 loc_agg_type.replace("/","_")441 ),442 location_set=LocationSetId.OUTPUTS,443 measure=measure,444 year=year445 )446 ]447 )448 self.task_map[DAG.Tasks.Type.LOC_AGG][449 agg_task.name450 ] = agg_task451 self.workflow.add_task(agg_task)452 def create_append_shocks_tasks(self) -> None:453 """454 Adds tasks to append shocks to Deaths and YLLs by location, sex,455 and year.456 Dependent on completed correction application for respective locations,457 sexes, and years. Also dependent on location aggregation.458 """459 for sex in self.parameters.sex_ids:460 for location in self.parameters.location_ids:461 most_detailed_location = (462 location in self.parameters.most_detailed_location_ids463 )464 append_shocks = PythonTask(465 script=os.path.join(466 self.code_dir, DAG.Executables.APPEND_SHOCKS467 ),468 args=[469 '--parent_dir', self.parameters.parent_dir,470 '--machine_process', self.parameters.process,471 '--measure_ids',472 " ".join([str(x) for x in self.parameters.measure_ids]),473 '--location_id', location,474 '--most_detailed_location', most_detailed_location,475 '--sex_id', sex476 ],477 name=DAG.Tasks.Name.APPEND_SHOCKS.format(478 location=location, sex=sex479 ),480 num_cores=DAG.Tasks.Cores.APPEND_SHOCKS,481 m_mem_free=DAG.Tasks.Memory.APPEND_SHOCKS,482 max_runtime_seconds=DAG.Tasks.Runtime.APPEND,483 tag=DAG.Tasks.Type.APPEND484 )485 if (486 most_detailed_location487 and488 Measures.Ids.YLLS in self.parameters.measure_ids489 ):490 # attach calc ylls jobs as upstream491 append_shocks.add_upstream(492 self.task_map[DAG.Tasks.Type.CALCULATE][493 DAG.Tasks.Name.CALC_YLLS.format(494 location=location,495 sex=sex496 )497 ]498 )499 elif (500 most_detailed_location501 and502 Measures.Ids.YLLS not in self.parameters.measure_ids503 ):504 # add cause agg jobs as upstream.505 append_shocks.add_upstream(506 self.task_map[DAG.Tasks.Type.CAUSE_AGG][507 (DAG.Tasks.Name.CAUSE_AGGREGATION508 .format(509 location=location,510 sex=sex511 ))512 ]513 )514 else:515 # Add location aggregation tasks as upstream dependency.516 for location_set_id in self.parameters.location_set_ids:517 for loc_agg_year in self.parameters.year_ids:518 for loc_agg_measure in self.parameters.measure_ids:519 for loc_agg_type in (520 LocationAggregation.Type.CODCORRECT521 ):522 if (523 FilePaths.UNSCALED_DIR in loc_agg_type524 and525 loc_agg_measure == Measures.Ids.YLLS526 ):527 continue528 append_shocks.add_upstream(529 self.task_map[530 DAG.Tasks.Type.LOC_AGG][(531 DAG.Tasks.Name.LOCATION_AGGREGATION532 .format(533 aggregation_type=(534 loc_agg_type535 .replace("/","_")536 ),537 location_set=location_set_id,538 measure=loc_agg_measure,539 year=loc_agg_year,540 )541 )]542 )543 self.task_map[DAG.Tasks.Type.APPEND][544 append_shocks.name545 ] = append_shocks546 self.workflow.add_task(append_shocks)547 def create_summarize_tasks(self) -> None:548 """549 Adds tasks to summarize draw level estimates for each location, year,550 and measure in our FauxCorrect run.551 Dependent on the mortality input caching tasks as well as the append552 shocks tasks.553 """554 for measure in self.parameters.measure_ids:555 for year in self.parameters.year_ids:556 for location in self.parameters.location_ids:557 # Create summarize tasks for gbd schema.558 summarize_gbd_task = PythonTask(559 script=os.path.join(560 self.code_dir,561 DAG.Executables.SUMMARIZE_GBD562 ),563 args=[564 '--parent_dir', self.parameters.parent_dir,565 '--gbd_round_id', self.parameters.gbd_round_id,566 '--location_id', location,567 '--year_id', year,568 '--measure_id', measure,569 '--machine_process', self.parameters.process570 ],571 name=DAG.Tasks.Name.SUMMARIZE_GBD.format(572 measure=measure, location=location, year=year573 ),574 num_cores=DAG.Tasks.Cores.SUMMARIZE,575 m_mem_free=DAG.Tasks.Memory.SUMMARIZE,576 max_runtime_seconds=DAG.Tasks.Runtime.SUMMARIZE,577 queue=DAG.QUEUE,578 upstream_tasks=[579 self.task_map[DAG.Tasks.Type.CACHE][580 DAG.Tasks.Name.CACHE_MORTALITY.format(581 mort_process=MortalityInputs.ENVELOPE_DRAWS582 )583 ],584 self.task_map[DAG.Tasks.Type.CACHE][585 DAG.Tasks.Name.CACHE_MORTALITY.format(586 mort_process=(587 MortalityInputs.ENVELOPE_SUMMARY588 )589 )590 ],591 self.task_map[DAG.Tasks.Type.CACHE][592 DAG.Tasks.Name.CACHE_MORTALITY.format(593 mort_process=MortalityInputs.POPULATION594 )595 ]596 ],597 tag=DAG.Tasks.Type.SUMMARIZE598 )599 for append_sex in self.parameters.sex_ids:600 summarize_gbd_task.add_upstream(601 self.task_map[DAG.Tasks.Type.APPEND][602 DAG.Tasks.Name.APPEND_SHOCKS.format(603 location=location,604 sex=append_sex605 )606 ]607 )608 self.task_map[DAG.Tasks.Type.SUMMARIZE][609 summarize_gbd_task.name610 ] = summarize_gbd_task611 self.workflow.add_task(summarize_gbd_task)612 if (613 measure == Measures.Ids.YLLS614 or615 DataBases.COD not in self.parameters.databases616 ):617 continue618 # Create summarize tasks for deaths and cod schema.619 summarize_cod_task = PythonTask(620 script=os.path.join(621 self.code_dir,622 DAG.Executables.SUMMARIZE_COD623 ),624 args=[625 '--version_id', self.parameters.version_id,626 '--parent_dir', self.parameters.parent_dir,627 '--gbd_round_id', self.parameters.gbd_round_id,628 '--location_id', location,629 '--year_id', year630 ],631 name=DAG.Tasks.Name.SUMMARIZE_COD.format(632 measure=Measures.Ids.DEATHS,633 location=location,634 year=year635 ),636 num_cores=DAG.Tasks.Cores.SUMMARIZE,637 m_mem_free=DAG.Tasks.Memory.SUMMARIZE,638 max_runtime_seconds=DAG.Tasks.Runtime.SUMMARIZE,639 queue=DAG.QUEUE,640 upstream_tasks=[641 self.task_map[DAG.Tasks.Type.CACHE][642 DAG.Tasks.Name.CACHE_MORTALITY.format(643 mort_process=MortalityInputs.ENVELOPE_DRAWS644 )645 ],646 self.task_map[DAG.Tasks.Type.CACHE][647 DAG.Tasks.Name.CACHE_MORTALITY.format(648 mort_process=(649 MortalityInputs.ENVELOPE_SUMMARY650 )651 )652 ],653 self.task_map[DAG.Tasks.Type.CACHE][654 DAG.Tasks.Name.CACHE_MORTALITY.format(655 mort_process=MortalityInputs.POPULATION656 )657 ]658 ],659 tag=DAG.Tasks.Type.SUMMARIZE660 )661 for append_sex in self.parameters.sex_ids:662 summarize_cod_task.add_upstream(663 self.task_map[DAG.Tasks.Type.APPEND][664 DAG.Tasks.Name.APPEND_SHOCKS.format(665 location=location,666 sex=append_sex667 )668 ]669 )670 self.task_map[DAG.Tasks.Type.SUMMARIZE][671 summarize_cod_task.name672 ] = summarize_cod_task673 self.workflow.add_task(summarize_cod_task)674 # pct_change summarization tasks675 if self.parameters.year_start_ids:676 for year_index in range(len(self.parameters.year_start_ids)):677 for pctc_location in self.parameters.location_ids:678 summarize_pct_change_task = PythonTask(679 script=os.path.join(680 self.code_dir,681 DAG.Executables.SUMMARIZE_PCT_CHANGE682 ),683 args=[684 '--parent_dir', self.parameters.parent_dir,685 '--gbd_round_id', self.parameters.gbd_round_id,686 '--location_id', pctc_location,687 '--year_start_id', self.parameters.year_start_ids[688 year_index],689 '--year_end_id', self.parameters.year_end_ids[690 year_index],691 '--measure_id', measure,692 '--machine_process', self.parameters.process693 ],694 name=DAG.Tasks.Name.SUMMARIZE_PCT_CHANGE.format(695 measure=measure, location=pctc_location,696 year_start=self.parameters.year_start_ids[year_index],697 year_end=self.parameters.year_end_ids[year_index]698 ),699 num_cores=DAG.Tasks.Cores.PCT_CHANGE,700 m_mem_free=DAG.Tasks.Memory.PCT_CHANGE,701 max_runtime_seconds=DAG.Tasks.Runtime.SUMMARIZE,702 queue=DAG.QUEUE,703 upstream_tasks=[704 self.task_map[DAG.Tasks.Type.CACHE][705 DAG.Tasks.Name.CACHE_MORTALITY.format(706 mort_process=MortalityInputs.ENVELOPE_DRAWS707 )708 ],709 self.task_map[DAG.Tasks.Type.CACHE][710 DAG.Tasks.Name.CACHE_MORTALITY.format(711 mort_process=(712 MortalityInputs.ENVELOPE_SUMMARY713 )714 )715 ],716 self.task_map[DAG.Tasks.Type.CACHE][717 DAG.Tasks.Name.CACHE_MORTALITY.format(718 mort_process=MortalityInputs.POPULATION719 )720 ]721 ],722 tag=DAG.Tasks.Type.SUMMARIZE723 )724 for append_sex in self.parameters.sex_ids:725 summarize_pct_change_task.add_upstream(726 self.task_map[DAG.Tasks.Type.APPEND][727 DAG.Tasks.Name.APPEND_SHOCKS.format(728 location=pctc_location,729 sex=append_sex730 )731 ]732 )733 self.task_map[DAG.Tasks.Type.SUMMARIZE][734 summarize_pct_change_task.name735 ] = summarize_pct_change_task736 self.workflow.add_task(summarize_pct_change_task)737 def create_upload_tasks(self) -> None:738 """739 Adds tasks to upload summaries to gbd and / or cod databases.740 """741 # Determine if percent change is part of workflow742 if self.parameters.year_start_ids is not None:743 upload_types = ['single', 'multi']744 else:745 upload_types = ['single']746 # gbd upload tasks747 for measure in self.parameters.measure_ids:748 for upload_type in upload_types:749 upload_gbd_task = PythonTask(750 script=os.path.join(751 self.code_dir,...

Full Screen

Full Screen

TaskGroup.js

Source:TaskGroup.js Github

copy

Full Screen

1import AsyncTask from './AsyncTask';2const Maker = require('./Maker');3import { TimeOut } from './TimeOut';4/**5 * Класс для работы группами задач6 */7class TaskGroup {8 constructor(taskGroupName, tasks) {9 this.tasks = new Map();10 this.timer = new TimeOut();11 this.taskGroupName = taskGroupName;12 if (Array.isArray(tasks) && tasks.length >= 1) {13 tasks.forEach((task) => {14 const name = task.name || Maker.uniqueId();15 const once = task.once || 0;16 const { func } = task;17 this.tasks.set(18 name,19 new AsyncTask({20 name,21 once,22 func23 })24 );25 });26 }27 return taskGroupName;28 }29 /**30 * Имя Группа Задач31 * @returns {*}32 * @constructor33 */34 get Name() {35 return this.taskGroupName;36 }37 getTasksExcept(tasks, exceptTask) {38 tasks = tasks || this.tasks;39 const filteredTasks = [];40 tasks.forEach((tsk) => {41 if (exceptTask.Name !== tsk.Name) filteredTasks.push(tsk);42 });43 return filteredTasks;44 }45 /**46 *47 * @param tasks48 * @returns {string}49 */50 firstTask(tasks) {51 let result = null;52 let i = 1;53 tasks = tasks || this.tasks;54 tasks.forEach((task) => {55 if (i === 1) {56 result = task;57 }58 i += 1;59 });60 return result;61 }62 /**63 * Добавить одну задачу64 * @param name65 * @param once66 * @param func67 * @returns {*}68 */69 addTask(name, once, func) {70 name = name || Maker.uniqueId();71 this.tasks.set(72 name,73 new AsyncTask({74 name,75 once,76 func77 })78 );79 return name;80 }81 /**82 * Добавть массив задач к группе83 * @param tasks84 */85 addTasks(tasks) {86 if (Array.isArray(tasks) && tasks.length >= 1) {87 tasks.forEach((task) => {88 const name = task.name || Maker.uniqueId();89 const once = task.once || 0;90 const { func } = task;91 this.tasks.set(92 name,93 new AsyncTask({94 name,95 once,96 func97 })98 );99 });100 }101 }102 clear() {103 this.tasks.clear();104 }105 delete(taskName) {106 return this.tasks.delete(taskName);107 }108 /**109 * Проверяет еслть задачи с таким именем110 * @param name111 * @returns {boolean}112 */113 taskExists(name) {114 return this.tasks.has(name);115 }116 /**117 * Запустить задачу с именем name118 * @param name: Имя задачи119 * @param params120 * @returns {Promise<*>}121 */122 // eslint-disable-next-line require-await123 async runTaskByName(name, params = {}) {124 if (this.taskExists(name)) {125 return this.tasks.get(name).run(params);126 }127 return false;128 }129 // eslint-disable-next-line require-await130 async runTaskByNameDebounce(name, wait, params = {}) {131 if (this.taskExists(name)) {132 return this.tasks.get(name).runDebounce(params, wait);133 }134 return false;135 }136 // eslint-disable-next-line require-await137 async runTasksParallel(tasks) {138 if (Array.isArray(tasks) && tasks.length >= 1) {139 tasks.forEach((task) => {140 const { name, params } = task;141 this.runTaskByName(name, params);142 });143 }144 return true;145 }146 // eslint-disable-next-line require-await147 async runTasksParallelDebounce(tasks) {148 if (Array.isArray(tasks) && tasks.length >= 1) {149 tasks.forEach((task) => {150 const { name, params, wait } = task;151 this.runTaskByNameDebounce(name, wait, params);152 });153 }154 return true;155 }156 async runTasksParallelWithPause(tasks, pause) {157 if (Array.isArray(tasks) && tasks.length >= 1) {158 const { name, params } = tasks[0];159 const filtered = tasks.filter((task, index) => index !== 0);160 if (this.taskExists(name)) {161 await this.timer.pause(pause);162 this.runTaskByName(name, params);163 this.runTasksParallelWithPause(filtered, pause);164 }165 }166 return true;167 }168 /**169 * Запускает массив задач один за другим170 * @param tasks: массив задач171 * @returns {Promise<boolean>}172 */173 async runTasksOneByOne(tasks) {174 if (Array.isArray(tasks) && tasks.length >= 1) {175 const { name, params } = tasks[0];176 const filtered = tasks.filter((task, index) => index !== 0);177 if (this.taskExists(name)) {178 await this.runTaskByName(name, params);179 this.runTasksOneByOne(filtered);180 }181 }182 return true;183 }184 /**185 * Запускает массив задач один за другим с учетом ожидании wait186 * @param tasks: массив задач187 * @returns {Promise<boolean>}188 */189 async runTasksOneByOneDebounce(tasks) {190 if (Array.isArray(tasks) && tasks.length >= 1) {191 const { name, params, wait } = tasks[0];192 const filtered = tasks.filter((task, index) => index !== 0);193 if (this.taskExists(name)) {194 await this.runTaskByNameDebounce(name, wait, params);195 this.runTasksOneByOneDebounce(filtered);196 }197 }198 return true;199 }200 /**201 * Запускает массив задач один за другим с учетом ожидании pause для всех одинаковый202 * @param tasks: массив задач203 * @param pause204 * @returns {Promise<boolean>}205 */206 async runTasksOneByOneWithPause(tasks, pause) {207 if (Array.isArray(tasks) && tasks.length >= 1) {208 const { name, params } = tasks[0];209 const filtered = tasks.filter((task, index) => index !== 0);210 if (this.taskExists(name)) {211 await this.timer.pause(pause);212 await this.runTaskByName(name, params);213 this.runTasksOneByOneWithPause(filtered, pause);214 }215 }216 return true;217 }218 /**219 * Запускает задачи после запуска первой переданной задачи220 * Если первая функция завершился успешно и возратиль true значение221 * То запускаем массив задач taskNames222 * @param name: Имя первой задачи223 * @param params: Параметры первой задачи224 * @param tasks: Массив запускаемых задач225 * @returns {Promise<boolean>}226 */227 async runTasksOneByOneAfter({ name, params = {} }, tasks) {228 if (this.taskExists(name)) {229 try {230 const result = await this.runTaskByName(name, params);231 if (result) this.runTasksOneByOne(tasks);232 } catch (e) {233 throw e;234 }235 }236 return true;237 }238 /**239 * Запускает массив задач поочередно, после запуска первой задачи240 * с ожиданием wait241 * @param name: имя первой задачи242 * @param wait: время ожидание перед запуском первой задачи243 * @param params: параметры первой задачи244 * @param tasks: массив запускаемых задач245 * @returns {Promise<boolean>}246 */247 async runTasksOneByOneAfterDebounce({ name, wait, params = {} }, tasks) {248 if (this.taskExists(name)) {249 try {250 const result = await this.runTaskByNameDebounce(name, wait, params);251 if (result) this.runTasksOneByOne(tasks);252 } catch (e) {253 throw e;254 }255 }256 return true;257 }258 /**259 * Запускает массив задач по очередно с ожидаением pause,260 * после запуска первой задачи с ожиданием wait261 * @param name: имя первой задачи262 * @param wait: сколько надо ждать чтобы запустить первую задачу263 * @param pause: интервал ожидание между массивами задач264 * @param params: параметры первой задачи265 * @param tasks: массив запускаемых задач266 * @returns {Promise<boolean>}267 */268 async runTasksOneByOneAfterDebounceWithPause(269 { name, wait, params = {}, pause },270 tasks271 ) {272 if (this.taskExists(name)) {273 try {274 const result = await this.runTaskByNameDebounce(name, wait, params);275 await this.timer.pause(pause || 4);276 if (result) this.runTasksOneByOne(tasks);277 } catch (e) {278 throw e;279 }280 }281 return true;282 }283 // eslint-disable-next-line require-await284 async runAllTasksParallel() {285 this.tasks.forEach((task) => {286 const name = task.Name;287 this.runTaskByName(name);288 });289 return true;290 }291 async runAllTasksParallelWithPause(pause, tasks) {292 tasks = tasks || this.tasks;293 if (Array.isArray(tasks) && tasks.length === 0) return true;294 const firstTask = this.firstTask(tasks);295 const filteredTasks = this.getTasksExcept(tasks, firstTask);296 await this.timer.pause(pause);297 this.runTaskByName(firstTask.Name);298 this.runAllTasksParallelWithPause(pause, filteredTasks);299 return true;300 }301 async runAllTasksOneByOne(tasks) {302 tasks = tasks || this.tasks;303 if (Array.isArray(tasks) && tasks.length === 0) return true;304 const firstTask = this.firstTask(tasks);305 const filteredTasks = this.getTasksExcept(tasks, firstTask);306 await this.runTaskByName(firstTask.Name);307 this.runAllTasksOneByOne(filteredTasks);308 return true;309 }310 async runAllTasksOneByOneWithPause(pause, tasks) {311 tasks = tasks || this.tasks;312 if (Array.isArray(tasks) && tasks.length === 0) return true;313 const firstTask = this.firstTask(tasks);314 const filteredTasks = this.getTasksExcept(tasks, firstTask);315 await this.timer.pause(pause);316 await this.runTaskByName(firstTask.Name);317 this.runAllTasksOneByOneWithPause(pause, filteredTasks);318 return true;319 }320}...

Full Screen

Full Screen

doneTask.js

Source:doneTask.js Github

copy

Full Screen

1export {voiceMessage};2const doneBtn = document.getElementById('doneTask');3const youSaidOutput = document.getElementById('youSaid');4const tableOut = document.getElementById('doneOut');5const clearDoneTasksBtn = document.getElementById('clearDoneTasks');6let SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;7let recognition = new SpeechRecognition();8doneBtn.addEventListener('click' , (e) => {9 e.preventDefault();10 recognition.start();11});12//FUNKCIJA ZA VOICE MESSAGEOM13function voiceMessage(){14 let speech = new SpeechSynthesisUtterance();15 speech.text = "Speak";16 let synt = window.speechSynthesis;17 synt.speak(speech);18}19//GLAVNA INICIJACIJA S20recognition.onstart = function() {21 console.log('voice is activited, speak now!');22 voiceMessage();23}24 //event hold string a we talking about - sad imamo dostupno sve što kažemo25recognition.onresult = function(event) {26 //access to izgovorenu poruku27 let current = event.resultIndex;28 let transcript = event.results[current][0].transcript;29 //show what user said30 youSaidOutput.innerHTML = transcript;31 //filter control word32 DoneControlWord(transcript);33}34recognition.onend = function(event) {35 console.log('GOTOVO');36}37//FUNKCIJA KOJA PROVJERAVA DA LI IMA RIJEČ OK38function DoneControlWord(transcript){39 console.log(transcript);40 //PROVJERAVA DA LI U PORUCI IMA NEKA OD RIJEČI IZ CONDITIONSA, AKO DA VRAĆA TRUE!41 var conditions = ['ok'];42 var test = conditions.some(el => transcript.includes(el));43 //ako nema kontrolne riječi44 if(!test){45 alert(`You Said: "${transcript}" --- at the end use command words (OK)`);46 }47 //za dodavanje48 if(transcript.includes('ok')){49 checkTaskInLocalStorage(transcript);50 }51}52//FUNKCIJA KOJA MIČE IZ PENDING TASKA ZADATAK I STAVLJA GA U NOVU TABLICU DONE TASKS53function checkTaskInLocalStorage(transcript){54 let store = localStorage;55 let CurrentTasks = JSON.parse(store.getItem('tasks'));56 var conditions = ['one' , 1 , 'two' , 2 , 'three' , 3 , 'four' , 4 , 'five' , 5 , 'six' , 6 , 'seven' , 7 , 'eight' , 8 , 'nine' , 9 , 'ten' , 10 ];57 var test = conditions.some(el => transcript.includes(el));58 if(!test){59 alert(`You Said: "${transcript}" --- use number of undone task`);60 }61 if(transcript.includes('one') ||transcript.includes(1)){62 //SPREMA OBAVLJENI ZADATAK U L STORAGE63 let task = CurrentTasks[0].task;64 let DoneTask = {65 task : task66 }67 // console.log(DoneTask);68 if(store.getItem('DoneTasks') === null){69 let doneTasks = [];70 doneTasks.push(DoneTask);71 store.setItem('DoneTasks' , JSON.stringify(doneTasks));72 }else{73 let doneTasks = JSON.parse(store.getItem('DoneTasks'));74 doneTasks.push(DoneTask);75 store.setItem('DoneTasks' , JSON.stringify(doneTasks));76 }77 //obriši iz localStoragea iz tablice tasks78 let tasks = JSON.parse(store.getItem('tasks'));79 tasks.splice(0 , 1);80 store.setItem('tasks' , JSON.stringify(tasks));81 }82 if(transcript.includes('two') ||transcript.includes(2)){83 //SPREMA OBAVLJENI ZADATAK U L STORAGE84 let task = CurrentTasks[1].task;85 let DoneTask = {86 task : task87 }88 if(store.getItem('DoneTasks') === null){89 let doneTasks = [];90 doneTasks.push(DoneTask);91 store.setItem('DoneTasks' , JSON.stringify(doneTasks));92 }else{93 let doneTasks = JSON.parse(store.getItem('DoneTasks'));94 doneTasks.push(DoneTask);95 store.setItem('DoneTasks' , JSON.stringify(doneTasks));96 }97 //obriši iz localStoragea iz tablice tasks98 let tasks = JSON.parse(store.getItem('tasks'));99 tasks.splice(1 , 1);100 store.setItem('tasks' , JSON.stringify(tasks));101 }102 if(transcript.includes('three') ||transcript.includes(3)){103 //SPREMA OBAVLJENI ZADATAK U L STORAGE104 let task = CurrentTasks[2].task;105 let DoneTask = {106 task : task107 }108 if(store.getItem('DoneTasks') === null){109 let doneTasks = [];110 doneTasks.push(DoneTask);111 store.setItem('DoneTasks' , JSON.stringify(doneTasks));112 }else{113 let doneTasks = JSON.parse(store.getItem('DoneTasks'));114 doneTasks.push(DoneTask);115 store.setItem('DoneTasks' , JSON.stringify(doneTasks));116 }117 //obriši iz localStoragea iz tablice tasks118 let tasks = JSON.parse(store.getItem('tasks'));119 tasks.splice(2 , 1);120 store.setItem('tasks' , JSON.stringify(tasks));121 }122 if(transcript.includes('four') ||transcript.includes(4)){123 //SPREMA OBAVLJENI ZADATAK U L STORAGE124 let task = CurrentTasks[3].task;125 let DoneTask = {126 task : task127 }128 if(store.getItem('DoneTasks') === null){129 let doneTasks = [];130 doneTasks.push(DoneTask);131 store.setItem('DoneTasks' , JSON.stringify(doneTasks));132 }else{133 let doneTasks = JSON.parse(store.getItem('DoneTasks'));134 doneTasks.push(DoneTask);135 store.setItem('DoneTasks' , JSON.stringify(doneTasks));136 }137 //obriši iz localStoragea iz tablice tasks138 let tasks = JSON.parse(store.getItem('tasks'));139 tasks.splice(3 , 1);140 store.setItem('tasks' , JSON.stringify(tasks));141 }142 if(transcript.includes('five') ||transcript.includes(5)){143 //SPREMA OBAVLJENI ZADATAK U L STORAGE144 let task = CurrentTasks[4].task;145 let DoneTask = {146 task : task147 }148 if(store.getItem('DoneTasks') === null){149 let doneTasks = [];150 doneTasks.push(DoneTask);151 store.setItem('DoneTasks' , JSON.stringify(doneTasks));152 }else{153 let doneTasks = JSON.parse(store.getItem('DoneTasks'));154 doneTasks.push(DoneTask);155 store.setItem('DoneTasks' , JSON.stringify(doneTasks));156 }157 //obriši iz localStoragea iz tablice tasks158 let tasks = JSON.parse(store.getItem('tasks'));159 tasks.splice(4 , 1);160 store.setItem('tasks' , JSON.stringify(tasks));161 }162 if(transcript.includes('six') ||transcript.includes(6)){163 //SPREMA OBAVLJENI ZADATAK U L STORAGE164 let task = CurrentTasks[5].task;165 let DoneTask = {166 task : task167 }168 if(store.getItem('DoneTasks') === null){169 let doneTasks = [];170 doneTasks.push(DoneTask);171 store.setItem('DoneTasks' , JSON.stringify(doneTasks));172 }else{173 let doneTasks = JSON.parse(store.getItem('DoneTasks'));174 doneTasks.push(DoneTask);175 store.setItem('DoneTasks' , JSON.stringify(doneTasks));176 }177 //obriši iz localStoragea iz tablice tasks178 let tasks = JSON.parse(store.getItem('tasks'));179 tasks.splice(5 , 1);180 store.setItem('tasks' , JSON.stringify(tasks));181 }182 if(transcript.includes('seven') ||transcript.includes(7)){183 //SPREMA OBAVLJENI ZADATAK U L STORAGE184 let task = CurrentTasks[6].task;185 let DoneTask = {186 task : task187 }188 if(store.getItem('DoneTasks') === null){189 let doneTasks = [];190 doneTasks.push(DoneTask);191 store.setItem('DoneTasks' , JSON.stringify(doneTasks));192 }else{193 let doneTasks = JSON.parse(store.getItem('DoneTasks'));194 doneTasks.push(DoneTask);195 store.setItem('DoneTasks' , JSON.stringify(doneTasks));196 }197 //obriši iz localStoragea iz tablice tasks198 let tasks = JSON.parse(store.getItem('tasks'));199 tasks.splice(6 , 1);200 store.setItem('tasks' , JSON.stringify(tasks));201 }202 if(transcript.includes('eight') ||transcript.includes(8)){203 //SPREMA OBAVLJENI ZADATAK U L STORAGE204 let task = CurrentTasks[7].task;205 let DoneTask = {206 task : task207 }208 if(store.getItem('DoneTasks') === null){209 let doneTasks = [];210 doneTasks.push(DoneTask);211 store.setItem('DoneTasks' , JSON.stringify(doneTasks));212 }else{213 let doneTasks = JSON.parse(store.getItem('DoneTasks'));214 doneTasks.push(DoneTask);215 store.setItem('DoneTasks' , JSON.stringify(doneTasks));216 }217 //obriši iz localStoragea iz tablice tasks218 let tasks = JSON.parse(store.getItem('tasks'));219 tasks.splice(7 , 1);220 store.setItem('tasks' , JSON.stringify(tasks));221 }222 if(transcript.includes('nine') ||transcript.includes(9)){223 //SPREMA OBAVLJENI ZADATAK U L STORAGE224 let task = CurrentTasks[8].task;225 let DoneTask = {226 task : task227 }228 if(store.getItem('DoneTasks') === null){229 let doneTasks = [];230 doneTasks.push(DoneTask);231 store.setItem('DoneTasks' , JSON.stringify(doneTasks));232 }else{233 let doneTasks = JSON.parse(store.getItem('DoneTasks'));234 doneTasks.push(DoneTask);235 store.setItem('DoneTasks' , JSON.stringify(doneTasks));236 }237 //obriši iz localStoragea iz tablice tasks238 let tasks = JSON.parse(store.getItem('tasks'));239 tasks.splice(8 , 1);240 store.setItem('tasks' , JSON.stringify(tasks));241 }242 if(transcript.includes('ten') ||transcript.includes(10)){243 //SPREMA OBAVLJENI ZADATAK U L STORAGE244 let task = CurrentTasks[9].task;245 let DoneTask = {246 task : task247 }248 if(store.getItem('DoneTasks') === null){249 let doneTasks = [];250 doneTasks.push(DoneTask);251 store.setItem('DoneTasks' , JSON.stringify(doneTasks));252 }else{253 let doneTasks = JSON.parse(store.getItem('DoneTasks'));254 doneTasks.push(DoneTask);255 store.setItem('DoneTasks' , JSON.stringify(doneTasks));256 }257 //obriši iz localStoragea iz tablice tasks258 let tasks = JSON.parse(store.getItem('tasks'));259 tasks.splice(9 , 1);260 store.setItem('tasks' , JSON.stringify(tasks));261 }262}263//FUNKCIJA ZA NAPRAVITI NEW TABLE ROW I PRIKAZATI TASK264function showTaskOutput(){265 let store = localStorage;266 let doneTasks = JSON.parse(store.getItem('DoneTasks'));267 let i = 1;268 if(doneTasks === null){269 console.log('Izvršeni zadataci su trenutno prazni!')270 }else{271 doneTasks.forEach( (task) => {272 let row = document.createElement('tr');273 row.innerHTML = `274 <td>${i}</td>275 <td>${task.task}</td>276 <td style="font-weight: 600" class="green lighten-2 center white-text">DONE</td>277 `278 tableOut.appendChild(row);279 i++;280 });281 }282}283//show it from lStore to html284showTaskOutput();285//FUNKCIJA CLEAR DONE TASKS286clearDoneTasksBtn.addEventListener('click' , (e) => {287e.preventDefault();288localStorage.removeItem('DoneTasks');289location.reload();...

Full Screen

Full Screen

taskUtils.test.js

Source:taskUtils.test.js Github

copy

Full Screen

1import {2 groupLinkedTasks,3 tasksToIds,4} from '../taskUtils.js'5describe('taskUtils', () => {6 describe('groupLinkedTasks', () => {7 it('should group when tasks are ordered', () => {8 const tasks = [9 {10 '@id': '/api/tasks/1',11 id : 1,12 next: '/api/tasks/2',13 }, {14 '@id': '/api/tasks/2',15 id : 2,16 previous: '/api/tasks/1',17 }, {18 '@id': '/api/tasks/3',19 id : 3,20 }21 ]22 const groups = groupLinkedTasks(tasks)23 expect(groups).toEqual({24 '/api/tasks/1': [ '/api/tasks/1', '/api/tasks/2' ],25 '/api/tasks/2': [ '/api/tasks/1', '/api/tasks/2' ],26 })27 })28 it('should group when tasks are not ordered', () => {29 const tasks = [30 {31 '@id': '/api/tasks/2',32 id : 2,33 previous: '/api/tasks/1',34 }, {35 '@id': '/api/tasks/1',36 id : 1,37 next: '/api/tasks/2',38 }, {39 '@id': '/api/tasks/3',40 id : 3,41 }42 ]43 const groups = groupLinkedTasks(tasks)44 expect(groups).toEqual({45 '/api/tasks/1': [ '/api/tasks/1', '/api/tasks/2' ],46 '/api/tasks/2': [ '/api/tasks/1', '/api/tasks/2' ],47 })48 })49 it('should group when there are more than 2 tasks', () => {50 const tasks = [51 {52 '@id': '/api/tasks/1',53 id : 1,54 next: '/api/tasks/2',55 }, {56 '@id': '/api/tasks/2',57 id : 2,58 previous: '/api/tasks/1',59 next: '/api/tasks/3',60 }, {61 '@id': '/api/tasks/3',62 id : 3,63 previous: '/api/tasks/2',64 }65 ]66 const groups = groupLinkedTasks(tasks)67 expect(groups).toEqual({68 '/api/tasks/1': [ '/api/tasks/1', '/api/tasks/2', '/api/tasks/3' ],69 '/api/tasks/2': [ '/api/tasks/1', '/api/tasks/2', '/api/tasks/3' ],70 '/api/tasks/3': [ '/api/tasks/1', '/api/tasks/2', '/api/tasks/3' ],71 })72 })73 it('should group when there are more than 2 tasks without next', () => {74 const tasks = [75 {76 '@id': '/api/tasks/1',77 id : 1,78 }, {79 '@id': '/api/tasks/2',80 id : 2,81 previous: '/api/tasks/1',82 }, {83 '@id': '/api/tasks/3',84 id : 3,85 previous: '/api/tasks/1',86 }, {87 '@id': '/api/tasks/4',88 id : 4,89 }90 ]91 const groups = groupLinkedTasks(tasks)92 expect(groups).toEqual({93 '/api/tasks/1': [ '/api/tasks/1', '/api/tasks/2', '/api/tasks/3' ],94 '/api/tasks/2': [ '/api/tasks/1', '/api/tasks/2', '/api/tasks/3' ],95 '/api/tasks/3': [ '/api/tasks/1', '/api/tasks/2', '/api/tasks/3' ],96 })97 })98 it('should group when there are more than 2 tasks without next, not ordered', () => {99 const tasks = [100 {101 '@id': '/api/tasks/2',102 id : 2,103 previous: '/api/tasks/1',104 }, {105 '@id': '/api/tasks/3',106 id : 3,107 previous: '/api/tasks/1',108 }, {109 '@id': '/api/tasks/4',110 id : 4,111 },112 {113 '@id': '/api/tasks/1',114 id : 1,115 },116 ]117 const groups = groupLinkedTasks(tasks)118 expect(groups).toEqual({119 '/api/tasks/1': [ '/api/tasks/1', '/api/tasks/2', '/api/tasks/3' ],120 '/api/tasks/2': [ '/api/tasks/1', '/api/tasks/2', '/api/tasks/3' ],121 '/api/tasks/3': [ '/api/tasks/1', '/api/tasks/2', '/api/tasks/3' ],122 })123 })124 it('should group multiple', () => {125 const tasks = [126 {127 '@id': '/api/tasks/1',128 id : 1,129 next: '/api/tasks/2',130 }, {131 '@id': '/api/tasks/2',132 id : 2,133 previous: '/api/tasks/1',134 }, {135 '@id': '/api/tasks/3',136 id : 3,137 },{138 '@id': '/api/tasks/4',139 id : 4,140 next: '/api/tasks/5',141 }, {142 '@id': '/api/tasks/5',143 id : 5,144 previous: '/api/tasks/4',145 }146 ]147 const groups = groupLinkedTasks(tasks)148 expect(groups).toEqual({149 '/api/tasks/1': [ '/api/tasks/1', '/api/tasks/2' ],150 '/api/tasks/2': [ '/api/tasks/1', '/api/tasks/2' ],151 '/api/tasks/4': [ '/api/tasks/4', '/api/tasks/5' ],152 '/api/tasks/5': [ '/api/tasks/4', '/api/tasks/5' ],153 })154 })155 })156 describe('tasksToIds', () => {157 it('should map tasks to task ids', () => {158 let tasks = [159 {160 '@id': '/api/tasks/1',161 id : 1,162 }, {163 '@id': '/api/tasks/2',164 id : 2,165 }166 ]167 let ids = tasksToIds(tasks)168 expect(ids).toEqual([169 '/api/tasks/1',170 '/api/tasks/2',171 ])172 })173 it('should map tasks to task ids with TaskCollectionItem', () => {174 let tasks = [175 {176 '@type': 'TaskCollectionItem',177 'task': '/api/tasks/1',178 }, {179 '@type': 'TaskCollectionItem',180 'task': '/api/tasks/2',181 }182 ]183 let ids = tasksToIds(tasks)184 expect(ids).toEqual([185 '/api/tasks/1',186 '/api/tasks/2',187 ])188 })189 })...

Full Screen

Full Screen

todolist.py

Source:todolist.py Github

copy

Full Screen

...23 def get_card_by_name(self, task_):24 return self.session_.query(TaskCard).filter(TaskCard.task == task_).all()25 def get_cards_by_date(self, date_):26 return self.session_.query(TaskCard).filter(TaskCard.deadline == date_).all()27 def get_range_tasks(self, start_, end_):28 return self.session_.query(TaskCard).filter(TaskCard.deadline.between(start_, end_)).order_by(TaskCard.deadline).all()29 def get_cards(self):30 return self.session_.query(TaskCard).order_by(TaskCard.deadline).all()31 def delete_card(self, task_):32 self.session_.delete(task_)33 self.session_.commit()34 print("The task has been deleted!")35 def get_before_date(self, date_):36 return self.session_.query(TaskCard).filter(TaskCard.deadline < date_).all()37 def today_tasks(self):38 tasks_ = self.get_cards_by_date(datetime.today().date())39 print(datetime.today().strftime("Today: %d %b:"))40 if len(tasks_) == 0:41 print("Nothing to do!")42 else:43 for n_ in range(len(tasks_)):44 print("{}. {}".format(n_ + 1, tasks_[n_].task))45 def add_task(self):46 print("Enter task")47 task_name_ = input()48 print("Enter deadline")49 deadline_ = datetime.strptime(input(), "%Y-%m-%d")50 print("dead", deadline_)51 self.add_card(task_name_, deadline_)52 def delete_task(self):53 tasks_ = self.get_cards()54 if len(tasks_) == 0:55 print("Nothing to delete")56 else:57 print("Choose the number of the task you want to delete:")58 for n_ in range(len(tasks_)):59 print("{}. {}. {}".format(n_ + 1, tasks_[n_].task, tasks_[n_].deadline.strftime("%d %b:")))60 task_number = input()61 try:62 if int(task_number) in (1, len(tasks_) + 1):63 self.delete_card(tasks_[int(task_number) - 1])64 except ValueError:65 print("Type error")66 def week_tasks(self):67 date_ = datetime.today()68 for n_ in range(0, 7):69 d_ = date_ + timedelta(days=n_)70 print(d_.strftime("%A %d %b:"))71 tasks_ = self.get_cards_by_date(d_.date())72 if len(tasks_) == 0:73 print("Nothing to do!")74 print()75 else:76 for x_ in range(len(tasks_)):77 print("{}. {}".format(x_ + 1, tasks_[x_].task))78 print()79 def all_tasks(self):80 tasks_ = self.get_cards()81 if len(tasks_) == 0:82 print("Nothing to do!")83 else:84 print("All tasks:")85 for n_ in range(len(tasks_)):86 print("{}. {}. {}".format(n_ + 1, tasks_[n_].task, tasks_[n_].deadline.strftime("%d %b:")))87 def missed_task(self):88 print("Missed tasks:")89 tasks_ = service.get_before_date(datetime.today().date())90 if len(tasks_) == 0:91 print("Nothing is missed!")92 else:93 for n_ in range(len(tasks_)):94 print("{}. {}. {}".format(n_ + 1, tasks_[n_].task, tasks_[n_].deadline.strftime("%d %b:")))95 print()96def main_menu():97 while True:98 print("1) Today's tasks")99 print("2) Week's tasks")100 print("3) All tasks")101 print("4) Missed tasks")102 print("5) Add task")103 print("6) Delete task")104 print("0) Exit")105 choice_ = input()106 if choice_ == "1":107 service.today_tasks()108 elif choice_ == "2":109 service.week_tasks()110 elif choice_ == "3":111 service.all_tasks()112 elif choice_ == "4":113 service.missed_task()114 elif choice_ == "5":115 service.add_task()116 elif choice_ == "6":117 service.delete_task()118 elif choice_ == "0":119 print("Bye!")120 break121 else:122 print("{} is not an option".format(choice_))123engine = create_engine('sqlite:///todo.db?check_same_thread=False')124connection = engine.connect()125Session = sessionmaker(bind=engine)...

Full Screen

Full Screen

cs_sort_task.py

Source:cs_sort_task.py Github

copy

Full Screen

...17 loc_year_str = "{loc} : {year_id}".format(loc=location_id, year_id=year_id)18 if loc_year_str in self._us_tasks_by_location_year:19 return_tasks.append(self._us_tasks_by_location_year[loc_year_str])20 return return_tasks21 def _get_phase_tasks(self):22 all_tasks = self._task_dag.tasks.values()23 tasks = []24 tasks.extend([t for t in all_tasks25 if isinstance(t, BashTask) and "most_detailed" in t.command])26 tasks.extend([t for t in all_tasks27 if isinstance(t, BashTask) and "run_cleanup" in t.command])28 tasks.extend([t for t in all_tasks29 if isinstance(t, BashTask) and "run_pct_change" in t.command])30 if not tasks:31 logger.debug("No upstream Tasks identified. Something is amiss in the Swarm definition")32 raise RuntimeError("No upstream Tasks identified. Something is "33 "amiss in the Swarm definition")34 return tasks35 def _get_tasks_for_upstream(self):36 tasks_for_upstream = {}37 us_tasks = self._get_phase_tasks()38 for task in us_tasks:39 loc = self.task_loc_map[task.hash]40 year_id = self.task_years_map[task.hash]41 loc_year_str = "{loc} : {year_id}".format(loc=loc, year_id=year_id)42 if loc_year_str not in tasks_for_upstream:43 tasks_for_upstream[loc_year_str] = []44 tasks_for_upstream[loc_year_str].append(task)...

Full Screen

Full Screen

Using AI Code Generation

copy

Full Screen

1const { tasks } = require('fast-check-monorepo');2const { tasks } = require('fast-check');3const { tasks } = require('fast-check-monorepo/tasks');4const { tasks } = require('fast-check/tasks');5const { tasks } = require('fast-check-monorepo/tasks');6const { tasks } = require('fast-check/tasks');7const { tasks } = require('fast-check-monorepo/tasks');8const { tasks } = require('fast-check/tasks');9const { tasks } = require('fast-check-monorepo/tasks');10const { tasks } = require('fast-check/tasks');11const { tasks } = require('fast-check-monorepo/tasks');12const { tasks } = require('fast-check/tasks');13const { tasks } = require('fast-check-monorepo/tasks');14const { tasks } = require('fast-check/tasks');15const { tasks } = require('fast-check-monorepo/tasks');16const { tasks } = require('fast-check/tasks');17const { tasks } = require('fast-check-monorepo/tasks');18const { tasks } = require('fast-check/tasks');19const { tasks } = require('fast-check-monorepo/tasks');

Full Screen

Using AI Code Generation

copy

Full Screen

1const { tasks } = require('fast-check-monorepo');2const { tasks } = require('fast-check');3const { tasks } = require('fast-check-monorepo');4const { tasks } = require('fast-check');5const { tasks } = require('fast-check-monorepo');6const { tasks } = require('fast-check');7const { tasks } = require('fast-check-monorepo');8const { tasks } = require('fast-check');9const { tasks } = require('fast-check-monorepo');10const { tasks } = require('fast-check');11const { tasks } = require('fast-check-monorepo');12const { tasks } = require('fast-check');13const { tasks } = require('fast-check-monorepo');14const { tasks } = require('fast-check');15const { tasks } = require('fast-check-monorepo');16const { tasks } = require('fast-check');17const { tasks } = require('fast-check-monorepo');18const { tasks } = require('fast-check');19const { tasks } = require('fast-check-monorepo');20const { tasks } = require('fast-check');21const { tasks } = require('fast-check-monorepo');22const { tasks } = require('fast-check');

Full Screen

Using AI Code Generation

copy

Full Screen

1const { tasks } = require('fast-check-monorepo');2tasks();3const { tasks } = require('fast-check');4tasks();5const { tasks } = require('fast-check-monorepo');6tasks();7const { tasks } = require('fast-check');8tasks();9const { tasks } = require('fast-check-monorepo');10tasks();11const { tasks } = require('fast-check');12tasks();13const { tasks } = require('fast-check-monorepo');14tasks();15const { tasks } = require('fast-check');16tasks();17const { tasks } = require('fast-check-monorepo');18tasks();19const { tasks } = require('fast-check');20tasks();21const { tasks } = require('fast-check-monorepo');22tasks();23const { tasks } = require('fast-check');24tasks();25const { tasks } = require('fast-check-monorepo');26tasks();27const { tasks } = require('fast-check');28tasks();29const { tasks } = require('fast-check-monorepo');30tasks();31const { tasks } = require('fast-check');32tasks();33const { tasks } = require('fast-check-monorepo');34tasks();35const { tasks } = require('fast-check');36tasks();

Full Screen

Using AI Code Generation

copy

Full Screen

1const fc = require('fast-check');2fc.tasks()3 .then(tasks => console.log(tasks))4{5 "scripts": {6 },7 "dependencies": {8 }9}

Full Screen

Using AI Code Generation

copy

Full Screen

1const fc = require("fast-check");2const { tasks } = require("fast-check-monorepo");3const { task } = tasks;4const sum = (a, b) => a + b;5const add = task("add", sum);6const add100 = add(100);7const add200 = add(200);8const add300 = add(300);9const add400 = add(400);10const add500 = add(500);11const add600 = add(600);12const add700 = add(700);13const add800 = add(800);14const add900 = add(900);15const add1000 = add(1000);16const add1100 = add(1100);17const add1200 = add(1200);18const add1300 = add(1300);19const add1400 = add(1400);20const add1500 = add(1500);21const add1600 = add(1600);22const add1700 = add(1700);23const add1800 = add(1800);24const add1900 = add(1900);25const add2000 = add(2000);26const add2100 = add(2100);27const add2200 = add(2200);28const add2300 = add(2300);29const add2400 = add(2400);30const add2500 = add(2500);31const add2600 = add(2600);32const add2700 = add(2700);33const add2800 = add(2800);34const add2900 = add(2900);35const add3000 = add(3000);36const add3100 = add(3100);37const add3200 = add(3200);38const add3300 = add(3300);39const add3400 = add(3400);40const add3500 = add(3500);41const add3600 = add(3600);42const add3700 = add(3700);43const add3800 = add(3800);44const add3900 = add(3900);45const add4000 = add(4000);46const add4100 = add(4100);47const add4200 = add(4200);48const add4300 = add(4300);49const add4400 = add(4400);50const add4500 = add(4500);51const add4600 = add(4600);

Full Screen

Using AI Code Generation

copy

Full Screen

1import * as fc from 'fast-check';2import * as fcMonorepo from 'fast-check-monorepo';3const { tasks } = fcMonorepo;4const myTask = tasks.task('myTask', () => {5 .integer()6 .noShrink()7 .filter((n) => n % 3 === 0)8 .map((n) => n / 3)9 .chain((n) => {10 if (n % 2 === 0) {11 return tasks.success(n);12 } else {13 return tasks.failure('not even');14 }15 });16});17const myTaskRunner = async () => {18 const result = await myTask.run();19 console.log('result', result);20};21myTaskRunner();22{23 "scripts": {24 },25 "devDependencies": {26 }27}28{29}

Full Screen

Using AI Code Generation

copy

Full Screen

1const { tasks } = require('fast-check-monorepo');2const task = tasks(['build', 'test', 'lint']);3task.run('build', 'test');4const { tasks } = require('fast-check-monorepo');5const task = tasks(['build', 'test', 'lint']);6task.run('build', 'test');

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run fast-check-monorepo automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful