How to use _get_outdir method in tappy

Best Python code snippet using tappy_python

plotting.py

Source:plotting.py Github

copy

Full Screen

...145 def _y_err(self) -> np.ndarray:146 return np.array([np.abs(self.transient.y_err[1, :]), self.transient.y_err[0, :]])147 @property148 def _lightcurve_plot_outdir(self) -> str:149 return self._get_outdir(join(self.transient.directory_structure.directory_path, self.model.__name__))150 @property151 def _data_plot_outdir(self) -> str:152 return self._get_outdir(self.transient.directory_structure.directory_path)153 def _get_outdir(self, default: str) -> str:154 return self._get_kwarg_with_default(kwarg="outdir", default=default)155 def get_filename(self, default: str) -> str:156 return self._get_kwarg_with_default(kwarg="filename", default=default)157 def _get_kwarg_with_default(self, kwarg: str, default: Any) -> Any:158 return self.kwargs.get(kwarg, default) or default159 @property160 def _model_kwargs(self) -> dict:161 return self._get_kwarg_with_default("model_kwargs", dict())162 @property163 def _posterior(self) -> pd.DataFrame:164 posterior = self.kwargs.get("posterior", pd.DataFrame())165 if not self._posterior_sorted and posterior is not None:166 posterior.sort_values(by='log_likelihood', inplace=True)167 self._posterior_sorted = True...

Full Screen

Full Screen

proto_compile.bzl

Source:proto_compile.bzl Github

copy

Full Screen

...311 else:312 outdir = builder.get(lang.name + "_outdir", run.outdir)313 options = builder.get(lang.name + "_grpc_options", [])314 _build_plugin_out(name, outdir, options, builder)315def _get_outdir(ctx, data):316 execdir = data.execdir317 if data.sources_are_generated and data.output_to_workspace:318 fail("output_to_workspace is not supported for generated proto files")319 if ctx.attr.output_to_workspace:320 outdir = "."321 else:322 outdir = ctx.var["GENDIR"]323 path = _get_offset_path(execdir, outdir)324 # If we are building generated files, the execdir and outdir are the same325 if path == "":326 return "."327 if execdir != ".":328 path += "/" + execdir329 return path330def _get_external_root(ctx):331 gendir = ctx.var["GENDIR"] + "/"332 # Complete set of "external workspace roots" that the proto333 # sourcefiles belong to.334 external_roots = []335 for file in ctx.files.protos:336 path = file.path337 if path.startswith(gendir):338 path = path[len(gendir):]339 path = path.split("/")340 if path[0] == "external":341 external_roots += ["/".join(path[0:2])]342 # This set size must be 0 or 1. (all source files must exist in this343 # workspace or the same external workspace).344 roots = depset(external_roots)345 if (ctx.attr.verbose > 2):346 print("external roots: %r" % roots)347 n = len(roots)348 if n:349 if n > 1:350 fail(351 """352 You are attempting simultaneous compilation of protobuf source files that span multiple workspaces (%s).353 Decompose your library rules into smaller units having filesets that belong to only a single workspace at a time.354 Note that it is OK to *import* across multiple workspaces, but not compile them as file inputs to protoc.355 """ % roots356 )357 else:358 return external_roots[0]359 else:360 return "."361def _update_import_paths(ctx, builder, data):362 """Updates import paths beginning with 'external' so that they point to external/."""363 execdir = data.execdir364 final_imports = []365 for i in builder["imports"]:366 final_i = i367 # Check for imports from external368 path = i.split("/")369 if path[0] == 'external':370 # Ensure that external imports start from root, as external/ does not exist when rule is being371 # built in an external project.372 final_i = _get_offset_path(execdir, i)373 final_imports.append(final_i)374 builder["imports"] = final_imports375def _compile(ctx, unit):376 execdir = unit.data.execdir377 protoc = _get_offset_path(execdir, unit.compiler.path)378 imports = ["--proto_path=" + i for i in unit.imports]379 srcs = [_get_offset_path(execdir, p.path) for p in unit.data.protos]380 protoc_cmd = [protoc] + list(unit.args) + imports + srcs381 manifest = [f.short_path for f in unit.outputs]382 transitive_units = depset()383 for u in unit.data.transitive_units:384 transitive_units = transitive_units | u.inputs385 inputs = list(unit.inputs | transitive_units) + [unit.compiler]386 outputs = list(unit.outputs)387 cmds = [cmd for cmd in unit.commands] + [" ".join(protoc_cmd)]388 if execdir != ".":389 cmds.insert(0, "cd %s" % execdir)390 if unit.data.output_to_workspace:391 print(392"""393>**************************************************************************394* - Generating files into the workspace... This is potentially *395* dangerous (may overwrite existing files) and violates bazel's *396* sandbox policy. *397* - Disregard "ERROR: output 'foo.pb.*' was not created." messages. *398* - Build will halt following the "not all outputs were created" message. *399* - Output manifest is printed below. *400**************************************************************************<401%s402>*************************************************************************<403""" % "\n".join(manifest)404 )405 if unit.data.verbose:406 print(407"""408************************************************************409cd $(bazel info execution_root)%s && \410%s411************************************************************412%s413************************************************************414""" % (415 "" if execdir == "." else "/" + execdir,416 " \\ \n".join(protoc_cmd),417 "\n".join(manifest))418 )419 if unit.data.verbose > 2:420 for i in range(len(protoc_cmd)):421 print(" > cmd%s: %s" % (i, protoc_cmd[i]))422 for i in range(len(inputs)):423 print(" > input%s: %s" % (i, inputs[i]))424 for i in range(len(outputs)):425 print(" > output%s: %s" % (i, outputs[i]))426 ctx.action(427 mnemonic = "ProtoCompile",428 command = " && ".join(cmds),429 inputs = inputs,430 outputs = outputs,431 )432def _check_if_protos_are_generated(ctx):433 generated_path = ctx.var["GENDIR"]434 all_generated = True435 all_source = True436 for f in ctx.files.protos:437 if not f.path.startswith(generated_path):438 all_generated = False439 if not f.is_source:440 all_source = False441 if all_source:442 return False443 if all_generated:444 return True445 fail(446 """447 You are attempting simultaneous compilation of protobuf source files and generated protobuf files.448 Decompose your library rules into smaller units having filesets that are only source files or only449 generated files.450 """451 )452def _add_imports_for_transitive_units(ctx, data, builder):453 proto_paths = [ data.execdir ]454 for unit in data.transitive_units:455 if len(unit.data.protos) == 0:456 continue457 if unit.data.execdir not in proto_paths:458 builder["imports"].append(_get_offset_path(data.execdir, unit.data.execdir))459 proto_paths.append(unit.data.execdir)460def _proto_compile_impl(ctx):461 if ctx.attr.verbose > 1:462 print("proto_compile %s:%s" % (ctx.build_file_path, ctx.label.name))463 # Calculate list of external roots and return the base directory464 # we'll use for the protoc invocation. Usually this is '.', but if465 # not, its 'external/WORKSPACE'466 execdir = _get_external_root(ctx)467 # If we are building generated protos, run from gendir.468 sources_are_generated = _check_if_protos_are_generated(ctx)469 if sources_are_generated:470 external = "" if execdir == "." else "/" + execdir471 execdir = ctx.var["GENDIR"] + external472 # Propagate proto deps compilation units.473 transitive_units = []474 for dep in ctx.attr.deps:475 for unit in dep.proto_compile_result.transitive_units:476 transitive_units.append(unit)477 if ctx.attr.go_prefix:478 go_prefix = ctx.attr.go_prefix.go_prefix479 elif ctx.attr.go_importpath:480 go_prefix = ctx.attr.go_importpath481 else:482 go_prefix = ""483 # Make the proto list.484 # First include any protos that match cts.attr.includes.485 # Then exclude any protos that match ctx.attr.excludes.486 includes = []487 protos = []488 if ctx.attr.includes:489 for file in ctx.files.protos:490 if _file_endswith(file, ctx.attr.includes):491 includes.append(file)492 else:493 continue494 else:495 includes = ctx.files.protos496 if ctx.attr.excludes:497 for file in includes:498 if _file_endswith(file, ctx.attr.excludes):499 continue500 else:501 protos.append(file)502 else:503 protos = includes504 # Immutable global state for this compiler run.505 data = struct(506 label = ctx.label,507 workspace_name = ctx.workspace_name,508 go_prefix = go_prefix,509 go_package = ctx.attr.go_package,510 execdir = execdir,511 protos = protos,512 descriptor_set = ctx.outputs.descriptor_set,513 importmap = ctx.attr.importmap,514 pb_options = ctx.attr.pb_options,515 grpc_options = ctx.attr.grpc_options,516 verbose = ctx.attr.verbose,517 with_grpc = ctx.attr.with_grpc,518 transitive_units = transitive_units,519 output_to_workspace = ctx.attr.output_to_workspace,520 sources_are_generated = sources_are_generated,521 )522 # Mutable global state to be populated by the classes.523 builder = {524 "args": [], # list of string525 "imports": ctx.attr.imports + ["."],526 "inputs": ctx.files.protos + ctx.files.inputs,527 "outputs": [],528 "commands": [], # optional miscellaneous pre-protoc commands529 }530 _add_imports_for_transitive_units(ctx, data, builder)531 # Build a list of structs that will be processed in this compiler532 # run.533 runs = []534 for l in ctx.attr.langs:535 lang = l.proto_language536 exts = []537 if lang.supports_pb:538 exts += lang.pb_file_extensions539 if lang.supports_grpc and data.with_grpc:540 exts += lang.grpc_file_extensions541 pb_outputs = []542 if lang.supports_pb:543 pb_outputs += lang.pb_outputs544 runs.append(struct(545 ctx = ctx,546 outdir = _get_outdir(ctx, data),547 lang = lang,548 data = data,549 exts = exts,550 pb_outputs = pb_outputs,551 output_to_jar = lang.output_to_jar,552 ))553 builder["inputs"] += lang.pb_inputs + lang.grpc_inputs554 builder["imports"] += lang.pb_imports + lang.grpc_imports555 builder[lang.name + "_pb_options"] = lang.pb_options + data.pb_options556 builder[lang.name + "_grpc_options"] = lang.grpc_options + data.grpc_options557 _build_descriptor_set(data, builder)558 for run in runs:559 if run.lang.output_to_jar:560 _build_output_jar(run, builder)...

Full Screen

Full Screen

outputter.py

Source:outputter.py Github

copy

Full Screen

...25 - tensorboard/*26 """27 def __init__(self, config, basedir, force_outdir=None):28 self.config = config29 self.outdir = self._get_outdir(basedir, force_outdir)30 print('Output directory: {}'.format(self.outdir))31 # Dump the config to config.json32 dump_config(config, os.path.join(self.outdir, 'config.json'))33 # Tensorboard logger34 self.tb_logger = SummaryWriter(os.path.join(self.outdir, 'tensorboard'))35 def _get_outdir(self, basedir, force_outdir=None):36 assert os.path.isdir(basedir), \37 'basedir is not a directory: {}'.format(basedir)38 if force_outdir:39 outdir = os.path.join(40 basedir,41 force_outdir,42 )43 assert os.path.isdir(outdir), \44 'forced outdir is not a directory: {}'.format(outdir)45 else:46 execs = [47 int(filename.split('.')[0])48 for filename in os.listdir(basedir)49 if filename.split('.')[0].isdigit()...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run tappy automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful