How to use paths method in storybook-root

Best JavaScript code snippet using storybook-root

find_cuda_config.py

Source:find_cuda_config.py Github

copy

Full Screen

...99 return ""100def _cartesian_product(first, second):101 """Returns all path combinations of first and second."""102 return [os.path.join(f, s) for f in first for s in second]103def _get_ld_config_paths():104 """Returns all directories from 'ldconfig -p'."""105 if not _is_linux():106 return []107 ldconfig_path = which("ldconfig") or "/sbin/ldconfig"108 output = subprocess.check_output([ldconfig_path, "-p"])109 pattern = re.compile(".* => (.*)")110 result = set()111 for line in output.splitlines():112 try:113 match = pattern.match(line.decode("ascii"))114 except UnicodeDecodeError:115 match = False116 if match:117 result.add(os.path.dirname(match.group(1)))118 return sorted(list(result))119def _get_default_cuda_paths(cuda_version):120 if not cuda_version:121 cuda_version = "*"122 elif not "." in cuda_version:123 cuda_version = cuda_version + ".*"124 if _is_windows():125 return [126 os.environ.get(127 "CUDA_PATH",128 "C:\\Program Files\\NVIDIA GPU Computing Toolkit\\CUDA\\v%s\\" %129 cuda_version)130 ]131 return ["/usr/local/cuda-%s" % cuda_version, "/usr/local/cuda", "/usr",132 "/usr/local/cudnn"] + _get_ld_config_paths()133def _header_paths():134 """Returns hard-coded set of relative paths to look for header files."""135 return [136 "",137 "include",138 "include/cuda",139 "include/*-linux-gnu",140 "extras/CUPTI/include",141 "include/cuda/CUPTI",142 ]143def _library_paths():144 """Returns hard-coded set of relative paths to look for library files."""145 return [146 "",147 "lib64",148 "lib",149 "lib/*-linux-gnu",150 "lib/x64",151 "extras/CUPTI/*",152 ]153def _not_found_error(base_paths, relative_paths, filepattern):154 base_paths = "".join(["\n '%s'" % path for path in sorted(base_paths)])155 relative_paths = "".join(["\n '%s'" % path for path in relative_paths])156 return ConfigError(157 "Could not find any %s in any subdirectory:%s\nof:%s\n" %158 (filepattern, relative_paths, base_paths))159def _find_file(base_paths, relative_paths, filepattern):160 for path in _cartesian_product(base_paths, relative_paths):161 for file in glob.glob(os.path.join(path, filepattern)):162 return file163 raise _not_found_error(base_paths, relative_paths, filepattern)164def _find_library(base_paths, library_name, required_version):165 """Returns first valid path to the requested library."""166 if _is_windows():167 filepattern = library_name + ".lib"168 elif _is_macos():169 filepattern = "%s*.dylib" % (".".join(["lib" + library_name] +170 required_version.split(".")[:1]))171 else:172 filepattern = ".".join(["lib" + library_name, "so"] +173 required_version.split(".")[:1]) + "*"174 return _find_file(base_paths, _library_paths(), filepattern)175def _find_versioned_file(base_paths, relative_paths, filepattern,176 required_version, get_version):177 """Returns first valid path to a file that matches the requested version."""178 for path in _cartesian_product(base_paths, relative_paths):179 for file in glob.glob(os.path.join(path, filepattern)):180 actual_version = get_version(file)181 if _matches_version(actual_version, required_version):182 return file, actual_version183 raise _not_found_error(184 base_paths, relative_paths,185 filepattern + " matching version '%s'" % required_version)186def _find_header(base_paths, header_name, required_version, get_version):187 """Returns first valid path to a header that matches the requested version."""188 return _find_versioned_file(base_paths, _header_paths(), header_name,189 required_version, get_version)190def _find_cuda_config(base_paths, required_version):191 def get_header_version(path):192 version = int(_get_header_version(path, "CUDA_VERSION"))193 if not version:194 return None195 return "%d.%d" % (version // 1000, version % 1000 // 10)196 cuda_header_path, header_version = _find_header(base_paths, "cuda.h",197 required_version,198 get_header_version)199 cuda_version = header_version # x.y, see above.200 cuda_library_path = _find_library(base_paths, "cudart", cuda_version)201 def get_nvcc_version(path):202 pattern = "Cuda compilation tools, release \d+\.\d+, V(\d+\.\d+\.\d+)"203 for line in subprocess.check_output([path, "--version"]).splitlines():204 match = re.match(pattern, line.decode("ascii"))205 if match:206 return match.group(1)207 return None208 nvcc_name = "nvcc.exe" if _is_windows() else "nvcc"209 nvcc_path, nvcc_version = _find_versioned_file(base_paths, [210 "",211 "bin",212 ], nvcc_name, cuda_version, get_nvcc_version)213 nvvm_path = _find_file(base_paths, [214 "nvvm/libdevice",215 "share/cuda",216 "lib/nvidia-cuda-toolkit/libdevice",217 ], "libdevice*.10.bc")218 cupti_header_path = _find_file(base_paths, _header_paths(), "cupti.h")219 cupti_library_path = _find_library(base_paths, "cupti", required_version)220 cuda_binary_dir = os.path.dirname(nvcc_path)221 nvvm_library_dir = os.path.dirname(nvvm_path)222 # XLA requires the toolkit path to find ptxas and libdevice.223 # TODO(csigg): pass in both directories instead.224 cuda_toolkit_paths = (225 os.path.normpath(os.path.join(cuda_binary_dir, "..")),226 os.path.normpath(os.path.join(nvvm_library_dir, "../..")),227 )228 if cuda_toolkit_paths[0] != cuda_toolkit_paths[1]:229 raise ConfigError("Inconsistent CUDA toolkit path: %s vs %s" %230 cuda_toolkit_paths)231 return {232 "cuda_version": cuda_version,233 "cuda_include_dir": os.path.dirname(cuda_header_path),234 "cuda_library_dir": os.path.dirname(cuda_library_path),235 "cuda_binary_dir": cuda_binary_dir,236 "nvvm_library_dir": nvvm_library_dir,237 "cupti_include_dir": os.path.dirname(cupti_header_path),238 "cupti_library_dir": os.path.dirname(cupti_library_path),239 "cuda_toolkit_path": cuda_toolkit_paths[0],240 }241def _find_cublas_config(base_paths, required_version, cuda_version):242 if _at_least_version(cuda_version, "10.1"):243 def get_header_version(path):244 version = (245 _get_header_version(path, name)246 for name in ("CUBLAS_VER_MAJOR", "CUBLAS_VER_MINOR",247 "CUBLAS_VER_PATCH"))248 return ".".join(version)249 header_path, header_version = _find_header(base_paths, "cublas_api.h",250 required_version,251 get_header_version)252 # cuBLAS uses the major version only.253 cublas_version = header_version.split(".")[0]254 if not _matches_version(cuda_version, cublas_version):255 raise ConfigError("cuBLAS version %s does not match CUDA version %s" %256 (cublas_version, cuda_version))257 else:258 # There is no version info available before CUDA 10.1, just find the file.259 header_path = _find_file(base_paths, _header_paths(), "cublas_api.h")260 # cuBLAS version is the same as CUDA version (x.y).261 cublas_version = required_version262 library_path = _find_library(base_paths, "cublas", cublas_version)263 return {264 "cublas_include_dir": os.path.dirname(header_path),265 "cublas_library_dir": os.path.dirname(library_path),266 }267def _find_cudnn_config(base_paths, required_version):268 def get_header_version(path):269 version = (270 _get_header_version(path, name)271 for name in ("CUDNN_MAJOR", "CUDNN_MINOR", "CUDNN_PATCHLEVEL"))272 return ".".join(version)273 header_path, header_version = _find_header(base_paths, "cudnn.h",274 required_version,275 get_header_version)276 cudnn_version = header_version.split(".")[0]277 library_path = _find_library(base_paths, "cudnn", cudnn_version)278 return {279 "cudnn_version": cudnn_version,280 "cudnn_include_dir": os.path.dirname(header_path),281 "cudnn_library_dir": os.path.dirname(library_path),282 }283def _find_nccl_config(base_paths, required_version):284 def get_header_version(path):285 version = (286 _get_header_version(path, name)287 for name in ("NCCL_MAJOR", "NCCL_MINOR", "NCCL_PATCH"))288 return ".".join(version)289 header_path, header_version = _find_header(base_paths, "nccl.h",290 required_version,291 get_header_version)292 nccl_version = header_version.split(".")[0]293 library_path = _find_library(base_paths, "nccl", nccl_version)294 return {295 "nccl_version": nccl_version,296 "nccl_include_dir": os.path.dirname(header_path),297 "nccl_library_dir": os.path.dirname(library_path),298 }299def _find_tensorrt_config(base_paths, required_version):300 def get_header_version(path):301 version = (302 _get_header_version(path, name)303 for name in ("NV_TENSORRT_MAJOR", "NV_TENSORRT_MINOR",304 "NV_TENSORRT_PATCH"))305 # `version` is a generator object, so we convert it to a list before using306 # it (muitiple times below).307 version = list(version)308 if not all(version):309 return None # Versions not found, make _matches_version returns False.310 return ".".join(version)311 try:312 header_path, header_version = _find_header(base_paths, "NvInfer.h",313 required_version,314 get_header_version)315 except ConfigError:316 # TensorRT 6 moved the version information to NvInferVersion.h.317 header_path, header_version = _find_header(base_paths, "NvInferVersion.h",318 required_version,319 get_header_version)320 tensorrt_version = header_version.split(".")[0]321 library_path = _find_library(base_paths, "nvinfer", tensorrt_version)322 return {323 "tensorrt_version": tensorrt_version,324 "tensorrt_include_dir": os.path.dirname(header_path),325 "tensorrt_library_dir": os.path.dirname(library_path),326 }327def _list_from_env(env_name, default=[]):328 """Returns comma-separated list from environment variable."""329 if env_name in os.environ:330 return os.environ[env_name].split(",")331 return default332def _get_legacy_path(env_name, default=[]):333 """Returns a path specified by a legacy environment variable.334 CUDNN_INSTALL_PATH, NCCL_INSTALL_PATH, TENSORRT_INSTALL_PATH set to335 '/usr/lib/x86_64-linux-gnu' would previously find both library and header336 paths. Detect those and return '/usr', otherwise forward to _list_from_env().337 """338 if env_name in os.environ:339 match = re.match("^(/[^/ ]*)+/lib/\w+-linux-gnu/?$", os.environ[env_name])340 if match:341 return [match.group(1)]342 return _list_from_env(env_name, default)343def _normalize_path(path):344 """Returns normalized path, with forward slashes on Windows."""345 path = os.path.realpath(path)346 if _is_windows():347 path = path.replace("\\", "/")348 return path349def find_cuda_config():350 """Returns a dictionary of CUDA library and header file paths."""351 libraries = [argv.lower() for argv in sys.argv[1:]]352 cuda_version = os.environ.get("TF_CUDA_VERSION", "")353 base_paths = _list_from_env("TF_CUDA_PATHS",354 _get_default_cuda_paths(cuda_version))355 base_paths = [path for path in base_paths if os.path.exists(path)]356 result = {}357 if "cuda" in libraries:358 cuda_paths = _list_from_env("CUDA_TOOLKIT_PATH", base_paths)359 result.update(_find_cuda_config(cuda_paths, cuda_version))360 cuda_version = result["cuda_version"]361 cublas_paths = base_paths362 if tuple(int(v) for v in cuda_version.split(".")) < (10, 1):363 # Before CUDA 10.1, cuBLAS was in the same directory as the toolkit.364 cublas_paths = cuda_paths365 cublas_version = os.environ.get("TF_CUBLAS_VERSION", "")366 result.update(367 _find_cublas_config(cublas_paths, cublas_version, cuda_version))368 if "cudnn" in libraries:...

Full Screen

Full Screen

write_project_headers.py

Source:write_project_headers.py Github

copy

Full Screen

...30 p = p.replace('\\', '/')31 p = re.sub('/+', '/', p)32 return p33'''34PROJECT_PATHS_MATLAB_PREAMBLE = '''function[path] = project_paths(key,arg)35%Paths script for Matlab:36%Use command project_paths(key,arg) to obtain the absolute path to the37%directory *key* refers to, with the string in *arg* appended.38%39%For example:40%project_paths('OUT_TABLES','table1.tex') returns your equivalent of41%D:\\workspace\\project\\trunk\\bld\\out\\tables\\table1.tex42%43%project_paths('OUT_ANALYSIS','logs\\reg.log') returns your equivalent of44%D:\\workspace\\project\\trunk\\bld\\out\\analysis\\logs\\reg.log45%46%Note: *arg* is optional.47%48%File is created by waf. Do not change paths here, but in root wscript!49'''50PROJECT_PATHS_MATLAB_END = '''51%define arg if not given52if ~exist('arg','var')53 arg='';54end55path=[getfield(projectpaths, key) '/' arg];56'''57PROJECT_PATHS_R_COMMENT = '''#58# Header with path definitions for entire project.59#60# Automatically generated by Waf, do not change!61#62# If paths need adjustment, perform those in the root wscript file.63#\n\n\n'''64PROJECT_PATHS_PERL_COMMENT = '''#!/usr/bin/perl65# Header with path definitions for entire project.66#67# Automatically generated by Waf, do not change!68#69# If paths need adjustment, perform those in the root wscript file.70#\n\n\n71package project_paths;72use strict;73use warnings;74use Exporter 'import';75our @EXPORT_OK = qw(%project_paths);76our %project_paths = ();77'''78PROJECT_PATHS_STATA_COMMENT = '''//79// Header with path definitions for entire project.80//81// Automatically generated by Waf, do not change!82//83// If paths need adjustment, perform those in the root wscript file.84//85// Note that the paths are added to the top of the ado-path.86//\n\n\n'''87@Task.update_outputs88class WriteProjectPathsPython(Task.Task):89 r"""Autogenerate a header with paths for inclusion in Python scripts.90 For this to work, the ``PROJECT_PATHS`` dictionary must be set as an91 environmental variable of the context in the top-level wscript -- it will92 be copied to a dictionary ``project_paths``.93 **Example**::94 PROJECT_PATHS['OUT_FINAL'] = 'bld/out/final'95 leads to::96 project_paths['OUT_FINAL'] = "/abs/path/to/your_project/bld/out/final"97 or::98 project_paths['OUT_FINAL'] = "C:\\your_project\\bld\\out\\final"99 in the module ``project_paths.py``. Assuming that the root directory of100 the project is on your ``PYTHONPATH`` and this script is called from101 ``src/library/python``, you can import the project_paths dictionary using::102 from bld.src.library.python.project_paths import project_paths103 There also is a convenience funtion joining project paths and any104 arguments::105 from bld.src.library.python.project_paths import project_paths_join106 allows you to write:107 out_file = project_paths_join('OUT_FINAL', 'baseline', 'figure.eps')108 """109 def run(self):110 with open(self.outputs[0].abspath(), 'w') as out_file:111 out_file.write(PROJECT_PATHS_PYTHON_DOCSTRING_IMPORTS)112 out_file.write('project_paths = {}\n')113 for name in sorted(self.env.PROJECT_PATHS.keys()):114 val = self.env.PROJECT_PATHS[name]115 if isinstance(val, Node.Node):116 out_file.write("project_paths['{n}'] = r'{p}'\n".format(117 n=name,118 p=val.abspath())119 )120 else:121 pass122 # Convenience function123 out_file.write(PROJECT_PATHS_PYTHON_JOIN_FUNCTION)124 out_file.write(PROJECT_PATHS_PYTHON_JOIN_LATEX_FUNCTION)125class WriteProjectPathsMatlab(Task.Task):126 r"""Autogenerate a function with paths for inclusion in Matlab scripts.127 """128 def run(self):129 with open(self.outputs[0].abspath(), 'w')as out_file:130 out_file.write(PROJECT_PATHS_MATLAB_PREAMBLE)131 for name in sorted(self.env.PROJECT_PATHS.keys()):132 val = self.env.PROJECT_PATHS[name]133 if isinstance(val, Node.Node):134 out_file.write("projectpaths.{n} = '{p}';\n".format(135 n=name,136 p=val.abspath())137 )138 else:139 pass140 out_file.write(PROJECT_PATHS_MATLAB_END)141class WriteProjectPathsR(Task.Task):142 r"""Autogenerate a header with paths for inclusion in R scripts.143 For this to work, the ``PROJECT_PATHS`` dictionary must be set as an144 environmental variable of the context in the top-level wscript -- it will145 be copied to a dictionary ``project_paths``.146 **Example**::147 PROJECT_PATHS['OUT_FINAL'] = 'bld/out/final'148 leads to::149 PATH_OUT_FINAL <<- '/path/to/your_project/bld/out/final'150 or::151 PATH_OUT_FINAL <<- 'C:\your_project\bld\out\final'152 in the module ``project_paths.r``. Assuming that this task is called153 from a directory ``src/library/R``, you can import the project paths154 using::155 source("src/library/R/project_paths.R")156 """157 def run(self):158 with open(self.outputs[0].abspath(), 'w') as out_file:159 out_file.write(PROJECT_PATHS_R_COMMENT)160 for name in sorted(self.env.PROJECT_PATHS.keys()):161 val = self.env.PROJECT_PATHS[name]162 if isinstance(val, Node.Node):163 out_file.write("PATH_{n} <<- '{p}'\n".format(164 n=name,165 p=val.abspath().replace('\\', '/'))166 )167 else:168 pass169@Task.update_outputs170class WriteProjectPathsPerl(Task.Task):171 r"""Autogenerate a header with paths for inclusion in Perl scripts.172 For this to work, the ``PROJECT_PATHS`` dictionary must be set as an173 environmental variable of the context in the top-level wscript -- it will174 be copied to a hash ``project_paths``.175 **Example**::176 PROJECT_PATHS['OUT_FINAL'] = 'bld/out/final'177 leads to::178 $project_paths{'OUT_FINAL'} = '/path/to/your_project/bld/out/final';179 or::180 $project_paths{'PATH_OUT_FINAL'} = 'C:\your_project\bld\out\final';181 in the module ``project_paths.pm``. Assuming that this task is called182 from a directory ``src/data_management/``, you can import the project paths183 using::184 use lib 'src/data_management/';185 use project_paths qw(%project_paths);186 """187 def run(self):188 with open(self.outputs[0].abspath(), 'w')as out_file:189 out_file.write(PROJECT_PATHS_PERL_COMMENT)190 for name in sorted(self.env.PROJECT_PATHS.keys()):191 val = self.env.PROJECT_PATHS[name]192 if isinstance(val, Node.Node):193 out_file.write(194 "$project_paths{{'{n}'}} = '{p}';\n".format(195 n=name,196 p=val.abspath().replace('\\', '/')197 )198 )199 else:200 pass201@Task.update_outputs202class WriteProjectPathsStata(Task.Task):203 r"""Autogenerate a header with paths for inclusion in Stata do-files.204 For this to work, the ``PROJECT_PATHS`` dictionary must be set as an205 environmental variable of the build context in the top-level wscript --206 it will be copied to a dictionary ``project_paths``.207 **Example**::208 PROJECT_PATHS['OUT_FINAL'] = 'out/final'209 leads to:210 .. code-block:: none211 global PATH_OUT_FINAL = "/path/to/your_project/bld/out/final"212 or:213 .. code-block:: none214 global PATH_OUT_FINAL = "C:\your_project\out\final"215 in the file ``project_paths.do``. Ado-paths found in the216 ``PROJECT_PATHS['ADO']`` dictionary get a special treatment -- if there are217 keys ``PERSONAL`` or ``PLUS``, the respective system directories will be218 set to their values. Other entries will be prepended to Stata's ado search219 path.220 The file ``project_paths.do`` usually lives in the project's library221 (build) directory. You can then put the lines:222 .. code-block:: none223 include path/to/library/stata/project_paths224 in your Stata do-file and reference the path defined above by writing, for225 example:226 .. code-block:: none227 log using "${PATH_OUT_FINAL}/log/graph_main_results"228 """229 def _write_ado_paths(self, ado_paths, out_file):230 for name, val in ado_paths.items():231 if re.match('PERSONAL|PLUS', name):232 out_file.write('sysdir set {} "{}/"\n'.format(233 name,234 val.abspath())235 )236 else:237 out_file.write('adopath ++ "{}/"\n'.format(val.abspath()))238 out_file.write('adopath ++ "{}/"\n'.format(239 val.get_bld().abspath())240 )241 out_file.write('\n')242 def run(self):243 with open(self.outputs[0].abspath(), 'w') as out_file:244 out_file.write(PROJECT_PATHS_STATA_COMMENT)245 for name in sorted(self.env.PROJECT_PATHS.keys()):246 val = self.env.PROJECT_PATHS[name]247 if isinstance(val, Node.Node):248 out_file.write(249 'global PATH_{n} "{p}/"\n'.format(250 n=name,251 p=val.abspath()252 )253 )254 elif name == 'ADO' and isinstance(val, dict):255 self._write_ado_paths(val, out_file)256@TaskGen.feature('write_project_paths')257@TaskGen.before_method('process_source')258def apply_write_project_paths(tsk_g):259 """Task generator, customising the options etc. to output the project260 paths header file in the correct language.261 The function is passed a waflib.TaskGen.task_gen object.262 """263 # Get target nodes.264 tgt_nodes = [265 tsk_g.path.find_or_declare(t) for t in tsk_g.to_list(tsk_g.target)266 ]267 # Set top-level wscript as the only dependency (where paths are defined).268 src_node = tsk_g.bld.srcnode.find_resource('wscript')269 # Parse the nodes to get the correct type of output.270 for tgt_node in tgt_nodes:271 if tgt_node.name.endswith('.py'):272 task_str = 'WriteProjectPathsPython'...

Full Screen

Full Screen

test_simple_paths.py

Source:test_simple_paths.py Github

copy

Full Screen

...59 def test_multidigraph(self):60 G = nx.MultiDiGraph([(0, 1), (0, 1), (1, 0), (1, 0)])61 assert_true(nx.is_simple_path(G, [0, 1]))62# Tests for all_simple_paths63def test_all_simple_paths():64 G = nx.path_graph(4)65 paths = nx.all_simple_paths(G,0,3)66 assert_equal(set(tuple(p) for p in paths),{(0,1,2,3)})67def test_all_simple_paths_cutoff():68 G = nx.complete_graph(4)69 paths = nx.all_simple_paths(G,0,1,cutoff=1)70 assert_equal(set(tuple(p) for p in paths),{(0,1)})71 paths = nx.all_simple_paths(G,0,1,cutoff=2)72 assert_equal(set(tuple(p) for p in paths),{(0,1),(0,2,1),(0,3,1)})73def test_all_simple_paths_multigraph():74 G = nx.MultiGraph([(1,2),(1,2)])75 paths = nx.all_simple_paths(G,1,2)76 assert_equal(set(tuple(p) for p in paths),{(1,2),(1,2)})77def test_all_simple_paths_multigraph_with_cutoff():78 G = nx.MultiGraph([(1,2),(1,2),(1,10),(10,2)])79 paths = nx.all_simple_paths(G,1,2, cutoff=1)80 assert_equal(set(tuple(p) for p in paths),{(1,2),(1,2)})81def test_all_simple_paths_directed():82 G = nx.DiGraph()83 nx.add_path(G, [1, 2, 3])84 nx.add_path(G, [3, 2, 1])85 paths = nx.all_simple_paths(G,1,3)86 assert_equal(set(tuple(p) for p in paths),{(1,2,3)})87def test_all_simple_paths_empty():88 G = nx.path_graph(4)89 paths = nx.all_simple_paths(G,0,3,cutoff=2)90 assert_equal(list(list(p) for p in paths),[])91def hamiltonian_path(G,source):92 source = arbitrary_element(G)93 neighbors = set(G[source])-set([source])94 n = len(G)95 for target in neighbors:96 for path in nx.all_simple_paths(G,source,target):97 if len(path) == n:98 yield path99def test_hamiltonian_path():100 from itertools import permutations101 G=nx.complete_graph(4)102 paths = [list(p) for p in hamiltonian_path(G,0)]103 exact = [[0]+list(p) for p in permutations([1,2,3],3) ]104 assert_equal(sorted(paths),sorted(exact))105def test_cutoff_zero():106 G = nx.complete_graph(4)107 paths = nx.all_simple_paths(G,0,3,cutoff=0)108 assert_equal(list(list(p) for p in paths),[])109 paths = nx.all_simple_paths(nx.MultiGraph(G),0,3,cutoff=0)110 assert_equal(list(list(p) for p in paths),[])111@raises(nx.NodeNotFound)112def test_source_missing():113 G = nx.Graph()114 nx.add_path(G, [1, 2, 3])115 paths = list(nx.all_simple_paths(nx.MultiGraph(G),0,3))116@raises(nx.NodeNotFound)117def test_target_missing():118 G = nx.Graph()119 nx.add_path(G, [1, 2, 3])120 paths = list(nx.all_simple_paths(nx.MultiGraph(G),1,4))121# Tests for shortest_simple_paths122def test_shortest_simple_paths():123 G = cnlti(nx.grid_2d_graph(4, 4), first_label=1, ordering="sorted")124 paths = nx.shortest_simple_paths(G, 1, 12)125 assert_equal(next(paths), [1, 2, 3, 4, 8, 12])126 assert_equal(next(paths), [1, 5, 6, 7, 8, 12])127 assert_equal([len(path) for path in nx.shortest_simple_paths(G, 1, 12)],128 sorted([len(path) for path in nx.all_simple_paths(G, 1, 12)]))129def test_shortest_simple_paths_directed():130 G = nx.cycle_graph(7, create_using=nx.DiGraph())131 paths = nx.shortest_simple_paths(G, 0, 3)132 assert_equal([path for path in paths], [[0, 1, 2, 3]])133def test_Greg_Bernstein():134 g1 = nx.Graph()135 g1.add_nodes_from(["N0", "N1", "N2", "N3", "N4"])136 g1.add_edge("N4", "N1", weight=10.0, capacity=50, name="L5")137 g1.add_edge("N4", "N0", weight=7.0, capacity=40, name="L4")138 g1.add_edge("N0", "N1", weight=10.0, capacity=45, name="L1")139 g1.add_edge("N3", "N0", weight=10.0, capacity=50, name="L0")140 g1.add_edge("N2", "N3", weight=12.0, capacity=30, name="L2")141 g1.add_edge("N1", "N2", weight=15.0, capacity=42, name="L3")142 solution = [['N1', 'N0', 'N3'], ['N1', 'N2', 'N3'], ['N1', 'N4', 'N0', 'N3']]143 result = list(nx.shortest_simple_paths(g1, 'N1', 'N3', weight='weight'))144 assert_equal(result, solution)145def test_weighted_shortest_simple_path():146 def cost_func(path):147 return sum(G.edge[u][v]['weight'] for (u, v) in zip(path, path[1:]))148 G = nx.complete_graph(5)149 weight = {(u, v): random.randint(1, 100) for (u, v) in G.edges()}150 nx.set_edge_attributes(G, 'weight', weight)151 cost = 0152 for path in nx.shortest_simple_paths(G, 0, 3, weight='weight'):153 this_cost = cost_func(path)154 assert_true(cost <= this_cost)155 cost = this_cost156def test_directed_weighted_shortest_simple_path():157 def cost_func(path):158 return sum(G.edge[u][v]['weight'] for (u, v) in zip(path, path[1:]))159 G = nx.complete_graph(5)160 G = G.to_directed()161 weight = {(u, v): random.randint(1, 100) for (u, v) in G.edges()}162 nx.set_edge_attributes(G, 'weight', weight)163 cost = 0164 for path in nx.shortest_simple_paths(G, 0, 3, weight='weight'):165 this_cost = cost_func(path)166 assert_true(cost <= this_cost)167 cost = this_cost168def test_weight_name():169 G = nx.cycle_graph(7)170 nx.set_edge_attributes(G, 'weight', 1)171 nx.set_edge_attributes(G, 'foo', 1)172 G.edge[1][2]['foo'] = 7173 paths = list(nx.shortest_simple_paths(G, 0, 3, weight='foo'))174 solution = [[0, 6, 5, 4, 3], [0, 1, 2, 3]]175 assert_equal(paths, solution)176@raises(nx.NodeNotFound)177def test_ssp_source_missing():178 G = nx.Graph()179 nx.add_path(G, [1, 2, 3])180 paths = list(nx.shortest_simple_paths(G, 0, 3))181@raises(nx.NodeNotFound)182def test_ssp_target_missing():183 G = nx.Graph()184 nx.add_path(G, [1, 2, 3])185 paths = list(nx.shortest_simple_paths(G, 1, 4))186@raises(nx.NetworkXNotImplemented)187def test_ssp_multigraph():188 G = nx.MultiGraph()189 nx.add_path(G, [1, 2, 3])190 paths = list(nx.shortest_simple_paths(G, 1, 4))191@raises(nx.NetworkXNoPath)192def test_ssp_source_missing():193 G = nx.Graph()194 nx.add_path(G, [0, 1, 2])195 nx.add_path(G, [3, 4, 5])196 paths = list(nx.shortest_simple_paths(G, 0, 3))197def test_bidirectional_shortest_path_restricted():198 grid = cnlti(nx.grid_2d_graph(4,4), first_label=1, ordering="sorted")199 cycle = nx.cycle_graph(7)200 directed_cycle = nx.cycle_graph(7, create_using=nx.DiGraph())201 length, path = _bidirectional_shortest_path(cycle, 0, 3)202 assert_equal(path, [0, 1, 2, 3])203 length, path = _bidirectional_shortest_path(cycle, 0, 3, ignore_nodes=[1])204 assert_equal(path, [0, 6, 5, 4, 3])205 length, path = _bidirectional_shortest_path(grid, 1, 12)206 assert_equal(path, [1, 2, 3, 4, 8, 12])207 length, path = _bidirectional_shortest_path(grid, 1, 12, ignore_nodes=[2])208 assert_equal(path, [1, 5, 6, 10, 11, 12])209 length, path = _bidirectional_shortest_path(grid, 1, 12, ignore_nodes=[2, 6])210 assert_equal(path, [1, 5, 9, 10, 11, 12])...

Full Screen

Full Screen

output_init_files_test.py

Source:output_init_files_test.py Github

copy

Full Screen

...81 contents = contents[start:end]82 file_paths = [83 file_path.strip().strip('"') for file_path in contents.split(',')]84 return set(file_path for file_path in file_paths if file_path)85def _module_to_paths(module):86 """Get all API __init__.py file paths for the given module.87 Args:88 module: Module to get file paths for.89 Returns:90 List of paths for the given module. For e.g. module foo.bar91 requires 'foo/__init__.py' and 'foo/bar/__init__.py'.92 """93 submodules = []94 module_segments = module.split('.')95 for i in range(len(module_segments)):96 submodules.append('.'.join(module_segments[:i+1]))97 paths = []98 for submodule in submodules:99 if not submodule:100 paths.append('__init__.py')101 continue102 paths.append('%s/__init__.py' % (submodule.replace('.', '/')))103 return paths104class OutputInitFilesTest(test.TestCase):105 """Test that verifies files that list paths for TensorFlow API."""106 def _validate_paths_for_modules(107 self, actual_paths, expected_paths, file_to_update_on_error):108 """Validates that actual_paths match expected_paths.109 Args:110 actual_paths: */__init__.py file paths listed in file_to_update_on_error.111 expected_paths: */__init__.py file paths that we need to create for112 TensorFlow API.113 file_to_update_on_error: File that contains list of */__init__.py files.114 We include it in error message printed if the file list needs to be115 updated.116 """117 self.assertTrue(actual_paths)118 self.assertTrue(expected_paths)119 missing_paths = expected_paths - actual_paths120 extra_paths = actual_paths - expected_paths121 # Surround paths with quotes so that they can be copy-pasted122 # from error messages as strings.123 missing_paths = ['\'%s\'' % path for path in missing_paths]124 extra_paths = ['\'%s\'' % path for path in extra_paths]125 self.assertFalse(126 missing_paths,127 'Please add %s to %s.' % (128 ',\n'.join(sorted(missing_paths)), file_to_update_on_error))129 self.assertFalse(130 extra_paths,131 'Redundant paths, please remove %s in %s.' % (132 ',\n'.join(sorted(extra_paths)), file_to_update_on_error))133 def test_V2_init_files(self):134 modules = _get_modules(135 'tensorflow', '_tf_api_names', '_tf_api_constants')136 file_path = resource_loader.get_path_to_datafile(137 'api_init_files.bzl')138 paths = _get_files_set(139 file_path, '# BEGIN GENERATED FILES', '# END GENERATED FILES')140 module_paths = set(141 f for module in modules for f in _module_to_paths(module))142 self._validate_paths_for_modules(143 paths, module_paths, file_to_update_on_error=file_path)144 def test_V1_init_files(self):145 modules = _get_modules(146 'tensorflow', '_tf_api_names_v1', '_tf_api_constants_v1')147 file_path = resource_loader.get_path_to_datafile(148 'api_init_files_v1.bzl')149 paths = _get_files_set(150 file_path, '# BEGIN GENERATED FILES', '# END GENERATED FILES')151 module_paths = set(152 f for module in modules for f in _module_to_paths(module))153 self._validate_paths_for_modules(154 paths, module_paths, file_to_update_on_error=file_path)155if __name__ == '__main__':...

Full Screen

Full Screen

test_correlation.py

Source:test_correlation.py Github

copy

Full Screen

1from biobb_common.tools import test_fixtures as fx2from biobb_dna.interbp_correlations.interhpcorr import interhpcorr3from biobb_dna.interbp_correlations.interseqcorr import interseqcorr4from biobb_dna.interbp_correlations.interbpcorr import interbpcorr5from biobb_dna.intrabp_correlations.intrahpcorr import intrahpcorr6from biobb_dna.intrabp_correlations.intraseqcorr import intraseqcorr7from biobb_dna.intrabp_correlations.intrabpcorr import intrabpcorr8class TestInterHelparCorrelation():9 def setUp(self):10 fx.test_setup(self, 'interhpcorr')11 def tearDown(self):12 fx.test_teardown(self)13 def test_helparcorrelation(self):14 returncode = interhpcorr(**self.paths)15 assert fx.not_empty(self.paths['output_csv_path'])16 assert fx.not_empty(self.paths['output_jpg_path'])17 assert fx.exe_success(returncode)18 assert fx.equal(19 self.paths['output_csv_path'],20 self.paths['ref_csv_output'])21 assert fx.equal(22 self.paths['output_jpg_path'],23 self.paths['ref_jpg_output'])24class TestInterSequenceCorrelation():25 def setUp(self):26 fx.test_setup(self, 'interseqcorr')27 def tearDown(self):28 fx.test_teardown(self)29 def test_sequencecorrelation(self):30 returncode = interseqcorr(31 properties=self.properties,32 **self.paths)33 assert fx.not_empty(self.paths['output_csv_path'])34 assert fx.not_empty(self.paths['output_jpg_path'])35 assert fx.exe_success(returncode)36 assert fx.equal(37 self.paths['output_csv_path'],38 self.paths['ref_csv_output'])39 assert fx.equal(40 self.paths['output_jpg_path'],41 self.paths['ref_jpg_output'])42class TestInterBasepairCorrelation():43 def setUp(self):44 fx.test_setup(self, 'interbpcorr')45 def tearDown(self):46 fx.test_teardown(self)47 def test_basepaircorrelation(self):48 returncode = interbpcorr(49 properties=self.properties,50 **self.paths)51 assert fx.not_empty(self.paths['output_csv_path'])52 assert fx.not_empty(self.paths['output_jpg_path'])53 assert fx.exe_success(returncode)54 assert fx.equal(55 self.paths['output_csv_path'],56 self.paths['ref_csv_output'])57 assert fx.equal(58 self.paths['output_jpg_path'],59 self.paths['ref_jpg_output'])60class TestIntraHelparCorrelation():61 def setUp(self):62 fx.test_setup(self, 'intrahpcorr')63 def tearDown(self):64 fx.test_teardown(self)65 def test_helparcorrelation(self):66 returncode = intrahpcorr(**self.paths)67 assert fx.not_empty(self.paths['output_csv_path'])68 assert fx.not_empty(self.paths['output_jpg_path'])69 assert fx.exe_success(returncode)70 assert fx.equal(71 self.paths['output_csv_path'],72 self.paths['ref_csv_output'])73 assert fx.equal(74 self.paths['output_jpg_path'],75 self.paths['ref_jpg_output'])76class TestIntraSequenceCorrelation():77 def setUp(self):78 fx.test_setup(self, 'intraseqcorr')79 # def tearDown(self):80 # fx.test_teardown(self)81 def test_sequencecorrelation(self):82 returncode = intraseqcorr(83 properties=self.properties,84 **self.paths)85 assert fx.not_empty(self.paths['output_csv_path'])86 assert fx.not_empty(self.paths['output_jpg_path'])87 assert fx.exe_success(returncode)88 assert fx.equal(89 self.paths['output_csv_path'],90 self.paths['ref_csv_output'])91 assert fx.equal(92 self.paths['output_jpg_path'],93 self.paths['ref_jpg_output'])94class TestIntraBasepairCorrelation():95 def setUp(self):96 fx.test_setup(self, 'intrabpcorr')97 def tearDown(self):98 fx.test_teardown(self)99 def test_basepaircorrelation(self):100 returncode = intrabpcorr(101 properties=self.properties,102 **self.paths)103 assert fx.not_empty(self.paths['output_csv_path'])104 assert fx.not_empty(self.paths['output_jpg_path'])105 assert fx.exe_success(returncode)106 assert fx.equal(107 self.paths['output_csv_path'],108 self.paths['ref_csv_output'])109 assert fx.equal(110 self.paths['output_jpg_path'],...

Full Screen

Full Screen

merge_paths.py

Source:merge_paths.py Github

copy

Full Screen

1#!/usr/bin/env python2import json,sys3from geo_utils import GeodeticDistGreatCircle4def merge_paths(paths):5 old_len = len(paths)+16 while len(paths)<old_len:7 old_len = len(paths)8 b = False9 for i in range(0,len(paths)):10 for j in range(0,len(paths)):11 if paths[i][-1] == paths[j][0]:12 # merge j into i13 paths[i].extend(paths[j])14 del paths[j]15 b = True16 break17 if paths[i][0] == paths[j][-1]:18 # merge i into j19 paths[j].extend(paths[i])20 del paths[i]21 b = True22 break23 #~ # find overlaping paths24 #~ begin_i = -125 #~ k = 026 #~ while k < len(paths[i]):27 #~ l = 028 #~ while l < len(paths[j]):29 #~ #print k,l30 #~ if paths[i][k]==paths[j][l]:31 #~ begin_i = k32 #~ begin_j = l33 #~ #print 'common got',k,l34 #~ break35 #~ l += 136 #~ if begin_i!=-1:37 #~ begin_i = 280 # special case for durance38 #~ m = 039 #~ while begin_i+m < len(paths[i]) and begin_j+m < len(paths[j]) and paths[i][begin_i+m]==paths[j][begin_j+m]:40 #~ m+=141 #~ if m>1:42 #~ print 'removing',begin_i,begin_j,m43 #~ del paths[i][begin_i:m]44 #~ #del paths[j][begin_j:m]45 #~ break46 #~ k += 147 if b:48 break49if __name__=='__main__':50 paths = [51 [[43.4811089,6.2341841],[43.4791642,6.269513300000001]],52 [[43.4810078,6.2909265],[43.433249700000005,6.3642313]],53 [[43.433249700000005,6.3642313],[43.4301522,6.374260100000001]],54 [[43.4791642,6.269513300000001],[43.4810078,6.2909265]],55 [[43.4219713,6.385425000000001],[43.4473507,6.4570821]],56 [[43.484489700000005,6.2144158],[43.4811089,6.2341841]],57 [[43.482820100000005,6.2124755],[43.484489700000005,6.2144158]],58 [[43.448267,6.458647],[43.4473235,6.4624787]],59 [[43.4473507,6.4570821],[43.448267,6.458647]],60 [[43.4301522,6.374260100000001],[43.4219713,6.385425000000001]],61 [[43.4664475,6.568304800000001],[43.4694493,6.571972400000001]],62 [[43.4694493,6.571972400000001],[43.4097165,6.737053400000001]],63 [[43.4484951,6.5457362],[43.4664475,6.568304800000001]],64 [[43.465827100000006,6.116036500000001],[43.482820100000005,6.2124755]],65 [[43.5192727,5.9953338],[43.465827100000006,6.116036500000001]],66 [[43.4473235,6.4624787],[43.431793000000006,6.5228108]],67 [[43.431793000000006,6.5228108],[43.4484951,6.5457362]],68 [[43.5273686,5.984377800000001],[43.5192727,5.9953338]],69 [[43.494067300000005,5.9142226],[43.490663100000006,5.925452]],70 [[43.5028408,5.9086786],[43.494067300000005,5.9142226]],71 [[43.5131151,5.963614300000001],[43.5273686,5.984377800000001]],72 [[43.490663100000006,5.925452],[43.4991374,5.950354300000001]],73 [[43.4991374,5.950354300000001],[43.5131151,5.963614300000001]],74 [[43.504062700000006,5.9073365],[43.5028408,5.9086786]],75 ]76 print 'bf merge',len(paths)77 merge_paths(paths)78 print 'af merge',len(paths)79 import pymongo80 paths = pymongo.MongoClient().wwsupdb.rivers_merged2.find_one({"_id":"La Durance"})["osm"]["paths"]81 paths = [map(tuple,path) for path in paths]82 print 'bf merge',len(paths)83 merge_paths(paths)...

Full Screen

Full Screen

params.py

Source:params.py Github

copy

Full Screen

1"""2Parameters and settings3"""4from typing import Union5import os6from pathlib import Path7from dotenv import find_dotenv, load_dotenv8from powergenome import __file__9from powergenome.resource_clusters import ClusterBuilder10# Not convinced this is the best way to set folder paths but it works!11powergenome_path = Path(__file__).parent12project_path = powergenome_path.parent13load_dotenv(dotenv_path=powergenome_path / ".env")14DATA_PATHS = {}15DATA_PATHS["results"] = project_path / "results"16DATA_PATHS["powergenome"] = project_path / "powergenome"17DATA_PATHS["data"] = project_path / "data"18DATA_PATHS["atb_storage_costs"] = DATA_PATHS["data"] / "NREL_ATB_battery_costs.csv"19DATA_PATHS["ipm_shapefiles"] = DATA_PATHS["data"] / "IPM Regions v617 04-05-17"20DATA_PATHS["tests"] = project_path / "tests"21DATA_PATHS["test_data"] = DATA_PATHS["tests"] / "data"22DATA_PATHS["settings"] = project_path / "settings"23DATA_PATHS["eia"] = DATA_PATHS["data"] / "eia"24DATA_PATHS["eia_860m"] = DATA_PATHS["eia"] / "860m"25DATA_PATHS["cost_multipliers"] = DATA_PATHS["data"] / "cost_multipliers"26DATA_PATHS["additional_techs"] = DATA_PATHS["data"] / "additional_technologies"27DATA_PATHS["coal_fgd"] = DATA_PATHS["data"] / "coal_fgd" / "fgd_output.csv"28DATA_PATHS["cpi_data"] = DATA_PATHS["data"] / "cpi_data" / "cpi_data.csv"29IPM_SHAPEFILE_PATH = DATA_PATHS["ipm_shapefiles"] / "IPM_Regions_201770405.shp"30IPM_GEOJSON_PATH = DATA_PATHS["data"] / "ipm_regions_simple.geojson"31SETTINGS = {}32SETTINGS["PUDL_DB"] = os.environ.get("PUDL_DB")33SETTINGS["PG_DB"] = os.environ.get("PG_DB")34SETTINGS["EIA_API_KEY"] = os.environ.get("EIA_API_KEY")35SETTINGS["RESOURCE_GROUPS"] = os.environ.get("RESOURCE_GROUPS")36def build_resource_clusters(group_path: Union[str, Path] = None):37 if not group_path:38 group_path = SETTINGS.get("RESOURCE_GROUPS")39 if not group_path:40 cluster_builder = ClusterBuilder([])41 else:42 cluster_builder = ClusterBuilder.from_json(43 Path(group_path, ".").glob("**/*.json")44 )...

Full Screen

Full Screen

aggregate_path_weights.py

Source:aggregate_path_weights.py Github

copy

Full Screen

2from operator import itemgetter3BASE_DATA_DIR = os.path.join('/', 'N', 'dc2', 'projects', 'filter_bubble', 'projects', 'web-traffic', 'data')4DATA_DIR = os.path.join(BASE_DATA_DIR, 'paths')5DEST = os.path.join(BASE_DATA_DIR, 'all-paths.tsv')6def read_paths():7 paths = {}8 for filename in os.listdir(DATA_DIR):9 print('Reading {}'.format(filename))10 with open(os.path.join(DATA_DIR, filename), 'rb') as f:11 f.readline() # skip header12 for line in f:13 row = [t.strip() for t in line.split(b'\t')]14 referrer = row[0]15 target = row[1]16 weight = int(row[2])17 if referrer not in paths:18 paths[referrer] = {}19 if target not in paths[referrer]:20 paths[referrer][target] = 021 paths[referrer][target] += weight22 return paths23if __name__ == '__main__':24 print('Reading paths.')25 paths = read_paths()26 print(len(paths))27 print('Transforming paths.')28 tuples = []29 for referrer in paths:30 for target in paths[referrer]:31 tuples.append((referrer, target, paths[referrer][target]))32 print('Sorting paths.')33 sorted_paths = sorted(tuples, key=itemgetter(2), reverse=True)34 print('Writing paths.')35 with open(DEST, 'wb') as f:36 f.write(b'referrer\ttarget\tpath weight\n')37 for t in sorted_paths:...

Full Screen

Full Screen

Using AI Code Generation

copy

Full Screen

1import { storiesOf } from '@storybook/react-native';2import { withKnobs } from '@storybook/addon-knobs';3import { withInfo } from '@storybook/addon-info';4import { withStoryRoot } from 'storybook-root';5import { View, Text } from 'react-native';6import React from 'react';7const stories = storiesOf('Test', module);8stories.addDecorator(withKnobs);9stories.addDecorator(withInfo);10stories.addDecorator(withStoryRoot);11stories.add('Test', () => (12));13import { configure } from '@storybook/react-native';14import { getStorybookUI, configureRoot } from 'storybook-root';15configureRoot(require.context('../', true, /\.js$/));16configure(() => {17 require('./test');18}, module);19const StorybookUIRoot = getStorybookUI({});20export default StorybookUIRoot;21import { storiesOf } from '@storybook/react-native';22import { withKnobs } from '@storybook/addon-knobs';23import { withInfo } from '@storybook/addon-info';24import { withStoryRoot } from 'storybook-root';25import { View, Text } from 'react-native';26import React from 'react';27const stories = storiesOf('Test', module);28stories.addDecorator(withKnobs);29stories.addDecorator(withInfo);30stories.addDecorator(withStoryRoot);31stories.add('Test', () => (32));33import { configure, addDecorator } from '@storybook/react-native';34import { withKnobs } from '@storybook/addon-knobs';35import { withInfo } from '@storybook/addon-info';36import { withStoryRoot } from 'storybook-root';37addDecorator(withKnobs);38addDecorator(withInfo);39addDecorator(withStoryRoot);40configureRoot(require.context('../', true, /\.js$/));41configure(() => {42 require('./stories/test');43}, module);44import '@storybook/addon-knobs/register';45import '@storybook/addon-info/register';

Full Screen

Using AI Code Generation

copy

Full Screen

1import { paths } from '@storybook/addon-storyshots-puppeteer/dist/paths';2import { storybookRoot } from '@storybook/addon-storyshots-puppeteer/dist/storybookRoot';3import { storybookUrl } from '@storybook/addon-storyshots-puppeteer/dist/storybookUrl';4import { getGotoOptions } from '@storybook/addon-storyshots-puppeteer/dist/getGotoOptions';5import imageSnapshot from '@storybook/addon-storyshots-puppeteer/dist/imageSnapshot';6import initStoryshots from '@storybook/addon-storyshots-puppeteer/dist/initStoryshots';7import multiSnapshotWithOptions from '@storybook/addon-storyshots-puppeteer/dist/multiSnapshotWithOptions';8import snapshotWithOptions from '@storybook/addon-storyshots-puppeteer/dist/snapshotWithOptions';9import storybook from '@storybook/addon-storyshots-puppeteer/dist/storybook';10import toMatchImageSnapshot from '@storybook/addon-storyshots-puppeteer/dist/toMatchImageSnapshot';11import toMatchSpecificImageSnapshot from '@storybook/addon-storyshots-puppeteer/dist/toMatchSpecificImageSnapshot';12import toMatchImageSnapshot from '@storybook/addon-storyshots-puppeteer/dist/toMatchImageSnapshot';13import toMatchSpecificImageSnapshot from '@storybook/addon-storyshots-puppeteer/dist/toMatchSpecificImageSnapshot';14import toMatchImageSnapshot from '@storybook/addon-storyshots-puppeteer/dist/toMatchImageSnapshot';15import toMatchSpecificImageSnapshot from '@storybook/addon-storyshots-puppeteer/dist/toMatchSpecificImageSnapshot';16import toMatchImageSnapshot from '@storybook/addon-storyshots-puppeteer/dist/toMatchImageSnapshot';17import toMatchSpecificImageSnapshot from '@storybook/addon-storyshots-puppeteer/dist/toMatchSpecificImageSnapshot';18import to

Full Screen

Using AI Code Generation

copy

Full Screen

1const path = require('path');2const rootPath = require('storybook-root').paths;3const config = {4 stories: [path.join(rootPath, 'stories/**/*.stories.js')],5};6module.exports = config;7"scripts": {8 },9const path = require('path');10const rootPath = require('storybook-root').paths;11module.exports = async ({ config, mode }) => {12 config.module.rules.push({13 {14 },15 {16 },17 {18 options: {19 includePaths: [path.join(rootPath, 'src')],20 },21 },22 include: path.resolve(__dirname, '../'),23 });24 return config;25};26const path = require('path');27const rootPath = require('storybook-root').paths;28module.exports = async ({ config, mode }) => {29 config.module.rules.push({30 {31 },32 {33 },34 {35 options: {36 includePaths: [path.join(rootPath, 'src')],37 },38 },39 include: path.resolve(__dirname, '../'),40 });41 return config;42};43const path = require('path');44const rootPath = require('storybook-root').paths;45module.exports = async ({ config, mode }) => {46 config.module.rules.push({47 {48 },49 {50 },51 {52 options: {

Full Screen

Using AI Code Generation

copy

Full Screen

1const path = require('path')2const root = require('storybook-root')3const { paths } = require('storybook-root/paths')4const { getPaths } = require('storybook-root/paths')5const { getPaths: getPaths2 } = require('storybook-root/paths')6const { getPaths: getPaths3 } = require('storybook-root/paths')7const { getPaths: getPaths4 } = require('storybook-root/paths')8const p = paths()9const p2 = getPaths()10const p3 = getPaths2()11const p4 = getPaths3()12const p5 = getPaths4()13console.log(p)14console.log(p2)15console.log(p3)16console.log(p4)17console.log(p5)18console.log(root)19console.log(path.resolve(__dirname, '..'))20const path = require('path')21const root = require('storybook-root')22const { paths } = require('storybook-root/paths')23const { getPaths } = require('storybook-root/paths')24const { getPaths: getPaths2 } = require('storybook-root/paths')25const { getPaths: getPaths3 } = require('storybook-root/paths')26const { getPaths: getPaths4 } = require('storybook-root/paths')27module.exports = {28}29const path = require('path')30const root = require('storybook-root')31const { paths } = require('storybook-root/paths')32const { getPaths } = require('storybook-root/paths')33const { getPaths: getPaths2 } = require('storybook-root/paths')34const { getPaths: getPaths3 } = require('storybook-root/paths')35const { getPaths: getPaths4 } = require('storybook-root/paths')36module.exports = {37}38{

Full Screen

Using AI Code Generation

copy

Full Screen

1import { paths } from 'storybook-root'2const path = paths('src/components/')3import { storiesOf } from '@storybook/react'4import { action } from '@storybook/addon-actions'5import { path } from 'test.js'6storiesOf('Button', module)7 .add('with text', () => (8 <Button onClick={action('clicked')}>Hello Button</Button>

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run storybook-root automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful