How to use canonical_filename method in autotest

Best Python code snippet using autotest_python

analytics_function.py

Source:analytics_function.py Github

copy

Full Screen

1from pathlib import Path2from precipy.identifiers import FileType3from precipy.identifiers import GeneratedFile4from precipy.identifiers import hash_for_fn5from precipy.identifiers import hash_for_supplemental_file6from precipy.identifiers import metadata_filename7import os8import pickle9import shutil10import tempfile11import time12import inspect13class AnalyticsFunction(object):14 metadata_keys = ["function_name", "function_source", "function_output", "kwargs", "files", "function_elapsed_seconds"]15 def __init__(self, fn, kwargs, key=None, previous_functions=None, storages=None, cachePath=None, constants=None, logger=None):16 """17 Arguments:18 fn - a function object representing the analytics function to be called19 kwargs - a dictionary of argument names and values to be passed to the function when called20 previous_functions - a dictionary of function keys:hashcodes for previously run functions21 cachePath - an optional Path object representing the Batch's cache path, can be blank for testing22 """23 self.logger = logger24 self.is_populated = False25 self.key = key or fn.__name__26 self.fn = fn27 for k, v in (constants or {}).items():28 self.fn.__globals__[k] = v29 self.kwargs = kwargs30 self.args = self.kwargs31 self.previous_functions = previous_functions or []32 self.generate_hash(self.fn, self.kwargs)33 self.set_cache_path(cachePath)34 self.setup_files()35 self.function_output = None36 self.storages = storages or []37 self.function_name = self.fn.__name__38 self.function_source = inspect.getsource(self.fn)39 def __repr__(self):40 return "<AnalyticsFunction %s> " % self.key41 def generate_hash(self, fn, kwargs):42 """43 Set the .h attribute containing a caching hash which will be different44 if the function source code, arguments, or dependencies change.45 """46 self.depends_function_hashes = None47 if 'depends' in kwargs:48 self.depends_function_keys = kwargs['depends']49 self.depends_function_hashes = [self.previous_functions[k] for k in self.depends_function_keys]50 del kwargs['depends']51 self.h = hash_for_fn(fn, kwargs, self.depends_function_hashes)52 self.logger.debug("calculated hash for %s is %s" % (self.key, self.h))53 54 def set_cache_path(self, cachePath):55 """56 Utility for setting a safe cachePath when one is not supplied - intended for testing.57 """58 if cachePath == None:59 tempdir = tempfile.gettempdir()60 cachePath = Path(tempdir) / "precipy" / "cache"61 self.cachePath = cachePath62 def setup_files(self):63 self.files = {}64 if not metadata_filename in self.files:65 self.files[metadata_filename] = GeneratedFile(metadata_filename, self.h,66 FileType.METADATA, cache_filepath = self.metadata_cache_filepath())67 def cache_dir(self, h):68 """69 Returns a Path to the directory in which a cache file should be stored,70 creating the directory if it doesn't exist.71 """72 prefix = h[0:2]73 parent_dir = self.cachePath / prefix74 os.makedirs(parent_dir, exist_ok=True)75 return parent_dir76 def call_function(self):77 kwargs = dict((k, v) for k, v in self.kwargs.items() if k != 'function_name')78 return self.fn(self, **kwargs)79 def run_function(self):80 start_time = time.time()81 self.function_output = self.call_function()82 self.function_elapsed_seconds = time.time() - start_time83 self.save_metadata()84 return self.function_metadata()85 def upload_to_storages(self, canonical_filename, cache_filepath):86 for storage in self.storages:87 public_url = storage.upload_cache(cache_filepath)88 self.files[canonical_filename].public_urls.append(public_url)89 def download_from_storages(self, local_cache_filepath):90 self.logger.debug("in download_from_storages for %s" % local_cache_filepath)91 for storage in self.storages:92 self.logger.debug(" from %r" % storage)93 if storage.download_cache(local_cache_filepath):94 self.logger.debug(" success!")95 return True96 self.logger.debug(" not found")97 return False98 def function_metadata(self):99 return dict((k, getattr(self, k, None)) for k in self.metadata_keys)100 def metadata_cache_filename(self):101 return "%s.pkl" % self.h102 def metadata_cache_filepath(self):103 return self.cache_dir(self.h) / self.metadata_cache_filename()104 def metadata_path_exists(self):105 return os.path.exists(self.metadata_cache_filepath())106 def save_metadata(self):107 filepath = self.metadata_cache_filepath()108 self.logger.debug(" saving metadata to %s" % filepath)109 with open(filepath, 'wb') as f:110 pickle.dump(self.function_metadata(), f)111 self.upload_to_storages(metadata_filename, filepath)112 113 def read_metadata(self):114 with open(self.metadata_cache_filepath(), 'rb') as f:115 return pickle.load(f)116 def load_metadata(self):117 meta = self.read_metadata()118 for k, v in meta.items():119 setattr(self, k, v)120 self.is_populated = True121 return meta122 def supplemental_file_hash(self, canonical_filename, fn_h=None):123 return hash_for_supplemental_file(canonical_filename, fn_h or self.h)124 def supplemental_file_cache_filepath(self, canonical_filename, fn_h=None):125 ext = os.path.splitext(canonical_filename)[1]126 h = self.supplemental_file_hash(canonical_filename, fn_h)127 cache_filename = "%s%s" % (h, ext)128 return self.cache_dir(h) / cache_filename129 def generate_file(self, canonical_filename, mode='w'):130 cache_filepath = self.supplemental_file_cache_filepath(canonical_filename)131 with open(cache_filepath, mode) as f:132 yield f133 self.append_generated_file(canonical_filename)134 def add_existing_file(self, filepath, canonical_filename=None, remove=False):135 if canonical_filename is None:136 canonical_filename = os.path.basename(filepath)137 cache_filepath = self.supplemental_file_cache_filepath(canonical_filename)138 shutil.copyfile(filepath, cache_filepath)139 self.append_generated_file(canonical_filename)140 if remove:141 os.remove(filepath)142 def path_to_cached_file(self, canonical_filename, fn_key=None):143 if fn_key:144 fn_h = self.previous_functions[fn_key]145 else:146 fn_h = self.h147 return self.supplemental_file_cache_filepath(canonical_filename, fn_h)148 def read_file(self, canonical_filename, fn_key=None, mode='r'):149 cache_filepath = self.path_to_cached_file(canonical_filename, fn_key) 150 with open(cache_filepath, mode) as f:151 yield f152 def append_generated_file(self, canonical_filename):153 """154 Adds file to list of supplemental files.155 """156 # verify that file exists in cache already157 filepath = self.supplemental_file_cache_filepath(canonical_filename)158 assert os.path.exists(filepath), "file must be in cache before calling append_generated_file"159 h = self.supplemental_file_hash(self.h, canonical_filename)160 self.files[canonical_filename] = GeneratedFile(canonical_filename, h, cache_filepath = filepath)...

Full Screen

Full Screen

PRESUBMIT.py

Source:PRESUBMIT.py Github

copy

Full Screen

1# Copyright (c) 2012 The Native Client Authors. All rights reserved.2# Use of this source code is governed by a BSD-style license that can be3# found in the LICENSE file.4# Documentation on PRESUBMIT.py can be found at:5# http://www.chromium.org/developers/how-tos/depottools/presubmit-scripts6import json7import hashlib8import os9import re10import gclient_utils11def CheckChange(input_api, message_constructor):12 """Checks for files with a modified contents.13 Some checking of validator happens on builbots, but comprehensive enumeration14 tests must be run locally.15 There are two dangers:16 1. Source code for autogenerated files can be modified without regeneration17 of said files.18 2. Source of validator can be changed without running the aforementioned19 tests.20 This function catches the situation when source files for validator_x86_??.c21 are changed but files are not regenerated and it also catches the situation22 when code is changed without running the dfacheckvalidator tests.23 """24 errors = []25 changelist = input_api.change26 root_path = changelist.RepositoryRoot()27 if input_api.change.scm == 'svn':28 try:29 # With SVN you can decide to commit not all modified files but some of30 # them thus separate GetAllModifiedFiles() and GetModifiedFiles() lists31 # are provided. We need to remove root_path from the name of file.32 assert all(filename.startswith(root_path + os.path.sep)33 for filename in changelist.GetAllModifiedFiles())34 all_filenames = [filename[len(root_path + os.path.sep):]35 for filename in changelist.GetAllModifiedFiles()]36 assert all(filename.startswith(root_path + os.path.sep)37 for filename in changelist.GetModifiedFiles())38 modified_filenames = [filename[len(root_path + os.path.sep):]39 for filename in changelist.GetModifiedFiles()]40 except:41 # If gcl is not available (which happens in CQ bots) then we'll try to use42 # AffectedFiles() instead of GetAllModifiedFiles()43 all_filenames = [file.LocalPath() for file in changelist.AffectedFiles()]44 modified_filenames = all_filenames45 else:46 # With GIT you must commit all modified files thus only AffectedFiles()47 # list is provided.48 all_filenames = [file.LocalPath() for file in changelist.AffectedFiles()]49 modified_filenames = all_filenames50 json_filename = os.path.join(51 'src', 'trusted', 'validator_ragel', 'gen', 'protected_files.json')52 protected_files = json.loads(53 gclient_utils.FileRead(os.path.join(root_path, json_filename)))54 need_dfagen = False55 need_dfacheckvalidator = False56 canonical_prefix = 'native_client/'57 for filename in sorted(all_filenames):58 canonical_filename = canonical_prefix + filename.replace('\\', '/')59 if canonical_filename in protected_files['validator']:60 file_contents = gclient_utils.FileRead(os.path.join(root_path, filename))61 sha512 = hashlib.sha512(file_contents).hexdigest()62 if sha512 != protected_files['validator'][canonical_filename]:63 errors.append(message_constructor(64 'Incorrect {0} hash:\n expected {1}\n got {2}'.format(65 canonical_filename,66 protected_files['validator'][canonical_filename],67 sha512)))68 need_dfacheckvalidator = True69 if canonical_filename in protected_files['generating']:70 for automaton_filename in protected_files['generated']:71 if (os.stat(os.path.join(root_path, filename)).st_mtime >72 os.stat(os.path.join(root_path,73 automaton_filename[len(canonical_prefix):])).st_mtime):74 errors.append(message_constructor(75 'File {0} is older then {1}'.format(76 automaton_filename, canonical_filename)))77 need_dfagen = True78 if (canonical_filename in protected_files['validator'] or79 canonical_filename in protected_files['generating'] or80 filename == json_filename):81 if filename not in modified_filenames:82 errors.append(message_constructor(83 'File {0} is changed but is excluded from this CL'.format(84 canonical_filename)))85 if need_dfagen:86 errors.append(message_constructor(87 'Please run "./scons dfagen" before commit!'))88 if need_dfacheckvalidator:89 errors.append(message_constructor(90 'Please run "./scons dfacheckvalidator" before commit!'))91 return errors92def CheckChangeOnUpload(input_api, output_api):93 return CheckChange(input_api,94 message_constructor=output_api.PresubmitPromptWarning)95def CheckChangeOnCommit(input_api, output_api):96 return CheckChange(input_api,...

Full Screen

Full Screen

filename.py

Source:filename.py Github

copy

Full Screen

...6_replace_re = re.compile(ur'(?:^[.])|[\x00-\x1f\x7f-\x9f\"/\\\[\]:;|=,*?]|(?:[.]$)',7re.UNICODE)8_prefix_re = re.compile(ur'^(?:\d|con|aux|(?:com|lpt)[1-4]|prn|nul|(?:rsrc)$|(?:$))',9re.UNICODE | re.IGNORECASE)10def canonical_filename(filename):11 """12 returns a canonicalized copy of filename which should be safe on13 most filesystems; generates a single path segment; the canonical14 filename is returned as a Unicode string; non-Unicode input is15 stringified and decoded as UTF-816 NOTE: this operation should be idempotent17 """18 if type(filename) is not type(u''):19 filename = str(filename).decode('utf-8')20 filename = unicodedata.normalize('NFC', filename)21 filename = filename.strip()22 filename = _replace_re.sub(u'_', filename)23 if _prefix_re.match(unicodedata.normalize('NFKC', filename)):24 filename = '_' + filename25 if len(filename) > 255:26 filename = filename[:127] + u'\N{horizontal ellipsis}' + filename[-127:]27 return filename28def urlify_filename(filename):29 return urllib.quote(to_utf8(canonical_filename(filename)))30def _test():31 for input, expected_output in (32 (u'coo\N{combining diaeresis}perative', u'co\N{latin small letter o with diaeresis}perative'),33 (u'COM1', u'_COM1'),34 (u'CON', u'_CON'),35 (u'rsrc', u'_rsrc'),36 (u'COM1:', u'_COM1_'),37 (u'lpt3.txt', u'_lpt3.txt'),38 (u'\N{fullwidth latin small letter a}\N{fullwidth latin small letter u}\N{fullwidth latin small letter x}',39 u'_\N{fullwidth latin small letter a}\N{fullwidth latin small letter u}\N{fullwidth latin small letter x}'),40 (u'', u'_'),41 (u'_', u'_'),42 (u'/etc/passwd', u'_etc_passwd'),43 (u'.', u'_'),44 (u'\N{TRADE MARK SIGN}', u'\N{TRADE MARK SIGN}'),45 (u'\t\n\x0b\x0c\r\x1c\x1d\x1e\x1f \x85\xa0\u1680\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u200b\u2028\u2029\u202f\u205f\u3000 strip whitespace \t\n\x0b\x0c\r\x1c\x1d\x1e\x1f \x85\xa0\u1680\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u200b\u2028\u2029\u202f\u205f\u3000',46 u'strip whitespace'),47 (u''.join([ unichr(x) for x in xrange(0x00, 0x1c) ]),48 u''.join([ '_' for x in xrange(0x00, 0x1c) ])),49 (u''.join([ unichr(x) for x in xrange(0x7f, 0xa0) ]),50 u''.join([ '_' for x in xrange(0x7f, 0xa0) ])),51 ):52 try:53 output = canonical_filename(input)54 assert expected_output == output55 input = input.encode('utf-8')56 output = canonical_filename(input)57 assert expected_output == output58 except:59 import sys60 sys.stderr.write('canonical_filename(%r) should yield %r but yielded %r\n' % (input, expected_output, output))61 raise62 pass63_test()64if __name__ == '__main__':65 import sys66 filenames = sys.argv[1:]67 if not filenames:68 filenames = sys.stdin69 for filename in filenames:...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run autotest automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful