How to use update_properties method in toolium

Best Python code snippet using toolium_python

api.py

Source:api.py Github

copy

Full Screen

1# Copyright 2013 The Chromium Authors. All rights reserved.2# Use of this source code is governed by a BSD-style license that can be3# found in the LICENSE file.4import re5import sys6import manual_bisect_files7from recipe_engine import recipe_api8# Regular expression to identify a Git hash.9GIT_COMMIT_HASH_RE = re.compile(r'[a-zA-Z0-9]{40}')10# The Google Storage metadata key for the full commit position.11GS_COMMIT_POSITION_KEY = 'Cr-Commit-Position'12# The Google Storage metadata key for the commit position number.13GS_COMMIT_POSITION_NUMBER_KEY = 'Cr-Commit-Position-Number'14# The Google Storage metadata key for the Git commit hash.15GS_GIT_COMMIT_KEY = 'Cr-Git-Commit'16class ArchiveApi(recipe_api.RecipeApi):17 """Chromium specific module for zipping, uploading and downloading build18 artifacts implemented as a wrapper around zip_build.py script.19 If you need to upload or download build artifacts (or any other files) for20 something other than Chromium flavor, consider using 'zip' + 'gsutil' or21 'isolate' modules instead.22 """23 def zip_and_upload_build(24 self, step_name, target, build_url=None, src_dir=None,25 build_revision=None, cros_board=None, package_dsym_files=False,26 exclude_files=None, exclude_perf_test_files=False,27 update_properties=None, store_by_hash=True,28 platform=None, **kwargs):29 """Returns a step invoking zip_build.py to zip up a Chromium build.30 If build_url is specified, also uploads the build."""31 if not src_dir:32 src_dir = self.m.path['checkout']33 args = [34 '--target', target,35 '--gsutil-py-path', self.m.depot_tools.gsutil_py_path,36 '--staging-dir', self.m.path['cache'].join('chrome_staging'),37 '--src-dir', src_dir,38 ]39 args += self.m.build.slave_utils_args40 if 'build_archive_url' in self.m.properties:41 args.extend(['--use-build-url-name', '--build-url',42 self.m.properties['build_archive_url']])43 elif build_url:44 args.extend(['--build-url', build_url])45 if build_revision:46 args.extend(['--build_revision', build_revision])47 if cros_board:48 args.extend(['--cros-board', cros_board])49 if package_dsym_files:50 args.append('--package-dsym-files')51 if exclude_files:52 args.extend(['--exclude-files', exclude_files])53 if 'gs_acl' in self.m.properties:54 args.extend(['--gs-acl', self.m.properties['gs_acl']])55 if exclude_perf_test_files and platform:56 include_bisect_file_list = (57 manual_bisect_files.CHROME_REQUIRED_FILES.get(platform))58 include_bisect_strip_list = (59 manual_bisect_files.CHROME_STRIP_LIST.get(platform))60 include_bisect_whitelist = (61 manual_bisect_files.CHROME_WHITELIST_FILES.get(platform))62 if include_bisect_file_list:63 inclusions = ','.join(include_bisect_file_list)64 args.extend(['--include-files', inclusions])65 if include_bisect_strip_list:66 strip_files = ','.join(include_bisect_strip_list)67 args.extend(['--strip-files', strip_files])68 if include_bisect_whitelist:69 args.extend(['--whitelist', include_bisect_whitelist])70 args.extend(['--exclude-extra'])71 # If update_properties is passed in and store_by_hash is False,72 # we store it with commit position number instead of a hash73 if update_properties and not store_by_hash:74 commit_position = self._get_commit_position(75 update_properties, None)76 cp_branch, cp_number = self.m.commit_position.parse(commit_position)77 args.extend(['--build_revision', cp_number])78 properties_json = self.m.json.dumps(self.m.properties.legacy())79 args.extend(['--factory-properties', properties_json,80 '--build-properties', properties_json])81 kwargs['allow_subannotations'] = True82 self.m.build.python(83 step_name,84 self.package_repo_resource('scripts', 'slave', 'zip_build.py'),85 args,86 infra_step=True,87 **kwargs88 )89 def _get_commit_position(self, update_properties, primary_project):90 """Returns the commit position of the project (or the specified primary91 project).92 """93 if primary_project:94 key = 'got_%s_revision_cp' % primary_project95 else:96 key = 'got_revision_cp'97 return update_properties[key]98 def _get_git_commit(self, update_properties, primary_project):99 """Returns: (str/None) the git commit hash for a given project.100 Attempts to identify the git commit hash for a given project. If101 'primary_project' is None, or if there is no git commit hash for the102 specified primary project, the checkout-wide commit hash will be used.103 If none of the candidate configurations are present, the value None will be104 returned.105 """106 if primary_project:107 commit = update_properties.get('got_%s_revision_git' % primary_project)108 if commit:109 return commit110 commit = update_properties.get('got_%s_revision' % primary_project)111 if commit and GIT_COMMIT_HASH_RE.match(commit):112 return commit113 commit = update_properties.get('got_revision_git')114 if commit:115 return commit116 commit = update_properties.get('got_revision')117 if commit and GIT_COMMIT_HASH_RE.match(commit):118 return commit119 return None120 def _get_comparable_upload_path_for_sort_key(self, branch, number):121 """Returns a sortable string corresponding to the commit position."""122 if branch and branch != 'refs/heads/master':123 branch = branch.replace('/', '_')124 return '%s-%s' % (branch, number)125 return str(number)126 def clusterfuzz_archive(127 self, build_dir, update_properties, gs_bucket,128 archive_prefix, archive_subdir_suffix='', gs_acl=None,129 revision_dir=None, primary_project=None, **kwargs):130 # TODO(machenbach): Merge revision_dir and primary_project. The131 # revision_dir is only used for building the archive name while the132 # primary_project is authoritative for the commit position.133 """Archives and uploads a build to google storage.134 The build is filtered by a list of file exclusions and then zipped. It is135 uploaded to google storage with some metadata about the commit position136 and revision attached. The zip file follows the naming pattern used by137 clusterfuzz. The file pattern is:138 <archive name>-<platform>-<target><optional component>-<sort-key>.zip139 Example: cool-project-linux-release-refs_heads_b1-12345.zip140 The archive name is "cool-project" and there's no component build. The141 commit is on a branch called b1 at commit position number 12345.142 Example: cool-project-mac-debug-x10-component-234.zip143 The archive name is "cool-project" and the component's name is "x10". The144 component is checked out in branch master with commit position number 234.145 Args:146 build_dir: The absolute path to the build output directory, e.g.147 [slave-build]/src/out/Release148 update_properties: The properties from the bot_update step (containing149 commit information)150 gs_bucket: Name of the google storage bucket to upload to151 archive_prefix: Prefix of the archive zip file152 archive_subdir_suffix: Optional suffix to the google storage subdirectory153 name that contains the archive files154 gs_acl: ACL used for the file on google storage155 revision_dir: Optional component name if the main revision for this156 archive is a component revision157 primary_project: Optional project name for specifying the revision of the158 checkout159 """160 target = self.m.path.split(build_dir)[-1]161 commit_position = self._get_commit_position(162 update_properties, primary_project)163 cp_branch, cp_number = self.m.commit_position.parse(commit_position)164 build_git_commit = self._get_git_commit(update_properties, primary_project)165 staging_dir = self.m.path.mkdtemp('chrome_staging')166 llvm_tools_to_copy = ['llvm-symbolizer', 'sancov']167 llvm_bin_dir = self.m.path['checkout'].join('third_party', 'llvm-build',168 'Release+Asserts', 'bin')169 ext = '.exe' if self.m.platform.is_win else ''170 for tool in llvm_tools_to_copy:171 tool_src = self.m.path.join(llvm_bin_dir, tool + ext)172 tool_dst = self.m.path.join(build_dir, tool + ext)173 if not self.m.path.exists(tool_src):174 continue175 try:176 self.m.file.copy('Copy ' + tool, tool_src, tool_dst)177 except self.m.step.StepFailure: # pragma: no cover178 # On some builds, it appears that a soft/hard link of llvm-symbolizer179 # exists in the build directory, which causes shutil.copy to raise an180 # exception. Either way, this shouldn't cause the whole build to fail.181 pass182 # Build the list of files to archive.183 filter_result = self.m.python(184 'filter build_dir',185 self.resource('filter_build_files.py'),186 [187 '--dir', build_dir,188 '--platform', self.m.platform.name,189 '--output', self.m.json.output(),190 ],191 infra_step=True,192 step_test_data=lambda: self.m.json.test_api.output(['file1', 'file2']),193 **kwargs194 )195 zip_file_list = filter_result.json.output196 # Use the legacy platform name as Clusterfuzz has some expectations on197 # this (it only affects Windows, where it replace 'win' by 'win32').198 pieces = [self.legacy_platform_name(), target.lower()]199 if archive_subdir_suffix:200 pieces.append(archive_subdir_suffix)201 subdir = '-'.join(pieces)202 # Components like v8 get a <name>-v8-component-<revision> infix.203 component = ''204 if revision_dir:205 component = '-%s-component' % revision_dir206 sortkey_path = self._get_comparable_upload_path_for_sort_key(207 cp_branch, cp_number)208 zip_file_base_name = '%s-%s-%s%s-%s' % (archive_prefix,209 self.legacy_platform_name(),210 target.lower(),211 component,212 sortkey_path)213 zip_file_name = '%s.zip' % zip_file_base_name214 self.m.python(215 'zipping',216 self.resource('zip_archive.py'),217 [218 staging_dir,219 zip_file_base_name,220 self.m.json.input(zip_file_list),221 build_dir,222 ],223 infra_step=True,224 **kwargs225 )226 zip_file = staging_dir.join(zip_file_name)227 gs_metadata = {228 GS_COMMIT_POSITION_NUMBER_KEY: cp_number,229 }230 if commit_position:231 gs_metadata[GS_COMMIT_POSITION_KEY] = commit_position232 if build_git_commit:233 gs_metadata[GS_GIT_COMMIT_KEY] = build_git_commit234 gs_args = []235 if gs_acl:236 gs_args.extend(['-a', gs_acl])237 self.m.gsutil.upload(238 zip_file,239 gs_bucket,240 "/".join([subdir, zip_file_name]),241 args=gs_args,242 metadata=gs_metadata,243 use_retry_wrapper=False,244 )245 self.m.file.remove(zip_file_name, zip_file)246 def download_and_unzip_build(247 self, step_name, target, build_url, src_dir=None,248 build_revision=None, build_archive_url=None, **kwargs):249 """Returns a step invoking extract_build.py to download and unzip250 a Chromium build."""251 if not src_dir:252 src_dir = self.m.path['checkout']253 args = [254 '--gsutil-py-path', self.m.depot_tools.gsutil_py_path,255 '--target', target,256 '--src-dir', src_dir,257 ]258 args += self.m.build.slave_utils_args259 if build_archive_url:260 args.extend(['--build-archive-url', build_archive_url])261 else:262 args.extend(['--build-url', build_url])263 if build_revision:264 args.extend(['--build_revision', build_revision])265 properties = (266 ('mastername', '--master-name'),267 ('buildnumber', '--build-number'),268 ('parent_builddir', '--parent-build-dir'),269 ('parentname', '--parent-builder-name'),270 ('parentslavename', '--parent-slave-name'),271 ('parent_buildnumber', '--parent-build-number'),272 ('webkit_dir', '--webkit-dir'),273 ('revision_dir', '--revision-dir'),274 )275 for property_name, switch_name in properties:276 if self.m.properties.get(property_name):277 args.extend([switch_name, self.m.properties[property_name]])278 # TODO(phajdan.jr): Always halt on missing build.279 if self.m.properties.get('halt_on_missing_build'): # pragma: no cover280 args.append('--halt-on-missing-build')281 self.m.build.python(282 step_name,283 self.package_repo_resource('scripts', 'slave', 'extract_build.py'),284 args,285 infra_step=True,286 **kwargs287 )288 def legacy_platform_name(self):289 """Replicates the behavior of PlatformName() in chromium_utils.py."""290 if self.m.platform.is_win:291 return 'win32'292 return self.m.platform.name293 def _legacy_url(self, is_download, gs_bucket_name, extra_url_components):294 """Computes a build_url suitable for uploading a zipped Chromium295 build to Google Storage.296 The reason this is named 'legacy' is that there are a large number297 of dependencies on the exact form of this URL. The combination of298 zip_build.py, extract_build.py, slave_utils.py, and runtest.py299 require that:300 * The platform name be exactly one of 'win32', 'mac', or 'linux'301 * The upload URL only name the directory on GS into which the302 build goes (zip_build.py computes the name of the file)303 * The download URL contain the unversioned name of the zip archive304 * The revision on the builder and tester machines be exactly the305 same306 There were too many dependencies to tease apart initially, so this307 function simply emulates the form of the URL computed by the308 underlying scripts.309 extra_url_components, if specified, should be a string without a310 trailing '/' which is inserted in the middle of the URL.311 The builder_name, or parent_buildername, is always automatically312 inserted into the URL."""313 result = ('gs://' + gs_bucket_name)314 if extra_url_components:315 result += ('/' + extra_url_components)316 if is_download:317 result += ('/' + self.m.properties['parent_buildername'] + '/' +318 'full-build-' + self.legacy_platform_name() +319 '.zip')320 else:321 result += '/' + self.m.properties['buildername']322 return result323 def legacy_upload_url(self, gs_bucket_name, extra_url_components=None):324 """Returns a url suitable for uploading a Chromium build to Google325 Storage.326 extra_url_components, if specified, should be a string without a327 trailing '/' which is inserted in the middle of the URL.328 The builder_name, or parent_buildername, is always automatically329 inserted into the URL."""330 return self._legacy_url(False, gs_bucket_name, extra_url_components)331 def legacy_download_url(self, gs_bucket_name, extra_url_components=None):332 """Returns a url suitable for downloading a Chromium build from333 Google Storage.334 extra_url_components, if specified, should be a string without a335 trailing '/' which is inserted in the middle of the URL.336 The builder_name, or parent_buildername, is always automatically337 inserted into the URL."""...

Full Screen

Full Screen

update_current_poll.py

Source:update_current_poll.py Github

copy

Full Screen

1import boto32import hashlib3import time4import json5from botocore.exceptions import ClientError6from boto3.dynamodb.conditions import Key7 8dynamodb = boto3.resource('dynamodb')9responseHeaders = { 'Access-Control-Allow-Origin': '*', 'Access-Control-Allow-Credentials' : True }10def hash_password(password, salt):11 """Using the sha256 crypto algorithm hash the combination of password and salt in 10 iterations.12 13 Parameters:14 password: Password.15 salt: Salt.16 17 Returns:18 Hashed password.19 """20 hashed_password = password21 for i in range(10):22 salted_password = (salt + hashed_password).encode('utf-8')23 hashed_password = hashlib.sha256(salted_password).hexdigest()24 return hashed_password25def get_item_admins(admin_name, second_attempt = False):26 """Tries 2 times to access the admins table and take the admin credentials (if there is).27 Parameters:28 admin_name: Admin name/id.29 second_attempt: Flag for the second attempt.30 Returns:31 Admin credentials.32 """33 admins_table = dynamodb.Table('fp.admins')34 try:35 response = admins_table.get_item(36 Key={37 'name': admin_name38 }39 )40 except Exception:41 if second_attempt:42 raise Exception('Database error!')43 # tries again if the first attempt failed44 time.sleep(1)45 return get_item_admins(admin_name, True)46 47 if 'Item' not in response:48 return None49 50 return response['Item']51def get_current_poll_id(second_attempt = False):52 """Tries 2 times to access the config table and takes the current poll id.53 Parameters:54 second_attempt: Flag for the second attempt.55 Returns:56 Current poll id.57 """58 config_table = dynamodb.Table('fp.config')59 try:60 response = config_table.get_item(61 Key={62 'id': 'CurrentPoll'63 }64 )65 except Exception:66 if second_attempt:67 raise Exception('Database error!')68 # tries again if the first attempt failed69 time.sleep(1)70 return get_current_poll_id(True)71 72 return int(response['Item']['value'])73def query_participants(poll_id, last_evaluated_key = None, second_attempt = False):74 """Query the participants table and returns all results for given poll, if the first attempt failed or has unprocessed keys tries again.75 Parameters:76 poll_id: Current poll id.77 last_evaluated_key: Last evaluated key, if some data is not read.78 second_attempt: Flag for the second attempt.79 Returns:80 List with participants.81 """82 83 result = []84 participants_table = dynamodb.Table('fp.participants')85 try:86 if last_evaluated_key:87 response = participants_table.query(88 KeyConditionExpression=Key('poll').eq(poll_id),89 ConsistentRead=True,90 ExclusiveStartKey=last_evaluated_key91 )92 else:93 response = participants_table.query(94 KeyConditionExpression=Key('poll').eq(poll_id),95 ConsistentRead=True96 )97 except Exception:98 if second_attempt:99 raise Exception('Database error!')100 101 # tries again if the first attempt failed102 time.sleep(1)103 return query_participants(poll_id, last_evaluated_key, True)104 105 if 'Items' in response:106 result = response['Items']107 if (not second_attempt) and ('LastEvaluatedKey' in response):108 # tries again if there are unprocessed keys109 try:110 time.sleep(1)111 second_result = query_participants(poll_id, response['LastEvaluatedKey'], True)112 except Exception:113 raise Exception('Database error!')114 115 result.append(second_result)116 117 return result118def update_item_polls(poll_id, update_expression, expression_attributes, expression_names, second_attempt = False):119 """Tries 2 time to update the current poll, if the first attempt failed tries again.120 Parameters:121 poll_id: Current poll id.122 update_expression: How to update the polls table123 expression_attributes: Values used in the update expression.124 expression_names: Names used in the update expression.125 second_attempt: Flag for the second attempt.126 Returns:127 Status of updating.128 """129 # check if need is smaller or equal to max130 participants_check = '(:need = :none OR (:max <> :none OR :need <= #max)) AND (:max = :none OR (:need <> :none OR :max >= #need))'131 # check if new end new and dt are bigger than start, and if there is no new end check if the new dt is smaller than the old end132 dates_check = '(:end = :none OR ((:dt <> :none OR :end >= #dt) AND (:end > #start))) AND (:dt = :none OR ((:end <> :none OR :dt <= #end) AND (:dt > #start)))'133 polls_table = dynamodb.Table('fp.polls')134 try:135 polls_table.update_item(136 Key={137 'id': poll_id138 },139 UpdateExpression=update_expression,140 ConditionExpression=dates_check + ' AND ' + participants_check,141 ExpressionAttributeValues=expression_attributes,142 ExpressionAttributeNames=expression_names143 )144 except ClientError as e:145 if e.response['Error']['Code'] == "ConditionalCheckFailedException":146 return { 147 'statusCode': 400,148 'headers': responseHeaders,149 'body': json.dumps({'errorMessage': e.response['Error']['Message'] + '!'})150 }151 if second_attempt:152 raise Exception('Database error!')153 # tries again if the first attempt failed154 time.sleep(1)155 return update_item_polls(poll_id, update_expression, expression_attributes, expression_names, True)156 157 return {158 'statusCode': 200,159 'headers': responseHeaders,160 'body': json.dumps({'statusMessage': 'The current poll is successfully updated!'})161 }162def update_current_poll(event, context):163 """Updates the current poll, admin credentials are mandatory164 Returns:165 Status of updating.166 """167 168 if event['body'] is None:169 return {170 'statusCode': 400,171 'headers': responseHeaders,172 'body': json.dumps({'errorMessage': 'No request body!'})173 }174 try:175 requestBody = json.loads(event['body'])176 except:177 return {178 'statusCode': 400,179 'headers': responseHeaders,180 'body': json.dumps({'errorMessage': 'Bad request body!'})181 }182 183 if type(requestBody) != dict:184 return {185 'statusCode': 400,186 'headers': responseHeaders,187 'body': json.dumps({'errorMessage': 'Bad request body!'})188 }189 # check admin credentials190 if ('admin_name' not in requestBody) or ('admin_password' not in requestBody):191 return {192 'statusCode': 403,193 'headers': responseHeaders,194 'body': json.dumps({'errorMessage': 'Access denied, missing admin_name and/or admin_password!'})195 }196 197 admin_name = requestBody['admin_name']198 admin_password = requestBody['admin_password']199 try:200 admin = get_item_admins(admin_name)201 except Exception:202 return {203 'statusCode': 500,204 'headers': responseHeaders,205 'body': json.dumps({'errorMessage': 'Database error!'})206 }207 if admin == None:208 return {209 'statusCode': 403,210 'headers': responseHeaders,211 'body': json.dumps({'errorMessage': 'Access denied, wrong credentials!'})212 }213 214 db_hashed_password = admin['password']215 db_salt = admin['salt']216 hashed_password = hash_password(admin_password, db_salt)217 if db_hashed_password != hashed_password:218 return {219 'statusCode': 403,220 'headers': responseHeaders,221 'body': json.dumps({'errorMessage': 'Access denied, wrong credentials!'})222 }223 # check properties for updating224 properties = ['title', 'note', 'locUrl', 'locDesc', 'max', 'need', 'end', 'dt']225 update_properties = {}226 found = False227 for prop in properties:228 update_properties[prop] = None229 if prop in requestBody:230 update_properties[prop] = requestBody[prop]231 found = True232 if not found:233 return {234 'statusCode': 400,235 'headers': responseHeaders,236 'body': json.dumps({'errorMessage': 'Nothing to update!'})237 }238 # check string lengths239 if (update_properties['title'] != None) and (len(update_properties['title']) > 50):240 return { 241 'statusCode': 400,242 'headers': responseHeaders,243 'body': json.dumps({'errorMessage': 'Too long title!'})244 }245 if (update_properties['note'] != None) and (len(update_properties['note']) > 100):246 return { 247 'statusCode': 400,248 'headers': responseHeaders,249 'body': json.dumps({'errorMessage': 'Too long note!'})250 }251 252 if (update_properties['locUrl'] != None) and (len(update_properties['locUrl']) > 100):253 return { 254 'statusCode': 400,255 'headers': responseHeaders,256 'body': json.dumps({'errorMessage': 'Too long locUrl!'})257 }258 259 if (update_properties['locDesc'] != None) and (len(update_properties['locDesc']) > 50):260 return { 261 'statusCode': 400,262 'headers': responseHeaders,263 'body': json.dumps({'errorMessage': 'Too long locDesc!'})264 }265 266 for prop in ['max', 'need', 'end', 'dt']:267 if prop in requestBody:268 try:269 update_properties[prop] = int(update_properties[prop])270 except:271 return {272 'statusCode': 400,273 'headers': responseHeaders,274 'body': json.dumps({'errorMessage': prop + ' value is not an integer number!'})275 }276 # check int values277 if (update_properties['dt'] != None) and (update_properties['dt'] < 0):278 return { 279 'statusCode': 400,280 'headers': responseHeaders,281 'body': json.dumps({'errorMessage': 'dt value shouldn\'t be negative!'})282 }283 if (update_properties['end'] != None) and (update_properties['end'] < 0):284 return { 285 'statusCode': 400,286 'headers': responseHeaders,287 'body': json.dumps({'errorMessage': 'end value shouldn\'t be negative!'})288 }289 if (update_properties['need'] != None) and (update_properties['need'] < 1):290 return { 291 'statusCode': 400,292 'headers': responseHeaders,293 'body': json.dumps({'errorMessage': 'need value should be bigger than 1!'})294 }295 if (update_properties['max'] != None) and (update_properties['max'] < 1):296 return { 297 'statusCode': 400,298 'headers': responseHeaders,299 'body': json.dumps({'errorMessage': 'max value should be bigger than 1!'})300 }301 # check if end is smaller than dt302 if (update_properties['end'] != None) and (update_properties['dt'] != None) and (update_properties['dt'] > update_properties['end']):303 return { 304 'statusCode': 400,305 'headers': responseHeaders,306 'body': json.dumps({'errorMessage': 'dt value must be smaller or equal than end value!'})307 }308 # check if max is smaller than need309 if (update_properties['need'] != None) and (update_properties['max'] != None) and (update_properties['need'] > update_properties['max']):310 return { 311 'statusCode': 400,312 'headers': responseHeaders,313 'body': json.dumps({'errorMessage': 'need value must be smaller or equal than max value!'})314 }315 316 # get current poll id317 try:318 current_poll_id = get_current_poll_id()319 except Exception:320 return {321 'statusCode': 500,322 'headers': responseHeaders,323 'body': json.dumps({'errorMessage': 'Database error!'})324 }325 326 # check if max value is smaller than the number of the current participants327 if update_properties['max'] != None:328 # query participants329 try:330 participants = query_participants(current_poll_id)331 except Exception:332 return {333 'statusCode': 500,334 'headers': responseHeaders,335 'body': json.dumps({'errorMessage': 'Database error!'})336 }337 if len(participants) > update_properties['max']:338 return { 339 'statusCode': 400,340 'headers': responseHeaders,341 'body': json.dumps({'errorMessage': 'There are more participants than the max value!'})342 }343 # prepare the expressions for the update query344 update_expression = "SET "345 expression_attributes = {}346 expression_names = {}347 for prop in properties:348 if update_properties[prop] == None:349 del update_properties[prop]350 else:351 prop_name = '#' + prop352 prop_value = ':' + prop353 update_expression += prop_name + '= ' + prop_value + ', '354 expression_names[prop_name] = prop # avoid reserved words355 expression_attributes[prop_value] = update_properties[prop]356 expression_names['#end'] = 'end'357 expression_names['#start'] = 'start'358 expression_names['#dt'] = 'dt'359 expression_names['#need'] = 'need'360 expression_names['#max'] = 'max'361 update_expression = update_expression[:-2]362 expression_attributes[':none'] = 'None'363 expression_attributes[':end'] = 'None' if ('end' not in update_properties) else update_properties['end']364 expression_attributes[':dt'] = 'None' if ('dt' not in update_properties) else update_properties['dt']365 expression_attributes[':need'] = 'None' if ('need' not in update_properties) else update_properties['need']366 expression_attributes[':max'] = 'None' if ('max' not in update_properties) else update_properties['max']367 # update the current poll368 try:369 update_status = update_item_polls(current_poll_id, update_expression, expression_attributes, expression_names)370 except Exception:371 return {372 'statusCode': 500,373 'headers': responseHeaders,374 'body': json.dumps({'errorMessage': 'Database error!'})375 }...

Full Screen

Full Screen

example.py

Source:example.py Github

copy

Full Screen

1# Copyright 2015 The Chromium Authors. All rights reserved.2# Use of this source code is governed by a BSD-style license that can be3# found in the LICENSE file.4DEPS = [5 'archive',6 'recipe_engine/json',7 'recipe_engine/path',8 'recipe_engine/platform',9 'recipe_engine/properties',10]11TEST_HASH_MAIN='5e3250aadda2b170692f8e762d43b7e8deadbeef'12TEST_COMMIT_POSITON_MAIN='refs/heads/B1@{#123456}'13TEST_HASH_COMPONENT='deadbeefdda2b170692f8e762d43b7e8e7a96686'14TEST_COMMIT_POSITON_COMPONENT='refs/heads/master@{#234}'15def RunSteps(api):16 if 'build_archive_url' in api.properties:17 api.archive.zip_and_upload_build(18 step_name='zip build',19 target=api.path['checkout'].join('/Release/out'))20 return21 if 'no_llvm' not in api.properties:22 llvm_bin_dir = api.path['checkout'].join('third_party', 'llvm-build',23 'Release+Asserts', 'bin')24 api.path.mock_add_paths(api.path.join(llvm_bin_dir, 'llvm-symbolizer'))25 api.path.mock_add_paths(api.path.join(llvm_bin_dir, 'sancov'))26 build_dir = api.path['start_dir'].join('src', 'out', 'Release')27 api.archive.clusterfuzz_archive(28 build_dir=build_dir,29 update_properties=api.properties.get('update_properties'),30 gs_bucket='chromium',31 gs_acl=api.properties.get('gs_acl', ''),32 archive_prefix='chrome-asan',33 archive_subdir_suffix=api.properties.get('archive_subdir_suffix', ''),34 revision_dir=api.properties.get('revision_dir'),35 primary_project=api.properties.get('primary_project'),36 )37def GenTests(api):38 update_properties = {39 'got_revision': TEST_HASH_MAIN,40 'got_revision_cp': TEST_COMMIT_POSITON_MAIN,41 }42 for platform, build_files in (43 ('win', ['chrome', 'icu.dat', 'lib', 'file.obj']),44 ('mac', ['chrome', 'icu.dat', 'pdfsqueeze']),45 ('linux', ['chrome', 'icu.dat', 'lib.host']),46 ):47 yield (48 api.test('cf_archiving_%s' % platform) +49 api.platform(platform, 64) +50 api.properties(51 update_properties=update_properties,52 gs_acl='public-read',53 archive_subdir_suffix='subdir',54 ) +55 api.override_step_data('filter build_dir', api.json.output(build_files))56 )57 # An svn project with a separate git property.58 update_properties = {59 'got_revision': '123456',60 'got_revision_git': TEST_HASH_MAIN,61 'got_revision_cp': TEST_COMMIT_POSITON_MAIN,62 }63 yield (64 api.test('cf_archiving_svn_with_git') +65 api.platform('linux', 64) +66 api.properties(update_properties=update_properties) +67 api.override_step_data(68 'filter build_dir', api.json.output(['chrome']))69 )70 # An svn project without git hash.71 update_properties = {72 'got_revision': '123456',73 'got_revision_cp': TEST_COMMIT_POSITON_MAIN,74 }75 yield (76 api.test('cf_archiving_svn_no_git') +77 api.platform('linux', 64) +78 api.properties(update_properties=update_properties) +79 api.override_step_data(80 'filter build_dir', api.json.output(['chrome']))81 )82 # A component build with git.83 update_properties = {84 'got_x10_revision': TEST_HASH_COMPONENT,85 'got_x10_revision_cp': TEST_COMMIT_POSITON_COMPONENT,86 }87 yield (88 api.test('cf_archiving_component') +89 api.platform('linux', 64) +90 api.properties(91 update_properties=update_properties,92 revision_dir='x10',93 primary_project='x10',94 ) +95 api.override_step_data(96 'filter build_dir', api.json.output(['chrome', 'resources']))97 )98 # A component on svn with a separate git property.99 update_properties = {100 'got_x10_revision': '234',101 'got_x10_revision_git': TEST_HASH_COMPONENT,102 'got_x10_revision_cp': TEST_COMMIT_POSITON_COMPONENT,103 }104 yield (105 api.test('cf_archiving_component_svn_with_git') +106 api.platform('linux', 64) +107 api.properties(108 update_properties=update_properties,109 revision_dir='x10',110 primary_project='x10',111 ) +112 api.override_step_data(113 'filter build_dir', api.json.output(['chrome']))114 )115 update_properties = {116 'got_revision': TEST_HASH_MAIN,117 'got_revision_cp': TEST_COMMIT_POSITON_MAIN,118 }119 yield (120 api.test('cf_archiving_no_llvm') +121 api.platform('linux', 64) +122 api.properties(123 update_properties=update_properties,124 no_llvm=True,125 ) +126 api.override_step_data(127 'filter build_dir', api.json.output(['chrome']))128 )129 yield(130 api.test('zip_and_upload_custom_location') +131 api.platform('linux', 64) +132 api.properties(133 build_archive_url='gs://dummy-bucket/Linux Release/full-build.zip')...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run toolium automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful