How to use backend_version method in Pyscreenshot

Best Python code snippet using pyscreenshot_python

test_archive.py

Source:test_archive.py Github

copy

Full Screen

1#!/usr/bin/env python32# -*- coding: utf-8 -*-3#4# Copyright (C) 2015-2020 Bitergia5#6# This program is free software; you can redistribute it and/or modify7# it under the terms of the GNU General Public License as published by8# the Free Software Foundation; either version 3 of the License, or9# (at your option) any later version.10#11# This program is distributed in the hope that it will be useful,12# but WITHOUT ANY WARRANTY; without even the implied warranty of13# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the14# GNU General Public License for more details.15#16# You should have received a copy of the GNU General Public License17# along with this program. If not, see <http://www.gnu.org/licenses/>.18#19# Authors:20# Valerio Cosentino <valcos@bitergia.com>21# Santiago Dueñas <sduenas@bitergia.com>22# Miguel Ángel Fernández <mafesan@bitergia.com>23# Jesus M. Gonzalez-Barahona <jgb@gsyc.es>24#25import os26import pickle27import shutil28import sqlite329import tempfile30import unittest31import unittest.mock32import httpretty33import requests34from grimoirelab_toolkit.datetime import datetime_utcnow, datetime_to_utc35from perceval.archive import Archive, ArchiveManager36from perceval.errors import ArchiveError, ArchiveManagerError37def count_number_rows(db, table_name):38 conn = sqlite3.connect(db)39 cursor = conn.cursor()40 cursor.execute("SELECT COUNT(*) FROM " + table_name)41 nrows = cursor.fetchone()[0]42 cursor.close()43 return nrows44class TestArchive(unittest.TestCase):45 """Archive tests"""46 def setUp(self):47 self.test_path = tempfile.mkdtemp(prefix='perceval_')48 def tearDown(self):49 shutil.rmtree(self.test_path)50 def test_create(self):51 """Test a new an empty archive is created"""52 archive_path = os.path.join(self.test_path, 'myarchive')53 archive = Archive.create(archive_path)54 # Archive file was created55 self.assertEqual(archive.archive_path, archive_path)56 self.assertEqual(os.path.exists(archive.archive_path), True)57 # Properties are initialized58 self.assertEqual(archive.created_on, None)59 self.assertEqual(archive.origin, None)60 self.assertEqual(archive.backend_name, None)61 self.assertEqual(archive.backend_version, None)62 self.assertEqual(archive.category, None)63 self.assertEqual(archive.backend_params, None)64 # Tables are empty65 nrows = count_number_rows(archive_path, Archive.ARCHIVE_TABLE)66 self.assertEqual(nrows, 0)67 nrows = count_number_rows(archive_path, Archive.METADATA_TABLE)68 self.assertEqual(nrows, 0)69 def test_create_existing_archive(self):70 """Test if create method fails when the given archive path already exists"""71 archive_path = os.path.join(self.test_path, 'myarchive')72 Archive.create(archive_path)73 with self.assertRaisesRegex(ArchiveError, "archive %s already exists" % archive_path):74 Archive.create(archive_path)75 def test_init(self):76 """Test whether an archive is propertly initialized"""77 archive_path = os.path.join(self.test_path, 'myarchive')78 _ = Archive.create(archive_path)79 archive = Archive(archive_path)80 self.assertEqual(archive.archive_path, archive_path)81 self.assertEqual(archive.created_on, None)82 self.assertEqual(archive.origin, None)83 self.assertEqual(archive.backend_name, None)84 self.assertEqual(archive.backend_version, None)85 self.assertEqual(archive.category, None)86 self.assertEqual(archive.backend_params, None)87 def test_init_not_existing_archive(self):88 """Test if an exception is raised when the given archive does not exist"""89 archive_path = os.path.join(self.test_path, 'myarchive')90 with self.assertRaisesRegex(ArchiveError, "archive %s does not exist" % archive_path):91 _ = Archive(archive_path)92 def test_init_not_valid_archive(self):93 """Test if an exception is raised when the file is an invalid archive"""94 archive_path = os.path.join(self.test_path, 'invalid_archive')95 with open(archive_path, 'w') as fd:96 fd.write("Invalid archive file")97 with self.assertRaisesRegex(ArchiveError, "invalid archive file"):98 _ = Archive(archive_path)99 def test_init_metadata(self):100 """Test whether metadata information is properly initialized"""101 archive_path = os.path.join(self.test_path, 'myarchive')102 archive = Archive.create(archive_path)103 before_dt = datetime_to_utc(datetime_utcnow())104 archive.init_metadata('marvel.com', 'marvel-comics-backend', '0.1.0',105 'issue', {'from_date': before_dt})106 after_dt = datetime_to_utc(datetime_utcnow())107 archive_copy = Archive(archive_path)108 # Both copies should have the same parameters109 for arch in [archive, archive_copy]:110 self.assertEqual(arch.origin, 'marvel.com')111 self.assertEqual(arch.backend_name, 'marvel-comics-backend')112 self.assertEqual(arch.backend_version, '0.1.0')113 self.assertEqual(arch.category, 'issue')114 self.assertGreaterEqual(arch.created_on, before_dt)115 self.assertLessEqual(arch.created_on, after_dt)116 self.assertDictEqual(arch.backend_params, {'from_date': before_dt})117 @httpretty.activate118 def test_store(self):119 """Test whether data is properly stored in the archive"""120 data_requests = [121 ("https://example.com/", {'q': 'issues', 'date': '2017-01-10'}, {}),122 ("https://example.com/", {'q': 'issues', 'date': '2018-01-01'}, {}),123 ("https://example.com/tasks", {'task_id': 10}, {'Accept': 'application/json'}),124 ]125 httpretty.register_uri(httpretty.GET,126 "https://example.com/",127 body='{"hey": "there"}',128 status=200)129 httpretty.register_uri(httpretty.GET,130 "https://example.com/tasks",131 body='{"task": "my task"}',132 status=200)133 archive_path = os.path.join(self.test_path, 'myarchive')134 archive = Archive.create(archive_path)135 # Store data in the archive136 responses = []137 for dr in data_requests:138 response = requests.get(dr[0], params=dr[1], headers=dr[2])139 archive.store(dr[0], dr[1], dr[2], response)140 responses.append(response)141 db = sqlite3.connect(archive.archive_path)142 cursor = db.cursor()143 cursor.execute("SELECT hashcode, data, uri, payload, headers FROM archive")144 data_stored = cursor.fetchall()145 cursor.close()146 self.assertEqual(len(data_stored), len(data_requests))147 ds = data_stored[0]148 dr = data_requests[0]149 self.assertEqual(ds[0], '0fa4ce047340780f08efca92f22027514263521d')150 self.assertEqual(pickle.loads(ds[1]).url, responses[0].url)151 self.assertEqual(ds[2], dr[0])152 self.assertEqual(pickle.loads(ds[3]), dr[1])153 self.assertEqual(pickle.loads(ds[4]), dr[2])154 ds = data_stored[1]155 dr = data_requests[1]156 self.assertEqual(ds[0], '3879a6f12828b7ac3a88b7167333e86168f2f5d2')157 self.assertEqual(pickle.loads(ds[1]).url, responses[1].url)158 self.assertEqual(ds[2], dr[0])159 self.assertEqual(pickle.loads(ds[3]), dr[1])160 self.assertEqual(pickle.loads(ds[4]), dr[2])161 ds = data_stored[2]162 dr = data_requests[2]163 self.assertEqual(ds[0], 'ef38f574a0745b63a056e7befdb7a06e7cf1549b')164 self.assertEqual(pickle.loads(ds[1]).url, responses[2].url)165 self.assertEqual(ds[2], dr[0])166 self.assertEqual(pickle.loads(ds[3]), dr[1])167 self.assertEqual(pickle.loads(ds[4]), dr[2])168 @httpretty.activate169 def test_store_duplicate(self):170 """Test whether the insertion of duplicated data throws an error"""171 url = "https://example.com/tasks"172 payload = {'task_id': 10}173 headers = {'Accept': 'application/json'}174 httpretty.register_uri(httpretty.GET,175 url,176 body='{"hey": "there"}',177 status=200)178 response = requests.get(url, params=payload, headers=headers)179 archive_path = os.path.join(self.test_path, 'myarchive')180 archive = Archive.create(archive_path)181 archive.store(url, payload, headers, response)182 # check the unique index filters duplicated API calls183 with self.assertRaisesRegex(ArchiveError, "duplicated entry"):184 archive.store(url, payload, headers, response)185 @httpretty.activate186 def test_retrieve(self):187 """Test whether data is properly retrieved from the archive"""188 url = "https://example.com/tasks"189 payload = {'task_id': 10}190 headers = {'Accept': 'application/json'}191 httpretty.register_uri(httpretty.GET,192 url,193 body='{"hey": "there"}',194 status=200)195 response = requests.get(url, params=payload, headers=headers)196 archive_path = os.path.join(self.test_path, 'myarchive')197 archive = Archive.create(archive_path)198 archive.store(url, payload, headers, response)199 data = archive.retrieve(url, payload, headers)200 self.assertEqual(data.url, response.url)201 def test_retrieve_missing(self):202 """Test whether the retrieval of non archived data throws an error203 In the exceptional case of a failure in retrieving data from an archive (e.g., manual modification),204 an exception is thrown to stop the retrieval from the archive205 """206 archive_path = os.path.join(self.test_path, 'myarchive')207 archive = Archive.create(archive_path)208 with self.assertRaisesRegex(ArchiveError, "not found in archive"):209 _ = archive.retrieve("http://wrong", payload={}, headers={})210ARCHIVE_TEST_DIR = 'archivedir'211class MockUUID:212 def __init__(self, uuid):213 self.hex = uuid214class TestArchiveManager(unittest.TestCase):215 """Archive Manager tests"""216 def setUp(self):217 self.test_path = tempfile.mkdtemp(prefix='perceval_')218 def tearDown(self):219 shutil.rmtree(self.test_path)220 def test_struct(self):221 """Test whether the structure of an archive manager directory is created"""222 archive_mng_path = os.path.join(self.test_path, ARCHIVE_TEST_DIR)223 # Directory does not exist yet224 self.assertEqual(os.path.isdir(archive_mng_path), False)225 # Object and directory are created226 manager = ArchiveManager(archive_mng_path)227 self.assertEqual(manager.dirpath, archive_mng_path)228 self.assertEqual(os.path.isdir(archive_mng_path), True)229 # A new object using the same directory does not create230 # a new directory231 alt_manager = ArchiveManager(archive_mng_path)232 self.assertEqual(alt_manager.dirpath, archive_mng_path)233 self.assertEqual(os.path.isdir(archive_mng_path), True)234 @unittest.mock.patch('uuid.uuid4')235 def test_create_archive(self, mock_uuid):236 """Test if a new archive is created"""237 mock_uuid.return_value = MockUUID('AB0123456789')238 archive_mng_path = os.path.join(self.test_path, ARCHIVE_TEST_DIR)239 manager = ArchiveManager(archive_mng_path)240 archive = manager.create_archive()241 self.assertIsInstance(archive, Archive)242 expected = os.path.join(archive_mng_path, 'AB', '0123456789.sqlite3')243 self.assertEqual(archive.archive_path, expected)244 self.assertEqual(os.path.exists(archive.archive_path), True)245 @unittest.mock.patch('uuid.uuid4')246 def test_create_existing_archive(self, mock_uuid):247 """Test if an exception is raised when the archive to create exists"""248 mock_uuid.return_value = MockUUID('AB0123456789')249 archive_mng_path = os.path.join(self.test_path, ARCHIVE_TEST_DIR)250 manager = ArchiveManager(archive_mng_path)251 # First we create the archive252 archive = manager.create_archive()253 self.assertIsInstance(archive, Archive)254 expected = os.path.join(archive_mng_path, 'AB', '0123456789.sqlite3')255 self.assertEqual(archive.archive_path, expected)256 # The archive already exist so it must raise an exception257 with self.assertRaisesRegex(ArchiveManagerError, 'archive .+ already exists'):258 _ = manager.create_archive()259 def test_remove_archive(self):260 """Test if an archive is removed by the archive manager"""261 archive_mng_path = os.path.join(self.test_path, ARCHIVE_TEST_DIR)262 manager = ArchiveManager(archive_mng_path)263 archive = manager.create_archive()264 self.assertEqual(os.path.exists(archive.archive_path), True)265 manager.remove_archive(archive.archive_path)266 self.assertEqual(os.path.exists(archive.archive_path), False)267 def test_remove_archive_not_found(self):268 """Test if an exception is raised when the archive is not found"""269 archive_mng_path = os.path.join(self.test_path, ARCHIVE_TEST_DIR)270 manager = ArchiveManager(archive_mng_path)271 with self.assertRaisesRegex(ArchiveManagerError, 'archive mockarchive does not exist'):272 manager.remove_archive('mockarchive')273 def test_search(self):274 """Test if a set of archives is found based on the given criteria"""275 archive_mng_path = os.path.join(self.test_path, ARCHIVE_TEST_DIR)276 manager = ArchiveManager(archive_mng_path)277 dt = datetime_utcnow()278 metadata = [279 {280 'origin': 'https://example.com',281 'backend_name': 'git',282 'backend_version': '0.8',283 'category': 'commit',284 'backend_params': {},285 },286 {287 'origin': 'https://example.com',288 'backend_name': 'gerrit',289 'backend_version': '0.1',290 'category': 'changes',291 'backend_params': {}292 },293 {294 'origin': 'https://example.org',295 'backend_name': 'git',296 'backend_version': '0.1',297 'category': 'commit',298 'backend_params': {}299 },300 {301 'origin': 'https://example.com',302 'backend_name': 'git',303 'backend_version': '0.1',304 'category': 'commit',305 'backend_params': {}306 }307 ]308 for meta in metadata:309 archive = manager.create_archive()310 archive.init_metadata(**meta)311 meta['filepath'] = archive.archive_path312 archives = manager.search('https://example.com', 'git', 'commit', dt)313 expected = [metadata[0]['filepath'], metadata[3]['filepath']]314 self.assertListEqual(archives, expected)315 def test_search_archived_after(self):316 """Check if a set of archives created after a given date are searched"""317 archive_mng_path = os.path.join(self.test_path, ARCHIVE_TEST_DIR)318 manager = ArchiveManager(archive_mng_path)319 # First set of archives to create320 metadata = [321 {322 'origin': 'https://example.com',323 'backend_name': 'git',324 'backend_version': '0.8',325 'category': 'commit',326 'backend_params': {},327 },328 {329 'origin': 'https://example.com',330 'backend_name': 'gerrit',331 'backend_version': '0.1',332 'category': 'changes',333 'backend_params': {}334 },335 ]336 for meta in metadata:337 archive = manager.create_archive()338 archive.init_metadata(**meta)339 # Second set, archived after the date we'll use to search340 after_dt = datetime_utcnow()341 metadata = [342 {343 'origin': 'https://example.org',344 'backend_name': 'git',345 'backend_version': '0.1',346 'category': 'commit',347 'backend_params': {}348 },349 {350 'origin': 'https://example.com',351 'backend_name': 'git',352 'backend_version': '0.1',353 'category': 'commit',354 'backend_params': {}355 }356 ]357 for meta in metadata:358 archive = manager.create_archive()359 archive.init_metadata(**meta)360 meta['filepath'] = archive.archive_path361 archives = manager.search('https://example.com', 'git', 'commit',362 after_dt)363 expected = [metadata[1]['filepath']]364 self.assertListEqual(archives, expected)365 def test_search_no_match(self):366 """Check if an empty set of archives is returned when none match the criteria"""367 archive_mng_path = os.path.join(self.test_path, ARCHIVE_TEST_DIR)368 manager = ArchiveManager(archive_mng_path)369 dt = datetime_utcnow()370 metadata = [371 {372 'origin': 'https://example.com',373 'backend_name': 'git',374 'backend_version': '0.8',375 'category': 'commit',376 'backend_params': {},377 },378 {379 'origin': 'https://example.com',380 'backend_name': 'gerrit',381 'backend_version': '0.1',382 'category': 'changes',383 'backend_params': {}384 },385 {386 'origin': 'https://example.org',387 'backend_name': 'git',388 'backend_version': '0.1',389 'category': 'commit',390 'backend_params': {}391 },392 {393 'origin': 'https://example.com',394 'backend_name': 'git',395 'backend_version': '0.1',396 'category': 'commit',397 'backend_params': {}398 }399 ]400 for meta in metadata:401 archive = manager.create_archive()402 archive.init_metadata(**meta)403 meta['filepath'] = archive.archive_path404 archives = manager.search('https://example.com', 'bugzilla', 'commit', dt)405 self.assertListEqual(archives, [])406if __name__ == "__main__":...

Full Screen

Full Screen

uploadToDownloads.py

Source:uploadToDownloads.py Github

copy

Full Screen

1# Copyright (c) 2012 The Chromium Authors. All rights reserved.2# Use of this source code is governed by a BSD-style license that can be3# found in the LICENSE file.4# This script is expected to be called from build.xml uploadToDownloads target5import sys6import getpass7import googlecode_upload8resultDir = sys.argv[1]9user_name = sys.argv[2]10main_version = sys.argv[3]11backend_version = sys.argv[4]12pwd = getpass.getpass(prompt="Google SVN password: ")13def uploadFile(file, summary):14 (http_status, http_reason, file_url) = googlecode_upload.upload(file=file, project_name="chromedevtools", user_name=user_name, password=pwd, summary=summary)15 if http_status != 201:16 raise Exception("Failed to upload file %s: %d '%s'" % (file, http_status, http_reason))17 print "Uploaded to %s" % file_url18uploadFile("%s/org.chromium.sdk-wipbackends-%s-%s.tar" % (resultDir, main_version, backend_version), "ChromeDevTools SDK WIP backends v. %s/%s" % (main_version, backend_version))19uploadFile("%s/org.chromium.sdk-lib-%s.tar" % (resultDir, main_version), "ChromeDevTools SDK library as tar archive v. %s" % main_version)...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run Pyscreenshot automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful