How to use upload_volume method in tempest

Best Python code snippet using tempest_python

test_uploaders.py

Source:test_uploaders.py Github

copy

Full Screen

...16@mock.patch("scraper.uploaders.uploaders.Mega", MockedMega)17@mock.patch("scraper.uploaders.uploaders.dropbox.Dropbox", MockedDropbox)18def test_upload_fails(caplog, volume, uploader):19 uploader = setup_uploader(uploader)20 response = uploader.upload_volume(volume)21 assert "already exists" in caplog.text22 assert response is None23@mock.patch("scraper.uploaders.uploaders.dropbox.Dropbox", MockedDropboxRealFile)24def test_dropbox_upload(caplog, volume):25 volume.file_path = "tests/test_files/mangakaka/dragonball_super_page.html"26 dbox = setup_uploader(DropboxUploader)27 response = dbox.upload_volume(volume)28 assert response.text == "success"29@mock.patch("scraper.uploaders.uploaders.Mega", MockedMegaNotFound)30def test_mega_upload(volume):31 mega = setup_uploader(MegaUploader)32 response = mega.upload_volume(volume)33 assert response == {"status": "success"}34@mock.patch("scraper.uploaders.uploaders.Mega", MockedMega)35def test_mega_set_dirname():36 manga = Manga("dragon-ball", "pdf")37 manga.add_volume(1)38 mega = setup_uploader(MegaUploader)39 mega.set_dirname(manga)40 assert mega.dirname == "start"41@mock.patch("scraper.uploaders.uploaders.Mega", MockedMegaNotFound)42def test_mega_set_dirname_if_dir_not_in_cloud():43 manga = Manga("dragon-ball", "pdf")44 manga.add_volume(1)45 mega = setup_uploader(MegaUploader)46 mega.set_dirname(manga)47 assert mega.dirname == "two"48@mock.patch("scraper.uploaders.uploaders.PyCloud", MockedPyCloud)49def test_pycloud_create_directory_if_not_present_in_the_cloud():50 pycloud = setup_uploader(PcloudUploader)51 response = pycloud.create_directory("/path")52 assert response == {"result": 0, "metadata": "path"}53def test_pycloud_create_directory_returns_nothing_if_dir_in_cloud():54 with mock.patch("scraper.uploaders.uploaders.PyCloud", MockedPyCloudFail) as mm:55 mm.listed = {}56 pycloud = setup_uploader(PcloudUploader)57 response = pycloud.create_directory("/path")58 assert response == {}59@mock.patch("scraper.uploaders.uploaders.PyCloud", MockedPyCloud)60def test_pycloud_create_directories():61 file = Path("/path/to/file.txt")62 pycloud = setup_uploader(PcloudUploader)63 responses = pycloud.create_directories_recursively(file)64 assert len(responses) == 265 assert responses[0] == {"result": 0, "metadata": "path"}66@mock.patch("scraper.uploaders.uploaders.PyCloud", MockedPyCloudFail)67def test_pycloud_create_directories_fail():68 file = Path("/path/to/file.txt")69 with mock.patch("scraper.uploaders.uploaders.PyCloud", MockedPyCloudFail) as mm:70 mm.listed = {"error": True, "result": 2005}71 pycloud = setup_uploader(PcloudUploader)72 with pytest.raises(IOError):73 pycloud.create_directories_recursively(file)74@mock.patch("scraper.uploaders.uploaders.PyCloud", MockedPyCloud)75def test_pycloud_upload_volume(volume):76 volume.file_path = Path("tests/test_files/jpgs/test-manga_1_1.jpg")77 pycloud = setup_uploader(PcloudUploader)78 response = pycloud.upload_volume(volume)79 assert response == {"status": "success"}80@mock.patch("scraper.uploaders.uploaders.PyCloud", MockedPyCloudFail)81def test_pycloud_upload_upload_failure(volume):82 volume.file_path = Path("tests/test_files/jpgs/test-manga_1_1.jpg")83 pycloud = setup_uploader(PcloudUploader)84 with pytest.raises(IOError):85 pycloud.upload_volume(volume)86@pytest.mark.parametrize(87 "to_mock,mock_obj,uploader",88 [89 ("PyCloud", MockedPyCloud, PcloudUploader),90 ("Mega", MockedMegaNotFound, MegaUploader),91 ],92)93def test_upload_calls(to_mock, mock_obj, uploader):94 with mock.patch(f"scraper.uploaders.uploaders.{to_mock}", mock_obj):95 manga = Manga("dragon-ball", "pdf")96 manga.add_volume(1)97 manga.add_volume(2)98 manga.volume[1].file_path = Path("tests/test_files/jpgs/test-manga_1_1.jpg")99 manga.volume[2].file_path = Path("tests/test_files/jpgs/test-manga_1_2.jpg")...

Full Screen

Full Screen

api.py

Source:api.py Github

copy

Full Screen

1# -*- coding: utf-8 -*-2#3# This file is part of Zenodo.4# Copyright (C) 2016-2021 CERN.5#6# Zenodo is free software; you can redistribute it7# and/or modify it under the terms of the GNU General Public License as8# published by the Free Software Foundation; either version 2 of the9# License, or (at your option) any later version.10#11# Zenodo is distributed in the hope that it will be12# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of13# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU14# General Public License for more details.15#16# You should have received a copy of the GNU General Public License17# along with Zenodo; if not, write to the18# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,19# MA 02111-1307, USA.20#21# In applying this license, CERN does not22# waive the privileges and immunities granted to it by virtue of its status23# as an Intergovernmental Organization or submit itself to any jurisdiction.24"""Zenodo Metrics API."""25from __future__ import absolute_import26import calendar27from datetime import datetime, timedelta28import requests29from elasticsearch_dsl import Search30from flask import current_app31from invenio_accounts.models import User32from invenio_communities.models import Community33from invenio_files_rest.models import FileInstance34from invenio_search import current_search_client35from invenio_search.utils import build_alias_name36from .proxies import current_metrics37class ZenodoMetric(object):38 """API class for Zenodo Metrics."""39 @staticmethod40 def get_data_transfer():41 """Get file transfer volume in TB."""42 time_range = {'gte': current_metrics.metrics_start_date.isoformat()}43 search = Search(44 using=current_search_client,45 index=build_alias_name('stats-file-download-*')46 ).filter(47 'range', timestamp=time_range,48 ).filter(49 'term', is_parent=False,50 )51 search.aggs.metric('download_volume', 'sum', field='volume')52 result = search[:0].execute().aggregations.to_dict()53 download_volume = result.get('download_volume', {}).get('value', 0)54 search = Search(55 using=current_search_client,56 index=build_alias_name('records')57 ).filter('range', created=time_range)58 search.aggs.metric('upload_volume', 'sum', field='size')59 result = search[:0].execute().aggregations.to_dict()60 upload_volume = result.get('upload_volume', {}).get('value', 0)61 return int(download_volume + upload_volume)62 @staticmethod63 def get_visitors():64 """Get number of unique zenodo users."""65 time_range = {'gte': current_metrics.metrics_start_date.isoformat()}66 search = Search(67 using=current_search_client,68 index=build_alias_name('events-stats-*')69 ).filter('range', timestamp=time_range)70 search.aggs.metric(71 'visitors_count', 'cardinality', field='visitor_id'72 )73 result = search[:0].execute()74 if 'visitors_count' not in result.aggregations:75 return 076 return int(result.aggregations.visitors_count.value)77 @staticmethod78 def get_uptime():79 """Get Zenodo uptime."""80 metrics = current_app.config['ZENODO_METRICS_UPTIME_ROBOT_METRIC_IDS']81 url = current_app.config['ZENODO_METRICS_UPTIME_ROBOT_URL']82 api_key = current_app.config['ZENODO_METRICS_UPTIME_ROBOT_API_KEY']83 end = datetime.utcnow().replace(84 day=1, hour=0, minute=0, second=0, microsecond=0)85 start = (end - timedelta(days=1)).replace(day=1)86 end_ts = calendar.timegm(end.utctimetuple())87 start_ts = calendar.timegm(start.utctimetuple())88 res = requests.post(url, json={89 'api_key': api_key,90 'custom_uptime_ranges': '{}_{}'.format(start_ts, end_ts),91 })92 return sum(93 float(d['custom_uptime_ranges'])94 for d in res.json()['monitors']95 if d['id'] in metrics96 ) / len(metrics)97 @staticmethod98 def get_researchers():99 """Get number of unique zenodo users."""100 return User.query.filter(101 User.confirmed_at.isnot(None),102 User.active.is_(True),103 ).count()104 @staticmethod105 def get_files():106 """Get number of files."""107 return FileInstance.query.count()108 @staticmethod109 def get_communities():110 """Get number of active communities."""111 return Community.query.filter(112 Community.deleted_at.is_(None)...

Full Screen

Full Screen

Upload.py

Source:Upload.py Github

copy

Full Screen

1import random2import random3import json, csv4from flask import Blueprint, request, session, g5from datetime import datetime6from api.bigtable import get_bigtable7from google.cloud.bigtable import row_filters8from constant import *9def read_data(f):10 rows = csv.reader(f, delimiter='\t')11 data = [r for r in rows]12 return data13 14#-----API Construction-----#15uploadApi = Blueprint('uploadApi', __name__)16#-----Routing Definition-----#17@uploadApi.route('upload', methods=['POST'])18def updatedbforreview(): 19 """20 連接DB, 上傳這一個upload file21 """22 table = get_bigtable('annotation')23 data = request.files['file']24 25 # TODO: check if we have this 2 columns; update: add tag26 uploader = request.args['user']27 tag = request.args['category']28 timestamp = datetime.utcnow()29 30 upload_volume = 031 for i, sentence in enumerate(data):32 sentence = sentence.decode('utf-8')33 try:34 row_key = f'{uploader}#{tag}#not_annotate#{hash(sentence)}'35 row = table.direct_row(row_key)36 row.set_cell('text', 'text', sentence, timestamp)37 row.set_cell('annotation', 'already_annotated', str(0), timestamp)38 row.commit()39 upload_volume += 140 except UnicodeEncodeError:41 print('UNICODE ERROR:', sentence)42 # update information of auth table43 auth_table = get_bigtable('auth')44 row_read = auth_table.read_row(uploader)45 row_write = auth_table.direct_row(uploader)46 # print_row(row_read)47 # print(row_read.cells['information'])48 try:49 previous_num = int(row_read.cells['information'][b'upload_amount'][0].value.decode())50 # print(previous_num)51 new_num = previous_num + upload_volume52 except KeyError:53 new_num = upload_volume54 55 # print(new_num)56 row_write.set_cell('information', 'upload_amount', str(new_num), timestamp)57 row_write.commit()58 ...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run tempest automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful