How to use output_should_be method in Robotframework

Best Python code snippet using robotframework

test_job.py

Source:test_job.py Github

copy

Full Screen

1import json2import unittest3from opulence.common.facts import BaseFact4from opulence.common.fields import IntegerField, StringField5from opulence.common.job import Composable, Result, StatusCode6from opulence.common.jsonEncoder import decode, encode7from opulence.common.patterns import Composite8class FactA(BaseFact):9 _name_ = "x"10 _description_ = "xx"11 _author_ = "xxx"12 _version_ = 113 def setup(self):14 self.a = IntegerField(mandatory=False)15 self.b = StringField(default="b")16 self.c = StringField(default="c")17class TestComposables(unittest.TestCase):18 def test_composable_eq(self):19 a = FactA(a=1)20 b = FactA(a=2)21 c = Composable(Composite(a, b))22 a1 = FactA(a=1)23 b1 = FactA(a=2)24 c1 = Composable(Composite(a1, b1))25 self.assertTrue(c == c1)26 def test_composable_eq_2(self):27 a = FactA(a=1)28 c = Composable(Composite(a))29 a1 = FactA(a=1)30 c1 = Composable(a1)31 self.assertTrue(c == c1)32 def test_composable_not_eq(self):33 a = FactA(a=1)34 b = FactA(a=2)35 c = Composable(Composite(a, b))36 a1 = FactA(a=1)37 self.assertTrue(c != a1)38 def test_composable_not_eq_2(self):39 a = FactA(a=1)40 b = FactA(a=2)41 c = Composable(Composite(a, b))42 a1 = FactA(a=1)43 b1 = FactA(a=4242)44 c1 = Composable(Composite(a1, b1))45 self.assertTrue(c != c1)46class TestJobResult(unittest.TestCase):47 def test_job_result_composite(self):48 a = FactA()49 b = FactA()50 j = Result(input=Composite(a, b))51 j.status = StatusCode.finished52 j_json = json.dumps(j, cls=encode)53 new_j = json.loads(j_json, object_hook=decode)54 self.assertEqual(55 False,56 StatusCode.is_errored(new_j.status["status"]),57 StatusCode.is_errored(j.status["status"]),58 )59 self.assertEqual(j.status, new_j.status)60 self.assertEqual("Finished", j.status["code"], new_j.status["code"])61 self.assertEqual(j.input, new_j.input)62 def test_job_result_error_msg(self):63 a = FactA()64 j = Result(input=a)65 j.status = StatusCode.finished, "this is an error"66 j_json = json.dumps(j, cls=encode)67 new_j = json.loads(j_json, object_hook=decode)68 self.assertEqual(69 False,70 StatusCode.is_errored(new_j.status["status"]),71 StatusCode.is_errored(j.status["status"]),72 )73 self.assertEqual(j.status, new_j.status)74 self.assertEqual("Finished", j.status["code"], new_j.status["code"])75 self.assertEqual("this is an error", j.status["error"], new_j.status["error"])76 def test_job_result_errored(self):77 a = FactA()78 j = Result(input=a)79 j.status = StatusCode.error80 j_json = json.dumps(j, cls=encode)81 new_j = json.loads(j_json, object_hook=decode)82 self.assertEqual(83 True,84 StatusCode.is_errored(new_j.status["status"]),85 StatusCode.is_errored(j.status["status"]),86 )87 self.assertEqual(j.status, new_j.status)88 def test_job_result_errored_bis(self):89 a = FactA()90 j = Result(input=a)91 j.status = StatusCode.rate_limited92 j_json = json.dumps(j, cls=encode)93 new_j = json.loads(j_json, object_hook=decode)94 self.assertEqual(95 True,96 StatusCode.is_errored(new_j.status["status"]),97 StatusCode.is_errored(j.status["status"]),98 )99 self.assertEqual(j.status, new_j.status)100 self.assertEqual(j.output, None)101 def test_job_result_output(self):102 a = FactA()103 b = FactA(a=1, b=2, c=3)104 j = Result(input=a, output=b)105 j_json = json.dumps(j, cls=encode)106 new_j = json.loads(j_json, object_hook=decode)107 self.assertEqual(new_j.output, j.output)108 self.assertEqual(False, StatusCode.is_errored(j.status["status"]))109 self.assertEqual(1, len(j.output))110 self.assertEqual(j.output[0], b)111 self.assertEqual(j.output[0].get_info(), b.get_info())112 def test_job_result_output2(self):113 r = Result()114 r.input = FactA(a=1, b=2)115 r.output = [FactA(a=10, b=20), FactA(a=12, b=22)]116 r_json = r.to_json()117 r_json2 = json.dumps(r, cls=encode)118 new_r = json.loads(r_json2, object_hook=decode)119 new_r_json = new_r.to_json()120 self.assertEqual(r_json["input"], new_r_json["input"])121 for a, b in zip(r_json["input"], new_r_json["input"]):122 self.assertTrue(a == b)123 self.assertEqual(r_json["output"], new_r_json["output"])124 for a, b in zip(r_json["output"], new_r_json["output"]):125 self.assertTrue(a == b)126 def test_job_result_output3(self):127 r = Result()128 r.input = Composite(FactA(a=1, b=2), FactA(a=10, b=20))129 r.output = FactA(a=10, b=20)130 r_json = r.to_json()131 output_should_be = [132 {133 "__class__": "FactA",134 "__module__": "tests.test_job",135 "fields": {136 "a": {137 "__class__": "IntegerField",138 "__module__": "opulence.common.fields.fields",139 "value": 10,140 "default": None,141 "mandatory": False,142 },143 "b": {144 "__class__": "StringField",145 "__module__": "opulence.common.fields.fields",146 "value": "20",147 "default": "b",148 "mandatory": False,149 },150 "c": {151 "__class__": "StringField",152 "__module__": "opulence.common.fields.fields",153 "value": "c",154 "default": "c",155 "mandatory": False,156 },157 },158 }159 ]160 input_should_be = [161 {162 "__class__": "FactA",163 "__module__": "tests.test_job",164 "fields": {165 "a": {166 "__class__": "IntegerField",167 "__module__": "opulence.common.fields.fields",168 "value": 1,169 "default": None,170 "mandatory": False,171 },172 "b": {173 "__class__": "StringField",174 "__module__": "opulence.common.fields.fields",175 "value": "2",176 "default": "b",177 "mandatory": False,178 },179 "c": {180 "__class__": "StringField",181 "__module__": "opulence.common.fields.fields",182 "value": "c",183 "default": "c",184 "mandatory": False,185 },186 },187 },188 {189 "__class__": "FactA",190 "__module__": "tests.test_job",191 "fields": {192 "a": {193 "__class__": "IntegerField",194 "__module__": "opulence.common.fields.fields",195 "value": 10,196 "default": None,197 "mandatory": False,198 },199 "b": {200 "__class__": "StringField",201 "__module__": "opulence.common.fields.fields",202 "value": "20",203 "default": "b",204 "mandatory": False,205 },206 "c": {207 "__class__": "StringField",208 "__module__": "opulence.common.fields.fields",209 "value": "c",210 "default": "c",211 "mandatory": False,212 },213 },214 },215 ]216 self.assertEqual(r_json["output"], output_should_be)217 self.assertEqual(r_json["input"], input_should_be)218 new_r = Result.from_json(r_json)219 new_r_json = new_r.to_json()220 for a, b in zip(r.to_json()["input"], new_r_json["input"]):221 self.assertTrue(a == b)222 for a, b in zip(r.to_json()["output"], new_r_json["output"]):223 self.assertTrue(a == b)224 def test_job_empty(self):225 r = Result()226 r_json = r.to_json()227 new_r = Result.from_json(r_json)228 self.assertEqual(new_r.to_json(), r.to_json())229 def test_job_not_altered(self):230 r = Result()231 r.input = Composite(FactA(a=1, b=2), FactA(a=10, b=20))232 r.output = FactA(a=30, b=40)233 r_json = r.to_json()234 # r_1 = Result.from_json(r_json)235 # r_1_json = r_1.to_json()236 # r_2 = Result.from_json(r_1_json)237 # r_2_json = r_2.to_json()238 input_should_be = [239 {240 "__class__": "FactA",241 "__module__": "tests.test_job",242 "fields": {243 "a": {244 "__class__": "IntegerField",245 "__module__": "opulence.common.fields.fields",246 "value": 1,247 "default": None,248 "mandatory": False,249 },250 "b": {251 "__class__": "StringField",252 "__module__": "opulence.common.fields.fields",253 "value": "2",254 "default": "b",255 "mandatory": False,256 },257 "c": {258 "__class__": "StringField",259 "__module__": "opulence.common.fields.fields",260 "value": "c",261 "default": "c",262 "mandatory": False,263 },264 },265 },266 {267 "__class__": "FactA",268 "__module__": "tests.test_job",269 "fields": {270 "a": {271 "__class__": "IntegerField",272 "__module__": "opulence.common.fields.fields",273 "value": 10,274 "default": None,275 "mandatory": False,276 },277 "b": {278 "__class__": "StringField",279 "__module__": "opulence.common.fields.fields",280 "value": "20",281 "default": "b",282 "mandatory": False,283 },284 "c": {285 "__class__": "StringField",286 "__module__": "opulence.common.fields.fields",287 "value": "c",288 "default": "c",289 "mandatory": False,290 },291 },292 },293 ]294 output_should_be = [295 {296 "__class__": "FactA",297 "__module__": "tests.test_job",298 "fields": {299 "a": {300 "__class__": "IntegerField",301 "__module__": "opulence.common.fields.fields",302 "value": 30,303 "default": None,304 "mandatory": False,305 },306 "b": {307 "__class__": "StringField",308 "__module__": "opulence.common.fields.fields",309 "value": "40",310 "default": "b",311 "mandatory": False,312 },313 "c": {314 "__class__": "StringField",315 "__module__": "opulence.common.fields.fields",316 "value": "c",317 "default": "c",318 "mandatory": False,319 },320 },321 }322 ]323 self.assertEqual(input_should_be, r_json["input"])324 self.assertEqual(output_should_be, r_json["output"])325 # self.assertEqual(input_should_be, r_1_json["input"])326 # self.assertEqual(output_should_be, r_1_json["output"])327 # self.assertEqual(input_should_be, r_2_json["input"])...

Full Screen

Full Screen

test_transformations.py

Source:test_transformations.py Github

copy

Full Screen

1"""2MDSuite: A Zincwarecode package.3License4-------5This program and the accompanying materials are made available under the terms6of the Eclipse Public License v2.0 which accompanies this distribution, and is7available at https://www.eclipse.org/legal/epl-v20.html8SPDX-License-Identifier: EPL-2.09Copyright Contributors to the Zincwarecode Project.10Contact Information11-------------------12email: zincwarecode@gmail.com13github: https://github.com/zincware14web: https://zincwarecode.com/15Citation16--------17If you use this module please cite us with:18Summary19-------20In this file, we test all transformation operations21"""22import numpy as np23import tensorflow as tf24from mdsuite.database.mdsuite_properties import mdsuite_properties25from mdsuite.transformations import (26 integrated_heat_current,27 ionic_current,28 kinaci_integrated_heat_current,29 momentum_flux,30 scale_coordinates,31 thermal_flux,32)33from mdsuite.transformations import translational_dipole_moment as tdp34from mdsuite.transformations import (35 unwrap_coordinates,36 unwrap_via_indices,37 velocity_from_positions,38 wrap_coordinates,39)40from mdsuite.utils.testing import assertDeepAlmostEqual41dtype = tf.float6442def test_integrated_heat_current():43 trafo = integrated_heat_current.IntegratedHeatCurrent()44 assert trafo.output_property == mdsuite_properties.integrated_heat_current45 # TODO46def test_ionic_current():47 trafo = ionic_current.IonicCurrent()48 assert trafo.output_property == mdsuite_properties.ionic_current49 n_part = 550 n_step = 751 input = {}52 output_should_be = np.zeros((n_step, 3))53 for sp_name in ["Na", "Cl"]:54 vel = tf.convert_to_tensor(np.random.random((n_part, n_step, 3)), dtype=dtype)55 charge = tf.convert_to_tensor([[[np.random.random()]]], dtype=dtype)56 input[sp_name] = {57 mdsuite_properties.velocities.name: vel,58 mdsuite_properties.charge.name: charge,59 }60 output_should_be += np.sum(vel.numpy() * charge.numpy(), axis=0)61 output = trafo.transform_batch(input)62 assertDeepAlmostEqual(output, output_should_be)63def test_kinaci_integrated_heat_current():64 trafo = kinaci_integrated_heat_current.KinaciIntegratedHeatCurrent()65 assert trafo.output_property == mdsuite_properties.kinaci_heat_current66 # TODO67# todo map_molecules68def test_momentum_flux():69 trafo = momentum_flux.MomentumFlux()70 assert trafo.output_property == mdsuite_properties.momentum_flux71 # TODO72def test_scale_coordinates():73 trafo = scale_coordinates.ScaleCoordinates()74 assert trafo.output_property == mdsuite_properties.positions75 n_part = 576 n_step = 777 pos = tf.convert_to_tensor(np.random.random((n_part, n_step, 3)), dtype=dtype)78 pos /= np.max(pos)79 box_l = tf.convert_to_tensor([1.1, 2.2, 3.3], dtype=dtype)[None, None, :]80 input = {81 mdsuite_properties.scaled_positions.name: pos,82 mdsuite_properties.box_length.name: box_l,83 }84 output_should_be = pos * box_l85 output = trafo.transform_batch(input)86 assertDeepAlmostEqual(output, output_should_be)87def test_thermal_flux():88 trafo = thermal_flux.ThermalFlux()89 assert trafo.output_property == mdsuite_properties.thermal_flux90 # TODO91def test_translational_dipole_moment():92 trafo = tdp.TranslationalDipoleMoment()93 assert trafo.output_property == mdsuite_properties.translational_dipole_moment94 n_part = 595 n_step = 796 input = {}97 output_should_be = np.zeros((n_step, 3))98 for sp_name in ["Na", "Cl"]:99 pos = tf.convert_to_tensor(np.random.random((n_part, n_step, 3)), dtype=dtype)100 charge = tf.convert_to_tensor([[[np.random.random()]]], dtype=dtype)101 input[sp_name] = {102 mdsuite_properties.unwrapped_positions.name: pos,103 mdsuite_properties.charge.name: charge,104 }105 output_should_be += np.sum(pos.numpy() * charge.numpy(), axis=0)106 output = trafo.transform_batch(input)107 assertDeepAlmostEqual(output, output_should_be)108def test_unwrap_coordinates():109 trafo = unwrap_coordinates.CoordinateUnwrapper()110 assert trafo.output_property == mdsuite_properties.unwrapped_positions111 box_l = tf.convert_to_tensor([1.1, 2.2, 3.3], dtype=dtype)[None, None, :]112 # 1 particle, 4 time steps113 # x stays in box 0114 # y jumps 0 -> -1 -> -1 -> 0115 # z jumps 0 -> 1 -> 1 -> 2116 pos = np.array(117 [[[0.5, 0.1, 3.2]], [[0.6, 2.1, 0.9]], [[0.6, 2.1, 2.1]], [[0.6, 0.1, 0.1]]]118 )119 pos = np.swapaxes(pos, 0, 1)120 print(np.shape(pos))121 input = {122 mdsuite_properties.positions.name: tf.convert_to_tensor(pos, dtype=dtype),123 mdsuite_properties.box_length.name: box_l,124 }125 # previous carryover (same pos, but already image jumps in the last batch)126 last_carryover = {127 "last_pos": tf.convert_to_tensor([[0.5, 0.1, 3.2]], dtype=dtype),128 "last_image_box": tf.convert_to_tensor([[4, 0, 0]], dtype=dtype),129 }130 output, carryover = trafo.transform_batch(input, carryover=last_carryover)131 output_should_be = np.array(132 [133 [[4 * 1.1 + 0.5, 0.1, 3.2]],134 [[4 * 1.1 + 0.6, -0.1, 4.2]],135 [[4 * 1.1 + 0.6, -0.1, 5.4]],136 [[4 * 1.1 + 0.6, 0.1, 6.7]],137 ]138 )139 output_should_be = np.swapaxes(output_should_be, 0, 1)140 carryover_should_be = {"last_pos": [[0.6, 0.1, 0.1]], "last_image_box": [[4, 0, 2]]}141 assertDeepAlmostEqual(output.numpy(), output_should_be)142 assertDeepAlmostEqual(carryover["last_pos"], carryover_should_be["last_pos"])143 assertDeepAlmostEqual(144 carryover["last_image_box"], carryover_should_be["last_image_box"]145 )146def test_unwrap_via_indices():147 trafo = unwrap_via_indices.UnwrapViaIndices()148 assert trafo.output_property == mdsuite_properties.unwrapped_positions149 n_part = 5150 n_step = 7151 pos = tf.convert_to_tensor(np.random.random((n_part, n_step, 3)), dtype=dtype)152 box_im = tf.convert_to_tensor(153 np.random.randint(-10, 10, size=(n_part, n_step, 3)), dtype=dtype154 )155 box_l = tf.convert_to_tensor([1.1, 2.2, 3.3], dtype=dtype)[None, None, :]156 input = {157 mdsuite_properties.positions.name: pos,158 mdsuite_properties.box_images.name: box_im,159 mdsuite_properties.box_length.name: box_l,160 }161 output_should_be = pos + box_im * box_l162 output = trafo.transform_batch(input)163 assertDeepAlmostEqual(output, output_should_be)164def test_velocity_from_positions():165 trafo = velocity_from_positions.VelocityFromPositions()166 assert trafo.output_property == mdsuite_properties.velocities_from_positions167 n_part = 5168 n_step = 7169 pos = tf.convert_to_tensor(np.random.random((n_part, n_step, 3)), dtype=dtype)170 t_step = tf.convert_to_tensor([[[0.1]]], dtype=dtype)171 sample_rate = tf.convert_to_tensor([[[17]]], dtype=dtype)172 input = {173 mdsuite_properties.unwrapped_positions.name: pos,174 mdsuite_properties.time_step.name: t_step,175 mdsuite_properties.sample_rate.name: sample_rate,176 }177 output = trafo.transform_batch(input)178 vels = (pos[:, 1:, :] - pos[:, :-1, :]) / (t_step * sample_rate)179 last_vels = vels[:, -1, :]180 output_should_be = np.concatenate((vels, last_vels[:, None, :]), axis=1)181 assertDeepAlmostEqual(output, output_should_be)182def test_wrap_coordinates():183 trafo = wrap_coordinates.CoordinateWrapper(center_box=False)184 assert trafo.output_property == mdsuite_properties.positions185 n_part = 5186 n_step = 7187 pos = tf.convert_to_tensor(np.random.random((n_part, n_step, 3)), dtype=dtype)188 box_l = tf.convert_to_tensor([1.1, 2.2, 3.3], dtype=dtype)[None, None, :]189 input = {190 mdsuite_properties.unwrapped_positions.name: pos,191 mdsuite_properties.box_length.name: box_l,192 }193 output_should_be = pos - tf.floor(pos / box_l) * box_l194 output = trafo.transform_batch(input)195 assertDeepAlmostEqual(output, output_should_be)196 assert np.all(output > 0)...

Full Screen

Full Screen

test_trash_list.py

Source:test_trash_list.py Github

copy

Full Screen

...14class describe_trash_list_output:15 @istest16 def should_output_the_help_message(self):17 self.run('trash-list', '--help')18 self.output_should_be("""\19Usage: trash-list [OPTIONS...]20List trashed files21Options:22 --version show program's version number and exit23 -h, --help show this help message and exit24Report bugs to http://code.google.com/p/trash-cli/issues25""")26 @istest27 def should_output_nothing_if_no_files(self):28 self.run()29 self.output_should_be('')30 @istest31 def should_output_deletion_date_and_path_of_trash(self):32 self.add_trashinfo('/aboslute/path', '2001-02-03T23:55:59')33 self.run()34 self.output_should_be( "2001-02-03 23:55:59 /aboslute/path\n")35 @istest36 def should_works_also_with_multiple_files(self):37 self.add_trashinfo("/file1", "2000-01-01T00:00:01")38 self.add_trashinfo("/file2", "2000-01-01T00:00:02")39 self.add_trashinfo("/file3", "2000-01-01T00:00:03")40 self.run()41 self.output_should_be( "2000-01-01 00:00:01 /file1\n"42 "2000-01-01 00:00:02 /file2\n"43 "2000-01-01 00:00:03 /file3\n")44 @istest45 def should_output_question_mark_if_deletion_date_is_not_present(self):46 self.info_dir.having_file(a_trashinfo_without_date())47 self.run()48 self.output_should_be("????-??-?? ??:??:?? /path\n")49 @istest50 def should_output_question_marks_if_deletion_date_is_invalid(self):51 self.info_dir.having_file(a_trashinfo_with_invalid_date())52 self.run()53 self.output_should_be("????-??-?? ??:??:?? /path\n")54 @istest55 def should_warn_about_empty_trashinfos(self):56 self.info_dir.touch('empty.trashinfo')57 self.run()58 self.error_should_be(59 "Parse Error: XDG_DATA_HOME/Trash/info/empty.trashinfo: "60 "Unable to parse Path\n")61 @istest62 def should_warn_about_unreadable_trashinfo(self):63 self.info_dir.having_unreadable('unreadable.trashinfo')64 self.run()65 self.error_should_be(66 "[Errno 13] Permission denied: "67 "'XDG_DATA_HOME/Trash/info/unreadable.trashinfo'\n")68 @istest69 def should_warn_about_unexistent_path_entry(self):70 self.info_dir.having_file(a_trashinfo_without_path())71 self.run()72 self.error_should_be(73 "Parse Error: XDG_DATA_HOME/Trash/info/1.trashinfo: "74 "Unable to parse Path\n")75 self.output_should_be('')76 def setUp(self):77 self.XDG_DATA_HOME = 'XDG_DATA_HOME'78 require_empty_dir( self.XDG_DATA_HOME)79 self.info_dir = FakeInfoDir(self.XDG_DATA_HOME+'/Trash/info')80 self.add_trashinfo = self.info_dir.add_trashinfo81 runner = TrashListRunner( environ = {'XDG_DATA_HOME': self.XDG_DATA_HOME})82 self.output_should_be = runner.output_should_be83 self.error_should_be = runner.error_should_be84 self.run = runner85@istest86class describe_trash_list_with_raw_option:87 def setup(self):88 from nose import SkipTest; raise SkipTest()89 self.having_XDG_DATA_HOME('XDG_DATA_HOME')90 self.running('trash-list', '--raw')91 @istest92 def output_should_contains_trashinfo_paths(self):93 from nose import SkipTest; raise SkipTest()94 self.having_trashinfo('foo.trashinfo')95 self.output_should_contain_line(96 'XDG_DATA_HOME/Trash/info/foo.trashinfo')97 @istest98 def output_should_contains_backup_copy_paths(self):99 from nose import SkipTest; raise SkipTest()100 self.having_trashinfo('foo.trashinfo')101 self.output_should_contain_line(102 'XDG_DATA_HOME/Trash/files/foo')103 def having_XDG_DATA_HOME(self, value):104 self.XDG_DATA_HOME = value105 def running(self, *argv):106 runner = TrashListRunner( environ = {'XDG_DATA_HOME': self.XDG_DATA_HOME})107 runner.run(argv)108 self.output = runner.output()109 def output_should_contain_line(self, line):110 assert line in self.output_lines()111 def output_lines(self):112 return [line.rstrip('\n') for line in self.output.splitlines()]113@istest114class describe_list_trash_with_top_trash_directory_type_1:115 @istest116 def should_list_method_1_trashcan_contents(self):117 make_sticky_dir('topdir/.Trash')118 trashdir = FakeInfoDir('topdir/.Trash/123/info')119 trashdir.add_trashinfo('file1', '2000-01-01T00:00:00')120 self.run()121 self.output_should_be("2000-01-01 00:00:00 topdir/file1\n")122 @istest123 def should_ignore_contents_when_is_not_sticky(self):124 trashdir = FakeInfoDir('topdir/.Trash/123/info')125 trashdir.add_trashinfo('file1', '2000-01-01T00:00:00')126 ensure_non_sticky_dir('topdir/.Trash')127 self.run()128 self.output_should_be("")129 @istest130 def should_list_method2_trashcan_contents(self):131 trashdir = FakeInfoDir('topdir/.Trash-123/info')132 trashdir.add_trashinfo('file', '2000-01-01T00:00:00')133 self.run()134 self.output_should_be("2000-01-01 00:00:00 topdir/file\n")135 def setUp(self):136 require_empty_dir('topdir')137 runner = TrashListRunner()138 runner.set_fake_uid(123)139 runner.add_volume('topdir')140 self.run = runner141 self.output_should_be = runner.output_should_be142class FakeInfoDir:143 def __init__(self, path):144 self.path = path145 self.number = 1146 def touch(self, path_relative_to_info_dir):147 make_empty_file(self.join(path_relative_to_info_dir))148 def having_unreadable(self, path_relative_to_info_dir):149 path = self.join(path_relative_to_info_dir)150 make_unreadable_file(path)151 def join(self, path_relative_to_info_dir):152 import os153 return os.path.join(self.path, path_relative_to_info_dir)154 def having_file(self, contents):155 path = '%(info_dir)s/%(name)s.trashinfo' % { 'info_dir' : self.path,156 'name' : str(self.number)}157 write_file(path, contents)158 self.number += 1159 self.path_of_last_file_added = path160 def add_trashinfo(self, escaped_path_entry, formatted_deletion_date):161 self.having_file(a_trashinfo(escaped_path_entry, formatted_deletion_date))162class TrashListRunner:163 def __init__(self, environ={}):164 self.stdout = OutputCollector()165 self.stderr = OutputCollector()166 self.environ = environ167 self.fake_getuid = self.error168 self.volumes = []169 def __call__(self, *argv):170 self.run(argv)171 def run(self,argv):172 if argv==():173 argv='trash-list'174 ListCmd(175 out = self.stdout,176 err = self.stderr,177 environ = self.environ,178 getuid = self.fake_getuid,179 list_volumes = lambda: self.volumes180 ).run(*argv)181 def set_fake_uid(self, uid):182 self.fake_getuid = lambda: uid183 def add_volume(self, mount_point):184 self.volumes.append(mount_point)185 def error(self):186 raise ValueError()187 def output_should_be(self, expected_value):188 self.stdout.assert_equal_to(expected_value)189 def error_should_be(self, expected_value):190 self.stderr.assert_equal_to(expected_value)191 def output(self):...

Full Screen

Full Screen

test_trash_put.py

Source:test_trash_put.py Github

copy

Full Screen

...8 def it_should_remove_the_file(self):9 file_should_have_been_deleted('sandbox/foo')10 @istest11 def it_should_remove_it_silently(self):12 self.output_should_be('')13 def a_trashinfo_file_should_have_been_created(self):14 15 file('sandbox/XDG_DATA_HOME/Trash/info/foo.trashinfo').read()16 def setUp(self):17 require_empty_dir('sandbox')18 having_file('sandbox/foo')19 self.run_trashput = TrashPutRunner(20 environ = {'XDG_DATA_HOME': 'sandbox/XDG_DATA_HOME' }21 )22 23 self.stderr_should_be = self.run_trashput.err.should_be24 self.output_should_be = self.run_trashput.out.should_be25 self.run_trashput('trash-put', 'sandbox/foo')26import os...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run Robotframework automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful