Best Python code snippet using localstack_python
test_pbs_queue.py
Source:test_pbs_queue.py  
1#!/usr/bin/env python32#3# Copyright (c) Bo Peng and the University of Texas MD Anderson Cancer Center4# Distributed under the terms of the 3-clause BSD License.5import os6import sys7import pytest8import random9import subprocess10from sos import execute_workflow11has_docker = True12try:13    subprocess.check_output('docker ps | grep test_sos', shell=True).decode()14except subprocess.CalledProcessError:15    subprocess.call('sh build_test_docker.sh', shell=True)16    try:17        subprocess.check_output(18            'docker ps | grep test_sos', shell=True).decode()19    except subprocess.CalledProcessError:20        print('Failed to set up a docker machine with sos')21        has_docker = False22@pytest.mark.skipif(not has_docker, reason="Docker container not usable")23def test_remote_execute(clear_now_and_after):24    clear_now_and_after('result.txt')25    execute_workflow(26        '''27        [10]28        output: 'result.txt'29        task:30        run:31        echo 'a' > 'result.txt'32        ''',33        options={34            'config_file': '~/docker.yml',35            'default_queue': 'ts',36            'sig_mode': 'force',37        })38    assert os.path.isfile('result.txt')39    with open('result.txt') as res:40        assert res.read() == 'a\n'41@pytest.mark.skipif(not has_docker, reason="Docker container not usable")42def test_task_spooler_with_force_sigmode(purge_tasks):43    execute_workflow(44        '''45        [10]46        input: for_each={'i': range(3)}47        task:48        run: expand=True49            echo I am spooler with force {i}50            sleep {10 + i*2}51        ''',52        options={53            'config_file': '~/docker.yml',54            'default_queue': 'ts',55            'sig_mode': 'force',56        })57# @pytest.mark.skipif(58#     sys.platform == 'win32' or not has_docker,59#     reason='No symbloc link problem under win32 or no docker')60# def test_to_host_rename(clear_now_and_after):61#     '''Test to_host with dictionary'''62#     clear_now_and_after('1.txt', '2.txt', '3.txt')63#     execute_workflow(64#         r'''65#         [1]66#         sh:67#         echo "1" > 1.txt68#         [10]69#         task: to_host={'1.txt': '2.txt'}, from_host={'3.txt': '2.txt'}70#         with open('2.txt', 'a') as t:71#             t.write('2\n')72#         ''',73#         options={74#             'config_file': '~/docker.yml',75#             'default_queue': 'ts',76#             'sig_mode': 'force',77#         })78#     assert os.path.isfile('3.txt')79#     with open('3.txt') as txt:80#         content = txt.read()81#         assert '1\n2\n' == content82@pytest.mark.skipif(83    sys.platform == 'win32' or not has_docker,84    reason='No symbloc link problem under win32 or no docker')85def test_send_symbolic_link(clear_now_and_after, temp_factory):86    '''Test to_host symbolic link or directories that contain symbolic link. #508'''87    # create a symbloc link88    temp_factory('ttt.py', content='something')89    clear_now_and_after('llink')90    subprocess.call('ln -s ttt.py llink', shell=True)91    execute_workflow(92        '''93        import os94        [10]95        task: to_host='llink'96        sz = os.path.getmtime('llink')97        ''',98        options={99            'config_file': '~/docker.yml',100            'default_queue': 'ts',101            'sig_mode': 'force'102        })103@pytest.mark.skipif(104    not os.path.exists(os.path.expanduser('~').upper()) or not has_docker,105    reason='Skip test for case sensitive file system')106def test_case_insensitive_local_path(clear_now_and_after, temp_factory):107    '''Test path_map from a case insensitive file system.'''108    clear_now_and_after('test_pbs_queue.py.bak', 'tt1.py.bak')109    temp_factory('tt1.py', content='something')110    execute_workflow(111        '''112[10]113output: 'tt1.py.bak'114task: to_host=r'{}'115import shutil116shutil.copy("tt1.py", f"{{_output}}")117'''.format(os.path.join(os.path.abspath('.').upper(), 'tt1.py')),118        options={119            'config_file': '~/docker.yml',120            'default_queue': 'ts',121            'sig_mode': 'force',122        })123    assert os.path.isfile('tt1.py.bak')124    # the files should be the same125    with open('tt1.py') as ori, open('tt1.py.bak') as bak:126        assert ori.read() == bak.read()127@pytest.mark.skipif(not has_docker, reason="Docker container not usable")128def test_sos_execute():129    '''Test sos execute'''130    subprocess.check_output(131        'sos purge -c ~/docker.yml -q docker --all', shell=True)132    execute_workflow(133        '''134        [10]135        input: for_each={'i': range(3)}136        task:137        run: expand=True138            echo Testing purge {i}139            sleep {i*2}140        ''',141        options={142            'config_file': '~/docker.yml',143            'default_queue': 'ts',144            'sig_mode': 'force',145        })146@pytest.mark.skipif(not has_docker, reason="Docker container not usable")147def test_sos_purge():148    '''Test purge tasks'''149    # purge all previous tasks150    subprocess.check_output('sos purge --all -c ~/docker.yml -q ts', shell=True)151    execute_workflow(152        '''153        [10]154        input: for_each={'i': range(3)}155        task:156        run: expand=True157            echo Testing purge {i}158            sleep {i*2}159    ''',160        options={161            'config_file': '~/docker.yml',162            'default_queue': 'ts',163            'sig_mode': 'force',164        })165@pytest.mark.skipif(not has_docker, reason="Docker container not usable")166def test_remote_input(clear_now_and_after):167    '''Test remote target'''168    clear_now_and_after('test_file.txt')169    # purge all previous tasks170    execute_workflow(171        '''172        [10]173        task:174        run:175            echo A file >> "test_file.txt"176        ''',177        options={178            'config_file': '~/docker.yml',179            'default_queue': 'ts',180            'sig_mode': 'force',181        })182    # this file is remote only183    assert not os.path.isfile('test_file.txt')184@pytest.mark.skipif(not has_docker, reason="Docker container not usable")185def test_remote_input_1(clear_now_and_after):186    #187    clear_now_and_after('test1.txt')188    execute_workflow(189        '''190        [10]191        input: remote('test_file.txt')192        output: 'test1.txt'193        task:194        run: expand=True195            echo {_input} >> {_output}196        ''',197        options={198            'config_file': '~/docker.yml',199            'default_queue': 'ts',200            'sig_mode': 'force',201        })202    #203    assert not os.path.isfile('test_file.txt')204    assert os.path.isfile('test1.txt')205@pytest.mark.skipif(not has_docker, reason="Docker container not usable")206def test_no_remote_input(clear_now_and_after):207    '''Test remote target'''208    clear_now_and_after('test_file_A.txt', 'test_file_B.txt')209    execute_workflow(210        '''211    [10]212    task:213    run:214        echo A file >> "test_file_A.txt"215        echo B file >> "test_file_B.txt"216    ''',217        options={218            'config_file': '~/docker.yml',219            'default_queue': 'ts',220            'sig_mode': 'force',221        })222    # this file is remote only223    assert not os.path.isfile('test_file_A.txt')224    assert not os.path.isfile('test_file_B.txt')225@pytest.mark.skipif(not has_docker, reason="Docker container not usable")226def test_multiple_remote_input(clear_now_and_after):227    #228    clear_now_and_after('test1.txt')229    execute_workflow(230        '''231        [10]232        A = 'test_file_A.txt'233        input: remote(A, ['test_file_B.txt'])234        output: 'test1.txt'235        task:236        run: expand=True237            cat {_input} >> {_output}238    ''',239        options={240            'config_file': '~/docker.yml',241            'default_queue': 'ts',242            'sig_mode': 'force',243        })244    #245    assert not os.path.isfile('test_file_A.txt')246    assert not os.path.isfile('test_file_B.txt')247    assert os.path.isfile('test1.txt')248    with open('test1.txt') as w:249        content = w.read()250        assert 'A file' in content251        assert 'B file' in content252@pytest.mark.skipif(not has_docker, reason="Docker container not usable")253def test_remote_output(clear_now_and_after):254    '''Test remote target'''255    # purge all previous tasks256    clear_now_and_after('test_file.txt', 'test_file1.txt')257    execute_workflow(258        '''259        [10]260        output: remote('test_file.txt'), 'test_file1.txt'261        task:262        run:263            echo A file >> "test_file.txt"264            echo A file >> "test_file1.txt"265        ''',266        options={267            'config_file': '~/docker.yml',268            'default_queue': 'ts',269            'sig_mode': 'force',270        })271    # this file is remote only272    assert not os.path.isfile('test_file.txt')273    assert os.path.isfile('test_file1.txt')274@pytest.mark.skipif(not has_docker, reason="Docker container not usable")275def test_from_host_option(clear_now_and_after):276    '''Test from_remote option'''277    clear_now_and_after('llp')278    execute_workflow(279        '''280        [10]281        task: from_host='llp'282        with open('llp', 'w') as llp:283            llp.write("LLP")284        ''',285        options={286            'config_file': '~/docker.yml',287            'default_queue': 'ts',288            'sig_mode': 'force',289        })290    assert os.path.isfile('llp')291@pytest.mark.skipif(not has_docker, reason="Docker container not usable")292def test_local_from_host_option(clear_now_and_after):293    '''Test from_remote option'''294    clear_now_and_after('llp')295    execute_workflow(296        '''297        [10]298        task: from_host='llp'299        with open('llp', 'w') as llp:300            llp.write("LLP")301        ''',302        options={303            'config_file': '~/docker.yml',304            'sig_mode': 'force',305            'default_queue': 'localhost',306        })307    assert os.path.isfile('llp')308def test_list_hosts():309    '''test list hosts using sos status -q'''310    for v in ['0', '1', '3', '4']:311        # ts of type pbs should be in output312        output = subprocess.check_output(313            ['sos', 'remote', 'list', '-c', '~/docker.yml', '-v', v]).decode()314        assert 'ts' in output315def test_sync_input_output(clear_now_and_after):316    '''Test sync input and output with remote host'''317    clear_now_and_after([f'test_{i}.txt' for i in range(4)],318                        [f'test_{i}.bak' for i in range(4)])319    val = random.randint(1, 10000)320    execute_workflow(321        '''322        parameter: g = 100323        [10]324        input: for_each=dict(i=range(4))325        output: f'test_{i}.txt'326        with open(f'test_{i}.txt', 'w') as tst:327            tst.write(f'test_{i}_{g}')328        [20]329        output: _input.with_suffix('.bak')330        task:331        with open(_input, 'r') as inf, open(_output, 'w') as outf:332            outf.write(inf.read() + '.bak')333    ''',334        args=['--g', str(val)],335        options={336            'config_file': '~/docker.yml',337            'default_queue': 'ts',338            'sig_mode': 'force',339        })340    # now check if341    for i in range(4):342        assert os.path.isfile(f'test_{i}.txt')343        with open(f'test_{i}.bak') as outf:344            assert outf.read() == f'test_{i}_{val}.bak'345        assert os.path.isfile(f'test_{i}.bak')346        with open(f'test_{i}.bak') as outf:347            assert outf.read() == f'test_{i}_{val}.bak'348def test_sync_master_task(clear_now_and_after):349    '''Test sync input and output with remote host with trunksize'''350    clear_now_and_after([f'test_{i}.txt' for i in range(4)],351                        [f'test_{i}.bak' for i in range(4)])352    val = random.randint(1, 10000)353    execute_workflow(354        '''355        parameter: g = 100356        [10]357        input: for_each=dict(i=range(4))358        output: f'test_{i}.txt'359        with open(f'test_{i}.txt', 'w') as tst:360            tst.write(f'test_{i}_{g}')361        [20]362        output: _input.with_suffix('.bak')363        task: trunk_size=2364        with open(_input, 'r') as inf, open(_output, 'w') as outf:365            outf.write(inf.read() + '.bak')366        ''',367        args=['--g', str(val)],368        options={369            'config_file': '~/docker.yml',370            'default_queue': 'ts',371            'sig_mode': 'force',372        })373    # now check if374    for i in range(4):375        assert os.path.isfile(f'test_{i}.txt')376        with open(f'test_{i}.bak') as outf:377            assert outf.read() == f'test_{i}_{val}.bak'378        assert os.path.isfile(f'test_{i}.bak')379        with open(f'test_{i}.bak') as outf:380            assert outf.read() == f'test_{i}_{val}.bak'381@pytest.mark.skipif(not has_docker, reason="Docker container not usable")382def test_delayed_interpolation(clear_now_and_after):383    '''Test delayed interpolation with expression involving remote objects'''384    # purge all previous tasks385    clear_now_and_after('test.py', 'test.py.bak')386    execute_workflow(387        '''388        [10]389        output: remote('test.py')390        task:391        run:392        touch test.py393        [20]394        output: remote(f"{_input:R}.bak")395        task:396        run: expand=True397        cp {_input} {_output}398        ''',399        options={400            'config_file': '~/docker.yml',401            'default_queue': 'ts',402            'sig_mode': 'force',403        })404    # this file is remote only405    assert not os.path.isfile('test.py')406    assert not os.path.isfile('test.py.bak')407@pytest.mark.skipif(not has_docker, reason="Docker container not usable")408def test_remote_execution(purge_tasks):409    execute_workflow(410        '''411        [10]412        input: for_each={'i': range(5)}413        task:414        run: expand=True415        echo I am {i}416        sleep {5+i}417        ''',418        options={419            'config_file': '~/docker.yml',420            'default_queue': 'ts',421            'max_running_jobs': 5,422            'sig_mode': 'force',423        })424@pytest.mark.skipif(not has_docker, reason="Docker container not usable")425def test_task_spooler(purge_tasks):426    '''Test task spooler PBS engine'''427    execute_workflow(428        '''429        [10]430        input: for_each={'i': range(3)}431        task:432        run: expand=True433        echo I am task spooler {i}434        sleep {5+i*2}435    ''',436        options={437            'config_file': '~/docker.yml',438            'default_queue': 'ts',439            'sig_mode': 'force',...dockerd.chart.py
Source:dockerd.chart.py  
1# -*- coding: utf-8 -*-2# Description: docker netdata python.d module3# Author: Kévin Darcel (@tuxity)4try:5    import docker6    HAS_DOCKER = True7except ImportError:8    HAS_DOCKER = False9from distutils.version import StrictVersion10from bases.FrameworkServices.SimpleService import SimpleService11# charts order (can be overridden if you want less charts, or different order)12ORDER = [13    'running_containers',14    'healthy_containers',15    'unhealthy_containers'16]17CHARTS = {18    'running_containers': {19        'options': [None, 'Number of running containers', 'containers', 'running containers',20                    'docker.running_containers', 'line'],21        'lines': [22            ['running_containers', 'running']23        ]24    },25    'healthy_containers': {26        'options': [None, 'Number of healthy containers', 'containers', 'healthy containers',27                    'docker.healthy_containers', 'line'],28        'lines': [29            ['healthy_containers', 'healthy']30        ]31    },32    'unhealthy_containers': {33        'options': [None, 'Number of unhealthy containers', 'containers', 'unhealthy containers',34                    'docker.unhealthy_containers', 'line'],35        'lines': [36            ['unhealthy_containers', 'unhealthy']37        ]38    }39}40MIN_REQUIRED_VERSION = '3.2.0'41class Service(SimpleService):42    def __init__(self, configuration=None, name=None):43        SimpleService.__init__(self, configuration=configuration, name=name)44        self.order = ORDER45        self.definitions = CHARTS46        self.client = None47    def check(self):48        if not HAS_DOCKER:49            self.error("'docker' package is needed to use dockerd module")50            return False51        if StrictVersion(docker.__version__) < StrictVersion(MIN_REQUIRED_VERSION):52            self.error("installed 'docker' package version {0}, minimum required version {1}, please upgrade".format(53                docker.__version__,54                MIN_REQUIRED_VERSION,55            ))56            return False57        self.client = docker.DockerClient(base_url=self.configuration.get('url', 'unix://var/run/docker.sock'))58        try:59            self.client.ping()60        except docker.errors.APIError as error:61            self.error(error)62            return False63        return True64    def get_data(self):65        data = dict()66        data['running_containers'] = len(self.client.containers.list(sparse=True))67        data['healthy_containers'] = len(self.client.containers.list(filters={'health': 'healthy'}, sparse=True))68        data['unhealthy_containers'] = len(self.client.containers.list(filters={'health': 'unhealthy'}, sparse=True))...Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!
