How to use execute_in_container method in localstack

Best Python code snippet using localstack_python

execute.py

Source:execute.py Github

copy

Full Screen

...147 148def _write_text_file(fname,str):149 with open(fname,'w') as f:150 f.write(str)151def _execute_in_container(proc, X, *, container, tempdir, **kwargs):152 singularity_opts=[]153 kbucket_cache_dir=kbucket.getConfig()['local_cache_dir']154 singularity_opts.append('-B {}:{}'.format(kbucket_cache_dir,'/sha1-cache'))155 singularity_opts.append('-B /tmp:/tmp')156 for input0 in proc.INPUTS:157 name0=input0.name158 fname0=getattr(X,name0)159 if fname0:160 if fname0.startswith('kbucket://') or fname0.startswith('sha1://'):161 pass162 else:163 fname0=os.path.abspath(fname0)164 fname2='/execute_in_container/input_{}'.format(name0)165 kwargs[name0]=fname2166 singularity_opts.append('-B {}:{}'.format(fname0,fname2))167 168 for output0 in proc.OUTPUTS:169 name0=output0.name170 val=getattr(X,name0)171 if val:172 val=os.path.abspath(val)173 dirname=os.path.dirname(val)174 filename=os.path.basename(val)175 dirname2='/execute_in_container/outputdir_{}'.format(name0)176 kwargs[name0]=dirname2+'/'+filename177 singularity_opts.append('-B {}:{}'.format(dirname,dirname2))178 expanded_kwargs_list=[]179 for key in kwargs:180 val=kwargs[key]181 if type(val)==str:182 val="'{}'".format(val)183 expanded_kwargs_list.append('{}={}'.format(key,val))184 expanded_kwargs=', '.join(expanded_kwargs_list)185 processor_source_fname=inspect.getsourcefile(proc)186 processor_source_dirname=os.path.dirname(processor_source_fname)187 processor_source_basename=os.path.basename(processor_source_fname)188 processor_source_basename_noext=os.path.splitext(processor_source_basename)[0]189 if not processor_source_fname:190 raise Exception('inspect.getsourcefile() returned empty for processor.')191 singularity_opts.append('-B {}:/execute_in_container/processor_source'.format(processor_source_dirname))192 # Code generation193 code="""194from processor_source.{processor_source_basename_noext} import {processor_name}195def main():196 {processor_name}.execute({expanded_kwargs})197if __name__ == "__main__":198 main()199 """200 code=code.replace('{processor_source_basename_noext}',processor_source_basename_noext)201 code=code.replace('{processor_name}',proc.__name__)202 code=code.replace('{expanded_kwargs}',expanded_kwargs)203 _write_text_file(tempdir+'/execute_in_container.py',code)204 singularity_opts.append('-B {}:/execute_in_container/execute_in_container.py'.format(tempdir+'/execute_in_container.py'))205 env_vars=[]206 if getattr(proc,'ENVIRONMENT_VARIABLES'):207 list=proc.ENVIRONMENT_VARIABLES208 for v in list:209 val=os.environ.get(v,'')210 if val:211 env_vars.append('{}={}'.format(v,val))212 singularity_cmd='singularity exec --contain -e {} {} bash -c "KBUCKET_CACHE_DIR=/sha1-cache {} python /execute_in_container/execute_in_container.py"'.format(' '.join(singularity_opts),container,' '.join(env_vars))213 retcode = _run_command_and_print_output(singularity_cmd)214 if retcode != 0:215 raise Exception('Processor in singularity returned a non-zero exit code')216def _run_command_and_print_output(command):217 print('RUNNING: '+command)218 with Popen(shlex.split(command), stdout=PIPE, stderr=PIPE) as process:219 while True:220 output_stdout = process.stdout.readline()221 output_stderr = process.stderr.readline()222 if (not output_stdout) and (not output_stderr) and (process.poll() is not None):223 break224 if output_stdout:225 print(output_stdout.decode())226 if output_stderr:227 print(output_stderr.decode())228 rc = process.poll()229 return rc230def execute(proc, _cache=True, _force_run=None, _container=None, **kwargs):231 if _force_run is None:232 if os.environ.get('MLPROCESSORS_FORCE_RUN','') == 'TRUE':233 _force_run=True234 else:235 _force_run=False236 # Execute a processor237 print ('::::::::::::::::::::::::::::: '+proc.NAME)238 X=proc() # instance239 ret=ProcessorExecuteOutput() # We will return this object240 for input0 in proc.INPUTS:241 name0=input0.name242 if not name0 in kwargs:243 raise Exception('Missing input: {}'.format(name0))244 setattr(X,name0,kwargs[name0])245 for output0 in proc.OUTPUTS:246 name0=output0.name247 if not name0 in kwargs:248 raise Exception('Missing output: {}'.format(name0))249 setattr(X,name0,kwargs[name0])250 for param0 in proc.PARAMETERS:251 name0=param0.name252 if not name0 in kwargs:253 if param0.optional:254 val0=param0.default255 else:256 raise Exception('Missing required parameter: {}'.format(name0))257 else:258 val0=kwargs[name0]259 setattr(X,name0,val0)260 if _cache:261 outputs_all_in_pairio=True262 output_signatures=dict()263 output_sha1s=dict()264 cache_collections=set()265 for output0 in proc.OUTPUTS:266 name0=output0.name267 signature0=compute_processor_job_output_signature(X,name0)268 output_signatures[name0]=signature0269 output_sha1,output_collection=pairio.get(signature0,return_collection=True)270 if output_sha1 is not None:271 print ('Found output "{}" in cache collection: {}'.format(name0,output_collection))272 cache_collections.add(output_collection)273 output_sha1s[name0]=output_sha1274 # Do the following because if we have it locally,275 # we want to make sure it also gets propagated remotely276 # and vice versa277 pairio.set(signature0,output_sha1)278 else:279 outputs_all_in_pairio=False280 output_files_all_found=False281 output_files=dict()282 if outputs_all_in_pairio:283 output_files_all_found=True284 for output0 in proc.OUTPUTS:285 out0=getattr(X,name0)286 if out0:287 name0=output0.name288 ext0=_get_output_ext(out0)289 sha1=output_sha1s[name0]290 output_files[name0]='sha1://'+sha1+'/'+name0+ext0291 fname=kbucket.findFile(sha1=sha1)292 if not fname:293 output_files_all_found=False294 if outputs_all_in_pairio and (not output_files_all_found):295 print ('Found job in cache, but not all output files exist.')296 if output_files_all_found:297 if not _force_run:298 print ('Using outputs from cache:',','.join(list(cache_collections)))299 for output0 in proc.OUTPUTS:300 name0=output0.name301 fname1=output_files[name0]302 fname2=getattr(X,name0)303 if type(fname2)==str:304 fname1=kbucket.realizeFile(fname1)305 if fname1!=fname2:306 if os.path.exists(fname2):307 os.remove(fname2)308 shutil.copyfile(fname1,fname2)309 ret.outputs[name0]=fname2310 else:311 ret.outputs[name0]=fname1312 return ret313 else:314 print ('Found outputs in cache, but forcing run...')315 for input0 in proc.INPUTS:316 name0=input0.name317 if hasattr(X,name0):318 val0=getattr(X,name0)319 if input0.directory:320 val1=val0321 else:322 val1=kbucket.realizeFile(val0)323 if not val1:324 raise Exception('Unable to realize input file {}: {}'.format(name0,val0))325 setattr(X,name0,val1)326 327 temporary_output_files=set()328 for output0 in proc.OUTPUTS:329 name0=output0.name330 val0=getattr(X,name0)331 job_signature0=compute_processor_job_output_signature(X,None)332 if type(val0)!=str:333 fname0=job_signature0+'_'+name0+val0['ext']334 tmp_fname=create_temporary_file(fname0)335 temporary_output_files.add(tmp_fname)336 setattr(X,name0,tmp_fname)337 ## Now it is time to execute338 if not _container:339 try:340 print ('MLPR EXECUTING::::::::::::::::::::::::::::: '+proc.NAME)341 X.run()342 print ('MLPR FINISHED ::::::::::::::::::::::::::::: '+proc.NAME)343 except:344 # clean up temporary output files345 print ('Problem executing {}. Cleaning up files.'.format(proc.NAME))346 for fname in temporary_output_files:347 if os.path.exists(fname):348 os.remove(fname)349 raise350 else:351 ## in a container352 tempdir=tempfile.mkdtemp()353 try:354 _execute_in_container(proc, X, container=_container, tempdir=tempdir, **kwargs)355 except:356 shutil.rmtree(tempdir)357 raise358 shutil.rmtree(tempdir)359 360 for output0 in proc.OUTPUTS:361 name0=output0.name362 output_fname=getattr(X,name0)363 if output_fname in temporary_output_files:364 output_fname=kbucket.moveFileToCache(output_fname)365 ret.outputs[name0]=output_fname366 if _cache:367 output_sha1=kbucket.computeFileSha1(output_fname)368 signature0=output_signatures[name0]...

Full Screen

Full Screen

test_paasta_execute_docker_command.py

Source:test_paasta_execute_docker_command.py Github

copy

Full Screen

...16import pytest17from paasta_tools.paasta_execute_docker_command import execute_in_container18from paasta_tools.paasta_execute_docker_command import main19from paasta_tools.paasta_execute_docker_command import TimeoutException20def test_execute_in_container():21 fake_container_id = "fake_container_id"22 fake_return_code = 023 fake_output = "fake_output"24 fake_command = "fake_cmd"25 mock_docker_client = mock.MagicMock(spec_set=docker.Client)26 mock_docker_client.exec_start.return_value = fake_output27 mock_docker_client.exec_inspect.return_value = {"ExitCode": fake_return_code}28 assert execute_in_container(29 mock_docker_client, fake_container_id, fake_command, 130 ) == (fake_output, fake_return_code)31 expected_cmd = ["/bin/sh", "-c", fake_command]32 mock_docker_client.exec_create.assert_called_once_with(33 fake_container_id, expected_cmd34 )35def test_execute_in_container_reuses_exec():36 fake_container_id = "fake_container_id"37 fake_execid = "fake_execid"38 fake_return_code = 039 fake_output = "fake_output"40 fake_command = "fake_cmd"41 mock_docker_client = mock.MagicMock(spec_set=docker.Client)42 mock_docker_client.inspect_container.return_value = {"ExecIDs": [fake_execid]}43 mock_docker_client.exec_start.return_value = fake_output44 mock_docker_client.exec_inspect.return_value = {45 "ExitCode": fake_return_code,46 "ProcessConfig": {"entrypoint": "/bin/sh", "arguments": ["-c", fake_command]},47 }48 assert execute_in_container(49 mock_docker_client, fake_container_id, fake_command, 150 ) == (fake_output, fake_return_code)51 assert mock_docker_client.exec_create.call_count == 052 mock_docker_client.exec_start.assert_called_once_with(fake_execid, stream=False)53def test_execute_in_container_reuses_only_valid_exec():54 fake_container_id = "fake_container_id"55 fake_execid = "fake_execid"56 fake_return_code = 057 fake_output = "fake_output"58 fake_command = "fake_cmd"59 bad_exec = {60 "ExitCode": fake_return_code,61 "ProcessConfig": {62 "entrypoint": "/bin/sh",63 "arguments": ["-c", "some_other_command"],64 },65 }66 good_exec = {67 "ExitCode": fake_return_code,68 "ProcessConfig": {"entrypoint": "/bin/sh", "arguments": ["-c", fake_command]},69 }70 mock_docker_client = mock.MagicMock(spec_set=docker.Client)71 mock_docker_client.inspect_container.return_value = {72 "ExecIDs": ["fake_other_exec", fake_execid, "fake_other_exec"]73 }74 mock_docker_client.exec_start.return_value = fake_output75 # the last side effect is used to check the exit code of the command76 mock_docker_client.exec_inspect.side_effect = [77 bad_exec,78 good_exec,79 bad_exec,80 good_exec,81 ]82 assert execute_in_container(83 mock_docker_client, fake_container_id, fake_command, 184 ) == (fake_output, fake_return_code)85 assert mock_docker_client.exec_create.call_count == 086 mock_docker_client.exec_start.assert_called_once_with(fake_execid, stream=False)87def test_main():88 fake_container_id = "fake_container_id"89 fake_timeout = 390 with mock.patch(91 "paasta_tools.paasta_execute_docker_command.get_container_id_for_mesos_id",92 return_value=fake_container_id,93 autospec=True,94 ), mock.patch(95 "paasta_tools.paasta_execute_docker_command.parse_args", autospec=True96 ) as args_patch, mock.patch(...

Full Screen

Full Screen

fuel_actions.py

Source:fuel_actions.py Github

copy

Full Screen

...19 class BaseActions(object):20 def __init__(self, admin_remote):21 self.admin_remote = admin_remote22 self.container = None23 def execute_in_container(self, command, container=None, exit_code=None,24 stdin=None):25 if not container:26 container = self.container27 cmd = 'dockerctl shell {0} {1}'.format(container, command)28 if stdin is not None:29 cmd = 'echo "{0}" | {1}'.format(stdin, cmd)30 result = self.admin_remote.execute(cmd)31 if exit_code is not None:32 assert_equal(exit_code,33 result['exit_code'],34 ('Command {cmd} returned exit code "{e}", but '35 'expected "{c}". Output: {out}; {err} ').format(36 cmd=cmd,37 e=result['exit_code'],38 c=exit_code,39 out=result['stdout'],40 err=result['stderr']41 ))42 return ''.join(result['stdout']).strip()43 class Nailgun(BaseActions):44 def __init__(self, admin_remote):45 super(FuelActions.Nailgun, self).__init__(admin_remote)46 self.container = 'nailgun'47 def update_nailgun_settings_once(self, settings):48 # temporary change Nailgun settings (until next container restart)49 cfg_file = '/etc/nailgun/settings.yaml'50 ng_settings = yaml.load(self.execute_in_container(51 'cat {0}'.format(cfg_file), exit_code=0))52 ng_settings.update(settings)53 logger.debug('Uploading new nailgun settings: {}'.format(54 ng_settings))55 self.execute_in_container('tee {0}'.format(cfg_file),56 stdin=yaml.dump(ng_settings),57 exit_code=0)58 def set_collector_address(self, host, port, ssl=False):59 cmd = ("awk '/COLLECTOR.*URL/' /usr/lib/python2.6"60 "/site-packages/nailgun/settings.yaml")61 protocol = 'http' if not ssl else 'https'62 parameters = {}63 for p in self.execute_in_container(cmd, exit_code=0).split('\n'):64 parameters[p.split(': ')[0]] = re.sub(65 r'https?://\{collector_server\}',66 '{0}://{1}:{2}'.format(protocol, host, port),67 p.split(': ')[1])[1:-1]68 logger.debug('Custom collector parameters: {0}'.format(parameters))69 self.update_nailgun_settings_once(parameters)70 if ssl:71 # if test collector server doesn't have trusted SSL cert72 # installed we have to use this hack in order to disable cert73 # verification and allow using of self-signed SSL certificate74 cmd = ("sed -i '/elf.verify/ s/True/False/' /usr/lib/python2.6"75 "/site-packages/requests/sessions.py")76 self.execute_in_container(cmd, exit_code=0)77 def force_fuel_stats_sending(self):78 log_file = '/var/log/nailgun/statsenderd.log'79 # Rotate logs on restart in order to get rid of old errors80 cmd = 'mv {0}{{,.backup_$(date +%s)}}'.format(log_file)81 self.execute_in_container(cmd)82 cmd = 'supervisorctl restart statsenderd'83 self.execute_in_container(cmd, exit_code=0)84 cmd = 'grep -sw "ERROR" {0}'.format(log_file)85 try:86 self.execute_in_container(cmd, exit_code=1)87 except AssertionError:88 logger.error(("Fuel stats were sent with errors! Check its log"89 "s in {0} for details.").format(log_file))90 raise91 class Postgres(BaseActions):92 def __init__(self, admin_remote):93 super(FuelActions.Postgres, self).__init__(admin_remote)94 self.container = 'postgres'95 def run_query(self, db, query):96 cmd = "su - postgres -c 'psql -qt -d {0} -c \"{1};\"'".format(97 db, query)98 return self.execute_in_container(cmd, exit_code=0)99 def action_logs_contain(self, action, group=False,100 table='action_logs'):101 logger.info("Checking that '{0}' action was logged..".format(102 action))103 log_filter = "action_name" if not group else "action_group"104 q = "select id from {0} where {1} = '\"'\"'{2}'\"'\"'".format(105 table, log_filter, action)106 logs = [i.strip() for i in self.run_query('nailgun', q).split('\n')107 if re.compile(r'\d+').match(i.strip())]108 logger.info("Found log records with ids: {0}".format(logs))109 return len(logs) > 0110 def count_sent_action_logs(self, table='action_logs'):111 q = "select count(id) from {0} where is_sent = True".format(table)112 return int(self.run_query('nailgun', q))

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run localstack automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful