How to use put_object_retention method in localstack

Best Python code snippet using localstack_python

test_s3_object_lock_x.py

Source:test_s3_object_lock_x.py Github

copy

Full Screen

...225 key = 'file1'226 response = client.put_object(Bucket=bucket_name, Body='abc', Key=key)227 version_id = response['VersionId']228 retention = {'Mode': 'GOVERNANCE', 'RetainUntilDate': datetime.datetime(2030, 1, 1, tzinfo=pytz.UTC)}229 response = client.put_object_retention(Bucket=bucket_name, Key=key, Retention=retention)230 self.eq(response['ResponseMetadata']['HTTPStatusCode'], 200)231 client.delete_object(Bucket=bucket_name, Key=key, VersionId=version_id, BypassGovernanceRetention=True)232 def test_object_lock_put_obj_retention_invalid_bucket(self, s3cfg_global_unique):233 """234 (operation='Test put object retention with bucket object lock not enabled')235 (assertion='fails')236 """237 client = get_client(s3cfg_global_unique)238 bucket_name = self.get_new_bucket_name(s3cfg_global_unique)239 client.create_bucket(Bucket=bucket_name)240 key = 'file1'241 client.put_object(Bucket=bucket_name, Body='abc', Key=key)242 retention = {'Mode': 'GOVERNANCE', 'RetainUntilDate': datetime.datetime(2030, 1, 1, tzinfo=pytz.UTC)}243 e = assert_raises(ClientError, client.put_object_retention, Bucket=bucket_name, Key=key, Retention=retention)244 status, error_code = self.get_status_and_error_code(e.response)245 self.eq(status, 400)246 self.eq(error_code, 'InvalidRequest')247 def test_object_lock_put_obj_retention_invalid_mode(self, s3cfg_global_unique):248 """249 (operation='Test put object retention with invalid mode')250 (assertion='fails')251 """252 client = get_client(s3cfg_global_unique)253 bucket_name = self.get_new_bucket_name(s3cfg_global_unique)254 client.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=True)255 key = 'file1'256 client.put_object(Bucket=bucket_name, Body='abc', Key=key)257 retention = {'Mode': 'governance', 'RetainUntilDate': datetime.datetime(2030, 1, 1, tzinfo=pytz.UTC)}258 e = assert_raises(ClientError, client.put_object_retention, Bucket=bucket_name, Key=key, Retention=retention)259 status, error_code = self.get_status_and_error_code(e.response)260 self.eq(status, 400)261 self.eq(error_code, 'MalformedXML')262 retention = {'Mode': 'abc', 'RetainUntilDate': datetime.datetime(2030, 1, 1, tzinfo=pytz.UTC)}263 e = assert_raises(ClientError, client.put_object_retention, Bucket=bucket_name, Key=key, Retention=retention)264 status, error_code = self.get_status_and_error_code(e.response)265 self.eq(status, 400)266 self.eq(error_code, 'MalformedXML')267 def test_object_lock_get_obj_retention(self, s3cfg_global_unique):268 """269 (operation='Test get object retention')270 (assertion='success')271 """272 client = get_client(s3cfg_global_unique)273 bucket_name = self.get_new_bucket_name(s3cfg_global_unique)274 client.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=True)275 key = 'file1'276 response = client.put_object(Bucket=bucket_name, Body='abc', Key=key)277 version_id = response['VersionId']278 retention = {'Mode': 'GOVERNANCE', 'RetainUntilDate': datetime.datetime(2030, 1, 1, tzinfo=pytz.UTC)}279 client.put_object_retention(Bucket=bucket_name, Key=key, Retention=retention)280 response = client.get_object_retention(Bucket=bucket_name, Key=key)281 self.eq(response['Retention'], retention)282 client.delete_object(Bucket=bucket_name, Key=key, VersionId=version_id, BypassGovernanceRetention=True)283 def test_object_lock_get_obj_retention_iso8601(self, s3cfg_global_unique):284 """285 (operation='Test object retention date formatting')286 (assertion='success')287 """288 client = get_client(s3cfg_global_unique)289 bucket_name = self.get_new_bucket_name(s3cfg_global_unique)290 client.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=True)291 key = 'file1'292 response = client.put_object(Bucket=bucket_name, Body='abc', Key=key)293 version_id = response['VersionId']294 date = datetime.datetime.today() + datetime.timedelta(days=365)295 retention = {'Mode': 'GOVERNANCE', 'RetainUntilDate': date}296 http_response = None297 def get_http_response(**kwargs):298 nonlocal http_response299 http_response = kwargs['http_response'].__dict__300 client.put_object_retention(Bucket=bucket_name, Key=key, Retention=retention)301 client.meta.events.register('after-call.s3.HeadObject', get_http_response)302 client.head_object(Bucket=bucket_name, VersionId=version_id, Key=key)303 retain_date = http_response['headers']['x-amz-object-lock-retain-until-date']304 isodate.parse_datetime(retain_date)305 client.delete_object(Bucket=bucket_name, Key=key, VersionId=version_id, BypassGovernanceRetention=True)306 def test_object_lock_get_obj_retention_invalid_bucket(self, s3cfg_global_unique):307 """308 (operation='Test get object retention with invalid bucket')309 (assertion='fails')310 """311 client = get_client(s3cfg_global_unique)312 bucket_name = self.get_new_bucket_name(s3cfg_global_unique)313 client.create_bucket(Bucket=bucket_name)314 key = 'file1'315 client.put_object(Bucket=bucket_name, Body='abc', Key=key)316 e = assert_raises(ClientError, client.get_object_retention, Bucket=bucket_name, Key=key)317 status, error_code = self.get_status_and_error_code(e.response)318 self.eq(status, 400)319 self.eq(error_code, 'InvalidRequest')320 def test_object_lock_put_obj_retention_version_id(self, s3cfg_global_unique):321 """322 (operation='Test put object retention with version id')323 (assertion='success')324 """325 client = get_client(s3cfg_global_unique)326 bucket_name = self.get_new_bucket_name(s3cfg_global_unique)327 client.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=True)328 key = 'file1'329 client.put_object(Bucket=bucket_name, Body='abc', Key=key)330 response = client.put_object(Bucket=bucket_name, Body='abc', Key=key)331 version_id = response['VersionId']332 retention = {'Mode': 'GOVERNANCE', 'RetainUntilDate': datetime.datetime(2030, 1, 1, tzinfo=pytz.UTC)}333 client.put_object_retention(Bucket=bucket_name, Key=key, VersionId=version_id, Retention=retention)334 response = client.get_object_retention(Bucket=bucket_name, Key=key, VersionId=version_id)335 self.eq(response['Retention'], retention)336 client.delete_object(Bucket=bucket_name, Key=key, VersionId=version_id, BypassGovernanceRetention=True)337 def test_object_lock_put_obj_retention_override_default_retention(self, s3cfg_global_unique):338 """339 (operation='Test put object retention to override default retention')340 (assertion='success')341 """342 client = get_client(s3cfg_global_unique)343 bucket_name = self.get_new_bucket_name(s3cfg_global_unique)344 client.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=True)345 conf = {'ObjectLockEnabled': 'Enabled',346 'Rule': {347 'DefaultRetention': {348 'Mode': 'GOVERNANCE',349 'Days': 1350 }351 }}352 client.put_object_lock_configuration(353 Bucket=bucket_name,354 ObjectLockConfiguration=conf)355 key = 'file1'356 response = client.put_object(Bucket=bucket_name, Body='abc', Key=key)357 version_id = response['VersionId']358 retention = {'Mode': 'GOVERNANCE', 'RetainUntilDate': datetime.datetime(2030, 1, 1, tzinfo=pytz.UTC)}359 client.put_object_retention(Bucket=bucket_name, Key=key, Retention=retention)360 response = client.get_object_retention(Bucket=bucket_name, Key=key)361 self.eq(response['Retention'], retention)362 client.delete_object(Bucket=bucket_name, Key=key, VersionId=version_id, BypassGovernanceRetention=True)363 def test_object_lock_put_obj_retention_increase_period(self, s3cfg_global_unique):364 """365 (operation='Test put object retention to increase retention period')366 (assertion='success')367 """368 client = get_client(s3cfg_global_unique)369 bucket_name = self.get_new_bucket_name(s3cfg_global_unique)370 client.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=True)371 key = 'file1'372 response = client.put_object(Bucket=bucket_name, Body='abc', Key=key)373 version_id = response['VersionId']374 retention1 = {'Mode': 'GOVERNANCE', 'RetainUntilDate': datetime.datetime(2030, 1, 1, tzinfo=pytz.UTC)}375 client.put_object_retention(Bucket=bucket_name, Key=key, Retention=retention1)376 retention2 = {'Mode': 'GOVERNANCE', 'RetainUntilDate': datetime.datetime(2030, 1, 3, tzinfo=pytz.UTC)}377 client.put_object_retention(Bucket=bucket_name, Key=key, Retention=retention2)378 response = client.get_object_retention(Bucket=bucket_name, Key=key)379 self.eq(response['Retention'], retention2)380 client.delete_object(Bucket=bucket_name, Key=key, VersionId=version_id, BypassGovernanceRetention=True)381 def test_object_lock_put_obj_retention_shorten_period(self, s3cfg_global_unique):382 """383 (operation='Test put object retention to shorten period')384 (assertion='fails')385 """386 client = get_client(s3cfg_global_unique)387 bucket_name = self.get_new_bucket_name(s3cfg_global_unique)388 client.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=True)389 key = 'file1'390 response = client.put_object(Bucket=bucket_name, Body='abc', Key=key)391 version_id = response['VersionId']392 retention = {'Mode': 'GOVERNANCE', 'RetainUntilDate': datetime.datetime(2030, 1, 3, tzinfo=pytz.UTC)}393 client.put_object_retention(Bucket=bucket_name, Key=key, Retention=retention)394 retention = {'Mode': 'GOVERNANCE', 'RetainUntilDate': datetime.datetime(2030, 1, 1, tzinfo=pytz.UTC)}395 e = assert_raises(ClientError, client.put_object_retention, Bucket=bucket_name, Key=key, Retention=retention)396 status, error_code = self.get_status_and_error_code(e.response)397 self.eq(status, 403)398 self.eq(error_code, 'AccessDenied')399 client.delete_object(Bucket=bucket_name, Key=key, VersionId=version_id, BypassGovernanceRetention=True)400 def test_object_lock_put_obj_retention_shorten_period_bypass(self, s3cfg_global_unique):401 """402 (operation='Test put object retention to shorten period with bypass header')403 (assertion='success')404 """405 client = get_client(s3cfg_global_unique)406 bucket_name = self.get_new_bucket_name(s3cfg_global_unique)407 client.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=True)408 key = 'file1'409 response = client.put_object(Bucket=bucket_name, Body='abc', Key=key)410 version_id = response['VersionId']411 retention = {'Mode': 'GOVERNANCE', 'RetainUntilDate': datetime.datetime(2030, 1, 3, tzinfo=pytz.UTC)}412 client.put_object_retention(Bucket=bucket_name, Key=key, Retention=retention)413 retention = {'Mode': 'GOVERNANCE', 'RetainUntilDate': datetime.datetime(2030, 1, 1, tzinfo=pytz.UTC)}414 client.put_object_retention(Bucket=bucket_name, Key=key, Retention=retention, BypassGovernanceRetention=True)415 response = client.get_object_retention(Bucket=bucket_name, Key=key)416 self.eq(response['Retention'], retention)417 client.delete_object(Bucket=bucket_name, Key=key, VersionId=version_id, BypassGovernanceRetention=True)418 def test_object_lock_delete_object_with_retention(self, s3cfg_global_unique):419 """420 (operation='Test delete object with retention')421 (assertion='retention period make effects')422 """423 client = get_client(s3cfg_global_unique)424 bucket_name = self.get_new_bucket_name(s3cfg_global_unique)425 client.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=True)426 key = 'file1'427 response = client.put_object(Bucket=bucket_name, Body='abc', Key=key)428 retention = {'Mode': 'GOVERNANCE', 'RetainUntilDate': datetime.datetime(2030, 1, 1, tzinfo=pytz.UTC)}429 client.put_object_retention(Bucket=bucket_name, Key=key, Retention=retention)430 e = assert_raises(ClientError, client.delete_object, Bucket=bucket_name, Key=key,431 VersionId=response['VersionId'])432 status, error_code = self.get_status_and_error_code(e.response)433 self.eq(status, 403)434 self.eq(error_code, 'AccessDenied')435 response = client.delete_object(Bucket=bucket_name, Key=key, VersionId=response['VersionId'],436 BypassGovernanceRetention=True)437 self.eq(response['ResponseMetadata']['HTTPStatusCode'], 204)438 def test_object_lock_multi_delete_object_with_retention(self, s3cfg_global_unique):439 """440 (operation='Test multi-delete object with retention')441 (assertion='retention period make effects')442 """443 client = get_client(s3cfg_global_unique)444 bucket_name = self.get_new_bucket_name(s3cfg_global_unique)445 client.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=True)446 key1 = 'file1'447 key2 = 'file2'448 response1 = client.put_object(Bucket=bucket_name, Body='abc', Key=key1)449 response2 = client.put_object(Bucket=bucket_name, Body='abc', Key=key2)450 version_id1 = response1['VersionId']451 version_id2 = response2['VersionId']452 # key1 is under retention, but key2 isn't.453 retention = {'Mode': 'GOVERNANCE', 'RetainUntilDate': datetime.datetime(2030, 1, 1, tzinfo=pytz.UTC)}454 client.put_object_retention(Bucket=bucket_name, Key=key1, Retention=retention)455 delete_response = client.delete_objects(456 Bucket=bucket_name,457 Delete={458 'Objects': [459 {460 'Key': key1,461 'VersionId': version_id1462 },463 {464 'Key': key2,465 'VersionId': version_id2466 }467 ]468 }469 )470 self.eq(len(delete_response['Deleted']), 1)471 self.eq(len(delete_response['Errors']), 1)472 failed_object = delete_response['Errors'][0]473 self.eq(failed_object['Code'], 'AccessDenied')474 self.eq(failed_object['Key'], key1)475 self.eq(failed_object['VersionId'], version_id1)476 deleted_object = delete_response['Deleted'][0]477 self.eq(deleted_object['Key'], key2)478 self.eq(deleted_object['VersionId'], version_id2)479 delete_response = client.delete_objects(480 Bucket=bucket_name,481 Delete={482 'Objects': [483 {484 'Key': key1,485 'VersionId': version_id1486 }487 ]488 },489 BypassGovernanceRetention=True490 )491 assert (('Errors' not in delete_response) or (len(delete_response['Errors']) == 0))492 self.eq(len(delete_response['Deleted']), 1)493 deleted_object = delete_response['Deleted'][0]494 self.eq(deleted_object['Key'], key1)495 self.eq(deleted_object['VersionId'], version_id1)496 def test_object_lock_put_legal_hold(self, s3cfg_global_unique):497 """498 (operation='Test put legal hold')499 (assertion='success')500 """501 client = get_client(s3cfg_global_unique)502 bucket_name = self.get_new_bucket_name(s3cfg_global_unique)503 client.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=True)504 key = 'file1'505 client.put_object(Bucket=bucket_name, Body='abc', Key=key)506 legal_hold = {'Status': 'ON'}507 response = client.put_object_legal_hold(Bucket=bucket_name, Key=key, LegalHold=legal_hold)508 self.eq(response['ResponseMetadata']['HTTPStatusCode'], 200)509 response = client.put_object_legal_hold(Bucket=bucket_name, Key=key, LegalHold={'Status': 'OFF'})510 self.eq(response['ResponseMetadata']['HTTPStatusCode'], 200)511 def test_object_lock_put_legal_hold_invalid_bucket(self, s3cfg_global_unique):512 """513 (operation='Test put legal hold with invalid bucket')514 (assertion='fails')515 """516 client = get_client(s3cfg_global_unique)517 bucket_name = self.get_new_bucket_name(s3cfg_global_unique)518 client.create_bucket(Bucket=bucket_name)519 key = 'file1'520 client.put_object(Bucket=bucket_name, Body='abc', Key=key)521 legal_hold = {'Status': 'ON'}522 e = assert_raises(ClientError, client.put_object_legal_hold, Bucket=bucket_name, Key=key, LegalHold=legal_hold)523 status, error_code = self.get_status_and_error_code(e.response)524 self.eq(status, 400)525 self.eq(error_code, 'InvalidRequest')526 def test_object_lock_put_legal_hold_invalid_status(self, s3cfg_global_unique):527 """528 (operation='Test put legal hold with invalid status')529 (assertion='fails')530 """531 client = get_client(s3cfg_global_unique)532 bucket_name = self.get_new_bucket_name(s3cfg_global_unique)533 client.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=True)534 key = 'file1'535 client.put_object(Bucket=bucket_name, Body='abc', Key=key)536 legal_hold = {'Status': 'abc'}537 e = assert_raises(ClientError, client.put_object_legal_hold, Bucket=bucket_name, Key=key, LegalHold=legal_hold)538 status, error_code = self.get_status_and_error_code(e.response)539 self.eq(status, 400)540 self.eq(error_code, 'MalformedXML')541 def test_object_lock_get_legal_hold(self, s3cfg_global_unique):542 """543 (operation='Test get legal hold')544 (assertion='success')545 """546 client = get_client(s3cfg_global_unique)547 bucket_name = self.get_new_bucket_name(s3cfg_global_unique)548 client.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=True)549 key = 'file1'550 client.put_object(Bucket=bucket_name, Body='abc', Key=key)551 legal_hold = {'Status': 'ON'}552 client.put_object_legal_hold(Bucket=bucket_name, Key=key, LegalHold=legal_hold)553 response = client.get_object_legal_hold(Bucket=bucket_name, Key=key)554 self.eq(response['LegalHold'], legal_hold)555 legal_hold_off = {'Status': 'OFF'}556 client.put_object_legal_hold(Bucket=bucket_name, Key=key, LegalHold=legal_hold_off)557 response = client.get_object_legal_hold(Bucket=bucket_name, Key=key)558 self.eq(response['LegalHold'], legal_hold_off)559 def test_object_lock_get_legal_hold_invalid_bucket(self, s3cfg_global_unique):560 """561 (operation='Test get legal hold with invalid bucket')562 (assertion='fails')563 """564 client = get_client(s3cfg_global_unique)565 bucket_name = self.get_new_bucket_name(s3cfg_global_unique)566 client.create_bucket(Bucket=bucket_name)567 key = 'file1'568 client.put_object(Bucket=bucket_name, Body='abc', Key=key)569 e = assert_raises(ClientError, client.get_object_legal_hold, Bucket=bucket_name, Key=key)570 status, error_code = self.get_status_and_error_code(e.response)571 self.eq(status, 400)572 self.eq(error_code, 'InvalidRequest')573 def test_object_lock_delete_object_with_legal_hold_on(self, s3cfg_global_unique):574 """575 (operation='Test delete object with legal hold on')576 (assertion='fails')577 """578 client = get_client(s3cfg_global_unique)579 bucket_name = self.get_new_bucket_name(s3cfg_global_unique)580 client.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=True)581 key = 'file1'582 response = client.put_object(Bucket=bucket_name, Body='abc', Key=key)583 client.put_object_legal_hold(Bucket=bucket_name, Key=key, LegalHold={'Status': 'ON'})584 e = assert_raises(ClientError, client.delete_object, Bucket=bucket_name, Key=key,585 VersionId=response['VersionId'])586 status, error_code = self.get_status_and_error_code(e.response)587 self.eq(status, 403)588 self.eq(error_code, 'AccessDenied')589 client.put_object_legal_hold(Bucket=bucket_name, Key=key, LegalHold={'Status': 'OFF'})590 def test_object_lock_delete_object_with_legal_hold_off(self, s3cfg_global_unique):591 """592 (operation='Test delete object with legal hold off')593 (assertion='fails')594 """595 client = get_client(s3cfg_global_unique)596 bucket_name = self.get_new_bucket_name(s3cfg_global_unique)597 client.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=True)598 key = 'file1'599 response = client.put_object(Bucket=bucket_name, Body='abc', Key=key)600 client.put_object_legal_hold(Bucket=bucket_name, Key=key, LegalHold={'Status': 'OFF'})601 response = client.delete_object(Bucket=bucket_name, Key=key, VersionId=response['VersionId'])602 self.eq(response['ResponseMetadata']['HTTPStatusCode'], 204)603 def test_object_lock_get_obj_metadata(self, s3cfg_global_unique):604 """605 (operation='Test get object metadata')606 (assertion='success')607 """608 client = get_client(s3cfg_global_unique)609 bucket_name = self.get_new_bucket_name(s3cfg_global_unique)610 client.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=True)611 key = 'file1'612 client.put_object(Bucket=bucket_name, Body='abc', Key=key)613 legal_hold = {'Status': 'ON'}614 client.put_object_legal_hold(Bucket=bucket_name, Key=key, LegalHold=legal_hold)615 retention = {'Mode': 'GOVERNANCE', 'RetainUntilDate': datetime.datetime(2030, 1, 1, tzinfo=pytz.UTC)}616 client.put_object_retention(Bucket=bucket_name, Key=key, Retention=retention)617 response = client.head_object(Bucket=bucket_name, Key=key)618 self.eq(response['ObjectLockMode'], retention['Mode'])619 self.eq(response['ObjectLockRetainUntilDate'], retention['RetainUntilDate'])620 self.eq(response['ObjectLockLegalHoldStatus'], legal_hold['Status'])621 client.put_object_legal_hold(Bucket=bucket_name, Key=key, LegalHold={'Status': 'OFF'})622 client.delete_object(Bucket=bucket_name, Key=key, VersionId=response['VersionId'],623 BypassGovernanceRetention=True)624 def test_object_lock_uploading_obj(self, s3cfg_global_unique):625 """626 (operation='Test put legal hold and retention when uploading object')627 (assertion='success')628 """629 client = get_client(s3cfg_global_unique)630 bucket_name = self.get_new_bucket_name(s3cfg_global_unique)631 client.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=True)632 key = 'file1'633 client.put_object(Bucket=bucket_name, Body='abc', Key=key, ObjectLockMode='GOVERNANCE',634 ObjectLockRetainUntilDate=datetime.datetime(2030, 1, 1, tzinfo=pytz.UTC),635 ObjectLockLegalHoldStatus='ON')636 response = client.head_object(Bucket=bucket_name, Key=key)637 self.eq(response['ObjectLockMode'], 'GOVERNANCE')638 self.eq(response['ObjectLockRetainUntilDate'], datetime.datetime(2030, 1, 1, tzinfo=pytz.UTC))639 self.eq(response['ObjectLockLegalHoldStatus'], 'ON')640 client.put_object_legal_hold(Bucket=bucket_name, Key=key, LegalHold={'Status': 'OFF'})641 client.delete_object(Bucket=bucket_name, Key=key, VersionId=response['VersionId'],642 BypassGovernanceRetention=True)643 def test_object_lock_changing_mode_from_governance_with_bypass(self, s3cfg_global_unique):644 """645 (operation='Test changing object retention mode from GOVERNANCE to COMPLIANCE with bypass')646 (assertion='succeeds')647 """648 bucket_name = self.get_new_bucket_name(s3cfg_global_unique)649 client = get_client(s3cfg_global_unique)650 key = 'file1'651 client.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=True)652 # upload object with mode=GOVERNANCE653 retain_until = datetime.datetime.now(pytz.utc) + datetime.timedelta(seconds=10)654 client.put_object(Bucket=bucket_name, Body='abc', Key=key, ObjectLockMode='GOVERNANCE',655 ObjectLockRetainUntilDate=retain_until)656 # change mode to COMPLIANCE657 retention = {'Mode': 'COMPLIANCE', 'RetainUntilDate': retain_until}658 client.put_object_retention(Bucket=bucket_name, Key=key, Retention=retention, BypassGovernanceRetention=True)659 def test_object_lock_changing_mode_from_governance_without_bypass(self, s3cfg_global_unique):660 """661 (operation='Test changing object retention mode from GOVERNANCE to COMPLIANCE without bypass')662 (assertion='fails')663 """664 bucket_name = self.get_new_bucket_name(s3cfg_global_unique)665 client = get_client(s3cfg_global_unique)666 key = 'file1'667 client.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=True)668 # upload object with mode=GOVERNANCE669 retain_until = datetime.datetime.now(pytz.utc) + datetime.timedelta(seconds=10)670 client.put_object(Bucket=bucket_name, Body='abc', Key=key, ObjectLockMode='GOVERNANCE',671 ObjectLockRetainUntilDate=retain_until)672 # try to change mode to COMPLIANCE...

Full Screen

Full Screen

fonctions.py

Source:fonctions.py Github

copy

Full Screen

...57 fileName = os.path.basename(path)58 try:59 s3_client.upload_file(Filename= path, Bucket=container, Key= fileName)60 if retention !=0 :61 s3_client.put_object_retention(Bucket=container, Key= fileName, 62 Retention={63 'Mode': 'GOVERNANCE',64 'RetainUntilDate': datetime.today()+timedelta(retention)65 }66 )67 except FileNotFoundError:68 print("No such file : "+path)69def test_connection( client):70 try:71 # use the JSON library's dump() method for indentation72 info = client.info()73 # pass client object to info() method74 print ("Elasticsearch client info():", info)75 except ConnectionError as err:76 # print ConnectionError for Elasticsearch77 print ("\nElasticsearch info() ERROR:", err)78 print ("\nThe client host:", config['elasticsearchDomain'], "is invalid or cluster is not running")79 # change the client's value to 'None' if ConnectionError80 client = None81#function to delete a file in the container82def deleteFileInContainer(container, fileName):83 """Function called to delete a certain file (thanks to its name) in a defined container"""84 try:85 s3_client.delete_object(Bucket=container, Key= fileName)86 except s3_client.exceptions.NoSuchBucket:87 print("No container named "+container+".")88#function to copy an entire folder from sever to the OpenIO container89def uploadFolder( container, folder_path, retention=0):90 """Function called to upload an entire folder (thanks to its path) from your computer to a defined container"""91 try:92 for file_name_ext in os.listdir(folder_path):93 file_path_ext=str(folder_path)+'/'+file_name_ext94 s3_client.upload_file(Filename= file_path_ext, Bucket= container, Key=file_name_ext)95 if retention !=0 :96 s3_client.put_object_retention(Bucket=container, Key=file_name_ext, 97 Retention={98 'Mode': 'GOVERNANCE',99 'RetainUntilDate': datetime.today()+timedelta(retention)100 }101 )102 except FileNotFoundError:103 print("No such directory : "+folder_path)104#function to list all containers105def listBuckets():106 print(s3_client.list_buckets())107#function to list all data inside a container108def listDataForAGivenPeriod( container, period):109 """Function called to list all data that a container has for a given period of time in days"""110 objects = []111 t = timedelta(days=period)112 utc=pytz.UTC113 today = utc.localize(datetime.today())114 try:115 for element in s3_client.list_objects(Bucket=container)['Contents']:116 creationDate = element['LastModified'].replace(tzinfo=utc)117 duration = today - t118 if(creationDate >= duration):119 objects.append(element)120 print(objects)121 except s3_client.exceptions.NoSuchBucket:122 print("No container named "+container+".")123#function to retrieve all data from a container124def retrieveAllDataFromContainer(container):125 """Function called to retrieve and copy in the folder of the python executable all the files contained in a126 certain container """127 try:128 for element in s3_client.list_objects(Bucket=container)['Contents']:129 s3_client.download_file(Bucket=container, Key=element['Key'], Filename= element['Key'])130 except s3_client.exceptions.NoSuchBucket:131 print("No container named "+container+".")132#function to copy an entire folder from ElasticSearch to the OpenIO container133def elasticUploadFolder(container, index, retention=0):134 """Function called to copy an entire folder """135 if os.name == 'posix':136 slash = "/" # for Linux and macOS137 else:138 slash = chr(92) # '\' for Windows139 host = str(config['elasticsearchDomain']) + ":" + str(config['elasticsearchPort'])140 client = Elasticsearch(host)141 test_connection(client)142 response = client.search(index=index, body={}, size=100)143 elastic_docs = response["hits"]["hits"]144 for num, doc in enumerate(elastic_docs):145 # get _source data dict from document146 source_data = doc["_source"]147 s3_client.upload_file(Filename= source_data[config['elasticsearchFilenameAttribut']],Bucket=container, Key= source_data[config['elasticsearchFilenameAttribut']])148 if retention !=0 :149 s3_client.put_object_retention(Bucket=container, Key= source_data[config['elasticsearchFilenameAttribut']], 150 Retention={151 'Mode': 'GOVERNANCE',152 'RetainUntilDate': datetime.today()+timedelta(retention)153 }154 )155#function to create a container156def addContainer(container, newACL='private'):157 if newACL=='private' or newACL=='public-read' or newACL=='public-read-write' or newACL=='authenticated-read':158 try:159 s3_client.create_bucket(ACL=newACL, Bucket=container)160 except s3_client.exceptions.BucketAlreadyExists:161 print("Bucket "+container+" already exists!")162 except s3_client.exceptions.ClientError:163 print("Bucket name is not valid.")164 print("Bucket names must be between 3 and 63 characters long.")165 print("Bucket names can consist only of lowercase letters, numbers, dots (.), and hyphens (-).")166 print("Bucket names must begin and end with a letter or number.")167 print("Bucket names must not be formatted as an IP address (for example, 192.168.5.4).")168 print("Bucket names can't begin with xn-- (for buckets created after February 2020).")169 else:170 print("ACL argument is not valid. It must be 'private'|'public-read'|'public-read-write'|'authenticated-read' for a container.")171#function to get the Acess Control List policy of a container172def getBucketACL(container):173 try:174 bucket_acl = s3_client.get_bucket_acl(Bucket=container)175 print(bucket_acl)176 except s3_client.exceptions.NoSuchBucket:177 print("No container named "+container+".")178#function to modify the Acess Control List policy of a container179def putBucketACL(container,newACL):180 if newACL=='private' or newACL=='public-read' or newACL=='public-read-write' or newACL=='authenticated-read':181 try:182 bucket_acl = s3_client.put_bucket_acl(ACL=newACL, Bucket=container)183 except s3_client.exceptions.NoSuchBucket:184 print("No container named "+container+".")185 else:186 print("ACL argument is not valid. It must be 'private'|'public-read'|'public-read-write'|'authenticated-read' for a container.")187#function to get the Acess Control List policy of a file188def getObjectACL(container,filename):189 try:190 object_acl = s3_client.get_object_acl(Bucket=container, Key= filename)191 print(object_acl)192 except s3_client.exceptions.NoSuchBucket:193 print("No container named "+container+".")194 except s3_client.exceptions.NoSuchKey:195 print("No file named "+filename+" in the container "+container+".")196#function to modify the Acess Control List policy of a file197def putObjectACL(container,filename, newACL):198 if newACL=='private' or newACL=='public-read' or newACL=='public-read-write' or newACL=='authenticated-read' or newACL=='bucket-owner-read' or newACL=='bucket-owner-full-control':199 try:200 object_acl = s3_client.put_object_acl(ACL=newACL, Bucket=container, Key= filename)201 except s3_client.exceptions.NoSuchKey:202 print("No file named "+filename+" in the container "+container+".")203 except s3_client.exceptions.NoSuchBucket:204 print("No container named "+container+".")205 else:206 print("ACL argument is not valid. It must be 'private'|'public-read'|'public-read-write'|'authenticated-read'|'aws-exec-read'|'bucket-owner-read'|'bucket-owner-full-control' for an object.")207#function to get the retention policy of an object208def getRetention(container, filename):209 try:210 object_retention = s3_client.get_object_retention(Bucket=container, Key= filename)211 print(object_retention)212 except botocore.parsers.ResponseParserError:213 print("No retention policy for this object")214 except s3_client.exceptions.NoSuchBucket:215 print("No container named "+container+".")216 except s3_client.exceptions.NoSuchKey:217 print("No file named "+filename+" in the container "+container+".")218 219#function to modify the retention policy of an object220def putRetention(container, filename, retention):221 try:222 object_retention = s3_client.put_object_retention(Bucket=container, Key= filename,223 Retention={224 'Mode': 'GOVERNANCE',225 'RetainUntilDate': datetime.today()+timedelta(retention)226 }227 )228 except s3_client.exceptions.NoSuchBucket:229 print("No container named "+container+".")230 except s3_client.exceptions.NoSuchKey:...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run localstack automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful