How to use is_api_enabled method in localstack

Best Python code snippet using localstack_python

gcp-units.py

Source:gcp-units.py Github

copy

Full Screen

...64 print("fetched all existing permissions on account")65 with open(self.file_path, 'w') as f:66 # Write Header67 f.write("Resource Type, Unit Counted\n")68 def is_api_enabled(self, apis):69 for api in apis:70 if not self.existing_permissions.get(api, False):71 print(f"service-api {api} is disabled")72 return False73 return True74 def add_result(self, k, v):75 with open(self.file_path, 'a') as f:76 f.write(f'{k}, {v}\n')77 def count_all(self):78 self.add_result("Load Balancers", self.count_load_balancers())79 self.add_result("Compute Instances", self.count_compute_instances())80 self.add_result("Disks", self.count_disks())81 self.add_result("VPC Networks", self.count_vpc_networks())82 self.add_result("Firewalls", self.count_firewalls())83 self.add_result("Managed DNS Zones", self.count_managed_dns_zones())84 # All iam users also includes service accounts85 all_iam_users = self.count_all_iam_users()86 all_service_account_users = self.count_all_service_account_users()87 self.total_resource_count -= all_service_account_users88 self.add_result("IAM Users", all_iam_users - all_service_account_users)89 self.add_result("Service Account Users", all_service_account_users)90 self.add_result("Kubernetes Clusters", self.count_kubernetes_clusters())91 self.add_result("Alert Policies", self.count_alert_policies())92 self.add_result("Log Sinks", self.count_log_sinks())93 self.add_result("SQL Instances", self.count_sql_instances())94 self.add_result("Storage Buckets", self.count_storage_buckets())95 self.add_result("Pub Sub Topics", self.count_pub_sub_topics())96 self.add_result("Spanner Instances", self.count_spanner_instances())97 self.add_result("Cloud Functions", self.count_cloud_functions())98 big_query_datasets, big_query_tables = self.count_big_query_datasets_tables()99 self.add_result("Big Query Datasets", big_query_datasets)100 self.add_result("Big Query Tables", big_query_tables)101 self.add_result('TOTAL', self.total_resource_count)102 print("results stored at", self.file_path)103 def count_load_balancers(self):104 print('getting data for count_load_balancers')105 if not self.is_api_enabled(["compute.googleapis.com"]):106 return 0107 output = subprocess.check_output(108 "gcloud compute forwarding-rules list --format json",109 text=True, shell=True110 )111 j = json.loads(output)112 self.total_resource_count += len(j)113 return len(j)114 def count_compute_instances(self):115 print('getting data for count_compute_instances')116 if not self.is_api_enabled(["compute.googleapis.com"]):117 return 0118 output = subprocess.check_output(119 "gcloud compute instances list --format json",120 text=True, shell=True121 )122 j = json.loads(output)123 self.total_resource_count += len(j)124 return len(j)125 def count_disks(self):126 print('getting data for count_disks')127 if not self.is_api_enabled(["compute.googleapis.com"]):128 return 0129 output = subprocess.check_output(130 "gcloud compute disks list --format json",131 text=True, shell=True132 )133 j = json.loads(output)134 self.total_resource_count += len(j)135 return len(j)136 def count_vpc_networks(self):137 print('getting data for count_vpc_networks')138 if not self.is_api_enabled(["compute.googleapis.com"]):139 return 0140 output = subprocess.check_output(141 "gcloud compute networks list --format json",142 text=True, shell=True143 )144 j = json.loads(output)145 self.total_resource_count += len(j)146 return len(j)147 def count_firewalls(self):148 print('getting data for count_firewalls')149 if not self.is_api_enabled(["compute.googleapis.com"]):150 return 0151 output = subprocess.check_output(152 "gcloud compute firewall-rules list --format json",153 text=True, shell=True154 )155 j = json.loads(output)156 self.total_resource_count += len(j)157 return len(j)158 def count_managed_dns_zones(self):159 print('getting data for count_managed_dns_zones')160 if not self.is_api_enabled(["dns.googleapis.com"]):161 return 0162 output = subprocess.check_output(163 "gcloud dns managed-zones list --format json",164 text=True, shell=True165 )166 j = json.loads(output)167 self.total_resource_count += len(j)168 return len(j)169 def count_all_iam_users(self):170 print('getting data for count_all_iam_users')171 if not self.is_api_enabled(["iam.googleapis.com", "orgpolicy.googleapis.com"]):172 return 0173 output = subprocess.check_output(174 f"gcloud projects get-iam-policy {self.project_id} --flatten=\"bindings[].members\" --format json",175 text=True, shell=True176 )177 j = json.loads(output)178 self.total_resource_count += len(j)179 return len(j)180 def count_all_service_account_users(self):181 print('getting data for count_all_service_account_users')182 if not self.is_api_enabled(["iam.googleapis.com", "orgpolicy.googleapis.com"]):183 return 0184 output = subprocess.check_output(185 "gcloud iam service-accounts list --format json",186 text=True, shell=True187 )188 j = json.loads(output)189 self.total_resource_count += len(j)190 return len(j)191 def count_kubernetes_clusters(self):192 print('getting data for count_kubernetes_clusters')193 if not self.is_api_enabled(["container.googleapis.com"]):194 return 0195 output = subprocess.check_output(196 "gcloud container clusters list --format json",197 text=True, shell=True198 )199 j = json.loads(output)200 self.total_resource_count += len(j)201 return len(j)202 def count_alert_policies(self):203 print('getting data for count_alert_policies')204 if not self.is_api_enabled(["monitoring.googleapis.com"]):205 return 0206 output = subprocess.check_output(207 "gcloud alpha monitoring policies list --format json",208 text=True, shell=True209 )210 j = json.loads(output)211 self.total_resource_count += len(j)212 return len(j)213 def count_log_sinks(self):214 print('getting data for count_log_sinks')215 if not self.is_api_enabled(["logging.googleapis.com"]):216 return 0217 output = subprocess.check_output(218 "gcloud logging sinks list --format json",219 text=True, shell=True220 )221 j = json.loads(output)222 self.total_resource_count += len(j)223 return len(j)224 def count_sql_instances(self):225 print('getting data for count_sql_instances')226 if not self.is_api_enabled(["sqladmin.googleapis.com"]):227 return 0228 output = subprocess.check_output(229 "gcloud sql instances list --format json",230 text=True, shell=True231 )232 j = json.loads(output)233 self.total_resource_count += len(j)234 return len(j)235 def count_storage_buckets(self):236 print('getting data for count_storage_buckets')237 if not self.is_api_enabled(["storage.googleapis.com"]):238 return 0239 output = subprocess.check_output(240 f"gsutil ls -p {self.project_id}",241 text=True, shell=True242 )243 j = list(filter(lambda x: len(x.strip()) > 0, output.split('\n')))244 self.total_resource_count += len(j)245 return len(j)246 def count_pub_sub_topics(self):247 print('getting data for count_pub_sub_topics')248 if not self.is_api_enabled(["pubsub.googleapis.com"]):249 return 0250 output = subprocess.check_output(251 "gcloud pubsub topics list --format json",252 text=True, shell=True253 )254 j = json.loads(output)255 self.total_resource_count += len(j)256 return len(j)257 def count_spanner_instances(self):258 print('getting data for count_spanner_instances')259 if not self.is_api_enabled(["spanner.googleapis.com"]):260 return 0261 output = subprocess.check_output(262 "gcloud spanner instances list --format json",263 text=True, shell=True264 )265 j = json.loads(output)266 self.total_resource_count += len(j)267 return len(j)268 def count_cloud_functions(self):269 print('getting data for count_cloud_functions')270 if not self.is_api_enabled(["cloudfunctions.googleapis.com"]):271 return 0272 output = subprocess.check_output(273 f"gcloud functions list --regions={','.join(GCP_CF_LOCATIONS)} --format json",274 text=True, shell=True275 )276 j = json.loads(output)277 self.total_resource_count += len(j)278 return len(j)279 def count_big_query_datasets_tables(self):280 print('getting data for count_big_query_datasets_tables')281 if not self.is_api_enabled(["bigquery.googleapis.com"]):282 return 0, 0283 dataset_count, table_count = 0, 0284 output = subprocess.check_output(285 f"bq ls --project_id {self.project_id} --format json",286 text=True, shell=True287 )288 if len(output) == 0:289 output = "[]"290 datasets = json.loads(output)291 dataset_count = len(datasets)292 for dataset in datasets:293 output = subprocess.check_output(294 f"bq ls --project_id {self.project_id} --max_results 10000 --format json {dataset['id']}",295 text=True, shell=True...

Full Screen

Full Screen

test_edge.py

Source:test_edge.py Github

copy

Full Screen

...12class TestEdgeAPI(unittest.TestCase):13 def test_invoke_apis_via_edge(self):14 edge_port = config.EDGE_PORT_HTTP or config.EDGE_PORT15 edge_url = '%s://localhost:%s' % (get_service_protocol(), edge_port)16 if is_api_enabled('s3'):17 self._invoke_s3_via_edge(edge_url)18 self._invoke_s3_via_edge_multipart_form(edge_url)19 if is_api_enabled('kinesis'):20 self._invoke_kinesis_via_edge(edge_url)21 if is_api_enabled('dynamodbstreams'):22 self._invoke_dynamodbstreams_via_edge(edge_url)23 if is_api_enabled('firehose'):24 self._invoke_firehose_via_edge(edge_url)25 if is_api_enabled('stepfunctions'):26 self._invoke_stepfunctions_via_edge(edge_url)27 def _invoke_kinesis_via_edge(self, edge_url):28 client = aws_stack.connect_to_service('kinesis', endpoint_url=edge_url)29 result = client.list_streams()30 self.assertIn('StreamNames', result)31 def _invoke_dynamodbstreams_via_edge(self, edge_url):32 client = aws_stack.connect_to_service('dynamodbstreams', endpoint_url=edge_url)33 result = client.list_streams()34 self.assertIn('Streams', result)35 def _invoke_firehose_via_edge(self, edge_url):36 client = aws_stack.connect_to_service('firehose', endpoint_url=edge_url)37 result = client.list_delivery_streams()38 self.assertIn('DeliveryStreamNames', result)39 def _invoke_stepfunctions_via_edge(self, edge_url):...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run localstack automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful