How to use _website_bucket_url method in localstack

Best Python code snippet using localstack_python

test_s3.py

Source:test_s3.py Github

copy

Full Screen

...1866 WebsiteConfiguration={1867 "IndexDocument": {"Suffix": "index.html"},1868 },1869 )1870 url = _website_bucket_url(bucket_name)1871 response = requests.get(url, verify=False)1872 assert 200 == response.status_code1873 assert "index" == response.text1874 @pytest.mark.aws_validated1875 def test_s3_static_website_hosting(self, s3_client, s3_create_bucket):1876 bucket_name = f"bucket-{short_uid()}"1877 s3_create_bucket(Bucket=bucket_name, ACL="public-read")1878 index_obj = s3_client.put_object(1879 Bucket=bucket_name,1880 Key="test/index.html",1881 Body="index",1882 ContentType="text/html",1883 ACL="public-read",1884 )1885 error_obj = s3_client.put_object(1886 Bucket=bucket_name,1887 Key="test/error.html",1888 Body="error",1889 ContentType="text/html",1890 ACL="public-read",1891 )1892 actual_key_obj = s3_client.put_object(1893 Bucket=bucket_name,1894 Key="actual/key.html",1895 Body="key",1896 ContentType="text/html",1897 ACL="public-read",1898 )1899 with_content_type_obj = s3_client.put_object(1900 Bucket=bucket_name,1901 Key="with-content-type/key.js",1902 Body="some js",1903 ContentType="application/javascript; charset=utf-8",1904 ACL="public-read",1905 )1906 s3_client.put_object(1907 Bucket=bucket_name,1908 Key="to-be-redirected.html",1909 WebsiteRedirectLocation="/actual/key.html",1910 ACL="public-read",1911 )1912 s3_client.put_bucket_website(1913 Bucket=bucket_name,1914 WebsiteConfiguration={1915 "IndexDocument": {"Suffix": "index.html"},1916 "ErrorDocument": {"Key": "test/error.html"},1917 },1918 )1919 website_url = _website_bucket_url(bucket_name)1920 # actual key1921 url = f"{website_url}/actual/key.html"1922 response = requests.get(url, verify=False)1923 assert 200 == response.status_code1924 assert "key" == response.text1925 assert "content-type" in response.headers1926 assert "text/html" == response.headers["content-type"]1927 assert "etag" in response.headers1928 assert actual_key_obj["ETag"] in response.headers["etag"]1929 # If-None-Match and Etag1930 response = requests.get(1931 url, headers={"If-None-Match": actual_key_obj["ETag"]}, verify=False1932 )1933 assert 304 == response.status_code1934 # key with specified content-type1935 url = f"{website_url}/with-content-type/key.js"1936 response = requests.get(url, verify=False)1937 assert 200 == response.status_code1938 assert "some js" == response.text1939 assert "content-type" in response.headers1940 assert "application/javascript; charset=utf-8" == response.headers["content-type"]1941 assert "etag" in response.headers1942 assert with_content_type_obj["ETag"] == response.headers["etag"]1943 # index document1944 url = f"{website_url}/test"1945 response = requests.get(url, verify=False)1946 assert 200 == response.status_code1947 assert "index" == response.text1948 assert "content-type" in response.headers1949 assert "text/html" in response.headers["content-type"]1950 assert "etag" in response.headers1951 assert index_obj["ETag"] == response.headers["etag"]1952 # root path test1953 url = f"{website_url}/"1954 response = requests.get(url, verify=False)1955 assert 404 == response.status_code1956 assert "error" == response.text1957 assert "content-type" in response.headers1958 assert "text/html" in response.headers["content-type"]1959 assert "etag" in response.headers1960 assert error_obj["ETag"] == response.headers["etag"]1961 # error document1962 url = f"{website_url}/something"1963 assert 404 == response.status_code1964 assert "error" == response.text1965 assert "content-type" in response.headers1966 assert "text/html" in response.headers["content-type"]1967 assert "etag" in response.headers1968 assert error_obj["ETag"] == response.headers["etag"]1969 # redirect object1970 url = f"{website_url}/to-be-redirected.html"1971 response = requests.get(url, verify=False, allow_redirects=False)1972 assert 301 == response.status_code1973 assert "location" in response.headers1974 assert "actual/key.html" in response.headers["location"]1975 response = requests.get(url, verify=False)1976 assert 200 == response.status_code1977 assert actual_key_obj["ETag"] == response.headers["etag"]1978class TestS3Cors:1979 @pytest.mark.aws_validated1980 # TODO x-amzn-requestid should be 'x-amz-request-id'1981 # TODO "Vary" contains more in AWS, other params are added additional in LocalStack1982 @pytest.mark.skip_snapshot_verify(1983 paths=[1984 "$..Access-Control-Allow-Headers",1985 "$..Connection",1986 "$..Location",1987 "$..Vary",1988 "$..Content-Type",1989 "$..x-amzn-requestid",1990 "$..last-modified",1991 "$..Last-Modified",1992 ]1993 )1994 def test_cors_with_allowed_origins(self, s3_client, s3_create_bucket, snapshot, monkeypatch):1995 monkeypatch.setattr(config, "DISABLE_CUSTOM_CORS_S3", False)1996 snapshot.add_transformer(self._get_cors_result_header_snapshot_transformer(snapshot))1997 bucket_cors_config = {1998 "CORSRules": [1999 {2000 "AllowedOrigins": ["https://localhost:4200"],2001 "AllowedMethods": ["GET", "PUT"],2002 "MaxAgeSeconds": 3000,2003 "AllowedHeaders": ["*"],2004 }2005 ]2006 }2007 bucket_name = f"bucket-{short_uid()}"2008 object_key = "424f6bae-c48f-42d8-9e25-52046aecc64d/document.pdf"2009 s3_create_bucket(Bucket=bucket_name)2010 s3_client.put_bucket_cors(Bucket=bucket_name, CORSConfiguration=bucket_cors_config)2011 # create signed url2012 url = s3_client.generate_presigned_url(2013 ClientMethod="put_object",2014 Params={2015 "Bucket": bucket_name,2016 "Key": object_key,2017 "ContentType": "application/pdf",2018 "ACL": "bucket-owner-full-control",2019 },2020 ExpiresIn=3600,2021 )2022 result = requests.put(2023 url,2024 data="something",2025 verify=False,2026 headers={2027 "Origin": "https://localhost:4200",2028 "Content-Type": "application/pdf",2029 },2030 )2031 assert result.status_code == 2002032 # result.headers is type CaseInsensitiveDict and needs to be converted first2033 snapshot.match("raw-response-headers", dict(result.headers))2034 bucket_cors_config = {2035 "CORSRules": [2036 {2037 "AllowedOrigins": [2038 "https://localhost:4200",2039 "https://localhost:4201",2040 ],2041 "AllowedMethods": ["GET", "PUT"],2042 "MaxAgeSeconds": 3000,2043 "AllowedHeaders": ["*"],2044 }2045 ]2046 }2047 s3_client.put_bucket_cors(Bucket=bucket_name, CORSConfiguration=bucket_cors_config)2048 # create signed url2049 url = s3_client.generate_presigned_url(2050 ClientMethod="put_object",2051 Params={2052 "Bucket": bucket_name,2053 "Key": object_key,2054 "ContentType": "application/pdf",2055 "ACL": "bucket-owner-full-control",2056 },2057 ExpiresIn=3600,2058 )2059 # mimic chrome behavior, sending OPTIONS request first for strict-origin-when-cross-origin2060 result = requests.options(2061 url,2062 headers={2063 "Origin": "https://localhost:4200",2064 "Access-Control-Request-Method": "PUT",2065 },2066 )2067 snapshot.match("raw-response-headers-2", dict(result.headers))2068 result = requests.put(2069 url,2070 data="something",2071 verify=False,2072 headers={2073 "Origin": "https://localhost:4200",2074 "Content-Type": "application/pdf",2075 },2076 )2077 assert result.status_code == 2002078 snapshot.match("raw-response-headers-3", dict(result.headers))2079 result = requests.put(2080 url,2081 data="something",2082 verify=False,2083 headers={2084 "Origin": "https://localhost:4201",2085 "Content-Type": "application/pdf",2086 },2087 )2088 assert result.status_code == 2002089 snapshot.match("raw-response-headers-4", dict(result.headers))2090 @pytest.mark.aws_validated2091 @pytest.mark.skip_snapshot_verify(2092 paths=[2093 "$..Access-Control-Allow-Headers",2094 "$..Connection",2095 "$..Location",2096 "$..Vary",2097 "$..Content-Type",2098 "$..x-amzn-requestid",2099 "$..last-modified",2100 "$..accept-ranges",2101 "$..content-language",2102 "$..content-md5",2103 "$..content-type",2104 "$..x-amz-version-id",2105 "$..Last-Modified",2106 "$..Accept-Ranges",2107 "$..raw-response-headers-2.Access-Control-Allow-Credentials",2108 ]2109 )2110 def test_cors_configurations(self, s3_client, s3_create_bucket, monkeypatch, snapshot):2111 monkeypatch.setattr(config, "DISABLE_CUSTOM_CORS_S3", False)2112 snapshot.add_transformer(self._get_cors_result_header_snapshot_transformer(snapshot))2113 bucket = f"test-cors-{short_uid()}"2114 object_key = "index.html"2115 url = "{}/{}".format(_bucket_url(bucket), object_key)2116 BUCKET_CORS_CONFIG = {2117 "CORSRules": [2118 {2119 "AllowedOrigins": [config.get_edge_url()],2120 "AllowedMethods": ["GET", "PUT"],2121 "MaxAgeSeconds": 3000,2122 "AllowedHeaders": ["x-amz-tagging"],2123 }2124 ]2125 }2126 s3_create_bucket(Bucket=bucket, ACL="public-read")2127 s3_client.put_bucket_cors(Bucket=bucket, CORSConfiguration=BUCKET_CORS_CONFIG)2128 s3_client.put_object(2129 Bucket=bucket, Key=object_key, Body="<h1>Index</html>", ACL="public-read"2130 )2131 response = requests.get(2132 url, headers={"Origin": config.get_edge_url(), "Content-Type": "text/html"}2133 )2134 assert 200 == response.status_code2135 snapshot.match("raw-response-headers", dict(response.headers))2136 BUCKET_CORS_CONFIG = {2137 "CORSRules": [2138 {2139 "AllowedOrigins": ["https://anydomain.com"],2140 "AllowedMethods": ["GET", "PUT"],2141 "MaxAgeSeconds": 3000,2142 "AllowedHeaders": ["x-amz-tagging"],2143 }2144 ]2145 }2146 s3_client.put_bucket_cors(Bucket=bucket, CORSConfiguration=BUCKET_CORS_CONFIG)2147 response = requests.get(2148 url, headers={"Origin": config.get_edge_url(), "Content-Type": "text/html"}2149 )2150 assert 200 == response.status_code2151 snapshot.match("raw-response-headers-2", dict(response.headers))2152 def _get_cors_result_header_snapshot_transformer(self, snapshot):2153 return [2154 snapshot.transform.key_value("x-amz-id-2", "<id>", reference_replacement=False),2155 snapshot.transform.key_value(2156 "x-amz-request-id", "<request-id>", reference_replacement=False2157 ),2158 snapshot.transform.key_value("Date", "<date>", reference_replacement=False),2159 snapshot.transform.key_value("Server", "<server>", reference_replacement=False),2160 snapshot.transform.key_value("Last-Modified", "<date>", reference_replacement=False),2161 ]2162 @pytest.mark.parametrize(2163 "signature_version, use_virtual_address",2164 [2165 ("s3", False),2166 ("s3", True),2167 ("s3v4", False),2168 ("s3v4", True),2169 ],2170 )2171 @pytest.mark.aws_validated2172 def test_presigned_url_signature_authentication_multi_part(2173 self,2174 s3_client,2175 s3_create_bucket,2176 signature_version,2177 use_virtual_address,2178 monkeypatch,2179 ):2180 monkeypatch.setattr(config, "S3_SKIP_SIGNATURE_VALIDATION", False)2181 bucket_name = f"presign-{short_uid()}"2182 s3_endpoint_path_style = _endpoint_url()2183 s3_create_bucket(Bucket=bucket_name)2184 object_key = "temp.txt"2185 s3_config = {"addressing_style": "virtual"} if use_virtual_address else {}2186 client = _s3_client_custom_config(2187 Config(signature_version=signature_version, s3=s3_config),2188 endpoint_url=s3_endpoint_path_style,2189 )2190 upload_id = client.create_multipart_upload(2191 Bucket=bucket_name,2192 Key=object_key,2193 )["UploadId"]2194 data = to_bytes("hello this is a upload test")2195 upload_file_object = BytesIO(data)2196 signed_url = _generate_presigned_url(2197 client,2198 {2199 "Bucket": bucket_name,2200 "Key": object_key,2201 "UploadId": upload_id,2202 "PartNumber": 1,2203 },2204 expires=4,2205 client_method="upload_part",2206 )2207 response = requests.put(signed_url, data=upload_file_object)2208 assert response.status_code == 2002209 multipart_upload_parts = [{"ETag": response.headers["ETag"], "PartNumber": 1}]2210 response = client.complete_multipart_upload(2211 Bucket=bucket_name,2212 Key=object_key,2213 MultipartUpload={"Parts": multipart_upload_parts},2214 UploadId=upload_id,2215 )2216 assert 200 == response["ResponseMetadata"]["HTTPStatusCode"]2217 simple_params = {"Bucket": bucket_name, "Key": object_key}2218 response = requests.get(_generate_presigned_url(client, simple_params, 4))2219 assert 200 == response.status_code2220 assert response.content == data2221 @pytest.mark.parametrize(2222 "signature_version, use_virtual_address",2223 [2224 ("s3", False),2225 ("s3", True),2226 ("s3v4", False),2227 ("s3v4", True),2228 ],2229 )2230 @pytest.mark.aws_validated2231 def test_presigned_url_signature_authentication_expired(2232 self,2233 s3_client,2234 s3_create_bucket,2235 signature_version,2236 use_virtual_address,2237 monkeypatch,2238 ):2239 monkeypatch.setattr(config, "S3_SKIP_SIGNATURE_VALIDATION", False)2240 bucket_name = f"presign-{short_uid()}"2241 s3_endpoint_path_style = _endpoint_url()2242 s3_create_bucket(Bucket=bucket_name)2243 object_key = "temp.txt"2244 s3_client.put_object(Key=object_key, Bucket=bucket_name, Body="123")2245 s3_config = {"addressing_style": "virtual"} if use_virtual_address else {}2246 client = _s3_client_custom_config(2247 Config(signature_version=signature_version, s3=s3_config),2248 endpoint_url=s3_endpoint_path_style,2249 )2250 url = _generate_presigned_url(client, {"Bucket": bucket_name, "Key": object_key}, expires=1)2251 time.sleep(1)2252 assert 403 == requests.get(url).status_code2253 @pytest.mark.parametrize(2254 "signature_version, use_virtual_address",2255 [2256 ("s3", False),2257 ("s3", True),2258 ("s3v4", False),2259 ("s3v4", True),2260 ],2261 )2262 @pytest.mark.aws_validated2263 def test_presigned_url_signature_authentication(2264 self,2265 s3_client,2266 s3_create_bucket,2267 signature_version,2268 use_virtual_address,2269 monkeypatch,2270 ):2271 monkeypatch.setattr(config, "S3_SKIP_SIGNATURE_VALIDATION", False)2272 bucket_name = f"presign-{short_uid()}"2273 s3_endpoint_path_style = _endpoint_url()2274 s3_url = _bucket_url_vhost(bucket_name) if use_virtual_address else _bucket_url(bucket_name)2275 s3_create_bucket(Bucket=bucket_name)2276 object_key = "temp.txt"2277 s3_client.put_object(Key=object_key, Bucket=bucket_name, Body="123")2278 s3_config = {"addressing_style": "virtual"} if use_virtual_address else {}2279 client = _s3_client_custom_config(2280 Config(signature_version=signature_version, s3=s3_config),2281 endpoint_url=s3_endpoint_path_style,2282 )2283 expires = 42284 # GET requests2285 simple_params = {"Bucket": bucket_name, "Key": object_key}2286 response = requests.get(_generate_presigned_url(client, simple_params, expires))2287 assert 200 == response.status_code2288 assert response.content == b"123"2289 params = {2290 "Bucket": bucket_name,2291 "Key": object_key,2292 "ResponseContentType": "text/plain",2293 "ResponseContentDisposition": "attachment; filename=test.txt",2294 }2295 presigned = _generate_presigned_url(client, params, expires)2296 response = requests.get(_generate_presigned_url(client, params, expires))2297 assert 200 == response.status_code2298 assert response.content == b"123"2299 object_data = "this should be found in when you download {}.".format(object_key)2300 # invalid requests2301 # TODO check how much sense it makes to make this url "invalid"...2302 assert (2303 4032304 == requests.get(2305 _make_url_invalid(s3_url, object_key, presigned),2306 data=object_data,2307 headers={"Content-Type": "my-fake-content/type"},2308 ).status_code2309 )2310 # put object valid2311 assert (2312 2002313 == requests.put(2314 _generate_presigned_url(client, simple_params, expires, client_method="put_object"),2315 data=object_data,2316 ).status_code2317 )2318 params = {2319 "Bucket": bucket_name,2320 "Key": object_key,2321 "ContentType": "text/plain",2322 }2323 presigned_put_url = _generate_presigned_url(2324 client, params, expires, client_method="put_object"2325 )2326 assert (2327 2002328 == requests.put(2329 presigned_put_url,2330 data=object_data,2331 headers={"Content-Type": "text/plain"},2332 ).status_code2333 )2334 # Invalid request2335 response = requests.put(2336 _make_url_invalid(s3_url, object_key, presigned_put_url),2337 data=object_data,2338 headers={"Content-Type": "my-fake-content/type"},2339 )2340 assert 403 == response.status_code2341 # DELETE requests2342 presigned_delete_url = _generate_presigned_url(2343 client, simple_params, expires, client_method="delete_object"2344 )2345 response = requests.delete(presigned_delete_url)2346 assert 204 == response.status_code2347class TestS3DeepArchive:2348 """2349 Test to cover DEEP_ARCHIVE Storage Class functionality.2350 """2351 @pytest.mark.aws_validated2352 def test_storage_class_deep_archive(self, s3_client, s3_resource, s3_bucket, tmpdir):2353 key = "my-key"2354 transfer_config = TransferConfig(multipart_threshold=5 * KB, multipart_chunksize=1 * KB)2355 def upload_file(size_in_kb: int):2356 file = tmpdir / f"test-file-{short_uid()}.bin"2357 data = b"1" * (size_in_kb * KB)2358 file.write(data=data, mode="w")2359 s3_client.upload_file(2360 Bucket=s3_bucket,2361 Key=key,2362 Filename=str(file.realpath()),2363 ExtraArgs={"StorageClass": "DEEP_ARCHIVE"},2364 Config=transfer_config,2365 )2366 upload_file(1)2367 upload_file(9)2368 upload_file(15)2369 objects = s3_resource.Bucket(s3_bucket).objects.all()2370 keys = []2371 for obj in objects:2372 keys.append(obj.key)2373 assert obj.storage_class == "DEEP_ARCHIVE"2374def _anon_client(service: str):2375 conf = Config(signature_version=UNSIGNED)2376 if os.environ.get("TEST_TARGET") == "AWS_CLOUD":2377 return boto3.client(service, config=conf, region_name=None)2378 return aws_stack.create_external_boto_client(service, config=conf)2379def _s3_client_custom_config(conf: Config, endpoint_url: str):2380 if os.environ.get("TEST_TARGET") == "AWS_CLOUD":2381 return boto3.client("s3", config=conf, endpoint_url=endpoint_url)2382 # TODO in future this should work with aws_stack.create_external_boto_client2383 # currently it doesn't as authenticate_presign_url_signv2 requires the secret_key to be 'test'2384 # return aws_stack.create_external_boto_client(2385 # "s3",2386 # config=conf,2387 # endpoint_url=endpoint_url,2388 # aws_access_key_id=TEST_AWS_ACCESS_KEY_ID,2389 # )2390 return boto3.client(2391 "s3",2392 endpoint_url=endpoint_url,2393 config=conf,2394 aws_access_key_id=TEST_AWS_ACCESS_KEY_ID,2395 aws_secret_access_key=TEST_AWS_SECRET_ACCESS_KEY,2396 )2397def _endpoint_url(region: str = "", localstack_host: str = None) -> str:2398 if not region:2399 region = config.DEFAULT_REGION2400 if os.environ.get("TEST_TARGET") == "AWS_CLOUD":2401 if region == "us-east-1":2402 return "https://s3.amazonaws.com"2403 else:2404 return f"http://s3.{region}.amazonaws.com"2405 return f"{config.get_edge_url(localstack_hostname=localstack_host or S3_VIRTUAL_HOSTNAME)}"2406def _bucket_url(bucket_name: str, region: str = "", localstack_host: str = None) -> str:2407 return f"{_endpoint_url(region, localstack_host)}/{bucket_name}"2408def _website_bucket_url(bucket_name: str):2409 # TODO depending on region the syntax of the website variy (dot vs dash before region)2410 if os.environ.get("TEST_TARGET") == "AWS_CLOUD":2411 region = config.DEFAULT_REGION2412 return f"http://{bucket_name}.s3-website-{region}.amazonaws.com"2413 return _bucket_url_vhost(bucket_name, localstack_host=constants.S3_STATIC_WEBSITE_HOSTNAME)2414def _bucket_url_vhost(bucket_name: str, region: str = "", localstack_host: str = None) -> str:2415 if not region:2416 region = config.DEFAULT_REGION2417 if os.environ.get("TEST_TARGET") == "AWS_CLOUD":2418 if region == "us-east-1":2419 return f"https://{bucket_name}.s3.amazonaws.com"2420 else:2421 return f"https://{bucket_name}.s3.{region}.amazonaws.com"2422 host = localstack_host or S3_VIRTUAL_HOSTNAME...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run localstack automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful