How to use get_index_name method in autotest

Best Python code snippet using autotest_python

country_templates.py

Source:country_templates.py Github

copy

Full Screen

...19 scraper.source_website = "https://www.vg.no/spesial/2020/corona/"20 scraper.optimize_min_max_index_ratio = 0.121 scraper.website_height = 120022 #scraper.training_data = {"cases": "3752", "deaths":"19", "tested":"78036", "hospitalised": "302", "intensive_care":"76"}23 group_country_classes[scraper.get_index_name()] = scraper24 # Sweden25 scraper = NovelScraperAuto()26 scraper.country_name = "Sweden"27 scraper.province_name = "Sweden"28 scraper.iso_code = "SE"29 scraper.javascript_required = True30 scraper.wait_time = 1031 scraper.optimize_min_max_index_ratio = 0.132 scraper.website_height = 80033 scraper.has_auto = True34 scraper.source_website = "https://fohm.maps.arcgis.com/apps/opsdashboard/index.html#/68d4537bf2714e63b646c37f152f1392"35 scraper.report_website = "https://www.folkhalsomyndigheten.se/smittskydd-beredskap/utbrott/aktuella-utbrott/covid-19/aktuellt-epidemiologiskt-lage/"36 #scraper.training_data = {"cases": "3046", "deaths": "92", "intensive_care": "209"}37 group_country_classes[scraper.get_index_name()] = scraper38 # Denmark39 # Cases are divided up by regions, can't be parsed with auto40 scraper = NovelScraperDK()41 scraper.scroll_height = 60042 scraper.website_height = 120043 scraper.has_auto = True44 group_country_classes[scraper.get_index_name()] = scraper45 # Iceland46 scraper = NovelScraperAuto()47 scraper.country_name = "Iceland"48 scraper.province_name = "Iceland"49 scraper.iso_code = "IS"50 scraper.javascript_required = True51 scraper.has_hopkins = True52 scraper.adjust_scraped_deaths_from_sheet = True53 scraper.website_height = 70054 scraper.has_auto = True55 scraper.source_website = "https://e.infogram.com/7327507d-28f5-4e3c-b587-c1680bd790e6?src=embed"56 scraper.report_website = "https://www.covid.is/tolulegar-upplysingar"57 #scraper.training_data = {"cases": "890", "recovered": "97", "hospitalised":"18", "intensive_care":"6", "tested":"13613"}58 group_country_classes[scraper.get_index_name()] = scraper59 # Finland60 scraper = NovelScraperAuto()61 scraper.country_name = "Finland"62 scraper.province_name = "Finland"63 scraper.iso_code = "FI"64 scraper.javascript_required = True65 scraper.has_auto = True66 scraper.source_website = "https://korona.kans.io/"67 #scraper.training_data = {"cases": "1056", "deaths": "7", "recovered": "10"}68 group_country_classes[scraper.get_index_name()] = scraper69 # Estonia70 scraper = NovelScraperAuto()71 scraper.country_name = "Estonia" 72 scraper.province_name = "Estonia" 73 scraper.iso_code = "EE"74 scraper.source_website = "https://www.koroonakaart.ee/en"75 scraper.website_height = 50076 scraper.has_auto = True77 #scraper.training_data = {"cases": "640", "deaths": "1", "recovered":"20", "tested":"9364", "hospitalised": "48"}78 group_country_classes[scraper.get_index_name()] = scraper79 # Lithuania80 scraper = NovelScraperAuto()81 scraper.country_name = "Lithuania"82 scraper.province_name = "Lithuania" 83 scraper.iso_code = "LI"84 scraper.source_website = "https://sam.lrv.lt/lt/naujienos/koronavirusas"85 scraper.scroll_height= 40086 scraper.website_height = 120087 scraper.website_width = 192088 scraper.has_auto = True89 #scraper.training_data = {"cases": "382", "deaths": "5", "recovered":"1", "tested":"6900"}90 group_country_classes[scraper.get_index_name()] = scraper91 # Latvia92 scraper = NovelScraperAuto()93 scraper.country_name = "Latvia" 94 scraper.province_name = "Latvia" 95 scraper.iso_code = "LV"96 scraper.source_website = "https://arkartassituacija.gov.lv/"97 scraper.website_height = 120098 scraper.has_auto = True99 #scraper.training_data = {"cases": "280", "tested":"11702", "hospitalised": "21"}100 group_country_classes[scraper.get_index_name()] = scraper101 # Central Europe102 # The United Kingdom103 scraper = NovelScraperAuto()104 scraper.country_name = "UK"105 scraper.province_name = "UK"106 scraper.iso_code = "GB"107 scraper.javascript_required = True108 scraper.report_link = "https://www.gov.uk/government/publications/covid-19-track-coronavirus-cases"109 scraper.source_website = "https://www.arcgis.com/apps/opsdashboard/index.html#/f94c3c90da5b4e9f9a0b19484dd4bb14"110 #scraper.training_data = {"cases": "17089", "recovered":"135", "deaths": "1019"}111 group_country_classes[scraper.get_index_name()] = scraper112 # Ireland113 scraper = NovelScraperAuto()114 scraper.country_name = "Ireland"115 scraper.province_name = "Ireland"116 scraper.iso_code = "IE"117 scraper.javascript_required = True118 scraper.source_website = "https://www.gov.ie/en/news/7e0924-latest-updates-on-covid-19-coronavirus/"119 scraper.scroll_height = 500120 #scraper.training_data = {"cases": "2415", "deaths": "36"}121 group_country_classes[scraper.get_index_name()] = scraper122 # Germany123 scraper = NovelScraperAuto()124 scraper.country_name = "Germany"125 scraper.province_name = "Germany"126 scraper.iso_code = "DE"127 scraper.javascript_required = True128 scraper.source_website = "https://interaktiv.tagesspiegel.de/lab/karte-sars-cov-2-in-deutschland-landkreise/"129 #scraper.training_data = {"cases": "54268", "deaths": "398", "recovered":"3781"}130 group_country_classes[scraper.get_index_name()] = scraper131 # France132 # Can't scrape images. Should probably do a custom one133 scraper = NovelScraperAuto()134 scraper.country_name = "France"135 scraper.province_name = "France"136 scraper.iso_code = "FR"137 scraper.javascript_required = True138 scraper.source_website = "https://www.santepubliquefrance.fr/maladies-et-traumatismes/maladies-et-infections-respiratoires/infection-a-coronavirus/articles/infection-au-nouveau-coronavirus-sars-cov-2-covid-19-france-et-monde"139 scraper.scroll_height = 2000140 #scraper.training_data = {"cases": "54268", "deaths": "398", "recovered":"3781"}141 group_country_classes[scraper.get_index_name()] = scraper142 # Spain143 scraper = NovelScraperAuto()144 scraper.country_name = "Spain"145 scraper.province_name = "Spain"146 scraper.iso_code = "ES"147 scraper.javascript_required = True148 scraper.optimize_min_max_index_ratio = 0.1149 scraper.source_website = "https://www.rtve.es/noticias/20200328/mapa-del-coronavirus-espana/2004681.shtml"150 #scraper.training_data = {"cases": "73235", "deaths": "5982", "recovered":"12285", "intensive_care":"4575"}151 group_country_classes[scraper.get_index_name()] = scraper152 # Italy153 scraper = NovelScraperAuto()154 scraper.country_name = "Italy"155 scraper.province_name = "Italy"156 scraper.iso_code = "IT"157 scraper.javascript_required = True158 scraper.optimize_min_max_index_ratio = 0.05159 scraper.source_website = "https://datastudio.google.com/u/0/reporting/91350339-2c97-49b5-92b8-965996530f00/page/RdlHB"160 #scraper.training_data = {"cases": "92472", "deaths": "10023", "recovered":"12384", "intensive_care":"3856", "hospitalsied":"30532", "tested":"429526"}161 group_country_classes[scraper.get_index_name()] = scraper162 # Portugal163 scraper = NovelScraperAuto()164 scraper.country_name = "Portugal"165 scraper.province_name = "Portugal"166 scraper.iso_code = "PT"167 scraper.javascript_required = True168 scraper.report_website = "https://covid19.min-saude.pt/ponto-de-situacao-atual-em-portugal/"169 # Mobile https://esriportugal.maps.arcgis.com/apps/opsdashboard/index.html#/e9dd1dea8d1444b985d38e58076d197a170 scraper.source_website = "https://esriportugal.maps.arcgis.com/apps/opsdashboard/index.html#/acf023da9a0b4f9dbb2332c13f635829"171 #scraper.training_data = {"cases": "5170", "deaths": "100", "recovered":"43"}172 group_country_classes[scraper.get_index_name()] = scraper173 # The Netherlands174 scraper = NovelScraperAuto()175 scraper.country_name = "Netherlands"176 scraper.province_name = "Netherlands"177 scraper.iso_code = "NL"178 scraper.optimize_min_max_index_ratio = 0.2179 scraper.source_website = "https://www.rivm.nl/nieuws/actuele-informatie-over-coronavirus"180 #scraper.training_data = {"cases": "9762", "deaths": "639", "hospitalised":"2954"}181 group_country_classes[scraper.get_index_name()] = scraper182 # Bad, all the stuff are on different links. Best is https://datastudio.google.com/embed/reporting/c14a5cfc-cab7-4812-848c-0369173148ab/page/tpRKB183 scraper = NovelScraperAuto()184 scraper.country_name = "Belgium"185 scraper.province_name = "Belgium"186 scraper.iso_code = "BE"187 scraper.optimize_min_max_index_ratio = 0.2188 #scraper.source_website = "https://www.info-coronavirus.be/fr/2020/03/24/526-nouvelles-infections-au-covid-19/"189 #scraper.training_data = {"cases": "4269", "deaths": "122", "recovered":"410", "hospitalised":"1859", "intensive_care":"381"}190 group_country_classes[scraper.get_index_name()] = scraper191 # Switzerland192 scraper = NovelScraperAuto()193 scraper.country_name = "Switzerland"194 scraper.province_name = "Switzerland"195 scraper.iso_code = "CH"196 scraper.optimize_min_max_index_ratio = 0.2197 scraper.source_website = "https://www.bag.admin.ch/bag/en/home/krankheiten/ausbrueche-epidemien-pandemien/aktuelle-ausbrueche-epidemien/novel-cov/situation-schweiz-und-international.html"198 scraper.scroll_height = 300199 #scraper.training_data = {"cases": "13213", "deaths": "235"}200 group_country_classes[scraper.get_index_name()] = scraper201 # Austria202 scraper = NovelScraperAuto()203 scraper.country_name = "Austria"204 scraper.province_name = "Austria"205 scraper.iso_code = "AT"206 scraper.optimize_min_max_index_ratio = 0.2207 scraper.website_height = 3000208 scraper.website_width = 1800209 scraper.scroll_height = 200210 scraper.source_website = "https://www.sozialministerium.at/Informationen-zum-Coronavirus/Neuartiges-Coronavirus-(2019-nCov).html"211 #scraper.training_data = {"cases": "7995", "deaths": "68", "tested":"42750"}212 group_country_classes[scraper.get_index_name()] = scraper213 # Russia (RU)214 scraper = NovelScraperAuto()215 scraper.country_name = "Russia"216 scraper.province_name = "Russia"217 scraper.iso_code = "RU"218 scraper.source_website = "https://www.interfax.ru/chronicle/novyj-koronavirus-v-kitae.html"219 scraper.has_auto = True220 scraper.javascript_required = True221 group_country_classes[scraper.get_index_name()] = scraper222 # Poland (PL)223 scraper = NovelScraperAuto()224 scraper.country_name = "Poland"225 scraper.province_name = "Poland"226 scraper.iso_code = "PL"227 scraper.javascript_required = True228 scraper.wait_time = 5229 #scraper.source_website = "https://koronawirusunas.pl/"230 scraper.source_website = "https://pokazwirusa.pl/?fbclid=IwAR1bkCIad8H4F-2-2WgYv2pbrleRWgPeUCVDu97X-C2DV0GPfiqlxx7_z9s"231 scraper.region_source_website = "https://www.gov.pl/web/koronawirus/wykaz-zarazen-koronawirusem-sars-cov-2"232 scraper.javascript_objects = ["vueApp.$data.records"]233 scraper.key_mapping = {"cases": "Liczba", "deaths": "Liczba zgonów", "province": "Województwo"}234 group_country_classes[scraper.get_index_name()] = scraper235 # Czech Republic (CZ)236 scraper = NovelScraperAuto()237 scraper.country_name = "Czech-Republic"238 scraper.province_name = "Czech-Republic"239 scraper.iso_code = "CZ"240 scraper.source_website = "https://onemocneni-aktualne.mzcr.cz/covid-19"241 group_country_classes[scraper.get_index_name()] = scraper242 # Romania (RO)243 scraper = NovelScraperAuto()244 scraper.country_name = "Romania"245 scraper.province_name = "Romania"246 scraper.iso_code = "RO"247 # Secondary https://instnsp.maps.arcgis.com/apps/opsdashboard/index.html#/5eced796595b4ee585bcdba03e30c127248 scraper.source_website = "https://covid19.geo-spatial.org/dashboard/main"249 group_country_classes[scraper.get_index_name()] = scraper250 251 # Belarus (BY)252 scraper = NovelScraperAuto()253 scraper.country_name = "Belarus"254 scraper.province_name = "Belarus"255 scraper.iso_code = "BY"256 scraper.source_website = "http://stopcovid.belta.by/"257 scraper.has_auto = True258 scraper.javascript_required = True259 scraper.combine_text_numbers = False260 group_country_classes[scraper.get_index_name()] = scraper261 # Ukarine (UA) (CoronaCloud)262 scraper = NovelScraperAuto()263 scraper.country_name = "Ukraine"264 scraper.province_name = "Ukraine"265 scraper.iso_code = "UA"266 #scraper.source_website = "https://www.sozialministerium.at/Informationen-zum-Coronavirus/Neuartiges-Coronavirus-(2019-nCov).html"267 group_country_classes[scraper.get_index_name()] = scraper268 # Greece (GR) (CoronaCloud)269 scraper = NovelScraperAuto()270 scraper.country_name = "Greece"271 scraper.province_name = "Greece"272 scraper.iso_code = "GR"273 #scraper.source_website = "https://www.sozialministerium.at/Informationen-zum-Coronavirus/Neuartiges-Coronavirus-(2019-nCov).html"274 group_country_classes[scraper.get_index_name()] = scraper275 # Andorra276 scraper = NovelScraperAuto()277 scraper.country_name = "Andorra"278 scraper.province_name = "Andorra"279 scraper.iso_code = "AD"280 scraper.report_website = "https://www.govern.ad/coronavirus"281 scraper.source_website = "https://www.govern.ad/covid/taula.php"282 scraper.website_height = 200283 scraper.javascript_required = True284 scraper.has_auto = True285 group_country_classes[scraper.get_index_name()] = scraper 286 # Albania287 scraper = NovelScraperAuto()288 scraper.country_name = "Albania"289 scraper.province_name = "Albania"290 scraper.iso_code = "AL"291 scraper.source_website = "https://coronavirus.al/statistika/"292 scraper.has_hopkins = True293 scraper.javascript_required = True294 scraper.has_auto = True295 group_country_classes[scraper.get_index_name()] = scraper296 # Bosnia and Herzegovina297 scraper = NovelScraperAuto()298 scraper.country_name = "Bosnia-and-Herzegovina"299 scraper.province_name = "Bosnia-and-Herzegovina"300 scraper.iso_code = "BA"301 scraper.report_website = "https://www.klix.ba/koronavirus-u-bih"302 scraper.source_website = "https://www.klix.ba/corona/"303 scraper.has_auto = True304 scraper.javascript_required = True305 group_country_classes[scraper.get_index_name()] = scraper306 # Bulgaria307 scraper = NovelScraperAuto()308 scraper.country_name = "Bulgaria"309 scraper.province_name = "Bulgaria"310 scraper.iso_code = "BG"311 scraper.source_website = "https://www.mh.government.bg/bg/informaciya-za-grazhdani/potvrdeni-sluchai-na-koronavirus-na-teritoriyata-na-r-blgariya/"312 scraper.has_auto = True313 group_country_classes[scraper.get_index_name()] = scraper314 # Croatia315 scraper = NovelScraperHR()316 scraper.has_auto = True317 group_country_classes[scraper.get_index_name()] = scraper318 # Cyprus319 scraper = NovelScraperAuto()320 scraper.country_name = "Cyprus"321 scraper.province_name = "Cyprus"322 scraper.iso_code = "CY"323 #scraper.source_website = "https://www.sozialministerium.at/Informationen-zum-Coronavirus/Neuartiges-Coronavirus-(2019-nCov).html"324 group_country_classes[scraper.get_index_name()] = scraper325 # Hungary326 scraper = NovelScraperHU()327 group_country_classes[scraper.get_index_name()] = scraper328 # Holy-see329 scraper = NovelScraperAuto()330 scraper.country_name = "Holy-see"331 scraper.province_name = "Holy-see"332 scraper.iso_code = "VA"333 #scraper.source_website = "https://www.sozialministerium.at/Informationen-zum-Coronavirus/Neuartiges-Coronavirus-(2019-nCov).html"334 group_country_classes[scraper.get_index_name()] = scraper335 # Kosovo336 scraper = NovelScraperAuto()337 scraper.country_name = "Kosovo"338 scraper.province_name = "Kosovo"339 scraper.iso_code = "XK"340 scraper.source_website = "https://kosova.health/en/"341 scraper.scroll_height = 600342 scraper.has_auto = True343 group_country_classes[scraper.get_index_name()] = scraper344 # Liechtenstein345 scraper = NovelScraperAuto()346 scraper.country_name = "Liechtenstein"347 scraper.province_name = "Liechtenstein"348 scraper.iso_code = "LI"349 #scraper.source_website = "https://www.sozialministerium.at/Informationen-zum-Coronavirus/Neuartiges-Coronavirus-(2019-nCov).html"350 group_country_classes[scraper.get_index_name()] = scraper351 # Luxembourg352 scraper = NovelScraperAuto()353 scraper.country_name = "Luxembourg"354 scraper.province_name = "Luxembourg"355 scraper.iso_code = "LU"356 scraper.source_website = "https://msan.gouvernement.lu/en/dossiers/2020/corona-virus.html"357 scraper.has_auto = True358 scraper.scroll_height = 300359 group_country_classes[scraper.get_index_name()] = scraper360 # Malta361 scraper = NovelScraperAuto()362 scraper.country_name = "Malta"363 scraper.province_name = "Malta"364 scraper.iso_code = "MT"365 scraper.report_website = "https://www.maltatoday.com.mt/"366 scraper.source_website = "https://e.infogram.com/ca2bde8e-e60d-49a1-8faa-5beab8e542ab?parent_url=https%3A%2F%2Fwww.maltatoday.com.mt%2F&src=embed#async_embed"367 scraper.javascript_required = True368 scraper.has_auto = True369 scraper.website_height = 300370 scraper.combine_text_numbers = False371 scraper.overwrite_model_surrounding_numbers = True372 group_country_classes[scraper.get_index_name()] = scraper373 # Moldova374 scraper = NovelScraperAuto()375 scraper.country_name = "Moldova"376 scraper.province_name = "Moldova"377 scraper.iso_code = "MD"378 scraper.source_website = "http://gismoldova.maps.arcgis.com/apps/opsdashboard/index.html#/d274da857ed345efa66e1fbc959b021b"379 scraper.javascript_required = True380 scraper.has_auto = True381 group_country_classes[scraper.get_index_name()] = scraper382 # Monaco383 scraper = NovelScraperAuto()384 scraper.country_name = "Monaco"385 scraper.province_name = "Monaco"386 scraper.iso_code = "MC"387 #scraper.source_website = "https://www.sozialministerium.at/Informationen-zum-Coronavirus/Neuartiges-Coronavirus-(2019-nCov).html"388 group_country_classes[scraper.get_index_name()] = scraper389 # Montenegro390 scraper = NovelScraperAuto()391 scraper.country_name = "Montenegro"392 scraper.province_name = "Montenegro"393 scraper.iso_code = "ME"394 #scraper.source_website = "https://www.sozialministerium.at/Informationen-zum-Coronavirus/Neuartiges-Coronavirus-(2019-nCov).html"395 group_country_classes[scraper.get_index_name()] = scraper396 # North Macedonia397 scraper = NovelScraperAuto()398 scraper.country_name = "North-Macedonia"399 scraper.province_name = "North-Macedonia"400 scraper.iso_code = "MK"401 #scraper.source_website = "https://www.sozialministerium.at/Informationen-zum-Coronavirus/Neuartiges-Coronavirus-(2019-nCov).html"402 group_country_classes[scraper.get_index_name()] = scraper403 # San Marino404 scraper = NovelScraperAuto()405 scraper.country_name = "San-Marino"406 scraper.province_name = "San-Marino"407 scraper.iso_code = "SM"408 #scraper.source_website = "https://www.sozialministerium.at/Informationen-zum-Coronavirus/Neuartiges-Coronavirus-(2019-nCov).html"409 group_country_classes[scraper.get_index_name()] = scraper410 # Slovenia411 scraper = NovelScraperAuto()412 scraper.country_name = "Slovenia"413 scraper.province_name = "Slovenia"414 scraper.iso_code = "SI"415 #scraper.source_website = "https://www.sozialministerium.at/Informationen-zum-Coronavirus/Neuartiges-Coronavirus-(2019-nCov).html"416 group_country_classes[scraper.get_index_name()] = scraper417 # Serbia418 scraper = NovelScraperAuto()419 scraper.country_name = "Serbia"420 scraper.province_name = "Serbia"421 scraper.iso_code = "RS"422 scraper.has_auto = True423 scraper.javascript_required = True424 scraper.source_website = "https://covid19.rs/"425 group_country_classes[scraper.get_index_name()] = scraper426 # Slovakia427 scraper = NovelScraperAuto()428 scraper.country_name = "Slovakia"429 scraper.province_name = "Slovakia"430 scraper.iso_code = "SK"431 #scraper.source_website = "https://www.sozialministerium.at/Informationen-zum-Coronavirus/Neuartiges-Coronavirus-(2019-nCov).html"432 group_country_classes[scraper.get_index_name()] = scraper 433 for country_name, scraper in group_country_classes.items(): #Values applied to all countries within this function434 scraper.group_name = "Europe"435 country_classes.update(group_country_classes)436 437def init_us_scrapers():438 group_country_classes = {}439 country_name = "United-States"440 # Alabama441 scraper = NovelScraperAuto()442 scraper.country_name = country_name443 scraper.province_name = "Alabama" 444 scraper.iso_code = "AL"445 scraper.source_website = "https://alpublichealth.maps.arcgis.com/apps/opsdashboard/index.html#/6d2771faa9da4a2786a509d82c8cf0f7"446 group_country_classes[scraper.get_index_name()] = scraper447 # Alaska448 scraper = NovelScraperAuto()449 scraper.country_name = country_name450 scraper.province_name = "Alaska" 451 scraper.iso_code = "AK"452 scraper.javascript_required = True453 scraper.report_website = "https://coronavirus-response-alaska-dhss.hub.arcgis.com/"454 scraper.source_website = "https://www.arcgis.com/apps/opsdashboard/index.html#/83c63cfec8b24397bdf359f49b11f218"455 scraper.scroll_height = 800456 scraper.website_height = 1200457 group_country_classes[scraper.get_index_name()] = scraper458 459 # Arizona460 scraper = NovelScraperAuto()461 scraper.country_name = country_name462 scraper.province_name = "Arizona" 463 scraper.iso_code = "AZ"464 scraper.javascript_required = True465 scraper.report_website = "https://www.azdhs.gov/preparedness/epidemiology-disease-control/infectious-disease-epidemiology/index.php#novel-coronavirus-home"466 scraper.source_website = "https://tableau.azdhs.gov/views/UpdatedCOVIDdashboardV3/Story1?:embed=y&:showVizHome=no&:host_url=https%3A%2F%2Ftableau.azdhs.gov%2F&:embed_code_version=3&:tabs=no&:toolbar=no&:showAppBanner=false&:display_spinner=no&iframeSizedToWindow=true&:loadOrderID=0"467 group_country_classes[scraper.get_index_name()] = scraper468 # Arkansas469 scraper = NovelScraperAuto()470 scraper.country_name = country_name471 scraper.province_name = "Arkansas" 472 scraper.iso_code = "AR"473 scraper.report_website = "https://www.healthy.arkansas.gov/programs-services/topics/novel-coronavirus"474 scraper.source_website = "https://adem.maps.arcgis.com/apps/opsdashboard/index.html#/f533ac8a8b6040e5896b05b47b17a647"475 scraper.javascript_required = True476 group_country_classes[scraper.get_index_name()] = scraper477 # California (not official source but updates faster)478 scraper = NovelScraperAuto()479 scraper.country_name = country_name480 scraper.province_name = "California" 481 scraper.iso_code = "CA"482 scraper.source_website = "https://www.latimes.com/projects/california-coronavirus-cases-tracking-outbreak/"483 group_country_classes[scraper.get_index_name()] = scraper484 # Colorado485 scraper = NovelScraperAuto()486 scraper.country_name = country_name487 scraper.province_name = "Colorado" 488 scraper.iso_code = "CO"489 scraper.report_website = "https://covid19.colorado.gov/case-data"490 scraper.source_website = "https://public.tableau.com/views/COVID19_CaseSummary_TP/COVID-19CaseSummary-TP?:embed=y&:showVizHome=no&:host_url=https%3A%2F%2Fpublic.tableau.com%2F&:embed_code_version=3&:tabs=no&:toolbar=yes&:animate_transition=yes&:display_static_image=no&:display_spinner=no&:display_overlay=yes&:display_count=yes&publish=yes&:loadOrderID=0"491 group_country_classes[scraper.get_index_name()] = scraper492 # Conneticut (primary source is in pdf, ugh)493 scraper = NovelScraperAuto()494 scraper.country_name = country_name495 scraper.province_name = "Connecticut" 496 scraper.iso_code = "CT"497 scraper.has_auto = True498 scraper.source_website = "https://www.nytimes.com/interactive/2020/us/connecticut-coronavirus-cases.html"499 #scraper.source_website = https://portal.ct.gov/coronavirus500 group_country_classes[scraper.get_index_name()] = scraper501 # Delaware502 scraper = NovelScraperAuto()503 scraper.country_name = country_name504 scraper.province_name = "Delaware" 505 scraper.iso_code = "DE"506 scraper.report_website = "https://coronavirus.delaware.gov/"507 scraper.source_website = "https://dshs.maps.arcgis.com/apps/opsdashboard/index.html#/f2b22615feeb442aa2975900f8f2d4a1"508 group_country_classes[scraper.get_index_name()] = scraper509 # Florida510 scraper = NovelScraperAuto()511 scraper.country_name = country_name512 scraper.province_name = "Florida" 513 scraper.iso_code = "FL"514 scraper.javascript_required = True515 scraper.has_auto = True516 scraper.wait_time = 14517 scraper.source_website = "https://fdoh.maps.arcgis.com/apps/opsdashboard/index.html#/8d0de33f260d444c852a615dc7837c86"518 group_country_classes[scraper.get_index_name()] = scraper519 # Georgia520 scraper = NovelScraperAuto()521 scraper.country_name = country_name522 scraper.province_name = "Georgia" 523 scraper.iso_code = "GA"524 scraper.report_website = "https://dph.georgia.gov/covid-19-daily-status-report"525 scraper.javascript_required = True526 scraper.has_auto = True527 scraper.scroll_height = 200528 scraper.source_website = "https://d20s4vd27d0hk0.cloudfront.net/?initialWidth=746&childId=covid19dashdph&parentTitle=COVID-19%20Daily%20Status%20Report%20%7C%20Georgia%20Department%20of%20Public%20Health&parentUrl=https%3A%2F%2Fdph.georgia.gov%2Fcovid-19-daily-status-report"529 group_country_classes[scraper.get_index_name()] = scraper530 # Hawaii531 scraper = NovelScraperAuto()532 scraper.country_name = country_name533 scraper.province_name = "Hawaii" 534 scraper.iso_code = "HI"535 scraper.source_website = "https://health.hawaii.gov/coronavirusdisease2019/"536 group_country_classes[scraper.get_index_name()] = scraper537 # Idaho538 scraper = NovelScraperAuto()539 scraper.country_name = country_name540 scraper.province_name = "Idaho" 541 scraper.iso_code = "ID"542 scraper.javascript_required = True543 scraper.report_website = "https://coronavirus.idaho.gov/"544 scraper.source_website = "https://public.tableau.com/profile/idaho.division.of.public.health#!/vizhome/DPHIdahoCOVID-19Dashboard_V2/DPHCOVID19Dashboard2"545 group_country_classes[scraper.get_index_name()] = scraper546 # Illinois547 scraper = NovelScraperAuto()548 scraper.country_name = country_name549 scraper.province_name = "Illinois" 550 scraper.iso_code = "IL"551 scraper.source_website = "http://www.dph.illinois.gov/topics-services/diseases-and-conditions/diseases-a-z-list/coronavirus"552 scraper.scroll_height = 200553 scraper.javascript_required = True554 scraper.has_auto = True555 group_country_classes[scraper.get_index_name()] = scraper556 # Indiana557 scraper = NovelScraperAuto()558 scraper.country_name = country_name559 scraper.province_name = "Indiana" 560 scraper.iso_code = "IN"561 scraper.javascript_required = True562 scraper.source_website = "https://coronavirus.in.gov/"563 scraper.scroll_height = 2200564 scraper.website_width = 1500565 group_country_classes[scraper.get_index_name()] = scraper566 # Iowa (No deaths reported here?)567 scraper = NovelScraperAuto()568 scraper.country_name = country_name569 scraper.province_name = "Iowa" 570 scraper.iso_code = "IA"571 #scraper.report_website = https://idph.iowa.gov/Emerging-Health-Issues/Novel-Coronavirus572 scraper.source_website = "https://idph.iowa.gov/Emerging-Health-Issues/Novel-Coronavirus"573 scraper.javascript_required = True574 scraper.wait_time = 7575 scraper.website_height = 1200576 scraper.scroll_height = 200577 group_country_classes[scraper.get_index_name()] = scraper578 # Kansas579 scraper = NovelScraperAuto()580 scraper.country_name = country_name581 scraper.province_name = "Kansas" 582 scraper.iso_code = "KS"583 scraper.source_website = "https://public.tableau.com/profile/kdhe.epidemiology#!/vizhome/COVID-19Data_15851817634470/KSCOVID-19CaseData"584 scraper.javascript_required = True585 group_country_classes[scraper.get_index_name()] = scraper586 # Kentucky587 scraper = NovelScraperAuto()588 scraper.country_name = country_name589 scraper.province_name = "Kentucky" 590 scraper.iso_code = "KY"591 scraper.source_website = "https://govstatus.egov.com/kycovid19"592 group_country_classes[scraper.get_index_name()] = scraper593 # Louisiana594 scraper = NovelScraperAuto()595 scraper.country_name = country_name596 scraper.province_name = "Louisiana" 597 scraper.iso_code = "LA"598 scraper.source_website = "https://www.arcgis.com/apps/opsdashboard/index.html#/69b726e2b82e408f89c3a54f96e8f776"599 scraper.report_website = "http://ldh.la.gov/Coronavirus/"600 scraper.javascript_required = True601 scraper.has_auto = True602 group_country_classes[scraper.get_index_name()] = scraper603 # Maine604 scraper = NovelScraperAuto()605 scraper.country_name = country_name606 scraper.province_name = "Maine" 607 scraper.iso_code = "ME"608 scraper.source_website = "https://www.maine.gov/dhhs/mecdc/infectious-disease/epi/airborne/coronavirus.shtml"609 scraper.scroll_height = 400610 scraper.website_height = 1200611 group_country_classes[scraper.get_index_name()] = scraper612 # Maryland613 scraper = NovelScraperAuto()614 scraper.country_name = country_name615 scraper.province_name = "Maryland" 616 scraper.iso_code = "MD"617 scraper.source_website = "https://maryland.maps.arcgis.com/apps/opsdashboard/index.html#/c34e541dd8b742d993159dbebb094d8b"618 scraper.report_website = "https://coronavirus.maryland.gov/"619 scraper.javascript_required = True620 group_country_classes[scraper.get_index_name()] = scraper621 # Massachusetts622 scraper = NovelScraperAuto()623 scraper.country_name = country_name624 scraper.province_name = "Massachusetts" 625 scraper.iso_code = "MA"626 scraper.source_website = "https://www.mass.gov/info-details/covid-19-cases-quarantine-and-monitoring#covid-19-cases-in-massachusetts-"627 scraper.scroll_height = 900628 scraper.website_height = 1200629 scraper.website_width=1600630 scraper.has_auto = True631 scraper.adjust_scraped_deaths_from_sheet = True632 group_country_classes[scraper.get_index_name()] = scraper633 # Michigan634 scraper = NovelScraperAuto()635 scraper.country_name = country_name636 scraper.province_name = "Michigan" 637 scraper.iso_code = "MI"638 scraper.source_website = "https://www.michigan.gov/coronavirus/0,9753,7-406-98163_98173---,00.html"639 scraper.website_height = 1200640 scraper.scroll_height = 2200641 scraper.has_auto = True642 group_country_classes[scraper.get_index_name()] = scraper643 # Minnesota644 scraper = NovelScraperAuto()645 scraper.country_name = country_name646 scraper.province_name = "Minnesota" 647 scraper.iso_code = "MN"648 scraper.report_website = "https://www.health.state.mn.us/diseases/coronavirus/situation.html"649 scraper.source_website = "https://mndps.maps.arcgis.com/apps/opsdashboard/index.html#/f28f84968c1148129932c3bebb1d3a1a"650 scraper.javascript_required = True651 group_country_classes[scraper.get_index_name()] = scraper652 # Mississippi653 scraper = NovelScraperAuto()654 scraper.country_name = country_name655 scraper.province_name = "Mississippi" 656 scraper.iso_code = "MS"657 scraper.source_website = "https://msdh.ms.gov/msdhsite/_static/14,0,420.html"658 scraper.scroll_height = 1300659 scraper.website_height = 1200660 scraper.javascript_required = True661 group_country_classes[scraper.get_index_name()] = scraper662 # Missouri663 scraper = NovelScraperAuto()664 scraper.country_name = country_name665 scraper.province_name = "Missouri" 666 scraper.iso_code = "MO"667 scraper.source_website = "https://health.mo.gov/living/healthcondiseases/communicable/novel-coronavirus/results.php"668 group_country_classes[scraper.get_index_name()] = scraper669 # Montana670 scraper = NovelScraperAuto()671 scraper.country_name = country_name672 scraper.province_name = "Montana" 673 scraper.iso_code = "MT"674 scraper.source_website = "https://montana.maps.arcgis.com/apps/MapSeries/index.html?appid=7c34f3412536439491adcc2103421d4b"675 scraper.javascript_required = True676 group_country_classes[scraper.get_index_name()] = scraper677 # Nebraska678 scraper = NovelScraperAuto()679 scraper.country_name = country_name680 scraper.province_name = "Nebraska" 681 scraper.iso_code = "NE"682 scraper.source_website = "https://nebraska.maps.arcgis.com/apps/opsdashboard/index.html#/4213f719a45647bc873ffb58783ffef3"683 scraper.javascript_required = True684 group_country_classes[scraper.get_index_name()] = scraper685 # Nevada686 scraper = NovelScraperAuto()687 scraper.country_name = country_name688 scraper.province_name = "Nevada" 689 scraper.iso_code = "NV"690 scraper.source_website = "https://app.powerbigov.us/view?r=eyJrIjoiMjA2ZThiOWUtM2FlNS00MGY5LWFmYjUtNmQwNTQ3Nzg5N2I2IiwidCI6ImU0YTM0MGU2LWI4OWUtNGU2OC04ZWFhLTE1NDRkMjcwMzk4MCJ9"691 scraper.javascript_required = True692 scraper.wait_time = 10693 group_country_classes[scraper.get_index_name()] = scraper694 # New Hampshire695 scraper = NovelScraperAuto()696 scraper.country_name = country_name697 scraper.province_name = "New-Hampshire" 698 scraper.iso_code = "NH"699 scraper.source_website = "https://www.nh.gov/covid19/"700 scraper.scroll_height = 200701 group_country_classes[scraper.get_index_name()] = scraper702 # New Jersey703 scraper = NovelScraperAuto()704 scraper.country_name = country_name705 scraper.province_name = "New-Jersey" 706 scraper.iso_code = "NJ"707 scraper.report_website = "https://covid19.nj.gov/#live-updates"708 scraper.source_website = "https://maps.arcgis.com/apps/opsdashboard/index.html#/ec4bffd48f7e495182226eee7962b422"709 scraper.javascript_required = True710 scraper.has_auto = True711 group_country_classes[scraper.get_index_name()] = scraper712 # New Mexico713 scraper = NovelScraperAuto()714 scraper.country_name = country_name715 scraper.province_name = "New-Mexico" 716 scraper.iso_code = "NM"717 scraper.source_website = "https://cv.nmhealth.org/"718 scraper.scroll_height = 100719 scraper.website_height = 1200720 group_country_classes[scraper.get_index_name()] = scraper721 # New York722 scraper = NovelScraperAuto()723 scraper.country_name = country_name724 scraper.province_name = "New-York"725 scraper.iso_code = "NY"726 scraper.source_website = "https://www.nbcnewyork.com/news/local/how-many-in-tri-state-have-tested-positive-for-coronavirus-here-are-latest-cases-by-the-numbers/2317721/"727 scraper.javascript_required = True728 scraper.has_auto = True729 scraper.scroll_height = 3400730 group_country_classes[scraper.get_index_name()] = scraper731 # North Carolina732 scraper = NovelScraperAuto()733 scraper.country_name = country_name734 scraper.province_name = "North-Carolina" 735 scraper.iso_code = "NC"736 scraper.source_website = "https://www.ncdhhs.gov/divisions/public-health/covid19/covid-19-nc-case-count"737 scraper.scroll_height = 100738 scraper.website_height = 1200739 group_country_classes[scraper.get_index_name()] = scraper740 # North Dakota741 scraper = NovelScraperAuto()742 scraper.country_name = country_name743 scraper.province_name = "North-Dakota" 744 scraper.iso_code = "ND"745 scraper.source_website = "https://www.health.nd.gov/diseases-conditions/coronavirus/north-dakota-coronavirus-cases"746 scraper.website_height = 1200747 scraper.scroll_height = 100748 group_country_classes[scraper.get_index_name()] = scraper749 # Ohio750 scraper = NovelScraperAuto()751 scraper.country_name = country_name752 scraper.province_name = "Ohio" 753 scraper.iso_code = "OH"754 scraper.source_website = "https://coronavirus.ohio.gov/wps/portal/gov/covid-19/home"755 scraper.scroll_height = 600756 group_country_classes[scraper.get_index_name()] = scraper757 # Oklahoma758 scraper = NovelScraperAuto()759 scraper.country_name = country_name760 scraper.province_name = "Oklahoma" 761 scraper.iso_code = "OK"762 scraper.source_website = "https://coronavirus.health.ok.gov/"763 scraper.scroll_height = 500764 scraper.website_width = 1600765 group_country_classes[scraper.get_index_name()] = scraper766 # Oregon767 scraper = NovelScraperAuto()768 scraper.country_name = country_name769 scraper.province_name = "Oregon" 770 scraper.iso_code = "OR"771 scraper.source_website = "https://govstatus.egov.com/OR-OHA-COVID-19"772 scraper.scroll_height = 1000773 scraper.website_width = 1600774 group_country_classes[scraper.get_index_name()] = scraper775 # Pennsylvania776 scraper = NovelScraperAuto()777 scraper.country_name = country_name778 scraper.province_name = "Pennsylvania" 779 scraper.iso_code = "PA"780 scraper.source_website = "https://www.health.pa.gov/topics/disease/coronavirus/Pages/Cases.aspx"781 scraper.scroll_height = 200782 scraper.has_auto = True783 group_country_classes[scraper.get_index_name()] = scraper784 # Rhode Island785 scraper = NovelScraperAuto()786 scraper.country_name = country_name787 scraper.province_name = "Rhode-Island" 788 scraper.iso_code = "RI"789 scraper.source_website = "https://health.ri.gov/data/covid-19/"790 scraper.scroll_height = 300791 scraper.javascript_required = True792 group_country_classes[scraper.get_index_name()] = scraper793 # South Carlonia794 scraper = NovelScraperAuto()795 scraper.country_name = country_name796 scraper.province_name = "South-Carolina" 797 scraper.iso_code = "SC"798 scraper.report_website = "https://scdhec.gov/infectious-diseases/viruses/coronavirus-disease-2019-covid-19/testing-sc-data-covid-19"799 scraper.source_website = "https://sc-dhec.maps.arcgis.com/apps/opsdashboard/index.html#/3732035614af4246877e20c3a496e397"800 scraper.javascript_required = True801 group_country_classes[scraper.get_index_name()] = scraper802 # South Dakota803 scraper = NovelScraperAuto()804 scraper.country_name = country_name805 scraper.province_name = "South-Dakota" 806 scraper.iso_code = "SD"807 scraper.source_website = "https://doh.sd.gov/news/Coronavirus.aspx"808 scraper.scroll_height = 1200809 group_country_classes[scraper.get_index_name()] = scraper810 # Tennessee811 scraper = NovelScraperAuto()812 scraper.country_name = country_name813 scraper.province_name = "Tennessee" 814 scraper.iso_code = "TN"815 scraper.source_website = "https://www.tn.gov/health/cedep/ncov.html"816 scraper.scroll_height = 1150817 scraper.website_height = 1200818 scraper.javascript_required = True819 group_country_classes[scraper.get_index_name()] = scraper820 # Texas821 scraper = NovelScraperAuto()822 scraper.country_name = country_name823 scraper.province_name = "Texas" 824 scraper.iso_code = "TX"825 scraper.source_website = "https://txdshs.maps.arcgis.com/apps/opsdashboard/index.html#/ed483ecd702b4298ab01e8b9cafc8b83"826 scraper.javascript_required = True827 scraper.has_auto = True828 group_country_classes[scraper.get_index_name()] = scraper829 # Utah830 scraper = NovelScraperAuto()831 scraper.country_name = country_name832 scraper.province_name = "Utah" 833 scraper.iso_code = "UT"834 scraper.source_website = "https://coronavirus-dashboard.utah.gov/"835 scraper.javascript_required = True836 group_country_classes[scraper.get_index_name()] = scraper837 # Vermont838 scraper = NovelScraperAuto()839 scraper.country_name = country_name840 scraper.province_name = "Vermont" 841 scraper.iso_code = "VT"842 scraper.source_website = "https://www.nytimes.com/interactive/2020/us/vermont-coronavirus-cases.html"843 scraper.javascript_required = True844 group_country_classes[scraper.get_index_name()] = scraper845 # Virginia846 scraper = NovelScraperAuto()847 scraper.country_name = country_name848 scraper.province_name = "Virginia" 849 scraper.iso_code = "VA"850 scraper.source_website = "https://public.tableau.com/views/VirginiaCOVID-19Dashboard/VirginiaCOVID-19Dashboard?:embed=yes&:display_count=yes&:showVizHome=no&:toolbar=no"851 scraper.javascript_required = True852 group_country_classes[scraper.get_index_name()] = scraper853 # Washington854 scraper = NovelScraperAuto()855 scraper.country_name = country_name856 scraper.province_name = "Washington" 857 scraper.iso_code = "WA"858 scraper.report_website = "https://www.doh.wa.gov/Emergencies/Coronavirus"859 scraper.source_website = "https://msit.powerbi.com/view?r=eyJrIjoiYzQ2YmYxZmEtYjlkNy00YjNkLWEyYTEtNzJmYzU3ZGI1MmZjIiwidCI6IjcyZjk4OGJmLTg2ZjEtNDFhZi05MWFiLTJkN2NkMDExZGI0NyIsImMiOjV9"860 scraper.javascript_required = True861 scraper.has_auto = True862 scraper.wait_time = 10863 group_country_classes[scraper.get_index_name()] = scraper864 # West Virginia865 scraper = NovelScraperAuto()866 scraper.country_name = country_name867 scraper.province_name = "West-Virginia" 868 scraper.iso_code = "WV"869 scraper.report_website = "https://dhhr.wv.gov/COVID-19/Pages/default.aspx"870 scraper.source_website = "https://app.powerbigov.us/view?r=eyJrIjoiMTg3YjRkZTgtNzhlZi00MGJlLTk1MTAtN2ZhOWExZWY4OWYyIiwidCI6IjhhMjZjZjAyLTQzNGEtNDMxZS04Y2FkLTdlYWVmOTdlZjQ4NCJ9"871 scraper.javascript_required = True872 scraper.wait_time = 10873 group_country_classes[scraper.get_index_name()] = scraper874 # Wisconsin875 scraper = NovelScraperAuto()876 scraper.country_name = country_name877 scraper.province_name = "Wisconsin" 878 scraper.iso_code = "WI"879 scraper.source_website = "https://www.dhs.wisconsin.gov/outbreaks/index.htm"880 scraper.javascript_required = True881 scraper.scroll_height = 800882 group_country_classes[scraper.get_index_name()] = scraper883 # Wyoming884 scraper = NovelScraperAuto()885 scraper.country_name = country_name886 scraper.province_name = "Wyoming" 887 scraper.iso_code = "WY"888 scraper.source_website = "https://health.wyo.gov/publichealth/infectious-disease-epidemiology-unit/disease/novel-coronavirus/covid-19-map-and-statistics/"889 scraper.javascript_required = True890 scraper.wait_time = 10891 group_country_classes[scraper.get_index_name()] = scraper892 # District of Colombia893 scraper = NovelScraperAuto()894 scraper.country_name = country_name895 scraper.province_name = "District-of-Columbia" 896 scraper.iso_code = "DC"897 scraper.source_website = "https://coronavirus.dc.gov/page/coronavirus-data"898 group_country_classes[scraper.get_index_name()] = scraper899 # American Samoa900 scraper = NovelScraperAuto()901 scraper.country_name = country_name902 scraper.province_name = "American-Samoa" 903 scraper.iso_code = "AS"904 #scraper.source_website = 905 group_country_classes[scraper.get_index_name()] = scraper906 # Guam907 scraper = NovelScraperAuto()908 scraper.country_name = country_name909 scraper.province_name = "Guam" 910 scraper.iso_code = "GU"911 #scraper.source_website = 912 group_country_classes[scraper.get_index_name()] = scraper913 # Northern Mariana Islands914 scraper = NovelScraperAuto()915 scraper.country_name = country_name916 scraper.province_name = "Northern-Mariana-Islands" 917 scraper.iso_code = "MP"918 scraper.source_website = "https://www.chcc.gov.mp/coronavirusinformation.php"919 scraper.javascript_required = True920 group_country_classes[scraper.get_index_name()] = scraper921 # Puerto Rico922 scraper = NovelScraperAuto()923 scraper.country_name = country_name924 scraper.province_name = "Puerto-Rico" 925 scraper.iso_code = "PR"926 scraper.source_website = "https://estadisticas.pr/en/covid-19"927 scraper.javascript_required = True928 group_country_classes[scraper.get_index_name()] = scraper929 # US Virgin Islands930 scraper = NovelScraperAuto()931 scraper.country_name = country_name932 scraper.province_name = "US-Virgin-Islands" 933 scraper.iso_code = "VI"934 scraper.source_website = "https://doh.vi.gov/covid19usvi"935 scraper.scroll_height = 500936 group_country_classes[scraper.get_index_name()] = scraper937 for country_name, scraper in group_country_classes.items(): #Values applied to all countries within this function938 scraper.group_name = "USA"939 scraper.has_covidtracking = True940 country_classes.update(group_country_classes)941def init_canada_scrapers():942 group_country_classes = {}943 country_name = "Canada"944 # Alberta945 scraper = NovelScraperAuto()946 scraper.country_name = country_name 947 scraper.province_name = "Alberta" 948 scraper.iso_code = "N/A"949 scraper.source_website = "https://covid19stats.alberta.ca/"950 group_country_classes[scraper.get_index_name()] = scraper951 # British Columbia952 scraper = NovelScraperAuto()953 scraper.country_name = country_name 954 scraper.province_name = "British-Columbia" 955 scraper.iso_code = "N/A"956 scraper.source_website = "https://governmentofbc.maps.arcgis.com/apps/opsdashboard/index.html#/11bd9b0303c64373b5680df29e5b5914"957 scraper.has_auto = True958 scraper.javascript_required = True959 scraper.wait_time = 15960 group_country_classes[scraper.get_index_name()] = scraper961 # Manitoba962 scraper = NovelScraperAuto()963 scraper.country_name = country_name 964 scraper.province_name = "Manitoba" 965 scraper.iso_code = "N/A"966 scraper.source_website = "https://www.gov.mb.ca/covid19/updates/index.html"967 scraper.scroll_height = 400968 scraper.has_auto = True969 group_country_classes[scraper.get_index_name()] = scraper970 # New Brunswick971 scraper = NovelScraperAuto()972 scraper.country_name = country_name 973 scraper.province_name = "New-Brunswick" 974 scraper.iso_code = "N/A"975 scraper.source_website = "https://www2.gnb.ca/content/gnb/en/corporate/promo/covid-19/maps_graphs.html"976 scraper.scroll_height = 600977 scraper.has_auto = True978 group_country_classes[scraper.get_index_name()] = scraper979 # Newfoundland and Labrador980 scraper = NovelScraperAuto()981 scraper.country_name = country_name982 scraper.province_name = "Newfoundland-and-Labrador" 983 scraper.iso_code = "N/A"984 scraper.source_website = "https://covid-19-newfoundland-and-labrador-gnl.hub.arcgis.com/"985 scraper.javascript_required = True986 scraper.has_auto = True987 group_country_classes[scraper.get_index_name()] = scraper988 # Nova Scotia989 scraper = NovelScraperAuto()990 scraper.country_name = country_name991 scraper.province_name = "Nova-Scotia" 992 scraper.iso_code = "N/A"993 scraper.source_website = "https://novascotia.ca/coronavirus/data/"994 scraper.javascript_required = True995 scraper.scroll_height = 300996 scraper.has_auto = True997 group_country_classes[scraper.get_index_name()] = scraper998 # Ontario999 scraper = NovelScraperAuto()1000 scraper.country_name = country_name1001 scraper.province_name = "Ontario" 1002 scraper.iso_code = "N/A"1003 scraper.source_website = "https://www.ontario.ca/page/2019-novel-coronavirus#2"1004 scraper.website_height = 12001005 scraper.scroll_height = 18001006 scraper.javascript_required = True1007 scraper.has_auto = True1008 group_country_classes[scraper.get_index_name()] = scraper1009 # Quebec1010 scraper = NovelScraperAuto()1011 scraper.country_name = country_name1012 scraper.province_name = "Quebec" 1013 scraper.iso_code = "N/A"1014 scraper.source_website = "https://www.inspq.qc.ca/covid-19/donnees"1015 scraper.javascript_required = True1016 scraper.has_auto = True1017 group_country_classes[scraper.get_index_name()] = scraper1018 # Saskatchewan1019 scraper = NovelScraperAuto()1020 scraper.country_name = country_name1021 scraper.province_name = "Saskatchewan" 1022 scraper.iso_code = "N/A"1023 scraper.source_website = "https://www.saskatchewan.ca/government/health-care-administration-and-provider-resources/treatment-procedures-and-guidelines/emerging-public-health-issues/2019-novel-coronavirus/cases-and-risk-of-covid-19-in-saskatchewan"1024 scraper.has_auto = True1025 group_country_classes[scraper.get_index_name()] = scraper1026 # Yukon1027 scraper = NovelScraperAuto()1028 scraper.country_name = country_name1029 scraper.province_name = "Yukon" 1030 scraper.iso_code = "N/A"1031 scraper.source_website = "https://yukon.ca/covid-19"1032 scraper.has_auto = True1033 group_country_classes[scraper.get_index_name()] = scraper1034 # Northwest Territories1035 scraper = NovelScraperAuto()1036 scraper.country_name = country_name1037 scraper.province_name = "Northwest-Territories" 1038 scraper.iso_code = "N/A"1039 scraper.source_website = "https://www.hss.gov.nt.ca/en/services/coronavirus-disease-covid-19"1040 scraper.scroll_height = 3001041 scraper.has_auto = True1042 group_country_classes[scraper.get_index_name()] = scraper1043 # Prince Edward Island1044 scraper = NovelScraperAuto()1045 scraper.country_name = country_name1046 scraper.province_name = "Prince-Edward-Island" 1047 scraper.iso_code = "N/A"1048 scraper.has_auto = True1049 scraper.source_website = "https://www.princeedwardisland.ca/en/information/health-and-wellness/pei-covid-19-testing-data"1050 group_country_classes[scraper.get_index_name()] = scraper1051 # Nunavut (No JH data?)1052 scraper = NovelScraperAuto()1053 scraper.country_name = country_name1054 scraper.province_name = "Nunavut" 1055 scraper.iso_code = "N/A"1056 scraper.source_website = "https://gov.nu.ca/health/information/covid-19-novel-coronavirus"1057 scraper.scroll_height = 3001058 scraper.has_auto = True1059 group_country_classes[scraper.get_index_name()] = scraper1060 for country_name, scraper in group_country_classes.items(): #Values applied to all countries within this function1061 scraper.group_name = "Canada"1062 country_classes.update(group_country_classes)1063def create_country_aliases():1064 for country_name, scraper in country_classes.items():1065 country_aliases[scraper.province_name.lower()] = country_name1066 for country_name, scraper in country_classes.items():1067 country_aliases_reverse[country_name] = scraper.province_name.lower()1068 country_aliases_extended_reverse[scraper.get_pretty_name()] = scraper.province_name.lower()1069 country_aliases_extended_reverse[country_name] = scraper.province_name.lower()...

Full Screen

Full Screen

RedisHelper.py

Source:RedisHelper.py Github

copy

Full Screen

...12 ip = ip or proxy.ip13 port = port or proxy.port14 protocal = protocal or proxy.protocol15 return "proxy::{}:{}:{}".format(ip, port, protocal)16 def get_index_name(self, index_name, value=None):17 if index_name == 'score':18 return 'index::score'19 return "index::{}:{}".format(index_name, value)20 def get_proxy_by_name(self, name):21 pd = self.redis.hgetall(name)22 if pd:23 return Proxy(**{k.decode('utf8'): v.decode('utf8') for k, v in pd.items()})24 def init_db(self, url=None):25 self.redis = Redis.from_url(url or self.redis_url)26 def drop_db(self):27 return self.redis.flushdb()28 def get_keys(self, conditions):29 select_keys = {self.get_index_name(key, conditions[key]) for key in conditions.keys() if30 key in self.index_names}31 if 'ip' in conditions and 'port' in conditions:32 return self.redis.keys(self.get_proxy_name(conditions['ip'], conditions['port'], '*'))33 if select_keys:34 return [name.decode('utf8') for name in self.redis.sinter(keys=select_keys)]35 return []36 def insert(self, value):37 proxy = Proxy(ip=value['ip'], port=value['port'], types=value['types'], protocol=value['protocol'],38 country=value['country'], area=value['area'],39 speed=value['speed'], score=value.get('score', config.DEFAULT_SCORE))40 mapping = proxy.__dict__41 for k in list(mapping.keys()):42 if k.startswith('_'):43 mapping.pop(k)44 object_name = self.get_proxy_name(proxy=proxy)45 # 存结构46 insert_num = self.redis.hmset(object_name, mapping)47 # 创建索引48 if insert_num > 0:49 for index_name in self.index_names:50 self.create_index(index_name, object_name, proxy)51 return insert_num52 def create_index(self, index_name, object_name, proxy):53 redis_key = self.get_index_name(index_name, getattr(proxy, index_name))54 if index_name == 'score':55 return self.redis.zadd(redis_key, object_name, int(proxy.score))56 return self.redis.sadd(redis_key, object_name)57 def delete(self, conditions):58 proxy_keys = self.get_keys(conditions)59 index_keys = self.redis.keys(u"index::*")60 if not proxy_keys:61 return 062 for iname in index_keys:63 if iname == b'index::score':64 self.redis.zrem(self.get_index_name('score'), *proxy_keys)65 else:66 self.redis.srem(iname, *proxy_keys)67 return self.redis.delete(*proxy_keys) if proxy_keys else 068 def update(self, conditions, values):69 objects = self.get_keys(conditions)70 count = 071 for name in objects:72 for k, v in values.items():73 if k == 'score':74 self.redis.zrem(self.get_index_name('score'), [name])75 self.redis.zadd(self.get_index_name('score'), name, int(v))76 self.redis.hset(name, key=k, value=v)77 count += 178 return count79 def select(self, count=None, conditions=None):80 count = (count and int(count)) or 1000 # 最多返回1000条数据81 count = 1000 if count > 1000 else count82 querys = {k: v for k, v in conditions.items() if k in self.index_names} if conditions else None83 if querys:84 objects = list(self.get_keys(querys))[:count]85 redis_name = self.get_index_name('score')86 objects.sort(key=lambda x: int(self.redis.zscore(redis_name, x)))87 else:88 objects = list(89 self.redis.zrevrangebyscore(self.get_index_name("score"), '+inf', '-inf', start=0, num=count))90 result = []91 for name in objects:92 p = self.get_proxy_by_name(name)93 result.append((p.ip, p.port, p.score))94 return result95if __name__ == '__main__':96 sqlhelper = RedisHelper()97 sqlhelper.init_db('redis://localhost:6379/9')98 proxy = {'ip': '192.168.1.1', 'port': 80, 'type': 0, 'protocol': 0, 'country': '中国', 'area': '广州', 'speed': 11.123,99 'types': 1}100 proxy2 = {'ip': 'localhost', 'port': 433, 'type': 1, 'protocol': 1, 'country': u'中国', 'area': u'广州', 'speed': 123,101 'types': 0, 'score': 100}102 assert sqlhelper.insert(proxy) == True103 assert sqlhelper.insert(proxy2) == True...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run autotest automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful