Best Python code snippet using localstack_python
treebanks.py
Source:treebanks.py  
...83                    if not sentence:84                        break85                    self.sentences.append(sentence)86                self.total_sentence_count = len(self.sentences)87    def is_in_memory(self) -> bool:88        return self.in_memory89    def __len__(self):90        return self.total_sentence_count91    def __getitem__(self, index: int = 0) -> Sentence:92        # if in memory, retrieve parsed sentence93        if self.in_memory:94            sentence = self.sentences[index]95        # else skip to position in file where sentence begins96        else:97            with open(str(self.path_to_conll_file), encoding="utf-8") as file:98                file.seek(self.indices[index])99                sentence = self._read_next_sentence(file)100        return sentence101    def _read_next_sentence(self, file):...monthly_pa_analysis_script.py
Source:monthly_pa_analysis_script.py  
1# Name: Global coverage analysis script2# Purpose: A script based on an Esri model to calculate PA coverage globally, regionally and nationally as well as calculating national and PAME statistics.3# Author: Ed Lewis (edward.lewis@unep-wcmc.org)4# Created: 01/07/20195# Last updated: 02/04/20206# ArcGIS Version: Pro(2.1+)7# Python: 3.1+8#--------------------------------------------------------------------------------------------------------------------------9# Preamble: Define the script workspaces10# import arcpy modules11import arcpy12import os13import time14import urllib.request15import zipfile16import random17from arcpy import env18#start the stopwatch19start = time.clock()20# enable the overwriting of outputs21arcpy.env.overwriteOutput = True22# define root folder for all outputs23rootfolder = r"C:\Users\EdwardL\Downloads"24pactfolder = arcpy.CreateFolder_management(rootfolder,"PACT_script")25outputfolder = arcpy.CreateFolder_management(pactfolder,"PACT_outputs")26sbafolder = arcpy.CreateFileGDB_management(outputfolder,"SBA_outputs")27inputfolder = arcpy.CreateFolder_management(pactfolder,"PACT_inputs")28# create a new file geodatabase in the root folder for all the script outputs29workspace = arcpy.CreateFileGDB_management(outputfolder,"PACT_script_outputs")30# define the scratch workspace for outputs we dont want to keep, this will be deleted at the end of the script31scratchworkspace = arcpy.CreateFileGDB_management(outputfolder,"PACT_script_scratch_workspace")32#--------------------------------------------------------------------------------------------------------------------------33# Stage 0: Specify file paths and define access level34print ("Stage 0: Define the inputs and access level")35print ("Stage 0.1: Access level")36# do you have access to the restricted protected area data? If you do not then put False37restricted = True38if restricted == True:39    print("Access level = WCMC")40else:41    print("Access level = public")42# if you have access to the restricted data then copy the file paths here:43if restricted == True:44    # define location of restricted CHN points45    in_restrict_chn_pnt = r"I:\_Monthly_Coverage_Stats_\0_Tools\0_Test_Data\Restricted_subset_model_testing.gdb\CHN_restricted_testing_for_model_pnt"46    # define location of restricted CHN polygons47    in_restrict_chn_poly = r"I:\_Monthly_Coverage_Stats_\0_Tools\0_Test_Data\Restricted_subset_model_testing.gdb\CHN_restricted_testing_for_model"48    # define location of restricted SHN polygons49    in_restrict_shn_poly = r"I:\_Monthly_Coverage_Stats_\0_Tools\0_Test_Data\Restricted_subset_model_testing.gdb\SHN_restricted_testing_for_model"50    # define location of restricted EST polygons51    in_restrict_cdda_poly = r"I:\_Monthly_Coverage_Stats_\0_Tools\0_Test_Data\Restricted_subset_model_testing.gdb\EST_restricted_testing_for_model"52    # restricted CAN polygons53    #in_restrict_can_poly = r"E:\WDPA\0_Useful_material\monthly_pa_analysis_testing_dataset\WDPA_subset_model_testing.gdb\CAN_restricted_50_testing_for_model"54print ("Stage 0.2: PAME sites")55# define the list of protected areas that have pame assessments56in_pame_sites = r"I:\_Monthly_Coverage_Stats_\0_Tools\1_Basemap\Restricted_Data.gdb\PAME_Sites"57print ("Stage 0.3: OECM sites")58# define the input for the oecm data59in_oecmpoly = r"I:\_Monthly_Coverage_Stats_\0_Tools\4_OECMs\WDOECM_Apr2020_Public\WDOECM_Apr2020_Public.gdb\WDOECM_poly_Apr2020"60print ("Stage 0.4 PA sites")61### THIS SECTION WORKS BUT IS MASKED OUT WHILST WE ARE RUNNING TESTS ####62# downloads the most recent version of the WDPA from Protected Planet and saves it in the root directory63#print ('Downloading the latest WDPA from Protected Planet....')64#url = r'http://wcmc.io/wdpa_current_release'65#filename = str(inputfolder) + r"\\WDPA_Latest.zip"66#targetfile = urllib.request.urlretrieve(url, filename)67#print ('Unzipping the WDPA...')68# unzips the folder to enable the file geodatabase to be queried, also in the root directory69#handle = zipfile.ZipFile(filename)70#handle.extractall(str(inputfolder))71#handle.close72#env.workspace = str(inputfolder)73# for the inputfolder, list the feature classes that are polygons and join the file path componentes to specify the exact polygon input for the script74#for dirpath, dirnames, filenames in arcpy.da.Walk(inputfolder, datatype="FeatureClass", type="Polygon"):75  for filename in filenames:76    in_polygons = os.path.join(dirpath,filename)77# for the inputfolder, list the feature classes that are points and join the file path componentes to specify the exact point input for the script78for dirpath, dirnames, filenames in arcpy.da.Walk(inputfolder, datatype="FeatureClass", type="Multipoint"):79  for filename in filenames:80    in_points = os.path.join(dirpath,filename)81##########################################################################82# define the protected area point and polygon inputs [doing this manually or now]83in_points = r"I:\_Monthly_Coverage_Stats_\0_Tools\0_Test_Data\tiny_subset.gdb\CHL_Test_Pnt"84in_polygons = r"I:\_Monthly_Coverage_Stats_\0_Tools\0_Test_Data\tiny_subset.gdb\BLM_model_testing_subset"85print ("Stage 0.5: Basemaps")86###### -  SCRIPTS TO AUTOMATE DOWNLOADING THE BASEMAPS - IGNORE FOR NOW####87#print('Downloading the basemaps from XXXX')88# download the basemaps from [INSERT PLACE]89#url = r'ENTER THE BASEMAP FILE PATH HERE'90#filename = str(inputfolder) + r"\\basemaps.zip"91#targetfile = urllib.request.urlretrieve(url, filename)92#print ('Unzipping the basemaps...')93# unzips the folder to enable the file geodatabase to be queried, also in the root directory94#handle = zipfile.ZipFile(filename)95#handle.extractall(str(inputfolder))96#handle.close97###################################################################################98# define spatial basemap input - country boundaries etc99in_basemap_spat = r"I:\_Monthly_Coverage_Stats_\0_Tools\1_Basemap\Basemap.gdb\EEZv8_WVS_DIS_V3_ALL_final_v7dis_with_SDG_regions_for_models"100# define tabular basemap input - just the attribute table of in_basemap_spat101in_basemap_tab = r"I:\_Monthly_Coverage_Stats_\0_Tools\1_Basemap\Basemap.gdb\EEZv8_WVS_DIS_V3_ALL_final_v7dis_with_SDG_regions_for_models_tabular"102print ("Stage 0.6: Supporting information from Github Repo")103# download the supporting files from the github repo104print('Downloading the supporting files from [Eds] GitHub repo....')105url = r'http://github.com/EdwardMLewis/wdpa-statistics/archive/master.zip'106filename = str(inputfolder) + r"\\Github_supporting_files.zip"107targetfile = urllib.request.urlretrieve(url, filename)108print ('Unzipping the supporting files...')109# unzips the folder to enable the file geodatabase to be queried, also in the root directory110handle = zipfile.ZipFile(filename)111handle.extractall(str(inputfolder))112handle.close113# rename the unzipped folder114arcpy.Rename_management(r"C:\Users\EdwardL\Downloads\PACT_script\PACT_inputs\wdpa-statistics-master",r"C:\Users\EdwardL\Downloads\PACT_script\PACT_inputs\Github_supporting_files")115print ("Stage 0.7: Projection files")116# define the projection files used to define outputs/workspaces117in_mollweideprj = str(inputfolder) + "\\Github_supporting_files\moll_projection.prj"118#--------------------------------------------------------------------------------------------------------------------------119# Stage 1: Global and Regional analysis120print ("Stage 1 of 2: Global and Regional analysis")121arcpy.env.workspace = str(workspace)122# combine the point inputs together depending on whether restricted data is included or not123if restricted == True:124    all_points = arcpy.Merge_management([in_points,in_restrict_chn_pnt], 'all_points')125    all_polygons = arcpy.Merge_management([in_polygons, in_restrict_chn_poly, in_restrict_shn_poly, in_restrict_cdda_poly], 'all_polygons')126else:127    all_points = in_points128    all_polygons = in_polygons129# repair geometries for newly merged files130arcpy.RepairGeometry_management(all_points,"DELETE_NULL","OGC")131arcpy.RepairGeometry_management(all_polygons,"DELETE_NULL","OGC")132# remove the sites that have an uncertain status or have potentially very innacruate areas133arcpy.Select_analysis(all_points, r"in_memory\all_wdpa_points_select","STATUS in ('Adopted', 'Designated', 'Inscribed') AND NOT DESIG_ENG = 'UNESCO-MAB Biosphere Reserve'")134arcpy.Select_analysis(all_polygons, r"in_memory\all_wdpa_polygons_select","STATUS in ('Adopted', 'Designated', 'Inscribed') AND NOT DESIG_ENG = 'UNESCO-MAB Biosphere Reserve'")135# convert the point selection into a polygon by buffering by the REP_AREA136arcpy.AddField_management(r"in_memory\all_wdpa_points_select","radius","DOUBLE")137arcpy.CalculateField_management(r"in_memory\all_wdpa_points_select","radius","math.sqrt(!REP_AREA!/math.pi )*1000","PYTHON_9.3")138arcpy.PairwiseBuffer_analysis(r"in_memory\all_wdpa_points_select",r"in_memory\all_wdpa_points_select_buff","radius","","","GEODESIC","")139# combine the poly selection with the buffered point selection140# the output (hereafter 'polybuffpnt') represents the starting point for the monthly release - it is all the sites we include in the analysis in one file141## IF you want to do count analyses then do it on *this* file142arcpy.Merge_management([r"in_memory\all_wdpa_points_select_buff",r"in_memory\all_wdpa_polygons_select"],"all_wdpa_polybuffpnt")143# repair the polybuffpnt144arcpy.RepairGeometry_management("all_wdpa_polybuffpnt","DELETE_NULL","OGC")145# randomly reassign a STATUS_YR value to those sites that dont have one146field = ['STATUS_YR']147with arcpy.da.UpdateCursor('all_wdpa_polybuffpnt',field) as cursor:148    for row in cursor:149        if row[0] == 0:150            row[0] = random.randint(1819,2020)151            cursor.updateRow(row)152# rename the ISO3 field in the WDPA to clarify it is the WDPA ISO3 and not a basemap ISO3153arcpy.AlterField_management("all_wdpa_polybuffpnt","ISO3","WDPA_ISO3")154# split up the polybuffpnt using the Union tool - this splits up the WDPA like a Venn diagram155arcpy.Union_analysis("all_wdpa_polybuffpnt","all_wdpa_polybuffpnt_union")156# repair the output of the union157arcpy.RepairGeometry_management("all_wdpa_polybuffpnt_union","DELETE_NULL","OGC")158# add xy coordinates for each of the ~1 million segments159arcpy.AddGeometryAttributes_management("all_wdpa_polybuffpnt_union","CENTROID")160# add a new field to concatenate the new x and y coordinate fields161arcpy.AddField_management("all_wdpa_polybuffpnt_union","XYco","TEXT")162# populate this new XYco field163arcpy.CalculateField_management("all_wdpa_polybuffpnt_union","XYco","str(!CENTROID_X!) + str(!CENTROID_Y!)","PYTHON_9.3")164# run a summary of the XYco field, showing how many instances there are of each XYyco, i.e. how many segments have165# exactly the same XYco, and by extension geometry.166arcpy.Statistics_analysis("all_wdpa_polybuffpnt_union","xyco_count",[["XYco","COUNT"]],"XYco")167# join (add) the XYco field from the summary table to the output of the union168arcpy.JoinField_management("all_wdpa_polybuffpnt_union","XYco","xyco_count","XYco","COUNT_XYco")169# select out all of the segments which only have 1 XYco, i.e. the novel geometries with no overlaps within the WDPA170arcpy.Select_analysis("all_wdpa_polybuffpnt_union",r"in_memory\all_wdpa_polybuffpnt_union_unique","COUNT_XYco = 1")171# select out all of the segments which have >1 XYco, i.e. geometries which overlap within the WDPA172arcpy.Select_analysis("all_wdpa_polybuffpnt_union","all_wdpa_polybuffpnt_union_duplicates","COUNT_XYco > 1")173# run a summary report listing the lowest STATUS_YR for each duplicate area by XYco174arcpy.Statistics_analysis("all_wdpa_polybuffpnt_union_duplicates",r"in_memory\all_wdpa_polybuffpnt_union_duplicates_earliest_sum",[["STATUS_YR","MIN"]],"XYco")175# make a copy of the duplicates176arcpy.Copy_management("all_wdpa_polybuffpnt_union_duplicates","all_wdpa_polybuffpnt_union_duplicates_flat")177# delete all identical XYcos from the copied duplicates, we dont care about the values, just the geometreis and making it flat178arcpy.DeleteIdentical_management("all_wdpa_polybuffpnt_union_duplicates_flat","XYco")179# join the summary report to the copied duplicates180arcpy.JoinField_management("all_wdpa_polybuffpnt_union_duplicates_flat","XYco",r"in_memory\all_wdpa_polybuffpnt_union_duplicates_earliest_sum","XYco","MIN_status_yr")181# recalculate status_yr so that each XYco has the earliest status_yr that geometry had182arcpy.CalculateField_management("all_wdpa_polybuffpnt_union_duplicates_flat","STATUS_YR","!MIN_status_yr!")183# remove the field184arcpy.DeleteField_management("all_wdpa_polybuffpnt_union_duplicates_flat","MIN_status_yr")185# merge these site back into the unique geometries - creating a flat layer which has the whole WDPA schema populated still.186## If you want to do count analyses do it on the polybuffpnt, if you want to do spatial analyses, do it with this file.187arcpy.Merge_management(["all_wdpa_polybuffpnt_union_duplicates_flat",r"in_memory\all_wdpa_polybuffpnt_union_unique"],r"in_memory\all_wdpa_polybuffpnt_union_flat")188# repair the recombined layer189arcpy.RepairGeometry_management(r"in_memory\all_wdpa_polybuffpnt_union_flat","DELETE_NULL","OGC")190# intersect it with the basemap191arcpy.PairwiseIntersect_analysis([r"in_memory\all_wdpa_polybuffpnt_union_flat",in_basemap_spat],r"in_memory\all_wdpa_polybuffpnt_union_flat_intersect")192# repair it193arcpy.RepairGeometry_management(r"in_memory\all_wdpa_polybuffpnt_union_flat_intersect","DELETE_NULL","OGC")194# project it into mollweide, an equal area projection195arcpy.Project_management(r"in_memory\all_wdpa_polybuffpnt_union_flat_intersect","all_wdpa_polybuffpnt_union_flat_intersect_project",in_mollweideprj)196# repair it197arcpy.RepairGeometry_management("all_wdpa_polybuffpnt_union_flat_intersect_project","DELETE_NULL","OGC")198# add and calculate a new area field199arcpy.AddGeometryAttributes_management("all_wdpa_polybuffpnt_union_flat_intersect_project","AREA_GEODESIC","","SQUARE_KILOMETERS",in_mollweideprj)200# now we get into the creation of summary statistic tables201# for the explanation and underlying rationale for these decisions please see accompanying metadata.202# GLOBAL SUMMARY REPORTS203# select only sites outside of the ABNJ (they get treated separately)204arcpy.Select_analysis("all_wdpa_polybuffpnt_union_flat_intersect_project", r"in_memory\all_wdpa_polybuffpnt_union_flat_intersect_project_nonabnj", "WDPA_ISO3 NOT IN ('ABNJ')")205# change the 'type' field in the non_abnj selection so that ABNJ is always changed to 'EEZ' (nationally designated sites go over into the geographic ABNJ)206arcpy.CalculateField_management(r"in_memory\all_wdpa_polybuffpnt_union_flat_intersect_project_nonabnj","type","!type!.replace('ABNJ','EEZ')", 'PYTHON3')207# run some summary stats on the Land + EEZ selection for the current year (current) and broken down per year (current)208arcpy.Statistics_analysis(r"in_memory\all_wdpa_polybuffpnt_union_flat_intersect_project_nonabnj","global_summary_statistics_current",[["AREA_GEO","SUM"]],"type")209arcpy.Statistics_analysis(r"in_memory\all_wdpa_polybuffpnt_union_flat_intersect_project_nonabnj","global_summary_statistics_temporal",[["AREA_GEO","SUM"]],["type", "STATUS_YR"])210# select out just the rows with an ISO3 of 'ABNJ'211arcpy.Select_analysis("all_wdpa_polybuffpnt_union_flat_intersect_project",r"in_memory\ABNJ_sites","WDPA_ISO3 = 'ABNJ'")212# run some global summary stats on the ABNJ selection for the current year (current) and broken down per year (temporal)213arcpy.Statistics_analysis(r"in_memory\ABNJ_sites",r"in_memory\abnj_global_summary_statistics_current",[["AREA_GEO","SUM"]],"type")214arcpy.Statistics_analysis(r"in_memory\ABNJ_sites",r"in_memory\abnj_global_summary_statistics_temporal",[["AREA_GEO","SUM"]],["type", "STATUS_YR"])215# pivot the global current, global temporal summary table and the abnj temporal summary tables216arcpy.PivotTable_management("global_summary_statistics_temporal",["STATUS_YR"],"type","SUM_AREA_GEO","global_summary_statistics_temporal_pivot")217arcpy.PivotTable_management(r"in_memory\abnj_global_summary_statistics_temporal",["STATUS_YR"],"type","SUM_AREA_GEO","abnj_summary_statistics_temporal_pivot")218# add the abnj tables into the global summary tables219arcpy.Append_management(r"in_memory\abnj_global_summary_statistics_current","global_summary_statistics_current","NO_TEST")220arcpy.JoinField_management("global_summary_statistics_temporal_pivot","STATUS_YR","abnj_summary_statistics_temporal_pivot","STATUS_YR", 'ABNJ')221# update the fields so that they show '0' as opposed to blank cells222# define the codeblock1223in_codeblock1 = """224def updateValue(value):225  if value == None:226   return '0'227  else: return value"""228arcpy.CalculateField_management("global_summary_statistics_temporal_pivot","EEZ","updateValue(!EEZ!)","PYTHON_9.3", in_codeblock1)229arcpy.CalculateField_management("global_summary_statistics_temporal_pivot","Land","updateValue(!Land!)","PYTHON_9.3", in_codeblock1)230arcpy.CalculateField_management("global_summary_statistics_temporal_pivot","ABNJ","updateValue(!ABNJ!)","PYTHON_9.3", in_codeblock1)231# Add in three new fields, to track the cumulative area232arcpy.AddField_management("global_summary_statistics_temporal_pivot","EEZ_net","LONG")233arcpy.AddField_management("global_summary_statistics_temporal_pivot","Land_net","LONG")234arcpy.AddField_management("global_summary_statistics_temporal_pivot","ABNJ_net","LONG")235# Calculate the three net fields236# define codeblock2237in_codeblock2 = """238total = 0239def accumulate(increment):240 global total241 if total:242  total += increment243 else:244  total = increment245 return total"""246arcpy.CalculateField_management("global_summary_statistics_temporal_pivot","EEZ_net","accumulate(!EEZ!)","PYTHON_9.3", in_codeblock2)247arcpy.CalculateField_management("global_summary_statistics_temporal_pivot","Land_net","accumulate(!Land!)","PYTHON_9.3", in_codeblock2)248arcpy.CalculateField_management("global_summary_statistics_temporal_pivot","ABNJ_net","accumulate(!ABNJ!)","PYTHON_9.3", in_codeblock2)249# REGIONAL SUMMARY REPORTS250# run some summary stats on the regional for the current year (current) and broken down per year (temporal)251arcpy.Statistics_analysis(r"in_memory\all_wdpa_polybuffpnt_union_flat_intersect_project_nonabnj",r"in_memory\regional_summary_statistics_current",[["AREA_GEO","SUM"]],["sdg_region","type"])252arcpy.Statistics_analysis(r"in_memory\all_wdpa_polybuffpnt_union_flat_intersect_project_nonabnj", r"in_memory\regional_summary_statistics_temporal",[["AREA_GEO","SUM"]],["type", "STATUS_YR","sdg_region"])253# run some global summary stats on the ABNJ selection for the current year (current) and broken down per year (temporal)254arcpy.Statistics_analysis(r"in_memory\ABNJ_sites",r"in_memory\abnj_regional_summary_statistics_current",[["AREA_GEO","SUM"]],["type","sdg_region"])255arcpy.Statistics_analysis(r"in_memory\ABNJ_sites",r"in_memory\abnj_regional_summary_statistics_temporal",[["AREA_GEO","SUM"]],["type", "sdg_region", "STATUS_YR"])256# add in the abnj area to the regional summary tables257arcpy.Append_management(r"in_memory\abnj_regional_summary_statistics_current",r"in_memory\regional_summary_statistics_current","NO_TEST")258arcpy.Append_management(r"in_memory\abnj_regional_summary_statistics_temporal",r"in_memory\regional_summary_statistics_temporal","NO_TEST")259# pivot the regional temporal summary table and the ABNJ table260arcpy.PivotTable_management(r"in_memory\regional_summary_statistics_current",["sdg_region"],"type","SUM_AREA_GEO","regional_summary_statistics_current_pivot")261arcpy.PivotTable_management(r"in_memory\regional_summary_statistics_temporal",["STATUS_YR","sdg_region"],"type","SUM_AREA_GEO","regional_summary_statistics_temporal_pivot")262# update the fields so that they show '0' as opposed to blank cells263arcpy.CalculateField_management("regional_summary_statistics_current_pivot","EEZ","updateValue(!EEZ!)","PYTHON_9.3", in_codeblock1)264arcpy.CalculateField_management("regional_summary_statistics_current_pivot","Land","updateValue(!Land!)","PYTHON_9.3", in_codeblock1)265arcpy.CalculateField_management("regional_summary_statistics_current_pivot","ABNJ","updateValue(!ABNJ!)","PYTHON_9.3", in_codeblock1)266arcpy.CalculateField_management("regional_summary_statistics_temporal_pivot","EEZ","updateValue(!EEZ!)","PYTHON_9.3", in_codeblock1)267arcpy.CalculateField_management("regional_summary_statistics_temporal_pivot","Land","updateValue(!Land!)","PYTHON_9.3", in_codeblock1)268arcpy.CalculateField_management("regional_summary_statistics_temporal_pivot","ABNJ","updateValue(!ABNJ!)","PYTHON_9.3", in_codeblock1)269print ("The global and regional summary tables can now be used by themselves or copied into the monthly summary statistics templates for QC")270# run some count statistics271arcpy.Statistics_analysis("all_wdpa_polybuffpnt","count_MARINE",[["WDPAID","COUNT"]],"MARINE")272arcpy.Statistics_analysis("all_wdpa_polybuffpnt","count_IUCNCAT",[["WDPAID","COUNT"]],"IUCN_CAT")273arcpy.Statistics_analysis("all_wdpa_polybuffpnt","count_GOVTYPE",[["WDPAID","COUNT"]],"GOV_TYPE")274arcpy.Statistics_analysis("all_wdpa_polybuffpnt","count_OWNTYPE",[["WDPAID","COUNT"]],"OWN_TYPE")275arcpy.Statistics_analysis("all_wdpa_polybuffpnt","count_DESIGENG",[["WDPAID","COUNT"]],"DESIG_ENG")276arcpy.Statistics_analysis("all_wdpa_polybuffpnt","count_DESIGTYPE",[["WDPAID","COUNT"]],"DESIG_TYPE")277arcpy.Statistics_analysis("all_wdpa_polybuffpnt","count_ISO3",[["WDPAID","COUNT"]],"WDPA_ISO3")278arcpy.Statistics_analysis("all_wdpa_polybuffpnt","count_STATUSYR",[["WDPAID","COUNT"]],"STATUS_YR")279arcpy.Statistics_analysis("all_wdpa_polybuffpnt","count_OWNTYPE",[["WDPAID","COUNT"]],"OWN_TYPE")280# calculate the no-take stats281arcpy.Select_analysis("all_wdpa_polybuffpnt", r"in_memory\notake_all","NO_TAKE = 'All'")282arcpy.Dissolve_management(r"in_memory\notake_all",r"in_memory\notake_all_diss")283arcpy.Project_management(r"in_memory\notake_all_diss","notakeall_diss_project",in_mollweideprj)284arcpy.AddGeometryAttributes_management("notakeall_diss_project","AREA_GEODESIC","","SQUARE_KILOMETERS",in_mollweideprj)285arcpy.Statistics_analysis("notakeall_diss_project","sum_NOTAKEall",[["AREA_GEO","SUM"]])286arcpy.Select_analysis("all_wdpa_polybuffpnt", r"in_memory\notake_part","NO_TAKE = 'Part'")287arcpy.Statistics_analysis(r"in_memory\notake_part","sum_NOTAKEpart",[["NO_TK_AREA","SUM"]])288elapsed_hours = (time.clock() - start)/3600289print(("Stage 1 took " + str(elapsed_hours) + " hours"))290##-------------------------------------------------------------------------------------------------------------------------291#Stage 2: National and National PAME analysis292print ("Stage 2 of 2: National & National PAME Analyses")293# create the summary tables for appending in individual natioanl summary statistics294out_national_current_schema = arcpy.CreateTable_management(workspace,"out_national_current_schema")295arcpy.AddFields_management(out_national_current_schema,[['WDPA_ISO3','TEXT'],['type','TEXT'],['FREQUENCY','LONG'],['SUM_AREA_GEO','DOUBLE']])296out_national_temporal_schema = arcpy.CreateTable_management(workspace,"out_national_temporal_schema")297arcpy.AddFields_management(out_national_temporal_schema,[['WDPA_ISO3','TEXT'],['MIN_STATUS_YR','DOUBLE'],['type','TEXT'],['FREQUENCY','LONG'],['SUM_AREA_GEO','DOUBLE']])298out_national_current_schema_pame = arcpy.CreateTable_management(workspace,"out_national_current_schema_pame")299arcpy.AddFields_management(out_national_current_schema_pame,[['WDPA_ISO3','TEXT'],['type','TEXT'],['FREQUENCY','LONG'],['SUM_AREA_GEO','DOUBLE']])300out_national_temporal_schema_pame = arcpy.CreateTable_management(workspace,"out_national_temporal_schema_pame")301arcpy.AddFields_management(out_national_temporal_schema_pame,[['WDPA_ISO3','TEXT'],['MIN_STATUS_YR','DOUBLE'],['type','TEXT'],['FREQUENCY','LONG'],['SUM_AREA_GEO','DOUBLE']])302# join pame list to polybuffpnt303arcpy.JoinField_management("all_wdpa_polybuffpnt","WDPAID",in_pame_sites,"wdpa_id","evaluation_id")304# update field (0) for those that don't have id305arcpy.CalculateField_management("all_wdpa_polybuffpnt","evaluation_id","updateValue(!evaluation_id!)","PYTHON_9.3", in_codeblock1)306# select transboundary sites and non transboundary sites307arcpy.Select_analysis("all_wdpa_polybuffpnt",r"in_memory\all_wdpa_polybuffpnt_nontransboundary","WDPA_ISO3 NOT LIKE '%;%'")308arcpy.Select_analysis("all_wdpa_polybuffpnt",r"in_memory\all_wdpa_polybuffpnt_transboundary","WDPA_ISO3 LIKE '%;%'")309# repair them310arcpy.RepairGeometry_management(r"in_memory\all_wdpa_polybuffpnt_nontransboundary","DELETE_NULL","OGC")311arcpy.RepairGeometry_management(r"in_memory\all_wdpa_polybuffpnt_transboundary","DELETE_NULL","OGC")312# erase the transboundary sites from the nontransboundary sites313arcpy.Erase_analysis(r"in_memory\all_wdpa_polybuffpnt_transboundary",r"in_memory\all_wdpa_polybuffpnt_nontransboundary","all_wdpa_polybuffpnt_transboundary_novelarea")314# repair the output of the erase315arcpy.RepairGeometry_management("all_wdpa_polybuffpnt_transboundary_novelarea","DELETE_NULL","OGC")316# intersect the erased output with the basemap317arcpy.PairwiseIntersect_analysis(["all_wdpa_polybuffpnt_transboundary_novelarea",in_basemap_spat],"all_wdpa_polybuffpnt_transboundary_novelarea_intersect")318# repair it319arcpy.RepairGeometry_management("all_wdpa_polybuffpnt_transboundary_novelarea_intersect","DELETE_NULL","OGC")320#  recalculate ISO3 based on the geo iso3321arcpy.CalculateField_management("all_wdpa_polybuffpnt_transboundary_novelarea_intersect","WDPA_ISO3","!GEO_ISO3!","PYTHON_9.3")322# rename the nontransboundary sites323#arcpy.Rename_management(r"in_memory\all_wdpa_polybuffpnt_nontransboundary",r"in_memory\all_wdpa_polybuffpnt_national")324# append back the erased and intersected transboundary sites back into the nontransboundary sites325arcpy.Append_management(r"in_memory\all_wdpa_polybuffpnt_nontransboundary","all_wdpa_polybuffpnt_transboundary_novelarea_intersect","NO_TEST")326# repair it327arcpy.RepairGeometry_management(r"in_memory\all_wdpa_polybuffpnt_nontransboundary","DELETE_NULL","OGC")328# split by attribute (wdpa_iso3) to create an individual fc for each iso3329arcpy.SplitByAttributes_analysis(r"in_memory\all_wdpa_polybuffpnt_nontransboundary",sbafolder, "WDPA_ISO3")330# change the location of the workspace to represent the location of the sba output331arcpy.env.workspace = str(sbafolder)332arcpy.env.overwriteOutput = True333out_sba = arcpy.ListFeatureClasses()334#  split the input into country specific subsets and do the analysis iso3 by iso3335for fc in out_sba:336    desc = arcpy.Describe(fc)337    # run a union, add in an xyco for each segment338    arcpy.Union_analysis(fc,r"in_memory\Union")339    arcpy.RepairGeometry_management(r"in_memory\union","DELETE_NULL","OGC")340    # assign a unique id to each parcel (XYco)341    arcpy.AddGeometryAttributes_management(r"in_memory\union","CENTROID")342    arcpy.AddField_management(r"in_memory\union","XYco","TEXT")343    arcpy.CalculateField_management(r"in_memory\union","XYco","str(!CENTROID_X!) + str(!CENTROID_Y!)","PYTHON_9.3")344    # run two summary reports per parcel, working out the minimum STATUS_YR and the maximum evaluation_id (i.e. whether assessed or not)345    arcpy.Statistics_analysis(r"in_memory\union",r"in_memory\out_styr_stats",[["STATUS_YR","MIN"]],"XYco")346    arcpy.Statistics_analysis(r"in_memory\union",r"in_memory\out_assid_stats",[["evaluation_id","MAX"]],"XYco")347    # delete identical (XYco) - (i.e. make it flat, removing intra-national overlaps), and repair it348    arcpy.DeleteIdentical_management(r"in_memory\union","XYco")349    arcpy.RepairGeometry_management(r"in_memory\union","DELETE_NULL","OGC")350    # split it up further by intersecting each country with the basemap351    arcpy.PairwiseIntersect_analysis([r"in_memory\union",in_basemap_spat],r"in_memory\intersect")352    arcpy.RepairGeometry_management(r"in_memory\intersect","DELETE_NULL","OGC")353    # add in the earliest designation date and whether it was assessed to each segment354    arcpy.JoinField_management(r"in_memory\intersect","XYco",r"in_memory\out_styr_stats","XYco", 'MIN_STATUS_YR')355    arcpy.JoinField_management(r"in_memory\intersect","XYco",r"in_memory\out_assid_stats","XYco", 'MAX_evaluation_id')356    # project the output into mollweide357    out_proj = desc.basename+"_union_intersect_project"358    arcpy.Project_management(r"in_memory\intersect",out_proj,in_mollweideprj)359    arcpy.RepairGeometry_management(out_proj,"DELETE_NULL","OGC")360    arcpy.AddGeometryAttributes_management(out_proj,"AREA","","SQUARE_KILOMETERS",in_mollweideprj)361    # for national reporting they can't report by ABNJ, so we treat areas in geographical ABNJ as actually being part of the ISO3's EEZ362    arcpy.CalculateField_management(out_proj,"type","!type!.replace('ABNJ','EEZ')", 'PYTHON3')363    # create national pa summary statistics364    arcpy.Statistics_analysis(out_proj,r"in_memory\out_styr_sum_current",[["POLY_AREA","SUM"]],["WDPA_ISO3","type"])365    out_styr_sum_temporal = desc.basename+"_summary"366    arcpy.Statistics_analysis(out_proj,r"in_memory\out_styr_sum_temporal",[["POLY_AREA","SUM"]],["WDPA_ISO3","MIN_STATUS_YR","type"])367    # pivot the national temporal pa summary368    arcpy.PivotTable_management(r"in_memory\out_styr_sum_temporal",["WDPA_ISO3","MIN_STATUS_YR"],"type","SUM_POLY_AREA",r"in_memory\out_styr_sum_temporal_pivot")369    # update current national field names (if they exist), replace <Null> with 0, add ABNJ area to EEZ field370    if len(arcpy.ListFields(r"in_memory\out_styr_sum_temporal_pivot","WDPA_ISO3"))!=0:371        arcpy.AlterField_management(r"in_memory\out_styr_sum_temporal_pivot","WDPA_ISO3","iso3")372    if len(arcpy.ListFields(r"in_memory\out_styr_sum_temporal_pivot","MIN_STATUS_YR"))!=0:373        arcpy.AlterField_management(r"in_memory\out_styr_sum_temporal_pivot","MIN_STATUS_YR","year")374    if len(arcpy.ListFields(r"in_memory\out_styr_sum_temporal_pivot","Land"))!=0:375        arcpy.AlterField_management(r"in_memory\out_styr_sum_temporal_pivot","Land","pa_land_area")376        arcpy.CalculateField_management(r"in_memory\out_styr_sum_temporal_pivot","pa_land_area","updateValue(!pa_land_area!)","PYTHON_9.3", in_codeblock1)377    if len(arcpy.ListFields(r"in_memory\out_styr_sum_temporal_pivot","EEZ"))!=0:378        arcpy.AlterField_management(r"in_memory\out_styr_sum_temporal_pivot","EEZ","pa_marine_area")379        arcpy.CalculateField_management(r"in_memory\out_styr_sum_temporal_pivot","pa_marine_area","updateValue(!pa_marine_area!)","PYTHON_9.3", in_codeblock1)380    if len(arcpy.ListFields(r"in_memory\out_styr_sum_temporal_pivot","NET_POLY_AREA"))!=0:381        arcpy.AddField_management(r"in_memory\out_styr_sum_temporal_pivot","pa_land_area_net_km2","LONG")382        arcpy.CalculateField_management(r"in_memory\out_styr_sum_temporal_pivot","pa_land_area_net_km2","accumulate(!pa_land_area!)","PYTHON_9.3", in_codeblock2)383        arcpy.AddField_management(r"in_memory\out_styr_sum_temporal_pivot","pa_marine_area_net_km2","LONG")384        arcpy.CalculateField_management(r"in_memory\out_styr_sum_temporal_pivot","pa_marine_area_net_km2","accumulate(!pa_marine_area!)","PYTHON_9.3", in_codeblock2)385    # append each of the national pa coverage tables into a clean precooked schema and all of the temporal pivot tables into another clean precooked schema386    arcpy.Append_management(r"in_memory\out_styr_sum_current",out_national_current_schema,"NO_TEST")387    arcpy.Append_management(r"in_memory\out_styr_sum_temporal",out_national_temporal_schema,"NO_TEST")388    # select the areas where there has been a pame assessment to only run statistics on those areas389    arcpy.Select_analysis(out_proj,r"in_memory\out_ass_sites","MAX_evaluation_id >= 1")390    # create national pa PAME summary statistics391    arcpy.Statistics_analysis(r"in_memory\out_ass_sites",r"in_memory\out_ass_sum_current",[["POLY_AREA","SUM"]],["WDPA_ISO3","type"])392    arcpy.Statistics_analysis(r"in_memory\out_ass_sites",r"in_memory\out_ass_sum_temporal",[["POLY_AREA","SUM"]],["WDPA_ISO3","MIN_STATUS_YR","type"])393    # append each of the national pa PAME coverage tables into a clean precooked schema and all of the temporal PAME data into another clean precooked schema394    arcpy.Append_management(r"in_memory\out_ass_sum_current",out_national_current_schema_pame,"NO_TEST")395    arcpy.Append_management(r"in_memory\out_ass_sum_temporal",out_national_temporal_schema_pame,"NO_TEST")396    # delete the in_memory workspace before starting the next country397    arcpy.Delete_management(r"in_memory")398# we now return back to the original workspace399arcpy.env.workspace = str(workspace)400# NATIONAL CURRENT REPORTS401# create summary tables for national status402# pivot the current national summary tables403arcpy.PivotTable_management(out_national_current_schema,"WDPA_ISO3","type","SUM_AREA_GEO","national_summary_statistics_current_pivot")404# rename fields405arcpy.AlterField_management("national_summary_statistics_current_pivot","WDPA_ISO3","iso3")406arcpy.AlterField_management("national_summary_statistics_current_pivot","Land","pa_land_area")407arcpy.AlterField_management("national_summary_statistics_current_pivot","EEZ","pa_marine_area")408# add the current national fields to calculate percentage coverage409arcpy.AddField_management("national_summary_statistics_current_pivot","percentage_pa_land_cover","FLOAT")410arcpy.AddField_management("national_summary_statistics_current_pivot","percentage_pa_marine_cover","FLOAT")411# join current national to the basemap412arcpy.JoinField_management("national_summary_statistics_current_pivot","iso3",in_basemap_tab,"GEO_ISO3",["land_area", "marine_area"])413# calculate current national fields and replace <Null> values with 0414arcpy.CalculateField_management("national_summary_statistics_current_pivot","percentage_pa_land_cover","(!pa_land_area! / !land_area!)*100","PYTHON_9.3")415arcpy.CalculateField_management("national_summary_statistics_current_pivot","percentage_pa_marine_cover","(!pa_marine_area! / !marine_area!)*100","PYTHON_9.3")416arcpy.CalculateField_management("national_summary_statistics_current_pivot","pa_land_area","updateValue(!percentage_pa_land_cover!)","PYTHON_9.3", in_codeblock1)417arcpy.CalculateField_management("national_summary_statistics_current_pivot","pa_marine_area","updateValue(!percentage_pa_marine_cover!)","PYTHON_9.3", in_codeblock1)418arcpy.CalculateField_management("national_summary_statistics_current_pivot","percentage_pa_land_cover","updateValue(!percentage_pa_land_cover!)","PYTHON_9.3", in_codeblock1)419arcpy.CalculateField_management("national_summary_statistics_current_pivot","percentage_pa_marine_cover","updateValue(!percentage_pa_marine_cover!)","PYTHON_9.3", in_codeblock1)420# pivot the current national pame summary tables421arcpy.PivotTable_management(out_national_current_schema_pame,"WDPA_ISO3","type","SUM_AREA_GEO","national_summary_statistics_current_pivot_pame")422# rename fields423arcpy.AlterField_management("national_summary_statistics_current_pivot_pame","WDPA_ISO3","iso3")424arcpy.AlterField_management("national_summary_statistics_current_pivot_pame","Land","pame_pa_land_area")425arcpy.AlterField_management("national_summary_statistics_current_pivot_pame","EEZ","pame_pa_marine_area")426# Join the current national pame table to the current natioanl table427arcpy.JoinField_management("national_summary_statistics_current_pivot","iso3","national_summary_statistics_current_pivot_pame","iso3",["pame_pa_land_area", "pame_pa_marine_area"])428# calculate pame percentage fields429arcpy.AddField_management("national_summary_statistics_current_pivot","pame_percentage_pa_land_cover","FLOAT")430arcpy.AddField_management("national_summary_statistics_current_pivot","pame_percentage_pa_marine_cover","FLOAT")431arcpy.CalculateField_management("national_summary_statistics_current_pivot","pame_percentage_pa_land_cover","(!pame_pa_land_area! / !land_area!)*100","PYTHON_9.3")432arcpy.CalculateField_management("national_summary_statistics_current_pivot","pame_percentage_pa_marine_cover","(!pame_pa_marine_area! / !marine_area!)*100","PYTHON_9.3")433# update all pame fields so that <Null> is replaced by 0434arcpy.CalculateField_management("national_summary_statistics_current_pivot","pame_pa_marine_area","updateValue(!pame_pa_marine_area!)","PYTHON_9.3", in_codeblock1)435arcpy.CalculateField_management("national_summary_statistics_current_pivot","pame_pa_land_area","updateValue(!pame_pa_land_area!)","PYTHON_9.3", in_codeblock1)436arcpy.CalculateField_management("national_summary_statistics_current_pivot","pame_percentage_pa_land_cover","updateValue(!pame_percentage_pa_land_cover!)","PYTHON_9.3", in_codeblock1)437arcpy.CalculateField_management("national_summary_statistics_current_pivot","pame_percentage_pa_marine_cover","updateValue(!pame_percentage_pa_marine_cover!)","PYTHON_9.3", in_codeblock1)438elapsed_minutes = (time.clock() - start)/60439elapsed_hours = (time.clock() - start)/3600440print ("scripts finished - all good")441print ("Outputs are here: " + str(workspace))442print ("Total running time: " + str(elapsed_minutes) + " minutes (" + str(elapsed_hours) + " hours)")443##### BELOW HERE IS A WORK IN PROGRESS AND HASHTAGGED OUT FOR NOW444# NATIONAL TEMPORAL REPORTS445# pivot the temporal national summary tables446#arcpy.PivotTable_management(out_national_temporal_schema,["WDPA_ISO3","MIN_STATUS_YR"],"type","SUM_AREA_GEO","national_summary_statistics_temporal_pivot")447# join temporal national to the basemap448#arcpy.JoinField_management("national_summary_statistics_temporal_pivot","WDPA_ISO3",in_basemap_tab,"GEO_ISO3",["land_area", "marine_area"])449# update the temporal national field names450#arcpy.AlterField_management("national_summary_statistics_temporal_pivot","WDPA_ISO3","iso3")451#arcpy.AlterField_management("national_summary_statistics_temporal_pivot","Land","pa_land_area")452#arcpy.AlterField_management("national_summary_statistics_temporal_pivot","EEZ","pa_marine_area")453#arcpy.AlterField_management("national_summary_statistics_temporal_pivot","MIN_STATUS_YR","year")454# update all the temporal natioanl fields so that they are 0 instead of Null455#arcpy.CalculateField_management("national_summary_statistics_temporal_pivot","pa_land_area","updateValue(!pa_land_area!)","PYTHON_9.3", in_codeblock1)456#arcpy.CalculateField_management("national_summary_statistics_temporal_pivot","pa_marine_area","updateValue(!pa_marine_area!)","PYTHON_9.3", in_codeblock1)457#arcpy.CalculateField_management("national_summary_statistics_temporal_pivot","percentage_pa_land_cover","updateValue(!percentage_pa_land_cover!)","PYTHON_9.3", in_codeblock1)458#arcpy.CalculateField_management("national_summary_statistics_temporal_pivot","percentage_pa_marine_cover","updateValue(!percentage_pa_marine_cover!)","PYTHON_9.3", in_codeblock1)459## UPDATE THE NET FIELD IN HERE TO REMOVE '0' VALUES?460# add the fields to calculate percentage coverage and calculate them461#arcpy.AddField_management("national_summary_statistics_temporal_pivot","percentage_pa_land_cover","FLOAT")462#arcpy.AddField_management("national_summary_statistics_temporal_pivot","percentage_pa_marine_cover","FLOAT")463#arcpy.CalculateField_management("national_summary_statistics_temporal_pivot","percentage_pa_land_cover","(!pa_land_area! / !land_area!)*100","PYTHON_9.3")464#arcpy.CalculateField_management("national_summary_statistics_temporal_pivot","percentage_pa_marine_cover","(!pa_marine_area! / !marine_area!)*100","PYTHON_9.3")465# add in net fields and calculate them466#arcpy.AddField_management("national_summary_statistics_temporal_pivot","pa_marine_cover_net_km2","LONG")467#arcpy.CalculateField_management("national_summary_statistics_temporal_pivot","pa_marine_area_net_km2","updateValue(!pa_marine_area_net_km2!)","PYTHON_9.3", in_codeblock1)468#arcpy.AddField_management("national_summary_statistics_temporal_pivot","pa_land_cover_net_km2","LONG")469#arcpy.CalculateField_management("national_summary_statistics_temporal_pivot","pa_land_area_net_km2","updateValue(!pa_land_area_net_km2!)","PYTHON_9.3", in_codeblock1)470#arcpy.AddField_management("national_summary_statistics_temporal_pivot","pa_marine_cover_net_perc","LONG")471#arcpy.CalculateField_management("national_summary_statistics_temporal_pivot","pa_marine_cover_net_perc","(!pa_marine_area_net_km2! / !marine_area!)*100","PYTHON_9.3")472#arcpy.AddField_management("national_summary_statistics_temporal_pivot","pa_land_cover_net_perc","LONG")473#arcpy.CalculateField_management("national_summary_statistics_temporal_pivot","pa_land_cover_net_perc","(!pa_land_area_net_km2! / !land_area!)*100","PYTHON_9.3")474# Finish running scripts...SHPO_General_Tools.pyt
Source:SHPO_General_Tools.pyt  
1"""2Name:             SHPO_General_Tools.pyt3Author:           Michael D. Troyer4                    with contributions by Benjamin Zank5Date:             August 11, 20166Version:          2.0 (Beta)7ArcGIS Version:   10.3 (requires Advanced License)89As-is, No Warranty, etc.1011Direct comments to:1213	Michael Troyer14	mtroyer@blm.gov / 719-269-85871516Upon failure:1718    Screenshot the error message19    and the tool input UI [relaunch from results window]2021    and send to Michael Troyer at mtroyer@blm.gov2223-------------------------------------------------------------------------------24PURPOSE AND USAGE:2526The SHPO General Tools script creates output data regarding an input polygon27in order to streamline the collection and input of project data into the SHPO28data system.2930Specifically, the SHPO General Tools python toolbox accepts an input polygon and31generates three output data files.3233INPUTS:3435- A polygon representing a site or survey boundary. The polygon can be multipart36or single part, but must represent a single project. If the input polygon37represents more than one unique project (the project id field returns more38than one unique ID, the tool will exit and direct the user to use a subselection39of the data. The subselection can be performed in ArcMap, using the various40select tools, or within the tool itself. Within the tool dialog, the41user has the ability to select specific features from within the input polygon42according to a FIELD and VALUE query. In this case, the user clicks Selection43Based on Case Value and then selects the appropriate field from the Select44Feature Case Field drop-down. The tool will identify a subset of the feature45layer based on the values contained within the selected field.4647OUTPUTS:48491.) A .csv file of the input polygon vertex coordinates in X/Y format. 50The tool will recognize mulitpart features and assign a generic, 51sequential ID to the individual parts so that they can be individually 52recognized and managed within the output .csv file.53542.) A .csv of the PLSS legal location of the input polygon. The .csv contains55the following fields: feature ID, PM, TWN, RNG, SEC, QQ1, and QQ2.56573.) A .txt file with the feature ID, polygon acreage, counties, quads, elevation58at the project centroid, the polygon centroid coordinates in X/Y format (if a59single polygon, the tool will not generate a centroid location for a multipart60or multiple feature since that centroid will likely fall outside the polygon61boundary), and the PLSS location.6263The tool will also update the geometry data of the input polygon64(area, perimeter, acres, and X and Y coordinates).65-------------------------------------------------------------------------------66"""6768import arcpy, os, sys, traceback, csv69from arcpy import env70env.addOutputsToMap = False71env.overwriteOutput = True727374class Toolbox(object):75    def __init__(self):76        self.label = "SHPO_General_Tools"77        self.alias = "SHPO_General_Tools"7879        # List of tool classes associated with this toolbox80        self.tools = [SHPO_General_Tools]818283class SHPO_General_Tools(object):84    def __init__(self):85        self.label = "SHPO_General_Tools"86        self.description = ""87        self.canRunInBackground = False88        89    def getParameterInfo(self):90        # Input Target Shapefile91        param0=arcpy.Parameter(92            displayName="Input Shapefile or Feature Class",93            name="Input_Shape",94            datatype="Feature Layer",95            parameterType="Required",96            direction="Input")97        param0.filter.list = ["Polygons"]9899        # Allow sub-selections100        param1=arcpy.Parameter(101            displayName="Selection Based on Case Value",102            name="Select_Boolean",103            datatype="Boolean",104            parameterType="Optional",105            direction="Input",106            enabled = "False")107        108        param2=arcpy.Parameter(109            displayName="Select Feature Case Field",110            name="Select_Field",111            datatype="String",112            parameterType="Optional",113            direction="Input",114            enabled = "False")115        116        param3=arcpy.Parameter(117            displayName="Select Feature Case Value",118            name="Select_Value",119            datatype="String",120            parameterType="Optional",121            direction="Input",122            enabled = "False")123        124        # Output Location and Name125        param4=arcpy.Parameter(126            displayName="Output Workspace and File Naming Convention",127            name="Out_Name",128            datatype="File",129            parameterType="Required",130            direction="Output")131132        # Quad Index133        param5=arcpy.Parameter(134            displayName="Input Quadrangle Index Layer",135            name="Input_Quad",136            datatype="Feature Layer",137            parameterType="Required",138            direction="Input")139        140        # County Layer141        param6=arcpy.Parameter(142            displayName="Input County Index Layer",143            name="Input_County",144            datatype="Feature Layer",145            parameterType="Required",146            direction="Input")147        148        # DEM149        param7=arcpy.Parameter(150            displayName="Input DEM Raster",151            name="Input_DEM",152            datatype="Raster Layer",153            parameterType="Required",154            direction="Input")155        156        # PLSS157        param8=arcpy.Parameter(158            displayName="Input PLSS Survey Grid Layer",159            name="Input_PLSS",160            datatype="Feature Layer",161            parameterType="Required",162            direction="Input")163        164        params = [param0, param1, param2, param3, param4, param5, param6, param7, param8]165        return params166167    def updateParameters(self, params):168        """Modify the values and properties of parameters before internal169        validation is performed.  This method is called whenever a parameter170        has been changed."""171        172        # Params 0-3 - Input shape and handle sub-selections173        if params[0].value:174            params[1].enabled = "True"175        else:176            params[1].enabled = "False"177            178        if params[1].value == 1:179            fieldtypeList = ["String", "Integer"]180            desc = arcpy.Describe(params[0].value)181            fields = desc.fields182            featurefieldList = [field.name for field in fields if field.type in fieldtypeList]183            params[2].enabled = "True"184            params[2].filter.type = "ValueList"185            params[2].filter.list = featurefieldList186        else:187            params[2].value = ""188            params[2].enabled = "False"189        190        if params[2].value:191            field_select = params[2].value192            arcpy.Frequency_analysis(params[0].value, "in_memory\\field_freq", field_select)193            featurevalueList = []194            for field in fields:195                if field.name == field_select:196                    type = field.type197                    if type == "Integer":198                        where = '"'+field_select+'" IS NOT NULL'199                    elif type == "String":200                        where = '"'+field_select+'" IS NOT NULL AND NOT "'+field_select+'" = '+"'' AND NOT "+'"'+field_select+'" = '+"' '"201            with arcpy.da.SearchCursor("in_memory\\field_freq", [field_select], where)as cursor:202                for row in cursor:203                    featurevalueList.append(row[0])204            featurevalueList.sort()205            params[3].enabled = "True"206            params[3].filter.type = "ValueList"207            params[3].filter.list = featurevalueList208        else:209            params[3].value = ""210            params[3].enabled = "False"211212        # Param 5 - Quad default value213        if not params[5].altered:214            params[5].value = " H:\Zone 13 Basemaps - 83\qdindex.shp" 215216        # Param 6 - County default value217        if not params[6].altered:218            params[6].value = "H:\Zone 13 Basemaps - 83\Counties.shp" 219220        # Param 7 - DEM default value221        # SHPO inDEM = r'H:\Zone 13 Basemaps - 83\222        if not params[7].altered:223            params[7].value = "" 224225        # Param 8 - PLSS default value226        if not params[8].altered:227            params[8].value = "H:\ToolboxesArc10\PLSSIntersected.gdb\CO_PLSSIntersected" 228        return229230    def updateMessages(self, params):231        """Modify the messages created by internal validation for each tool232        parameter.  This method is called after internal validation."""233        return234    235    def isLicensed(self):236        """Set whether tool is licensed to execute."""237        if arcpy.CheckProduct("ArcInfo") == "Available":238            return True239        else:240            msg = "ArcGIS for Desktop Advanced License is not available. Install Advanced Licnese and try again."241            sys.exit(msg)242243    def execute(self, params, messages):244        # Define workspace locations - can also use os.path.join()245        arcpy.env.workspace = os.path.dirname(params[4].valueAsText)+"\\"+os.path.basename(params[4].valueAsText)246        dirName = os.path.dirname(params[4].valueAsText)247        baseName = os.path.dirname(params[4].valueAsText)+"\\"+os.path.basename(params[4].valueAsText)248249        # Define base data sources250        inDEM = params[7].valueAsText251        inQuad = params[5].valueAsText252        inCounty = params[6].valueAsText253        inPLSS = params[8].valueAsText254255        # Define Functions:256        def deleteInMemory():257            #Set the workspace to in_memory258            env.workspace = "in_memory"259            #Delete all in memory feature classes260            fcs = arcpy.ListFeatureClasses()261            if len(fcs) > 0:262                for fc in fcs:263                    arcpy.Delete_management(fc)264            #Delete all in memory tables 265            tbls = arcpy.ListTables()266            if len(tbls) > 0:267                for tbl in tbls:268                    arcpy.Delete_management(tbl)269270        def getErrors():271            # Get the traceback object272            tb = sys.exc_info()[2]273            tbinfo = traceback.format_tb(tb)[0]274            # Concatenate information together concerning the error into a message string275            pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(sys.exc_info()[1])276            msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages(2) + "\n"277            # Return python error messages for use in script tool or Python Window278            arcpy.AddError(pymsg)279            arcpy.AddError(msgs)280            return pymsg, msgs281282        def selectAndPart(inPoly):283            """Check for subselection and manage singlepart/multipart/multiple parts"""284            # Check for subselection285            if params[1].value == 1:286                desc = arcpy.Describe(inPoly)287                fields = desc.fields288                for field in fields:289                    if field.name == params[2].valueAsText:290                        type = field.type291                        selField = '"'+params[2].valueAsText+'"'292                        selValue = params[3].valueAsText293                        if type == "Integer":294                            where = selField+' = '+selValue295                        elif type == "String":296                            where = selField+' = '+"'"+selValue+"'"      297                arcpy.MakeFeatureLayer_management(inPoly, "in_memory\\selected", where)298            else:299                arcpy.MakeFeatureLayer_management(inPoly, "in_memory\\selected")300            301            # Identify project ID field and get a list of individual project IDs302            desc = arcpy.Describe("in_memory\\selected")303            fieldnames = []304            for field in desc.fields:305                fieldnames.append(field.name)306                307            # for site308            if "SITE_" in fieldnames:309                projID = "SITE_"310            # for survey311            elif "DOC_" in fieldnames:312                projID = "DOC_"313            # if neither parse for shape or fc    314            elif "FID" in fieldnames:315                projID = "FID"316            elif "OBJECTID" in fieldnames:317                projID = "OBJECTID"318           319            # If more than one unique project ID, return with message to use subselection320            projectIDS = []321            with arcpy.da.SearchCursor("in_memory\\selected", projID) as cur:322                for row in cur:323                    projectIDS.append(str(row[0]))       324            uniqueIDS = set(projectIDS)325                326            if not len(uniqueIDS) == 1:327                arcpy.AddMessage("------------------------------------------------------------")328                arcpy.AddMessage("\n The input feature represents more than one unique project.")329                arcpy.AddMessage("\n Please use the field and value subselection function to identify a single project.")330                arcpy.AddMessage("\n The SHPO General Tools script will now exit.\n")331                arcpy.AddMessage("------------------------------------------------------------")332                sys.exit()333                                334            # Check for multiple features of the same project - dissolve on projID335            else:336                if len(projectIDS) > 1:337                    arcpy.Dissolve_management("in_memory\\selected", "in_memory\\singleFeature", [projID])338                else:339                    arcpy.MakeFeatureLayer_management("in_memory\\selected", "in_memory\\singleFeature")340            341            # Check for multipart - create single part and multipart versions342            desc = arcpy.Describe("in_memory\\singleFeature")343            shape_field = desc.ShapeFieldName344            rows = arcpy.SearchCursor("in_memory\\singleFeature")345            for row in rows:346                poly = row.getValue(shape_field)347                if poly.isMultipart:348                    arcpy.MultipartToSinglepart_management("in_memory\\singleFeature", "in_memory\\multiFeatures")349                else:350                    #if singlepart, just create a copy named multipart351                    arcpy.MakeFeatureLayer_management("in_memory\\singleFeature", "in_memory\\multiFeatures") 352353            singleFeature = "in_memory\\singleFeature"354            multiFeature = "in_memory\\multiFeatures"355            return singleFeature, multiFeature356357        def outTable(inPoly):358            """Creates a table of input ID, quads, counties, and PLSS data"""359            360            # Execute CreateTable361            arcpy.CreateTable_management("in_memory", "output")362363            # Add fields to table364            desc = arcpy.Describe(inPoly)365            fieldnames = []366            for field in desc.fields:367                fieldnames.append(field.name)368            # for site369            if "SITE_" in fieldnames:370                projID = "SITE_"371            # for survey372            elif "DOC_" in fieldnames:373                projID = "DOC_"374            # if neither parse for shape or fc    375            elif "FID" in fieldnames:376                projID = "FID"377            elif "OBJECTID" in fieldnames:378                projID = "OBJECTID"379380            # Add the fields to the table381            arcpy.AddField_management("in_memory\\output", projID, "TEXT", "", "", 50)382            arcpy.AddField_management("in_memory\\output", "PM", "TEXT","","",6)383            arcpy.AddField_management("in_memory\\output", "TWN", "TEXT","","",6)384            arcpy.AddField_management("in_memory\\output", "RNG", "TEXT","","",6)385            arcpy.AddField_management("in_memory\\output", "SEC", "TEXT","","",4)386            arcpy.AddField_management("in_memory\\output", "QQ1", "TEXT","","",4)387            arcpy.AddField_management("in_memory\\output", "QQ2", "TEXT","","",4)388            389            # Create insert cursor390            inCur = arcpy.da.InsertCursor("in_memory\\output", [projID, "PM", "TWN", "RNG", "SEC", "QQ1", "QQ2"])391392            # Peel back input polygon 10 meters to prevent extranneous boundary overlap - particulurly PLSS393            # If poly(s) is too small and gets erased, keep original(s)394            arcpy.MakeFeatureLayer_management(inPoly, "in_memory\\polycopy")395            arcpy.PolygonToLine_management("in_memory\\polycopy", "in_memory\\polylines")396            arcpy.Buffer_analysis("in_memory\\polylines", "in_memory\\polybuffer", 10)397            arcpy.Erase_analysis("in_memory\\polycopy", "in_memory\\polybuffer", "in_memory\\PLSSpoly")398            inResult=int(arcpy.GetCount_management("in_memory\\polycopy").getOutput(0))399            outResult=int(arcpy.GetCount_management("in_memory\\PLSSpoly").getOutput(0))400            if not inResult == outResult:401                arcpy.Delete_management("in_memory\\PLSSpoly")402                arcpy.MakeFeatureLayer_management(inPoly, "in_memory\\PLSSpoly")403            arcpy.Delete_management("in_memory\\polycopy")404            arcpy.Delete_management("in_memory\\polylines")405            arcpy.Delete_management("in_memory\\polybuffer")406407            # Intersect locations408            arcpy.Intersect_analysis(["in_memory\\PLSSpoly", inPLSS], "in_memory\\locations", "NO_FID")409410            # Secure site/survey id411            inRow0 = str([row[0] for row in arcpy.da.SearchCursor(inPoly, projID)])412413            # Sort PLSS            414            freqFields = ["PLSSID","FRSTDIVNO","QQSEC"]415            arcpy.Frequency_analysis("in_memory\\locations", "in_memory\\PLSS", freqFields)416            with arcpy.da.SearchCursor("in_memory\\PLSS", freqFields) as cursor:417                for row in cursor:418                    #inRow[1] = PM419                    inRow1 = str(row[0])[2:4]420                    #inRow[2] = Twn 421                    inRow2 = str(row[0])[5:7]+str(row[0])[8]422                    #inRow[3] = Rng423                    inRow3 = str(row[0])[10:12]+str(row[0])[13]424                    #inRow[4] = Sec425                    inRow4 = str(row[1])426                    #inRow[5] = Quar1427                    inRow5 = str(row[2])[0:2]428                    #inRow[6] = Quar2429                    inRow6 = str(row[2])[2:4]430                    inCur.insertRow([inRow0, inRow1, inRow2, inRow3, inRow4, inRow5, inRow6])431432            # Write to .csv - use an intermediate to clean up extraneous fields - OID           433            tempTable = baseName+"_temp.csv"434            outTable = baseName+"_PLSS_Data.csv"435            arcpy.CopyRows_management("in_memory\\output", tempTable)436            arcpy.Delete_management("in_memory\\output")437438            # Clean up csv - remove OID field439            with open(tempTable,"rb") as source:440                rdr = csv.reader(source)441                with open(outTable,"wb") as result:442                    wtr = csv.writer(result)443                    for r in rdr:444                        wtr.writerow((r[1], r[2], r[3], r[4], r[5], r[6], r[7]))445            # Clean up446            os.remove(tempTable)447            os.remove(baseName+"_temp.txt.xml")448            os.remove(dirName+"\\"+"schema.ini")449            return        450451        def outText(inPoly):452            """Creates a text file of input ID, acreage, county(s),453            quad(s), centroid (if single polygon), and PLSS data"""454            455            # Get project ID 456            desc = arcpy.Describe(inPoly)457            fieldnames = []458            for field in desc.fields:459                fieldnames.append(field.name)460            # for site461            if "SITE_" in fieldnames:462                projID = "SITE_"463            # for survey464            elif "DOC_" in fieldnames:465                projID = "DOC_"466            # if neither parse for shape or fc    467            elif "FID" in fieldnames:468                projID = "FID"469            elif "OBJECTID" in fieldnames:470                projID = "OBJECTID"471                472            # Peel back input polygon boundary 10 meters to prevent473            # extranneous PLSS boundary overlap for PLSS caclculation474            arcpy.MakeFeatureLayer_management(inPoly, "in_memory\\polycopy")475            arcpy.PolygonToLine_management("in_memory\\polycopy", "in_memory\\polylines")476            arcpy.Buffer_analysis("in_memory\\polylines", "in_memory\\polybuffer", 10)477            arcpy.Erase_analysis("in_memory\\polycopy", "in_memory\\polybuffer", "in_memory\\PLSSpoly")478            arcpy.Delete_management("in_memory\\polycopy")479            arcpy.Delete_management("in_memory\\polylines")480            arcpy.Delete_management("in_memory\\polybuffer")481482            # Intersect locations483            arcpy.Intersect_analysis(["in_memory\\PLSSpoly", inPLSS, inCounty, inQuad], "in_memory\\locations", "NO_FID")484                485            # Secure site/survey id486            projectValue = ([row[0] for row in arcpy.da.SearchCursor(inPoly, projID)])487            projectID = "Feature ID: "+str(projectValue[0])488            489            # Sort counties490            arcpy.Frequency_analysis("in_memory\\locations", "in_memory\\County", "NAME")491            ###arcpy.Frequency_analysis("in_memory\\locations", "in_memory\\County", "COUNTY") # this is for BLM testing492            countyList = []493            ###with arcpy.da.SearchCursor("in_memory\\County", ["COUNTY"]) as cursor:  # this is for BLM testing494            with arcpy.da.SearchCursor("in_memory\\County", ["NAME"]) as cursor:495                for row in cursor:496                        countyList.append(str(row[0]).title()+" County")497            arcpy.Delete_management("in_memory\\County")498            countyText = "Counties: "+", ".join(countyList)499                        500            # Sort Quads501            arcpy.Frequency_analysis("in_memory\\locations", "in_memory\\Quad", "QUAD_NAME")502            quadList = []503            with arcpy.da.SearchCursor("in_memory\\Quad", ["QUAD_NAME"]) as cursor:504                for row in cursor:505                        quadList.append(str(row[0]).title()+" 7.5'")506            arcpy.Delete_management("in_memory\\Quad")507            quadText = "Quads: "+", ".join(quadList)508509            # Extract Elevation at centroid and get centroid location510            # If single polygon, create variable to signal print centroid location - default False511            arcpy.FeatureToPoint_management(inPoly, "in_memory\\centroid", "CENTROID")512            printCentroid = 0513            desc = arcpy.Describe(inPoly)514            shape_field = desc.ShapeFieldName515            rows = arcpy.SearchCursor(inPoly)516            for row in rows:517                poly = row.getValue(shape_field)518                if not poly.isMultipart:519                    printCentroid = 1520                    with arcpy.da.SearchCursor("in_memory\\centroid",["SHAPE@"]) as cursor:521                        for row in cursor:522                            centroidX = row[0].centroid.X523                            centroidY = row[0].centroid.Y524                            centroidPrint = "Polygon centroid: "+str(int(round(centroidX)))+" mE   "+str(int(round(centroidY)))+" mN"525            arcpy.sa.ExtractValuesToPoints("in_memory\\centroid", inDEM, "in_memory\\centValue", "NONE", "VALUE_ONLY")526            elePrint = int(round([row[0] for row in arcpy.da.SearchCursor("in_memory\\centValue", "RASTERVALU")][0]))       527            elevText = "Elevation at project centroid: "+str(elePrint)528            529            # Sort PLSS            530            freqFields = ["PLSSID","FRSTDIVNO","QQSEC"]531            arcpy.Frequency_analysis("in_memory\\locations", "in_memory\\PLSS", freqFields)532            PLSSlist = []533            with arcpy.da.SearchCursor("in_memory\\PLSS", freqFields) as cursor:534                for row in cursor:535                    PMtext = str(row[0])[2:4]536                    TWNtext = str(row[0])[5:7]+str(row[0])[8]537                    RNGtext = str(row[0])[10:12]+str(row[0])[13]538                    SECtext = str(row[1])539                    QQ1text = str(row[2])[0:2]540                    QQ2text = str(row[2])[2:4]541                    PLSSinput = PMtext+" "+TWNtext+" "+RNGtext+" "+SECtext+" "+QQ1text+" "+QQ2text542                    PLSSlist.append(PLSSinput)543                PLSStext = ", ".join(PLSSlist)544545            # Calculate acreage and format for print w/ 2 decimal places546            with arcpy.da.UpdateCursor(inPoly,["SHAPE@"]) as cur:547                for row in cur:548                    acreage = row[0].area*0.000247105549                    acreagePrint = "Polygon acreage: %.2f" % acreage550                    551            # Write to text file                    552            outText = baseName+"_Location_Data.txt"553            textFile = open(outText, "w")554            textFile.write(projectID)555            textFile.write("\n")556            textFile.write(acreagePrint)557            textFile.write("\n")558            textFile.write(countyText)559            textFile.write("\n")560            textFile.write(quadText)561            textFile.write("\n")562            textFile.write(elevText)563            textFile.write("\n")564            if printCentroid:565                textFile.write(centroidPrint)566            textFile.write("\n")567            textFile.write("\n")568            textFile.write("PLSS Location")569            textFile.write("\n")570            for plss in PLSSlist:571                textFile.write(plss)572                textFile.write("\n")573            textFile.close()574            return575576        def updateInput(inPoly):          577            """Update the input polygon attributes"""578            579            # Check if fields exist - if not, add them580            fields = ["SHAPE@", "AREA", "PERIMETER","ACRES", "X", "Y"]581            fieldList = arcpy.ListFields(inPoly)582            if not "AREA" in fieldList:583                arcpy.AddField_management(inPoly, "AREA", "DOUBLE",15,3)584            if not "PERIMETER" in fieldList:585                arcpy.AddField_management(inPoly, "PERIMETER", "DOUBLE",15,3)586            if not "ACRES" in fieldList:587                arcpy.AddField_management(inPoly, "ACRES", "DOUBLE",15,3)588            if not "X" in fieldList:589                arcpy.AddField_management(inPoly, "X", "LONG",6)590            if not "Y" in fieldList:591                arcpy.AddField_management(inPoly, "Y", "LONG",7)592593            # Update the fields        594            with arcpy.da.UpdateCursor(inPoly,fields) as cur:595                for row in cur:596                    row[1] = row[0].area597                    row[2] = row[0].length598                    row[3] = row[0].area*0.000247105599                    row[4] = row[0].centroid.X600                    row[5] = row[0].centroid.Y 601                    cur.updateRow(row)602            return603604        def getCoords(inPoly):          605            """Get vertice coordiantes and write to csv, assign arbitrary ID to multiple parts"""606            607            # Create the tables - use an intermediary to clean up fieds608            tempTable = baseName+"_temp.csv"609            outTable = baseName+"_Coordinates.csv"610611            # Execute Feature Vertices to Points - THIS REQUIRES AN ADVANCED LICENSE612            arcpy.FeatureVerticesToPoints_management(inPoly, "in_memory\\vertPoints", "ALL")613614            # Add Fields XCOORD, YCOORD615            arcpy.AddField_management("in_memory\\vertPoints", "XCOORD", "LONG",6)616            arcpy.AddField_management("in_memory\\vertPoints", "YCOORD", "LONG",7)617            with arcpy.da.UpdateCursor("in_memory\\vertPoints",["SHAPE@", "XCOORD", "YCOORD"]) as cursor:618                for row in cursor:619                    row[1] = row[0].centroid.X620                    row[2] = row[0].centroid.Y621                    cursor.updateRow(row)622            arcpy.DeleteIdentical_management("in_memory\\vertPoints", ["XCOORD", "YCOORD"])623            arcpy.ExportXYv_stats("in_memory\\vertPoints", ["ORIG_FID","XCOORD","YCOORD"], "COMMA", tempTable, "ADD_FIELD_NAMES")624625            # Clean up csv - remove OID field626            with open(tempTable,"rb") as source:627                rdr = csv.reader(source)628                with open(outTable,"wb") as result:629                    wtr = csv.writer(result)630                    for r in rdr:631                        if r[2] == "ORIG_FID":632                            r[2] = "ID"633                        wtr.writerow((r[2], r[3], r[4]))634635            # Clean up636            os.remove(tempTable)637            os.remove(baseName+"_temp.txt.xml")638            return639        try:640            """With all functions defined, unpack selectAndPart return tuple for use641            in other functions. Execute outTable, outText, updateInput, and getCoords.642            Return exceptions."""643            single, multi = selectAndPart(params[0].value)644            outTable(single)645            outText(single)646            updateInput(params[0].value)647            getCoords(multi)648649            650        except SystemExit:651            arcpy.AddMessage("System Exit")652        except arcpy.ExecuteError: 653            # Get the tool error messages 654            msgs = arcpy.GetMessages(2) 655            # Return tool error messages for use with a script tool 656            arcpy.AddError(msgs)657        except:658            getErrors()659            660        finally:661            #Clean everything out
...Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!
