How to use addwarning method in tavern

Best Python code snippet using tavern

construct_deathdata_test_for_unsmooth_percentile.py

Source:construct_deathdata_test_for_unsmooth_percentile.py Github

copy

Full Screen

1import os, arcpy, numpy, numbers, ast, bisect2import datetime as dt3from operator import itemgetter4import data_filter as df # This module filtered the result based on input5df = reload(df) # Make sure newest module is loaded6import update_schema as us # This module cleans schema file to make sure same name file exists only 1 time in schema7us = reload(us) # Make sure newest module is loaded8### Basic Function built-up9# Check if a key exist in a dictionary10def if_key_exist (key, dictionary):11 try:12 dictionary[key]13 return True14 except KeyError:15 return False16 17# Function to Push a word from a comma separated string18def push_word(string):19 i = 020 quote = False21 while i < len(string):22 if quote:23 if string[i] == "\"" or string[i] == "\'":24 return [string[1:i], string[i+1:]]25 else:26 i += 127 else:28 if(string[i] == "\"" or string[i] == "\'"):29 quote = True30 elif string[i] == "," or string[i] == "]" or string[i] == '\n':31 return [string[0:i], string[i:]]32 i += 133 if(i < len(string)):34 return [string[0+i], string[i+1:]]35 else:36 return [string, '']37 38def index_field(string, field_name):39 i = 040 [current, string] = push_word(string)41 while current != "":42 if (current == field_name):43 return i44 [current, string] = push_word(string[1:])45 i += 146 return NameError('No Field Found') 47# Generate a sequence of number48def sequence(start, length, step=1):49 result = []50 i = 051 while i < length:52 result.append(start + i * step)53 i += 154 return result55# Erase certain column from dataframe56def col_erase(datalist, nvector):57 i = 0 58 while i < len(nvector):59 if(nvector[i] < 0):60 nvector[i] += len(datalist[0])61 i += 162 seq = sequence(0, len(datalist[0]), 1)63 select = [n for n in seq if not n in nvector]64 result = []65 for row in datalist:66 get_num = itemgetter(*select)(row)67 if type(get_num) is tuple:68 result.append(list(get_num))69 elif type(get_num) is list:70 result.append(get_num)71 elif type(get_num) is int or type(get_num) is float:72 result.append([get_num])73 return result74# Column merge two dataset75def c_merge (df1, df2):76 if (len(df1) != len(df2)):77 raise ValueError("Two data frames don't have the same number of row")78 i = 079 result = []80 while i < len(df1):81 result.append(df1[i]+ df2[i])82 i += 183 return result84# Create zero matrix85def create_zero_mat (nrow, ncol):86 result = []87 i = 088 while i < nrow:89 result.append(sequence(0, ncol, 0))90 i += 191 return result92# Build dictionary from dataframe93def df_to_dict(datalist, key_index):94 result = dict()95 for row in datalist:96 result.update({row[key_index]: row})97 return result98# Index right slot for age99def index_age (age, age_struct):100 i = 0101 while i < len(age_struct):102 if i == 0 and age < age_struct[0]:103 return -1104 elif age_struct[i] < 0:105 if(age < abs(age_struct[i])):106 return i - 1107 else:108 return -1109 elif i + 1 == len(age_struct):110 return i111 elif age >= age_struct[i] and age < age_struct[i + 1]:112 return i113 i += 1114# Vector divide X1i/y1i115def vector_divide(v1, v2):116 result = []117 if isinstance(v2, numbers.Number):118 i = 0119 while i < len(v1):120 result.append(float(v1[i])/v2)121 i += 1122 return result123 elif (len(v1) != len(v2)):124 raise ValueError("Length of Two Vector is not the same")125 else:126 i = 0127 while i < len(v1):128 if(v2[i] == 0):129 result.append(0)130 else:131 result.append(float(v1[i])/float(v2[i]))132 i += 1133 return result134# Vector multiplies X1i/y1i135def vector_multi (v1,v2):136 result = []137 if isinstance(v2, numbers.Number):138 i = 0139 while i < len(v1):140 result.append(float(v1[i])*float(v2))141 i += 1142 return result143 elif (len(v1) != len(v2)):144 raise ValueError("Length of Two Vector is not the same")145 else:146 i = 0147 while i < len(v1):148 result.append(float(v1[i])*float(v2[i]))149 i += 1150 return result151 152# Vector plus X1i + Y1i153def vector_plus (v1, v2):154 result = []155 if isinstance(v2, numbers.Number):156 i = 0157 while i < len(v1):158 result.append(float(v1[i]) + float(v2))159 i += 1160 return result161 elif (len(v1) != len(v2)):162 raise ValueError("Length of Two Vector is not the same")163 else:164 i = 0165 while i < len(v1):166 result.append(float(v1[i]) + float(v2[i]))167 i += 1168 return result169# Data frame Row sum170def row_sum (df):171 result = []172 for row in df:173 result.append([sum(row)])174 return result175 176def col_sum(df):177 result = []178 i = 0179 while i < len(df[0]):180 temp = 0181 for row in df:182 temp += row[i]183 result.append(temp)184 i += 1185 return result186 187 188# Construct string from vector189def vect_to_str(vector):190 result = ""191 for elem in vector:192 if(elem == str):193 result += "\'" + elem + "\',"194 else:195 result += str(elem) + ","196 result = result[0:len(result)-1]#.replace(" ", "")197 return result198 199# Sample Gamma function200def gamma_sample (shape, scale, nSample, geoid):201 numpy.random.seed(20151201)202 if shape == 0:203 g_sample1000 = sequence(0, nSample, 0)204 arcpy.AddWarning("Some age group in " + geoid + " don't have any incident!!!")205 else:206 g_sample1000 = numpy.random.gamma(shape, scale, nSample)207 return g_sample1000208# Sum a numeric matrix209def df_sum(df):210 result = 0211 for row in df:212 result += sum(row)213 return result214# Get prior events and prior population for each age categories in each geographic area215def get_a0_n0 (result, ncol, death_count, percentile, a00=[], n00=[], minimum_n0 = 5, debug=False): # Set a00 n00 0 for global a0 and n0 calculation216 Y_prior = 6.0217 pop_mat = col_erase(result, sequence(-1, ncol, -1))218 case_mat = col_erase(death_count, sequence(-1, ncol, -1))219 #n_tot = df_sum(pop_mat)220 #c_tot = df_sum(case_mat)221 n_tot = col_sum(pop_mat)222 if n_tot == 0:223 return([a00, n00])224 225 c_tot = col_sum(case_mat)226 lam = vector_divide(c_tot, n_tot)227 228 if debug:229 arcpy.AddWarning(c_tot)230 arcpy.AddWarning(n_tot)231 arcpy.AddWarning("!--------!") 232 233 if n00 == []: # if n00 = 0 we are calculating n00 234 pi_lam = vector_multi(percentile, lam)235 lam0d = sum(pi_lam)236 pct_age_spec = vector_divide(pi_lam, lam0d)237 a0adj = vector_multi(pct_age_spec, Y_prior)238 n0 = vector_divide(a0adj, lam)239 if debug:240 arcpy.AddWarning(lam)241 return [a0adj, n0]242 243 else:244 lamadj = []245 a0adj_00 = []246 i = 0247 while i < len(n_tot):248 each_n = n_tot[i]249 if each_n == 0:250 #arcpy.AddMessage('!!!')251 #arcpy.AddMessage(float(a00[i])/n00[i])252 lamadj.append(float(a00[i])/n00[i])253 else:254 omega = min(float(each_n)/n00[i], 0.99)255 if debug:256 arcpy.AddWarning("000000")257 arcpy.AddWarning(omega)258 arcpy.AddWarning("000000")259 lamadj.append(omega*c_tot[i]/each_n + (1-omega)*a00[i]/n00[i])260 i += 1261 262 pi_lamadj = vector_multi(percentile, lamadj)263 lamadj0d = sum(pi_lamadj)264 pct_age_spec_adj = vector_divide(pi_lamadj, lamadj0d)265 a0adj_00 = vector_multi(pct_age_spec_adj, Y_prior)266 n0 = vector_divide(a0adj_00, lamadj)267 268 if debug:269 arcpy.AddWarning("#####")270 arcpy.AddWarning(lamadj)271 arcpy.AddWarning(n0)272 arcpy.AddWarning("#####")273 274 return [a0adj_00, n0]275 276# Sample the vector based on percentile, Unit in /100,000people277def sample_percentile (vector, percentile_vector):278 temp = sorted(vector)279 result = []280 for percent in percentile_vector:281 result.append(temp[int(percent * len(vector))]*100000)282 return result283def col_divide(df, ncol, num, header = False):284 if header:285 i = 1286 else:287 i = 0288 while i < len(df):289 df[i][ncol] /= num290 i += 1291 return df292 293def check_a0_okay(a0):294 for a0k in a0:295 if a0k < 0.000001: # Can't use equals to 0 when comparing float points296 return False297 return True298 299def check_age_group_case_count(death_count, dataCol_cnt):300 result = death_count[0][0:dataCol_cnt]301 i = 1302 while i < len(death_count):303 result = vector_plus(result, death_count[i][0:dataCol_cnt])304 #print result305 if not 0.0 in result:306 return True307 i += 1308 return False309 310### Function to be call by the main core. It is the wrapped function for this module311def construct_deathdata (r_note_col, result, percent, inputdata, outputfolder, id_field, age_field, nyear, state_shp="", GeoID="", ngbh_dict_loc=""):312 nyear = float(nyear)313 input_ext = os.path.splitext(os.path.split(inputdata)[1])[1]314 if input_ext == '.csv':315 temp_f = open(inputdata, 'r')316 header_string = temp_f.readline().replace('\n', '')317 temp_f.close()318 319 id_id = index_field(header_string, id_field)320 321 f = open(os.path.split(inputdata)[0] + '\\schema.ini', 'a')322 f.write('['+ os.path.split(inputdata)[1] +']\n')323 f.write('Col{0}={1} Text Width 30\n'.format(id_id+1, id_field))324 f.close()325 326 arcpy.AddMessage("Constructing disease/death rate from individual records...")327 ## Construct basic matrix for each geographic boundary328 num_count = len(percent[0])329 header_zero = result[0][0:num_count]330 if(header_zero[len(header_zero)-1] < 0):331 num_count -= 1332 zero_mat = create_zero_mat(len(result)-1, num_count)333 death_count = c_merge(zero_mat, r_note_col[1:])334 death_count_dict = df_to_dict(death_count, len(death_count[0])-1)335 ## Go through each record to generate disease/death count in each age categories for each geographic boundary 336 cursor = arcpy.SearchCursor(inputdata)337 errorID = []338 total_death = 0339 for row in cursor:340 temp_ID = str(row.getValue(id_field))341 temp_age_check = row.getValue(age_field)342 343 try: 344 temp_age = float(temp_age_check)345 except ValueError:346 arcpy.AddWarning('Age input for ID ' + temp_ID + ' is \''+ str(temp_age_check) + '\'!! Clean data or Program will consider this age as 0!!!')347 temp_age = 0348 349 if(not if_key_exist(temp_ID, death_count_dict)):350 errorID.append(temp_ID)351 else:352 idx = index_age(temp_age, header_zero)353 if(idx != -1):354 death_count_dict[temp_ID][idx] += 1355 356 if not check_age_group_case_count(death_count, len(death_count[0])-len(r_note_col[0])):357 arcpy.AddWarning(death_count)358 arcpy.AddError("Some age group don't have any case in it!!! Please summarize your data based on the age and then redesign your age group.")359 360 arcpy.AddMessage("Calculating age adjusted rate...")361 # Calculate Age adjusted rate for each county362 i = 1363 num_rate = []364 while i < len(result):365 key_id = r_note_col[i][len(r_note_col[0])-1]366 num_rate.append(vector_multi(vector_divide(death_count_dict[key_id][0:num_count], result[i][0:num_count]), 100000))367 i += 1368 rate = []369 for row in num_rate:370 rate.append(vector_multi(percent[0], row))371 age_adj_rate = [["Age_adjust_rate"]]372 age_adj_rate.extend(col_divide(row_sum(rate),0,nyear))373 ratesum = row_sum(rate)374 375 unsmooth_pctl_ns = [['ns_percentile']]376 ###377 ### For non-spatial Bayesian 378 ###379 ncol = len(r_note_col[0])380 [a0, n0] = get_a0_n0 (result[1:], ncol, death_count, percent[0])381 i = 0382 aar_bayesian = []383 field_name = ["Baye_AAR", "Baye_2p5", "Baye_97p5"]384 aar_bayesian.append(field_name)385 while i < len(death_count):386 Y = death_count[i][0:num_count]387 n = result[i+1][0:num_count]388 Geokey = r_note_col[i][-1]389 # Make sure n is always equal or larger than Y390 k = 0391 while k < len(n):392 n[k]=max(Y[k],n[k])393 k += 1394 j = 0395 age_group = []396 while j < num_count:397 g_samps_per = vector_multi(gamma_sample(Y[j] + a0[j], 1.0/(n[j] + n0[j]), 5000, Geokey), percent[0][j])398 age_group.append(g_samps_per)399 j += 1400 aar_bayesian.append(sample_percentile(col_sum(age_group), [0.5, 0.025, 0.975]))401 unsmooth_pctl_ns.append([bisect.bisect(col_sum(age_group), ratesum[i][0]/100000)/50.0])402 i += 1403 aar_bayesian = col_divide(aar_bayesian,0,nyear, True)404 aar_bayesian = col_divide(aar_bayesian,1,nyear, True)405 aar_bayesian = col_divide(aar_bayesian,2,nyear, True)406 ### Bayesian ends here407 if state_shp != "" or ngbh_dict_loc != "":408 arcpy.AddMessage("Spatial smoothing the results...")409 410 411 unsmooth_pctl_sp = [['sp_percentile']]412 ### Spatial Bayesian Starts here413 if ngbh_dict_loc != "":414 fngbh = open(ngbh_dict_loc, 'r')415 ngbh_dict = ast.literal_eval(fngbh.read())416 fngbh.close()417 del fngbh418 else:419 ngbh_dict = df.build_neighborhood_dict (state_shp, GeoID, selection_type = "First_Order")420 421 i = 0422 sp_aar_bayesian = []423 field_name = ["SpBay_AAR", "SpBay_2p5", "SpBay_97p5"]424 sp_aar_bayesian.append(field_name)425 while i < len(death_count):426 Geokey = result[i+1][-1]427 428 data_list_dict = ngbh_dict[Geokey]429 [temp_result, temp_col] = df.filter_with_dict (result, r_note_col, "GEOID", data_list_dict, cnty_filter = False)430 death_with_header = [result[0]]431 death_with_header.extend(death_count)432 [temp_death, temp_dcol] = df.filter_with_dict (death_with_header, r_note_col, "GEOID", data_list_dict, cnty_filter = False)433 #arcpy.AddMessage(Geokey) ###For debug ----434 # if Geokey == '37019020304':435 # [a0i, n0i] = get_a0_n0 (temp_result[1:], ncol, temp_death[1:], percent[0], a0, n0, 5, True)436 # arcpy.AddWarning(a0)437 # arcpy.AddWarning(n0)438 # arcpy.AddWarning("========")439 # else:440 # [a0i, n0i] = get_a0_n0 (temp_result[1:], ncol, temp_death[1:], percent[0], a0, n0)441 [a0i, n0i] = get_a0_n0 (temp_result[1:], ncol, temp_death[1:], percent[0], a0, n0)442 443 Y = death_count[i][0:num_count]444 n = result[i+1][0:num_count]445 446 # Make sure n is always equal or larger than Y447 k = 0448 while k < len(n):449 n[k]=max(Y[k],n[k])450 k += 1451 j = 0452 sp_age_group = []453 while j < num_count:454 if n[j] + n0i[j] == 0:455 arcpy.AddError(n)456 arcpy.AddError(n0i)457 sp_g_samps_per = vector_multi(gamma_sample(Y[j] + a0i[j], 1.0/(n[j] + n0i[j]), 5000, Geokey), percent[0][j])458 # if Geokey == '37019020304': ###For debug459 # arcpy.AddWarning(Y[j] + a0i[j])460 # arcpy.AddWarning(n[j] + n0i[j])461 # arcpy.AddWarning(sample_percentile(gamma_sample(Y[j] + a0i[j], 1.0/(n[j] + n0i[j]), 5000, Geokey), [0.5, 0.025, 0.975]))462 # arcpy.AddWarning(percent[0][j])463 # arcpy.AddWarning("-------")464 465 sp_age_group.append(sp_g_samps_per)466 j += 1467 sp_aar_bayesian.append(sample_percentile(col_sum(sp_age_group), [0.5, 0.025, 0.975]))468 unsmooth_pctl_sp.append([bisect.bisect(col_sum(sp_age_group), ratesum[i][0]/100000)/50.0])469 i += 1470 471 # if Geokey == '37019020304': ###For debug472 # arcpy.AddWarning(Y)473 # arcpy.AddWarning(n)474 # arcpy.AddWarning(a0i)475 # arcpy.AddWarning(n0i)476 # arcpy.AddWarning(percent)477 # arcpy.AddWarning(sp_aar_bayesian[-1])478 # arcpy.AddWarning(sample_percentile(col_sum(sp_age_group), [0.5, 0.025, 0.975]))479 480 sp_aar_bayesian = col_divide(sp_aar_bayesian,0,nyear, True)481 sp_aar_bayesian = col_divide(sp_aar_bayesian,1,nyear, True)482 sp_aar_bayesian = col_divide(sp_aar_bayesian,2,nyear, True)483 age_adj_rate = c_merge(age_adj_rate, sp_aar_bayesian)484 485 ###486 ### For non-spatial Bayesian487 ###488 age_adj_rate = c_merge(age_adj_rate, aar_bayesian)489 avg_rate = sum(vector_multi(vector_divide(a0, n0), percent[0]))/nyear * 100000490 pop_seq = col_erase(result[1:], sequence(-1, ncol, -1))491 pop_sum = row_sum(pop_seq)492 #arcpy.AddMessage(len(pop_sum))493 #arcpy.AddMessage(len(aar_bayesian))494 i = 1495 while i < len(aar_bayesian):496 row = pop_sum[i-1]497 if float(aar_bayesian[i][0]) < float(aar_bayesian[i][2])-float(aar_bayesian[i][1]):498 if state_shp != "" or ngbh_dict_loc != "":499 if float(sp_aar_bayesian[i][0]) < float(sp_aar_bayesian[i][2])-float(sp_aar_bayesian[i][1]):500 row.append("Alert:Unreliable Estimate!!!!")501 row.append(1)502 row.append(1)503 else:504 row.append("Alert:Unreliable non-Spatial Bayesian Estimate!!!!")505 row.append(1)506 row.append(0)507 else:508 row.append("Alert:Unreliable non-Spatial Bayesian Estimate!!!!")509 row.append(1)510 elif state_shp != "" or ngbh_dict_loc != "":511 if float(sp_aar_bayesian[i][0]) < float(sp_aar_bayesian[i][2])-float(sp_aar_bayesian[i][1]):512 row.append("Alert:Unreliable Spatial Bayesian Estimate!!!!")513 row.append(0)514 row.append(1)515 else: 516 row.append("-")517 row.append(0)518 row.append(0)519 else:520 row.append("-")521 row.append(0)522 523 i += 1524 pop_name = [["Population", "Alert", "NSpUnreli"]]525 if state_shp != "" or ngbh_dict_loc != "":526 pop_name[0].append("SpUnreli")527 pop_name.extend(pop_sum)528 ### Bayesian ends here529 output = c_merge(age_adj_rate, r_note_col)530 output = c_merge(output, unsmooth_pctl_ns)531 if ngbh_dict_loc!="":532 output = c_merge(output, unsmooth_pctl_sp)533 output_pop = c_merge(output, pop_name)534 # Write output to csv file535 filename = os.path.splitext(os.path.split(inputdata)[1])[0]536 f = open(outputfolder + "\\" + "age_adjust_" + filename + ".csv", "w")537 head = True538 for row in output_pop:539 if head:540 head = False541 headerline = row542 temp_text = vect_to_str(row)543 f.writelines(temp_text + "\n")544 f.close()545 # Clean Schema.ini to remove the entry with same table name546 cleaned_content = us.clean_exist_schema(outputfolder + "\\" + "schema.ini", ["age_adjust_"+ filename + ".csv"])547 548 # Write Schema.ini file549 f = open(outputfolder + "\\" + "schema.ini", "w")550 f.write(cleaned_content)551 f.writelines("[age_adjust_" + filename + ".csv]\n")552 f.writelines("Format=CSVDelimited\n")553 f.writelines("ColNameHeader=True\n")554 i = 1555 for col in headerline:556 #arcpy.AddMessage(col)557 if col in ["state", "county", "tract", "GEOID"]:558 f.writelines("Col" + str(i) + "=" + str(col) + " Text Width 80\n")559 elif col in ["Alert", "NAME"]:560 f.writelines("Col" + str(i) + "=" + str(col) + " Text Width 200\n")561 elif col in ["Population", "NSpUnreli", "SpUnreli"]:562 f.writelines("Col" + str(i) + "=" + col + " Long\n")563 else:564 f.writelines("Col" + str(i) + "=" + col + " Double\n")565 i += 1566 f.writelines("\n")567 f.close()568 569 if(errorID != []):570 arcpy.AddWarning("Warning: Following ID is not identified in census data: " + str(errorID) + "!!!")571 else:572 arcpy.AddMessage("Age adjusted rate successfully calculated with no error!!!")573 574 arcpy.AddMessage("The average rate for the area is " + str(avg_rate) + ' cases per 100,000')...

Full Screen

Full Screen

RasterCalculateByAverage_eaStaging_s3_crf.py

Source:RasterCalculateByAverage_eaStaging_s3_crf.py Github

copy

Full Screen

1import arcpy, os, sys, time2import arcgisscripting3import zipfile4import csv5#from dbfpy import dbf6from arcpy import env7from arcpy import Raster8from arcpy.sa import *9from arcpy.analysis import *10from datetime import datetime11#from simpledbf import Dbf512def dbf2csv(dbfpath, csvpath):13 ''' To convert .dbf file or any shapefile/featureclass to csv file14 Inputs: 15 dbfpath: full path to .dbf file [input] or featureclass16 csvpath: full path to .csv file [output]17 '''18 #import csv19 rows = arcpy.SearchCursor(dbfpath)20 #csvFile = csv.writer(open(csvpath, 'wb')) #output csv; 'wb' works for python 2.*21 csvFile = csv.writer(open(csvpath, 'w')) #output csv; 'wb' works for python 3.*22 fieldnames = [f.name for f in arcpy.ListFields(dbfpath)]23 allRows = []24 for row in rows:25 rowlist = []26 for field in fieldnames:27 rowlist.append(row.getValue(field))28 allRows.append(rowlist)29 csvFile.writerow(fieldnames)30 for row in allRows:31 csvFile.writerow(row)32 row = None33 rows = None34def zipws(path, zip, keep):35 arcpy.AddWarning("inside zipws:" )36 path = os.path.normpath(path)37 for (dirpath, dirnames, filenames) in os.walk(path):38 for file in filenames:39 if not file.endswith('.lock'):40 arcpy.AddMessage("Adding %s..." % os.path.join(path, dirpath, file))41 try:42 if keep:43 zip.write(os.path.join(dirpath, file), os.path.join(os.path.basename(path), os.path.join(dirpath, file)[len(path)+len(os.sep):]))44 else:45 zip.write(os.path.join(dirpath, file), os.path.join(dirpath[len(path):], file))46 except Exception as e:47 arcpy.AddWarning(" Error adding %s: %s" % (file, e))48 return None49def computeRasterByYearRange(rasterList, startingYear, endingYear):50 previousAverageStart = 051 for yearIndex in range(int(startingYear), int(endingYear) + 1, 1):52 if ((yearIndex % averageRange) != 0):53 currentRangeStart = int(yearIndex) - (int(yearIndex) % averageRange)54 if (currentRangeStart != previousAverageStart):55 pathCurrent = chooseDataPath(yearIndex)56 if seasonSelection != "Annual":57 rasterList.append( os.path.join(pathCurrent, "T" + str(yearIndex) + "_" + seasonSelection + climateVariableSelection + rasterFormat) )58 else:59 rasterList.append( os.path.join(pathCurrent, "T" + str(yearIndex) + "_" + climateVariableSelection + rasterFormat) )60 else:61 if ((yearIndex + averageRange)<=int(endingYear)):62 pathCurrent = chooseAverageDataPath(yearIndex)63 rasterList.append(os.path.join(pathCurrent, "T" + str(yearIndex) + "_" + str(yearIndex + averageRange - 1) + rasterFormat))64 previousAverageStart = yearIndex65 else: 66 pathCurrent = chooseDataPath(yearIndex)67 if seasonSelection != "Annual":68 rasterList.append( os.path.join(pathCurrent, "T" + str(yearIndex) + "_" + seasonSelection + climateVariableSelection + rasterFormat) )69 else:70 rasterList.append( os.path.join(pathCurrent, "T" + str(yearIndex) + "_" + climateVariableSelection + rasterFormat) )71 72def chooseDataPath(year):73 if (float(year) <= float(endOfHistoricYear)):74 return os.path.join(s3Path, climateDataFolder + "\\" + "Hist\\" + climateVariableSelection + "\\" + seasonSelection + climateVariableSelection)75 else:76 return os.path.join(s3Path, climateDataFolder + "\\" + futureClimateScenario + "\\" + climateVariableSelection + "\\" + seasonSelection + climateVariableSelection)77def chooseAverageDataPath(year):78 if (float(year) <= float(endOfHistoricYear)):79 return os.path.join(s3Path, climateAverageDataFolder + "\\" + "Hist\\" + climateVariableSelection + "\\" + seasonSelection + climateVariableSelection)80 else:81 return os.path.join(s3Path, climateAverageDataFolder + "\\" + futureClimateScenario + "\\" + climateVariableSelection + "\\" + seasonSelection + climateVariableSelection)82 83 84# Define global variables85#86gp = arcgisscripting.create(10.2)87s3Path = "D:/Public/Data/projects/s3bucket/enviroatlas-vdc-stage-s3bucket.acs"88climateDataFolder = "ClimateData_2015_crf"89#climateData = r"D:\Public\Data\projects\s3bucket\temp\EnviroAtlas\ClimateData_2015_crf"90climateAverageDataFolder = "ClimateData_2015_crf/Average_ECAT"91#climateAverageData = r"D:\Public\Data\projects\s3bucket\temp\EnviroAtlas\ClimateData_2015_crf\Average_ECAT"92rasterFormat = ".crf"93#scratchWS = arcpy.env.scratchWorkspace94scratchWS = r"D:\Public\Data\projects\EnviroAtlas_Public\scripts\ECAT_script\Scratch\scratch.gdb"95arcpy.env.scratchWorkspace = scratchWS96desc = arcpy.Describe(scratchWS)97scratchPath = desc.path98arcpy.AddWarning("scratchWS: " + scratchWS)99precision = 0.1100#tempFileInt = os.path.join(scratchWS, "resultInt") 101tempFile = scratchWS + "\\resultFloat"102tempFutureAverageRas = scratchWS + "\\FutureAverage"103tempHistAverageRas = scratchWS + "\\HistAverage"104tempFileInt = scratchWS + "\\resultInt" # this is the integer raster result obtained from tempFile / precision105arcpy.AddWarning("scratchWS:" + scratchWS)106arcpy.AddWarning("scratchPath:" + scratchPath)107arcpy.AddWarning("tempFile:" + tempFile)108arcpy.AddWarning("tempFileInt:" + tempFileInt)109ZipFolderName = "ZipShapefileFolder"110shpFileName = "comparison.shp"111ZipFileName = os.path.join(scratchWS, "resultShp.zip")112endOfHistoricYear = 2005113histRasterList = [];114futureRasterList = [];115gp.LogHistory = True116futureClimateScenario = arcpy.GetParameterAsText(0) #eg: RCP60117startingYearBaseline = arcpy.GetParameterAsText(1)118endingYearBaseline = arcpy.GetParameterAsText(2)119startingYearFuture = arcpy.GetParameterAsText(3)120endingYearFuture = arcpy.GetParameterAsText(4) 121climateVariableSelection = arcpy.GetParameterAsText(5) #eg: TempMax , PET, Precip122seasonSelection = arcpy.GetParameterAsText(6) #ed: Fall , Annual123averageRange = 5124arcpy.env.overwriteOutput = True125def main():126 try:127 if arcpy.CheckExtension("Spatial") == "Available":128 arcpy.CheckOutExtension("Spatial")129 else:130 raise LicenseError131 132 startTimeForECAT = round(time.time())133 134 if ((int(startingYearBaseline)<=endOfHistoricYear) and (endOfHistoricYear<int(endingYearBaseline))):135 computeRasterByYearRange(histRasterList, startingYearBaseline, endOfHistoricYear)136 computeRasterByYearRange(histRasterList, endOfHistoricYear + 1, endingYearBaseline) 137 else:138 computeRasterByYearRange(histRasterList, startingYearBaseline, endingYearBaseline)139 #histAverage = histAverage/(int(endingYearBaseline) + 1 - int(startingYearBaseline))140##################################################################################################141 142 if ((int(startingYearFuture)<=endOfHistoricYear) and (endOfHistoricYear<int(endingYearFuture))):143 computeRasterByYearRange(futureRasterList, startingYearFuture, endOfHistoricYear)144 computeRasterByYearRange(futureRasterList, endOfHistoricYear + 1, endingYearFuture) 145 else:146 computeRasterByYearRange(futureRasterList, startingYearFuture, endingYearFuture)147 #futureAverage = futureAverage/(int(endingYearFuture) + 1 - int(startingYearFuture))148 149 arcpy.AddWarning("histRasterList : ")150 for indexYear in range(0, len(histRasterList), 1):151 arcpy.AddWarning(histRasterList[indexYear] )152 arcpy.AddWarning("futureRasterList : ")153 for indexYear in range(0, len(futureRasterList), 1):154 arcpy.AddWarning(futureRasterList[indexYear] ) 155 histAverage = CellStatistics(histRasterList, "SUM", "DATA")156 arcpy.AddWarning("After CellStatistics for hist" ) 157 futureAverage = CellStatistics(futureRasterList, "SUM", "DATA")158 159 #outRas = (futureAverage - histAverage) / 100.0160 outRas = ((futureAverage/(int(endingYearFuture) + 1 - int(startingYearFuture))) - (histAverage/(int(endingYearBaseline) + 1 - int(startingYearBaseline)))) / 100.0161 FutureAverageRas = futureAverage/(int(endingYearFuture) + 1 - int(startingYearFuture))162 HistAverageRas = histAverage/(int(endingYearBaseline) + 1 - int(startingYearBaseline))163 ##FutureAverageRas.save(tempFutureAverageRas)164 ##HistAverageRas.save(tempHistAverageRas)165 arcpy.AddWarning("after making subtraction" )166 if outRas is not None:167 arcpy.AddWarning("tempFile will be saved as:" + tempFile)168 169## outRas_1digit.save(tempFileInt)170 outRas.save(tempFile)171 arcpy.AddWarning("tempFile is already saved")172 arcpy.SetParameterAsText(7, tempFile) 173 else:174 arcpy.AddWarning("Failed to make raster tempFile")175 176 endTimeForECAT = round(time.time()) 177 timeSpentForCOG = round((endTimeForECAT - startTimeForECAT)/60, 1)178 arcpy.AddWarning('Time spent for ECAT startingYearBaseline: ' + startingYearBaseline + "; endingYearBaseline:" + endingYearBaseline + "; startingYearFuture: " + startingYearFuture + "; endingYearFuture: " + endingYearFuture + ' is: '+ str(timeSpentForCOG) + ' minutes')179 print('\n' + 'Time spent for ECAT startingYearBaseline: ' + startingYearBaseline + "; endingYearBaseline:" + endingYearBaseline + "; startingYearFuture: " + startingYearFuture + "; endingYearFuture: " + endingYearFuture + ' is: '+ str(timeSpentForCOG) + ' minutes')180 except IOError as e:181 arcpy.AddWarning("IOError, errno: " + e.errno + "IOError, strerror: " + e.strerror ) 182 except ValueError as e:183 arcpy.AddWarning("ValueError: " + e)184 except:185 arcpy.AddWarning("other error")186 arcpy.AddWarning("Error: {0}".format(sys.exc_info()[0]))187 188 #Perform Zonal Statistics189 outTable = scratchWS +"\\ZonalSta_by5YearAverage"190 out_feature_class = r"D:\Public\Data\projects\EnviroAtlas_Public\Other\HUCQuery\HUC12Raster\HUC12Raster"191 192 try:193 arcpy.AddWarning("out_feature_class: " + out_feature_class)194 arcpy.AddWarning("tempFile: " + tempFile)195 arcpy.AddWarning("outTable: " + outTable)196 197 outZSaT = ZonalStatisticsAsTable(out_feature_class, "HUC_12", tempFile, outTable, "DATA", "MEAN")198 arcpy.AddWarning("Implemented ZonalStatisticsAsTable ")199 except Exception as e:200 arcpy.AddWarning("error ZonalStatisticsAsTable " + e.message)201 finally:202 arcpy.CheckInExtension("Spatial")203 #csv_fn = scratchWS + "\\HUC12Statistic.csv"204 csv_fn = scratchPath + "\\HUC12Statistic.csv"205 dbf2csv(outTable, csv_fn)206 try:207 arcpy.SetParameter(8, csv_fn)208 except Exception as e:209 arcpy.AddWarning("error set out paramter for csv file:" + e.message)210 211if __name__ == '__main__':...

Full Screen

Full Screen

createDB_HuntingAreas.py

Source:createDB_HuntingAreas.py Github

copy

Full Screen

1# -*- coding: utf-8 -*-2import arcpy as a3input_folder = a.GetParameterAsText(0)4ref_system = a.GetParameterAsText(1)5def main(input_folder=input_folder, ref_system=ref_system):6 a.AddMessage("\n" + "Пошаговое выполнение. Всего шагов: 3")7 a.AddMessage("\n" + "Шаг 1. Создание базы данных «HuntingAreas»")8 if a.Exists(input_folder + "\\createDB_HuntingAreas.mdb"):9 a.AddWarning("База данных уже существует")10 else:11 try:12 a.CreatePersonalGDB_management(input_folder, "HuntingAreas.mdb", "10.0")13 a.AddMessage("База данных создана")14 except:15 a.AddWarning("Не удалось создать базу данных")16 a.AddMessage("\n" + "Шаг 2. Создание наборов классов")17 def crFSet(name):18 if a.Exists(input_folder + "\\" + "HuntingAreas.mdb" + "\\" + name):19 a.AddWarning("Набор данных " + name + " уже существует")20 else:21 a.CreateFeatureDataset_management(input_folder + "\\" + "HuntingAreas.mdb" , name, ref_system)22 a.AddMessage("Создан набор " + name)23 try:24 crFSet("BORDERS")25 crFSet("OBJECTS")26 a.AddMessage("Шаг 2. Результат: Все наборы данных созданы")27 except:28 a.AddWarning("Шаг 2. Результат: Не удалось создать наборы данных")29 a.AddMessage("\n" + "Шаг 3. Создание классов пространственных объектов")30 def crClass(ds, name, geom, alias):31 if a.Exists(input_folder + "\\" + "HuntingAreas.mdb" + "\\" + ds + "\\" + name):32 a.AddWarning("Класс" + name + " уже существует")33 else:34 a.CreateFeatureclass_management(input_folder + "\\" + "HuntingAreas.mdb" + "\\" + ds, name, geom)35 a.AddMessage("Создан класс" + "\\" + ds + name)36 a.AlterAliasName(input_folder + "\\" + "HuntingAreas.mdb" + "\\" + ds + "\\" + name, alias)37 try:38 crClass("BORDERS", "Hunt_Farm", "POLYGON", "Охотничьи хозяйства")39 crClass("BORDERS", "Hunt_Zone", "POLYGON", "Охотхозяйственные зоны")40 crClass("BORDERS", "Hunt_Camp", "POLYGON", "Егерские обходы (охотдачи)")41 crClass("BORDERS", "Hunt_Borders", "POLYLINE", "Границы охотничьих хозяйств, егерских обходов и охотохозяйственных зон")42 crClass("BORDERS", "SpecRegion", "POLYGON", "Специальные территории")43 crClass("BORDERS", "RegionWork", "POLYGON", "Изменения полигональных объектов")44 crClass("OBJECTS", "HuntObj", "POINT", "Внемасштабные объекты и символы")45 a.AddMessage("Классы пространственных объектов созданы")46 except:47 a.AddWarning("Не удалось создать классы пространственных объектов")48 a.AddMessage("\n" + "Шаг 4. Создание полей в классах пространственных объектов")49 fc = input_folder + "\\" + "HuntingAreas.mdb" + "\\" + "BORDERS" + "\\" + "Hunt_Farm"50 try:51 a.AddField_management(fc, "HUNTNAME" , "TEXT", "", "", 100, "Название охотохозяйственной организации", "NULLABLE")52 except:53 a.AddWarning("C добавлением полей возникли проблемы (Hunt_Farm)")54 fc = input_folder + "\\" + "HuntingAreas.mdb" + "\\" + "BORDERS" + "\\" + "Hunt_Zone"55 try:56 a.AddField_management(fc, "HUNTNAME" , "TEXT", "", "", 100, "Название охотохозяйственной организации", "NULLABLE")57 a.AddField_management(fc, "ZONEKOD" , "SHORT", "", "", "", "Охотохозяйственная зона", "NULLABLE")58 a.AddField_management(fc, "ZONENUM" , "TEXT", "", "", 1, "Обозначение зоны", "NULLABLE")59 except:60 a.AddWarning("C добавлением полей возникли проблемы (Hunt_Zone)")61 fc = input_folder + "\\" + "HuntingAreas.mdb" + "\\" + "BORDERS" + "\\" + "Hunt_Camp"62 try:63 a.AddField_management(fc, "HUNTNAME" , "TEXT", "", "", 100, "Название охотохозяйственной организации", "NULLABLE")64 a.AddField_management(fc, "OBHODNUM" , "SHORT", "", "", "", "Номер охотдачи, егерского обхода", "NULLABLE")65 a.AddField_management(fc, "OBHODNAME" , "TEXT", "", "", 20, "Название обхода, охотдачи", "NULLABLE")66 a.AddField_management(fc, "OBHODTYPE" , "SHORT", "", "", "", "Тип подразделения охотничьего хозяйства", "NULLABLE")67 except:68 a.AddWarning("C добавлением полей возникли проблемы (Hunt_Camp)")69 fc = input_folder + "\\" + "HuntingAreas.mdb" + "\\" + "BORDERS" + "\\" + "Hunt_Borders"70 try:71 a.AddField_management(fc, "BorderType" , "SHORT", "", "", "", "Тип границы", "NULLABLE")72 except:73 a.AddWarning("C добавлением полей возникли проблемы (Hunt_Farm)")74 fc = input_folder + "\\" + "HuntingAreas.mdb" + "\\" + "BORDERS" + "\\" + "SpecRegion"75 try:76 a.AddField_management(fc, "SPECKOD" , "SHORT", "", "", "", "Код участка", "NULLABLE")77 a.AddField_management(fc, "SPECNAME" , "TEXT", "", "", 3, "Обозначение участка", "NULLABLE")78 except:79 a.AddWarning("C добавлением полей возникли проблемы (SpecRegion)")80 fc = input_folder + "\\" + "HuntingAreas.mdb" + "\\" + "BORDERS" + "\\" + "RegionWork"81 try:82 a.AddField_management(fc, "SPECKOD" , "SHORT", "", "", "", "Код участка", "NULLABLE")83 a.AddField_management(fc, "SPECNAME" , "TEXT", "", "", 3, "Обозначение участка", "NULLABLE")84 except:85 a.AddWarning("C добавлением полей возникли проблемы (RegionWork)")86 fc = input_folder + "\\" + "HuntingAreas.mdb" + "\\" + "OBJECTS" + "\\" + "HuntObj"87 try:88 a.AddField_management(fc, "HUNTNAME" , "TEXT", "", "", 100, "Название охотохозяйственной организации", "NULLABLE")89 a.AddField_management(fc, "CLASSCODE" , "LONG", "", "", "", "Классификационный код", "NULLABLE")90 except:91 a.AddWarning("C добавлением полей возникли проблемы (HuntObj)")92 a.AddMessage("Шаг 4. Завершено добавление полей в класс пространственных объектов")93if __name__ == "__main__":...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run tavern automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful