How to use triage method in avocado

Best Python code snippet using avocado_python

helpers.py

Source:helpers.py Github

copy

Full Screen

1import pandas as pd2import numpy as np3import re4import os5import math6import matplotlib.pyplot as plt7from datetime import timedelta8from sklearn import metrics9from sklearn.metrics import precision_recall_curve, average_precision_score10from sklearn.preprocessing import LabelEncoder11from tensorflow.keras.utils import Sequence12from tensorflow.keras.preprocessing.sequence import pad_sequences13import collections14def convert_str_to_float(x):15 if isinstance(x, str):16 x_split = re.compile('[^a-zA-Z0-9-]').split(x.strip())17 if '-' in x_split[0]:18 x_split_dash = x_split[0].split('-')19 if len(x_split_dash) == 2 and x_split_dash[0].isnumeric() and x_split_dash[1].isnumeric():20 return (float(x_split_dash[0]) + float(x_split_dash[1])) / 221 else:22 return np.nan23 else:24 if x_split[0].isnumeric():25 return float(x_split[0])26 else:27 return np.nan28 else:29 return x30 31def read_edstays_table(edstays_table_path):32 df_edstays = pd.read_csv(edstays_table_path)33 df_edstays['intime'] = pd.to_datetime(df_edstays['intime'])34 df_edstays['outtime'] = pd.to_datetime(df_edstays['outtime'])35 return df_edstays36def read_patients_table(patients_table_path):37 df_patients = pd.read_csv(patients_table_path)38 df_patients['dod'] = pd.to_datetime(df_patients['dod'])39 return df_patients40def read_admissions_table(admissions_table_path):41 df_admissions = pd.read_csv(admissions_table_path)42 df_admissions = df_admissions.rename(columns={"race": "ethnicity"})43 df_admissions = df_admissions[['subject_id', 'hadm_id', 'admittime', 'dischtime', 'deathtime','ethnicity', 'edregtime','edouttime', 'insurance']]44 df_admissions['admittime'] = pd.to_datetime(df_admissions['admittime'])45 df_admissions['dischtime'] = pd.to_datetime(df_admissions['dischtime'])46 df_admissions['deathtime'] = pd.to_datetime(df_admissions['deathtime'])47 return df_admissions48def read_icustays_table(icustays_table_path):49 df_icu = pd.read_csv(icustays_table_path)50 df_icu['intime'] = pd.to_datetime(df_icu['intime'])51 df_icu['outtime'] = pd.to_datetime(df_icu['outtime'])52 return df_icu53def read_triage_table(triage_table_path):54 df_triage = pd.read_csv(triage_table_path)55 vital_rename_dict = {vital: '_'.join(['triage', vital]) for vital in ['temperature', 'heartrate', 'resprate', 'o2sat', 'sbp', 'dbp', 'pain', 'acuity']}56 df_triage.rename(vital_rename_dict, axis=1, inplace=True)57 df_triage['triage_pain'] = df_triage['triage_pain'].apply(convert_str_to_float).astype(float)58 return df_triage59def read_diagnoses_table(diagnoses_table_path):60 df_diagnoses = pd.read_csv(diagnoses_table_path)61 return df_diagnoses62def read_vitalsign_table(vitalsign_table_path):63 df_vitalsign = pd.read_csv(vitalsign_table_path)64 vital_rename_dict = {vital: '_'.join(['ed', vital]) for vital in65 ['temperature', 'heartrate', 'resprate', 'o2sat', 'sbp', 'dbp', 'rhythm', 'pain']}66 df_vitalsign.rename(vital_rename_dict, axis=1, inplace=True)67 df_vitalsign['ed_pain'] = df_vitalsign['ed_pain'].apply(convert_str_to_float).astype(float)68 return df_vitalsign69def read_pyxis_table(pyxis_table_path):70 df_pyxis = pd.read_csv(pyxis_table_path)71 return df_pyxis72def merge_edstays_patients_on_subject(df_edstays,df_patients):73 if 'gender' in df_edstays.columns:74 df_edstays = pd.merge(df_edstays, df_patients[['subject_id', 'anchor_age', 'anchor_year','dod']], on = ['subject_id'], how='left')75 else:76 df_edstays = pd.merge(df_edstays, df_patients[['subject_id', 'anchor_age', 'gender', 'anchor_year','dod']], on = ['subject_id'], how='left')77 return df_edstays78def merge_edstays_admissions_on_subject(df_edstays ,df_admissions):79 df_edstays = pd.merge(df_edstays,df_admissions, on = ['subject_id', 'hadm_id'], how='left')80 return df_edstays81def merge_edstays_triage_on_subject(df_master ,df_triage):82 df_master = pd.merge(df_master,df_triage, on = ['subject_id', 'stay_id'], how='left')83 return df_master84def add_age(df_master):85 df_master['in_year'] = df_master['intime'].dt.year86 df_master['age'] = df_master['in_year'] - df_master['anchor_year'] + df_master['anchor_age']87 #df_master.drop(['anchor_age', 'anchor_year', 'in_year'],axis=1, inplace=True)88 return df_master89def add_inhospital_mortality(df_master):90 inhospital_mortality = df_master['dod'].notnull() & (df_master['dischtime'] >= df_master['dod'])91 df_master['outcome_inhospital_mortality'] = inhospital_mortality92 return df_master93def add_ed_los(df_master):94 ed_los = df_master['outtime'] - df_master['intime']95 df_master['ed_los'] = ed_los96 return df_master97def add_outcome_icu_transfer(df_master, df_icustays, timerange):98 timerange_delta = timedelta(hours = timerange)99 df_icustays_sorted = df_icustays[['subject_id', 'hadm_id', 'intime']].sort_values('intime')100 df_icustays_keep_first = df_icustays_sorted.groupby('hadm_id').first().reset_index()101 df_master_icu = pd.merge(df_master, df_icustays_keep_first, on = ['subject_id', 'hadm_id'], how='left', suffixes=('','_icu'))102 time_diff = (df_master_icu['intime_icu']- df_master_icu['outtime'])103 df_master_icu['time_to_icu_transfer'] = time_diff104 df_master_icu[''.join(['outcome_icu_transfer_', str(timerange), 'h'])] = time_diff <= timerange_delta105 # df_master_icu.drop(['intime_icu', 'time_to_icu_transfer'],axis=1, inplace=True)106 return df_master_icu107def fill_na_ethnicity(df_master): # requires df_master to be sorted 108 N = len(df_master)109 ethnicity_list= [float("NaN") for _ in range(N)]110 ethnicity_dict = {} # dict to store subejct ethnicity111 def get_filled_ethnicity(row):112 i = row.name113 if i % 10000 == 0:114 print('Process: %d/%d' % (i, N), end='\r')115 curr_eth = row['ethnicity']116 curr_subject = row['subject_id']117 prev_subject = df_master['subject_id'][i+1] if i< (N-1) else None118 if curr_subject not in ethnicity_dict.keys(): ## if subject ethnicity not stored yet, look ahead and behind 119 subject_ethnicity_list = []120 next_subject_idx = i+1121 prev_subject_idx = i-1122 next_subject= df_master['subject_id'][next_subject_idx] if next_subject_idx <= (N-1) else None123 prev_subject= df_master['subject_id'][prev_subject_idx] if prev_subject_idx >= 0 else None124 subject_ethnicity_list.append(df_master['ethnicity'][i]) ## add current ethnicity to list125 while prev_subject == curr_subject:126 subject_ethnicity_list.append(df_master['ethnicity'][prev_subject_idx])127 prev_subject_idx -= 1128 prev_subject= df_master['subject_id'][prev_subject_idx] if prev_subject_idx >= 0 else None129 while next_subject == curr_subject:130 subject_ethnicity_list.append(df_master['ethnicity'][next_subject_idx])131 next_subject_idx += 1132 next_subject= df_master['subject_id'][next_subject_idx] if next_subject_idx <= (N-1) else None133 134 eth_counter_list = collections.Counter(subject_ethnicity_list).most_common() #sorts counter and outputs list135 136 if len(eth_counter_list) == 0: ## no previous or next entries 137 subject_eth = curr_eth138 elif len(eth_counter_list) == 1: ## exactly one other ethnicity139 subject_eth = eth_counter_list.pop(0)[0] ## extract ethnicity from count tuple140 else:141 eth_counter_list = [x for x in eth_counter_list if pd.notna(x[0])] # remove any NA142 subject_eth = eth_counter_list.pop(0)[0]143 144 ethnicity_dict[curr_subject] = subject_eth ## store in dict145 146 if pd.isna(curr_eth): ## if curr_eth is na, fill with subject_eth from dict147 ethnicity_list[i]= ethnicity_dict[curr_subject]148 else:149 ethnicity_list[i]= curr_eth150 151 df_master.apply(get_filled_ethnicity, axis=1)152 print('Process: %d/%d' % (N, N), end='\r')153 df_master.loc[:,'ethnicity'] = ethnicity_list154 return df_master155def generate_past_ed_visits(df_master, timerange):156 #df_master = df_master.sort_values(['subject_id', 'intime']).reset_index()157 158 timerange_delta = timedelta(days=timerange)159 N = len(df_master)160 n_ed = [0 for _ in range(N)]161 def get_num_past_ed_visits(df):162 start = df.index[0]163 for i in df.index:164 if i % 10000 == 0:165 print('Process: %d/%d' % (i, N), end='\r')166 while df.loc[i, 'intime'] - df.loc[start, 'intime'] > timerange_delta:167 start += 1168 n_ed[i] = i - start169 grouped = df_master.groupby('subject_id')170 grouped.apply(get_num_past_ed_visits)171 print('Process: %d/%d' % (N, N), end='\r')172 df_master.loc[:, ''.join(['n_ed_', str(timerange), "d"])] = n_ed173 return df_master174def generate_past_admissions(df_master, df_admissions, timerange):175 df_admissions_sorted = df_admissions[df_admissions['subject_id'].isin(df_master['subject_id'].unique().tolist())][['subject_id', 'admittime']].copy()176 177 df_admissions_sorted.loc[:,'admittime'] = pd.to_datetime(df_admissions_sorted['admittime'])178 df_admissions_sorted.sort_values(['subject_id', 'admittime'], inplace=True)179 df_admissions_sorted.reset_index(drop=True, inplace=True)180 timerange_delta = timedelta(days=timerange)181 N = len(df_master)182 n_adm = [0 for _ in range(N)]183 def get_num_past_admissions(df):184 subject_id = df.iloc[0]['subject_id']185 if subject_id in grouped_adm.groups.keys():186 df_adm = grouped_adm.get_group(subject_id)187 start = end = df_adm.index[0]188 for i in df.index:189 if i % 10000 == 0:190 print('Process: %d/%d' % (i, N), end='\r')191 while start < df_adm.index[-1] and df.loc[i, 'intime'] - df_adm.loc[start, 'admittime'] > timerange_delta:192 start += 1193 end = start194 while end <= df_adm.index[-1] and \195 (timerange_delta >= (df.loc[i, 'intime'] - df_adm.loc[end, 'admittime']) > timedelta(days=0)):196 end += 1197 n_adm[i] = end - start198 grouped = df_master.groupby('subject_id')199 grouped_adm = df_admissions_sorted.groupby('subject_id')200 grouped.apply(get_num_past_admissions)201 print('Process: %d/%d' % (N, N), end='\r')202 df_master.loc[:,''.join(['n_hosp_', str(timerange), "d"])] = n_adm203 return df_master204def generate_past_icu_visits(df_master, df_icustays, timerange):205 df_icustays_sorted = df_icustays[df_icustays['subject_id'].isin(df_master['subject_id'].unique().tolist())][['subject_id', 'intime']].copy()206 df_icustays_sorted.sort_values(['subject_id', 'intime'], inplace=True)207 df_icustays_sorted.reset_index(drop=True, inplace=True)208 timerange_delta = timedelta(days=timerange)209 N = len(df_master)210 n_icu = [0 for _ in range(N)]211 def get_num_past_icu_visits(df):212 subject_id = df.iloc[0]['subject_id']213 if subject_id in grouped_icu.groups.keys():214 df_icu = grouped_icu.get_group(subject_id)215 start = end = df_icu.index[0]216 for i in df.index:217 if i % 10000 == 0:218 print('Process: %d/%d' % (i, N), end='\r')219 while start < df_icu.index[-1] and df.loc[i, 'intime'] - df_icu.loc[start, 'intime'] > timerange_delta:220 start += 1221 end = start222 while end <= df_icu.index[-1] and \223 (timerange_delta >= (df.loc[i, 'intime'] - df_icu.loc[end, 'intime']) > timedelta(days=0)):224 end += 1225 n_icu[i] = end - start226 grouped = df_master.groupby('subject_id')227 grouped_icu = df_icustays_sorted.groupby('subject_id')228 grouped.apply(get_num_past_icu_visits)229 print('Process: %d/%d' % (N, N), end='\r')230 df_master.loc[:,''.join(['n_icu_', str(timerange), "d"])] = n_icu231 return df_master232def generate_future_ed_visits(df_master, next_ed_visit_timerange):233 N = len(df_master)234 time_of_next_ed_visit = [float("NaN") for _ in range(N)]235 time_to_next_ed_visit = [float("NaN") for _ in range(N)]236 outcome_ed_revisit = [False for _ in range(N)]237 timerange_delta = timedelta(days = next_ed_visit_timerange)238 curr_subject=None239 next_subject=None240 def get_future_ed_visits(row):241 i = row.name242 if i % 10000 == 0:243 print('Process: %d/%d' % (i, N), end='\r')244 curr_subject = row['subject_id']245 next_subject= df_master['subject_id'][i+1] if i< (N-1) else None246 if curr_subject == next_subject:247 curr_outtime = row['outtime']248 next_intime = df_master['intime'][i+1]249 next_intime_diff = next_intime - curr_outtime250 time_of_next_ed_visit[i] = next_intime251 time_to_next_ed_visit[i] = next_intime_diff252 outcome_ed_revisit[i] = next_intime_diff < timerange_delta253 df_master.apply(get_future_ed_visits, axis=1)254 print('Process: %d/%d' % (N, N), end='\r')255 df_master.loc[:,'next_ed_visit_time'] = time_of_next_ed_visit256 df_master.loc[:,'next_ed_visit_time_diff'] = time_to_next_ed_visit257 df_master.loc[:,''.join(['outcome_ed_revisit_', str(next_ed_visit_timerange), "d"])] = outcome_ed_revisit258 return df_master259def generate_numeric_timedelta(df_master):260 N = len(df_master)261 ed_los_hours = [float("NaN") for _ in range(N)]262 time_to_icu_transfer_hours = [float("NaN") for _ in range(N)]263 next_ed_visit_time_diff_days = [float("NaN") for _ in range(N)]264 265 def get_numeric_timedelta(row):266 i = row.name267 if i % 10000 == 0:268 print('Process: %d/%d' % (i, N), end='\r')269 curr_subject = row['subject_id']270 curr_ed_los = row['ed_los']271 curr_time_to_icu_transfer = row['time_to_icu_transfer']272 curr_next_ed_visit_time_diff = row['next_ed_visit_time_diff']273 274 ed_los_hours[i] = round(curr_ed_los.total_seconds() / (60*60),2) if not pd.isna(curr_ed_los) else curr_ed_los275 time_to_icu_transfer_hours[i] = round(curr_time_to_icu_transfer.total_seconds() / (60*60),2) if not pd.isna(curr_time_to_icu_transfer) else curr_time_to_icu_transfer276 next_ed_visit_time_diff_days[i] = round(curr_next_ed_visit_time_diff.total_seconds() / (24*60*60), 2) if not pd.isna(curr_next_ed_visit_time_diff) else curr_next_ed_visit_time_diff277 278 df_master.apply(get_numeric_timedelta, axis=1)279 print('Process: %d/%d' % (N, N), end='\r')280 281 df_master.loc[:,'ed_los_hours'] = ed_los_hours282 df_master.loc[:,'time_to_icu_transfer_hours'] = time_to_icu_transfer_hours283 df_master.loc[:,'next_ed_visit_time_diff_days'] = next_ed_visit_time_diff_days284 return df_master285def encode_chief_complaints(df_master, complaint_dict):286 holder_list = []287 complaint_colnames_list = list(complaint_dict.keys())288 complaint_regex_list = list(complaint_dict.values())289 for i, row in df_master.iterrows():290 curr_patient_complaint = str(row['chiefcomplaint'])291 curr_patient_complaint_list = [False for _ in range(len(complaint_regex_list))]292 complaint_idx = 0293 for complaint in complaint_regex_list:294 if re.search(complaint, curr_patient_complaint, re.IGNORECASE):295 curr_patient_complaint_list[complaint_idx] = True296 complaint_idx += 1297 298 holder_list.append(curr_patient_complaint_list)299 300 df_encoded_complaint = pd.DataFrame(holder_list, columns = complaint_colnames_list)301 df_master = pd.concat([df_master,df_encoded_complaint], axis=1)302 return df_master303def merge_vitalsign_info_on_edstay(df_master, df_vitalsign, options=[]):304 df_vitalsign.sort_values('charttime', inplace=True)305 grouped = df_vitalsign.groupby(['stay_id'])306 for option in options:307 method = getattr(grouped, option, None)308 assert method is not None, "Invalid option. " \309 "Should be a list of values from 'max', 'min', 'median', 'mean', 'first', 'last'. " \310 "e.g. ['median', 'last']"311 df_vitalsign_option = method(numeric_only=True)312 df_vitalsign_option.rename({name: '_'.join([name, option]) for name in313 ['ed_temperature', 'ed_heartrate', 'ed_resprate', 'ed_o2sat', 'ed_sbp', 'ed_dbp', 'ed_pain']},314 axis=1,315 inplace=True)316 df_master = pd.merge(df_master, df_vitalsign_option, on=['subject_id', 'stay_id'], how='left')317 return df_master318def merge_med_count_on_edstay(df_master, df_pyxis):319 df_pyxis_fillna = df_pyxis.copy()320 df_pyxis_fillna['gsn'].fillna(df_pyxis['name'], inplace=True)321 grouped = df_pyxis_fillna.groupby(['stay_id'])322 df_medcount = grouped['gsn'].nunique().reset_index().rename({'gsn': 'n_med'}, axis=1)323 df_master = pd.merge(df_master, df_medcount, on='stay_id', how='left')324 df_master.fillna({'n_med': 0}, inplace=True)325 return df_master326def merge_medrecon_count_on_edstay(df_master, df_medrecon):327 df_medrecon_fillna = df_medrecon.copy()328 df_medrecon_fillna['gsn'].fillna(df_medrecon['name'])329 grouped = df_medrecon_fillna.groupby(['stay_id'])330 df_medcount = grouped['gsn'].nunique().reset_index().rename({'gsn': 'n_medrecon'}, axis=1)331 df_master = pd.merge(df_master, df_medcount, on='stay_id', how='left')332 df_master.fillna({'n_medrecon': 0}, inplace=True)333 return df_master334def outlier_removal_imputation(column_type, vitals_valid_range):335 column_range = vitals_valid_range[column_type]336 def outlier_removal_imputation_single_value(x):337 if x < column_range['outlier_low'] or x > column_range['outlier_high']:338 # set as missing339 return np.nan340 elif x < column_range['valid_low']:341 # impute with nearest valid value342 return column_range['valid_low']343 elif x > column_range['valid_high']:344 # impute with nearest valid value345 return column_range['valid_high']346 else:347 return x348 return outlier_removal_imputation_single_value349def convert_temp_to_celcius(df_master):350 for column in df_master.columns:351 column_type = column.split('_')[1] if len(column.split('_')) > 1 else None352 if column_type == 'temperature':353 # convert to celcius354 df_master[column] -= 32355 df_master[column] *= 5/9356 return df_master357def remove_outliers(df_master, vitals_valid_range):358 for column in df_master.columns:359 column_type = column.split('_')[1] if len(column.split('_')) > 1 else None360 if column_type in vitals_valid_range:361 df_master[column] = df_master[column].apply(outlier_removal_imputation(column_type, vitals_valid_range))362 return df_master363def display_outliers_count(df_master, vitals_valid_range):364 display_df = pd.DataFrame(columns=['variable', '< outlier_low', '[outlier_low, valid_low)',365 '[valid_low, valid_high]', '(valid_high, outlier_high]', '> outlier_high'])366 for column in df_master.columns:367 column_type = column.split('_')[1] if len(column.split('_')) > 1 else None368 if column_type in vitals_valid_range:369 column_range = vitals_valid_range[column_type]370 display_df = display_df.append({'variable': column,371 '< outlier_low': len(df_master[df_master[column] < column_range['outlier_low']]),372 '[outlier_low, valid_low)': len(df_master[(column_range['outlier_low'] <= df_master[column])373 & (df_master[column] < column_range['valid_low'])]),374 '[valid_low, valid_high]': len(df_master[(column_range['valid_low'] <= df_master[column])375 & (df_master[column] <= column_range['valid_high'])]),376 '(valid_high, outlier_high]': len(df_master[(column_range['valid_high'] < df_master[column])377 & (df_master[column] <= column_range['outlier_high'])]),378 '> outlier_high': len(df_master[df_master[column] > column_range['outlier_high']])379 }, ignore_index=True)380 return display_df381def add_score_CCI(df):382 conditions = [383 (df['age'] < 50),384 (df['age'] >= 50) & (df['age'] <= 59),385 (df['age'] >= 60) & (df['age'] <= 69),386 (df['age'] >= 70) & (df['age'] <= 79),387 (df['age'] >= 80)388 ]389 values = [0, 1, 2, 3, 4]390 df['score_CCI'] = np.select(conditions, values) 391 df['score_CCI'] = df['score_CCI'] + df['cci_MI'] + df['cci_CHF'] + df['cci_PVD'] + df['cci_Stroke'] + df['cci_Dementia'] + df['cci_Pulmonary'] + df['cci_PUD'] + df['cci_Rheumatic'] +df['cci_Liver1']*1 + df['cci_Liver2']*3 + df['cci_DM1'] + df['cci_DM2']*2 +df['cci_Paralysis']*2 + df['cci_Renal']*2 + df['cci_Cancer1']*2 + df['cci_Cancer2']*6 + df['cci_HIV']*6392 print("Variable 'add_score_CCI' successfully added")393def add_triage_MAP(df):394 df['triage_MAP'] = df['triage_sbp']*1/3 + df['triage_dbp']*2/3395 print("Variable 'add_triage_MAP' successfully added")396def add_score_REMS(df):397 conditions1 = [398 (df['age'] < 45),399 (df['age'] >= 45) & (df['age'] <= 54),400 (df['age'] >= 55) & (df['age'] <= 64),401 (df['age'] >= 65) & (df['age'] <= 74),402 (df['age'] > 74)403 ]404 values1 = [0, 2, 3, 5, 6]405 conditions2 = [406 (df['triage_MAP'] > 159),407 (df['triage_MAP'] >= 130) & (df['triage_MAP'] <= 159),408 (df['triage_MAP'] >= 110) & (df['triage_MAP'] <= 129),409 (df['triage_MAP'] >= 70) & (df['triage_MAP'] <= 109),410 (df['triage_MAP'] >= 50) & (df['triage_MAP'] <= 69),411 (df['triage_MAP'] < 49)412 ]413 values2 = [4, 3, 2, 0, 2, 4]414 conditions3 = [415 (df['triage_heartrate'] >179),416 (df['triage_heartrate'] >= 140) & (df['triage_heartrate'] <= 179),417 (df['triage_heartrate'] >= 110) & (df['triage_heartrate'] <= 139),418 (df['triage_heartrate'] >= 70) & (df['triage_heartrate'] <= 109),419 (df['triage_heartrate'] >= 55) & (df['triage_heartrate'] <= 69),420 (df['triage_heartrate'] >= 40) & (df['triage_heartrate'] <= 54),421 (df['triage_heartrate'] < 40)422 ]423 values3 = [4, 3, 2, 0, 2, 3, 4]424 conditions4 = [425 (df['triage_resprate'] > 49),426 (df['triage_resprate'] >= 35) & (df['triage_resprate'] <= 49),427 (df['triage_resprate'] >= 25) & (df['triage_resprate'] <= 34),428 (df['triage_resprate'] >= 12) & (df['triage_resprate'] <= 24),429 (df['triage_resprate'] >= 10) & (df['triage_resprate'] <= 11),430 (df['triage_resprate'] >= 6) & (df['triage_resprate'] <= 9),431 (df['triage_resprate'] < 6)432 ]433 values4 = [4, 3, 1, 0, 1, 2, 4]434 conditions5 = [435 (df['triage_o2sat'] < 75),436 (df['triage_o2sat'] >= 75) & (df['triage_o2sat'] <= 85),437 (df['triage_o2sat'] >= 86) & (df['triage_o2sat'] <= 89),438 (df['triage_o2sat'] > 89)439 ]440 values5 = [4, 3, 1, 0]441 df['score_REMS'] = np.select(conditions1, values1) + np.select(conditions2, values2) + np.select(conditions3, values3) + np.select(conditions4, values4) + np.select(conditions5, values5)442 print("Variable 'Score_REMS' successfully added")443 444def add_score_CART(df):445 conditions1 = [446 (df['age'] < 55),447 (df['age'] >= 55) & (df['age'] <= 69),448 (df['age'] >= 70) 449 ]450 values1 = [0, 4, 9]451 conditions2 = [452 (df['triage_resprate'] < 21),453 (df['triage_resprate'] >= 21) & (df['triage_resprate'] <= 23),454 (df['triage_resprate'] >= 24) & (df['triage_resprate'] <= 25),455 (df['triage_resprate'] >= 26) & (df['triage_resprate'] <= 29),456 (df['triage_resprate'] >= 30) 457 ]458 values2 = [0, 8, 12, 15, 22]459 conditions3 = [460 (df['triage_heartrate'] < 110),461 (df['triage_heartrate'] >= 110) & (df['triage_heartrate'] <= 139),462 (df['triage_heartrate'] >= 140) 463 ]464 values3 = [0, 4, 13]465 conditions4 = [466 (df['triage_dbp'] > 49),467 (df['triage_dbp'] >= 40) & (df['triage_dbp'] <= 49),468 (df['triage_dbp'] >= 35) & (df['triage_dbp'] <= 39),469 (df['triage_dbp'] < 35) 470 ]471 values4 = [0, 4, 6, 13]472 df['score_CART'] = np.select(conditions1, values1) + np.select(conditions2, values2) + np.select(conditions3, values3) + np.select(conditions4, values4)473 print("Variable 'Score_CART' successfully added")474 475def add_score_NEWS(df):476 conditions1 = [477 (df['triage_resprate'] <= 8),478 (df['triage_resprate'] >= 9) & (df['triage_resprate'] <= 11),479 (df['triage_resprate'] >= 12) & (df['triage_resprate'] <= 20),480 (df['triage_resprate'] >= 21) & (df['triage_resprate'] <= 24),481 (df['triage_resprate'] >= 25) 482 ]483 values1 = [3, 1, 0, 2, 3]484 conditions2 = [485 (df['triage_o2sat'] <= 91),486 (df['triage_o2sat'] >= 92) & (df['triage_o2sat'] <= 93),487 (df['triage_o2sat'] >= 94) & (df['triage_o2sat'] <= 95),488 (df['triage_o2sat'] >= 96) 489 ]490 values2 = [3, 2, 1, 0]491 conditions3 = [492 (df['triage_temperature'] <= 35),493 (df['triage_temperature'] > 35) & (df['triage_temperature'] <= 36),494 (df['triage_temperature'] > 36) & (df['triage_temperature'] <= 38),495 (df['triage_temperature'] > 38) & (df['triage_temperature'] <= 39),496 (df['triage_temperature'] > 39) 497 ]498 values3 = [3, 1, 0, 1, 2]499 conditions4 = [500 (df['triage_sbp'] <= 90),501 (df['triage_sbp'] >= 91) & (df['triage_sbp'] <= 100),502 (df['triage_sbp'] >= 101) & (df['triage_sbp'] <= 110),503 (df['triage_sbp'] >= 111) & (df['triage_sbp'] <= 219),504 (df['triage_sbp'] > 219) 505 ]506 values4 = [3, 2, 1, 0, 3]507 conditions5 = [508 (df['triage_heartrate'] <= 40),509 (df['triage_heartrate'] >= 41) & (df['triage_heartrate'] <= 50),510 (df['triage_heartrate'] >= 51) & (df['triage_heartrate'] <= 90),511 (df['triage_heartrate'] >= 91) & (df['triage_heartrate'] <= 110),512 (df['triage_heartrate'] >= 111) & (df['triage_heartrate'] <= 130),513 (df['triage_heartrate'] > 130) 514 ]515 values5 = [3, 1, 0, 1, 2, 3] 516 df['score_NEWS'] = np.select(conditions1, values1) + np.select(conditions2, values2) + np.select(conditions3, values3) + np.select(conditions4, values4) + np.select(conditions5, values5)517 print("Variable 'Score_NEWS' successfully added")518 519def add_score_NEWS2(df): 520 conditions1 = [521 (df['triage_resprate'] <= 8),522 (df['triage_resprate'] >= 9) & (df['triage_resprate'] <= 11),523 (df['triage_resprate'] >= 12) & (df['triage_resprate'] <= 20),524 (df['triage_resprate'] >= 21) & (df['triage_resprate'] <= 24),525 (df['triage_resprate'] >= 25) 526 ]527 values1 = [3, 1, 0, 2, 3]528 conditions2 = [529 (df['triage_temperature'] <= 35),530 (df['triage_temperature'] > 35) & (df['triage_temperature'] <= 36),531 (df['triage_temperature'] > 36) & (df['triage_temperature'] <= 38),532 (df['triage_temperature'] > 38) & (df['triage_temperature'] <= 39),533 (df['triage_temperature'] > 39) 534 ]535 values2 = [3, 1, 0, 1, 2]536 conditions3 = [537 (df['triage_sbp'] <= 90),538 (df['triage_sbp'] >= 91) & (df['triage_sbp'] <= 100),539 (df['triage_sbp'] >= 101) & (df['triage_sbp'] <= 110),540 (df['triage_sbp'] >= 111) & (df['triage_sbp'] <= 219),541 (df['triage_sbp'] > 219) 542 ]543 values3 = [3, 2, 1, 0, 3]544 conditions4 = [545 (df['triage_heartrate'] <= 40),546 (df['triage_heartrate'] >= 41) & (df['triage_heartrate'] <= 50),547 (df['triage_heartrate'] >= 51) & (df['triage_heartrate'] <= 90),548 (df['triage_heartrate'] >= 91) & (df['triage_heartrate'] <= 110),549 (df['triage_heartrate'] >= 111) & (df['triage_heartrate'] <= 130),550 (df['triage_heartrate'] > 130) 551 ]552 values4 = [3, 1, 0, 1, 2, 3] 553 df['score_NEWS2'] = np.select(conditions1, values1) + np.select(conditions2, values2) + np.select(conditions3, values3) + np.select(conditions4, values4)554 print("Variable 'Score_NEWS2' successfully added")555 556def add_score_MEWS(df): 557 conditions1 = [558 (df['triage_sbp'] <= 70),559 (df['triage_sbp'] >= 71) & (df['triage_sbp'] <= 80),560 (df['triage_sbp'] >= 81) & (df['triage_sbp'] <= 100),561 (df['triage_sbp'] >= 101) & (df['triage_sbp'] <= 199),562 (df['triage_sbp'] > 199) 563 ]564 values1 = [3, 2, 1, 0, 2]565 conditions2 = [566 (df['triage_heartrate'] <= 40),567 (df['triage_heartrate'] >= 41) & (df['triage_heartrate'] <= 50),568 (df['triage_heartrate'] >= 51) & (df['triage_heartrate'] <= 100),569 (df['triage_heartrate'] >= 101) & (df['triage_heartrate'] <= 110),570 (df['triage_heartrate'] >= 111) & (df['triage_heartrate'] <= 129),571 (df['triage_heartrate'] >= 130) 572 ]573 values2 = [2, 1, 0, 1, 2, 3]574 conditions3 = [575 (df['triage_resprate'] < 9),576 (df['triage_resprate'] >= 9) & (df['triage_resprate'] <= 14),577 (df['triage_resprate'] >= 15) & (df['triage_resprate'] <= 20),578 (df['triage_resprate'] >= 21) & (df['triage_resprate'] <= 29),579 (df['triage_resprate'] >= 30) 580 ]581 values3 = [2, 0, 1, 2, 3]582 conditions4 = [583 (df['triage_temperature'] < 35),584 (df['triage_temperature'] >= 35) & (df['triage_temperature'] < 38.5),585 (df['triage_temperature'] >= 38.5) 586 ]587 values4 = [2, 0, 2] 588 df['score_MEWS'] = np.select(conditions1, values1) + np.select(conditions2, values2) + np.select(conditions3, values3) + np.select(conditions4, values4) 589 print("Variable 'Score_MEWS' successfully added")590 591def add_score_SERP2d(df): 592 conditions1 = [593 (df['age'] < 30),594 (df['age'] >= 30) & (df['age'] <= 49),595 (df['age'] >= 50) & (df['age'] <= 79),596 (df['age'] >= 80)597 ]598 values1 = [0, 9, 13, 17]599 conditions2 = [600 (df['triage_heartrate'] < 60),601 (df['triage_heartrate'] >= 60) & (df['triage_heartrate'] <= 69),602 (df['triage_heartrate'] >= 70) & (df['triage_heartrate'] <= 94),603 (df['triage_heartrate'] >= 95) & (df['triage_heartrate'] <= 109),604 (df['triage_heartrate'] >= 110) 605 ]606 values2 = [3, 0, 3, 6, 10]607 conditions3 = [608 (df['triage_resprate'] < 16),609 (df['triage_resprate'] >= 16) & (df['triage_resprate'] <= 19),610 (df['triage_resprate'] >= 20) 611 ]612 values3 = [11, 0, 7]613 conditions4 = [614 (df['triage_sbp'] < 100),615 (df['triage_sbp'] >= 100) & (df['triage_sbp'] <= 114),616 (df['triage_sbp'] >= 115) & (df['triage_sbp'] <= 149),617 (df['triage_sbp'] >= 150) 618 ]619 values4 = [10, 4, 1, 0]620 conditions5 = [621 (df['triage_dbp'] < 50),622 (df['triage_dbp'] >= 50) & (df['triage_dbp'] <= 94),623 (df['triage_dbp'] >= 95) 624 ]625 values5 = [5, 0, 1]626 conditions6 = [627 (df['triage_o2sat'] < 90),628 (df['triage_o2sat'] >= 90) & (df['triage_o2sat'] <= 94),629 (df['triage_o2sat'] >= 95) 630 ]631 values6 = [7, 5, 0]632 df['score_SERP2d'] = np.select(conditions1, values1) + np.select(conditions2, values2) + np.select(conditions3, values3) + np.select(conditions4, values4) + np.select(conditions5, values5) + np.select(conditions6, values6)633 print("Variable 'Score_SERP2d' successfully added")634def add_score_SERP7d(df): 635 conditions1 = [636 (df['age'] < 30),637 (df['age'] >= 30) & (df['age'] <= 49),638 (df['age'] >= 50) & (df['age'] <= 79),639 (df['age'] >= 80)640 ]641 values1 = [0, 10, 17, 21]642 conditions2 = [643 (df['triage_heartrate'] < 60),644 (df['triage_heartrate'] >= 60) & (df['triage_heartrate'] <= 69),645 (df['triage_heartrate'] >= 70) & (df['triage_heartrate'] <= 94),646 (df['triage_heartrate'] >= 95) & (df['triage_heartrate'] <= 109),647 (df['triage_heartrate'] >= 110) 648 ]649 values2 = [2, 0, 4, 8, 12]650 conditions3 = [651 (df['triage_resprate'] < 16),652 (df['triage_resprate'] >= 16) & (df['triage_resprate'] <= 19),653 (df['triage_resprate'] >= 20) 654 ]655 values3 = [10, 0, 6]656 conditions4 = [657 (df['triage_sbp'] < 100),658 (df['triage_sbp'] >= 100) & (df['triage_sbp'] <= 114),659 (df['triage_sbp'] >= 115) & (df['triage_sbp'] <= 149),660 (df['triage_sbp'] >= 150) 661 ]662 values4 = [12, 6, 1, 0]663 conditions5 = [664 (df['triage_dbp'] < 50),665 (df['triage_dbp'] >= 50) & (df['triage_dbp'] <= 94),666 (df['triage_dbp'] >= 95) 667 ]668 values5 = [4, 0, 2]669 df['score_SERP7d'] = np.select(conditions1, values1) + np.select(conditions2, values2) + np.select(conditions3, values3) + np.select(conditions4, values4) + np.select(conditions5, values5)670 print("Variable 'Score_SERP7d' successfully added")671 672def add_score_SERP30d(df): 673 conditions1 = [674 (df['age'] < 30),675 (df['age'] >= 30) & (df['age'] <= 49),676 (df['age'] >= 50) & (df['age'] <= 79),677 (df['age'] >= 80)678 ]679 values1 = [0, 8, 14, 19]680 conditions2 = [681 (df['triage_heartrate'] < 60),682 (df['triage_heartrate'] >= 60) & (df['triage_heartrate'] <= 69),683 (df['triage_heartrate'] >= 70) & (df['triage_heartrate'] <= 94),684 (df['triage_heartrate'] >= 95) & (df['triage_heartrate'] <= 109),685 (df['triage_heartrate'] >= 110) 686 ]687 values2 = [1, 0, 2, 6, 9]688 conditions3 = [689 (df['triage_resprate'] < 16),690 (df['triage_resprate'] >= 16) & (df['triage_resprate'] <= 19),691 (df['triage_resprate'] >= 20) 692 ]693 values3 = [8, 0, 6]694 conditions4 = [695 (df['triage_sbp'] < 100),696 (df['triage_sbp'] >= 100) & (df['triage_sbp'] <= 114),697 (df['triage_sbp'] >= 115) & (df['triage_sbp'] <= 149),698 (df['triage_sbp'] >= 150) 699 ]700 values4 = [8, 5, 2, 0]701 conditions5 = [702 (df['triage_dbp'] < 50),703 (df['triage_dbp'] >= 50) & (df['triage_dbp'] <= 94),704 (df['triage_dbp'] >= 95) 705 ]706 values5 = [3, 0, 2]707 df['score_SERP30d'] = np.select(conditions1, values1) + np.select(conditions2, values2) + np.select(conditions3, values3) + np.select(conditions4, values4) + np.select(conditions5, values5) + df['cci_Cancer1']*6 + df['cci_Cancer2']*12708 print("Variable 'Score_SERP30d' successfully added")709 710 711def PlotROCCurve(probs,y_test_roc, ci= 95, random_seed=0):712 713 fpr, tpr, threshold = metrics.roc_curve(y_test_roc,probs)714 roc_auc = metrics.auc(fpr, tpr)715 average_precision = average_precision_score(y_test_roc, probs)716 a=np.sqrt(np.square(fpr-0)+np.square(tpr-1)).argmin()717 sensitivity = tpr[a]718 specificity = 1-fpr[a]719 threshold = threshold[a]720 print("AUC:",roc_auc)721 print("AUPRC:", average_precision)722 print("Sensitivity:",sensitivity)723 print("Specificity:",specificity)724 print("Score thresold:",threshold)725 lower_auroc, upper_auroc, std_auroc, lower_ap, upper_ap, std_ap, lower_sensitivity, upper_sensitivity, std_sensitivity, lower_specificity, upper_specificity, std_specificity = auc_with_ci(probs,y_test_roc, lower = (100-ci)/2, upper = 100-(100-ci)/2, n_bootstraps=20, rng_seed=random_seed)726 plt.title('Receiver Operating Characteristic: AUC={0:0.4f}'.format(727 roc_auc))728 plt.plot(fpr, tpr, 'b')729 plt.plot([0, 1], [0, 1],'r--')730 plt.xlim([0, 1])731 plt.ylim([0, 1])732 plt.ylabel('True Positive Rate')733 plt.xlabel('False Positive Rate')734 plt.show()735 precision, recall, threshold2 = precision_recall_curve(y_test_roc, probs)736 plt.step(recall, precision, color='b', alpha=0.2,737 where='post')738 plt.fill_between(recall, precision, step='post', alpha=0.2,739 color='b')740 plt.xlabel('Recall')741 plt.ylabel('Precision')742 plt.ylim([0.0, 1.05])743 plt.xlim([0.0, 1.0])744 plt.title('Precision-Recall Curve: AUPRC={0:0.4f}'.format(745 average_precision))746 plt.show()747 return [roc_auc, average_precision, sensitivity, specificity, threshold, lower_auroc, upper_auroc, std_auroc, lower_ap, upper_ap, std_ap, lower_sensitivity, upper_sensitivity, std_sensitivity, lower_specificity, upper_specificity, std_specificity]748def auc_with_ci(probs,y_test_roc, lower = 2.5, upper = 97.5, n_bootstraps=200, rng_seed=10):749 print(lower, upper)750 y_test_roc = np.asarray(y_test_roc)751 bootstrapped_auroc = []752 bootstrapped_ap = []753 bootstrapped_sensitivity = []754 bootstrapped_specificity = []755 rng = np.random.default_rng(rng_seed)756 for i in range(n_bootstraps):757 # bootstrap by sampling with replacement on the prediction indices758 indices = rng.integers(0, len(y_test_roc)-1, len(y_test_roc))759 if len(np.unique(y_test_roc[indices])) < 2:760 # We need at least one positive and one negative sample for ROC AUC761 # to be defined: reject the sample762 continue763 fpr, tpr, threshold = metrics.roc_curve(y_test_roc[indices],probs[indices])764 auroc = metrics.auc(fpr, tpr)765 ap = metrics.average_precision_score(y_test_roc[indices], probs[indices])766 a=np.sqrt(np.square(fpr-0)+np.square(tpr-1)).argmin()767 sensitivity = tpr[a]768 specificity = 1-fpr[a]769 bootstrapped_auroc.append(auroc)770 bootstrapped_ap.append(ap)771 bootstrapped_sensitivity.append(sensitivity)772 bootstrapped_specificity.append(specificity)773 lower_auroc,upper_auroc = np.percentile(bootstrapped_auroc, [lower, upper])774 lower_ap,upper_ap = np.percentile(bootstrapped_ap, [lower, upper])775 lower_sensitivity,upper_sensitivity = np.percentile(bootstrapped_sensitivity, [lower, upper])776 lower_specificity,upper_specificity = np.percentile(bootstrapped_specificity, [lower, upper])777 std_auroc = np.std(bootstrapped_auroc)778 std_ap = np.std(bootstrapped_ap)779 std_sensitivity = np.std(bootstrapped_sensitivity)780 std_specificity = np.std(bootstrapped_specificity)781 return lower_auroc, upper_auroc, std_auroc, lower_ap, upper_ap, std_ap, lower_sensitivity, upper_sensitivity, std_sensitivity, lower_specificity, upper_specificity, std_specificity782def plot_confidence_interval(dataset, metric= 'auroc', ci=95, name = 'AUROC', my_file = 'AUROC_hosp.eps', my_path = 'my_path', dpi=300):783 ci_list = [dataset['lower_'+metric].values.tolist(),dataset['upper_'+metric].values.tolist()]784 std = [(dataset[metric]-dataset['std_'+metric]).values.tolist(), (dataset[metric]+dataset['std_'+metric]).values.tolist()]785 auc = dataset[metric].values.tolist()786 y = [range(len(dataset)), range(len(dataset))]787 plt.plot(ci_list,y, '-', color='gray',linewidth=1.5)788 plt.plot(std,y,'-', color='black', linewidth=2)789 plt.plot(auc,y[0],'|k', markersize=4)790 plt.xlabel(name)791 plt.yticks(range(len(dataset)),list(dataset['Model']))792 plt.savefig(os.path.join(my_path, my_file), format='eps', dpi=dpi)793 794 plt.show()795class LSTMDataGenerator(Sequence):796 def __init__(self, main_df, vitalsign_df, y, batch_size, x1_cols, x2_cols):797 self.main_df = main_df798 self.vitalsign_df = vitalsign_df799 self.batch_size = batch_size800 self.x1_cols = x1_cols801 self.x2_cols = x2_cols802 self.y_df = y803 def __len__(self):804 return math.ceil(len(self.main_df) / self.batch_size)805 def __getitem__(self, index):806 df_batch = self.main_df.iloc[index * self.batch_size:(index + 1) * self.batch_size]807 x1 = df_batch[self.x1_cols].to_numpy().astype(np.float64)808 y = self.y_df.iloc[index * self.batch_size:(index + 1) * self.batch_size].to_numpy()809 stay_ids = df_batch['stay_id'].to_numpy().astype(np.int64)810 batch_size = len(df_batch)811 df_batch = df_batch.merge(self.vitalsign_df, on='stay_id', how='left')812 x2 = []813 for i in range(batch_size):814 x2.append(df_batch[df_batch['stay_id'] == stay_ids[i]][self.x2_cols].to_numpy())815 padded_x2 = pad_sequences(x2, padding='post')816 return [x1, padded_x2.astype(np.float64)], y817def get_lstm_data_gen(df_train, df_test, df_vitalsign, variable, outcome, batch_size=200):818 variable_with_id = ["stay_id"]819 variable_with_id.extend(variable)820 X_train = df_train[variable_with_id].copy()821 y_train = df_train[outcome].copy()822 X_test = df_test[variable_with_id].copy()823 y_test = df_test[outcome].copy()824 if 'gender' in variable:825 encoder = LabelEncoder()826 X_train['gender'] = encoder.fit_transform(X_train['gender'])827 X_test['gender'] = encoder.transform(X_test['gender'])828 if 'ed_los' in variable:829 X_train['ed_los'] = pd.to_timedelta(X_train['ed_los']).dt.seconds / 60830 X_test['ed_los'] = pd.to_timedelta(X_test['ed_los']).dt.seconds / 60831 x1_cols = [x for x in variable_with_id[1:] if not ('ed' in x and 'last' in x)]832 x2_cols = [x for x in df_vitalsign.columns if 'ed' in x]833 train_data_gen = LSTMDataGenerator(X_train, df_vitalsign, y_train, batch_size, x1_cols, x2_cols)834 test_data_gen = LSTMDataGenerator(X_test, df_vitalsign, y_test, batch_size, x1_cols, x2_cols)...

Full Screen

Full Screen

forms.py

Source:forms.py Github

copy

Full Screen

1from django import forms2from django.conf import settings3from directory_validators.common import not_contains_url_or_email4from directory_validators.company import no_html5from .generics.forms import DITHelpForm, DITHelpModelForm6from .meta import choices, label, help_text, placeholder, validation7from .models import FeedbackModel, TriageModel8from . import fields9class FeedbackForm(DITHelpModelForm):10 title = "Help us improve great.gov.uk"11 subtitle = (12 "Give your feedback on the guidance and services on great.gov.uk. "13 "If something is wrong, give as much detail as you can."14 )15 class Meta:16 model = FeedbackModel17 exclude = []18 content = fields.CharField(19 label="Feedback",20 required=True,21 widget=forms.Textarea,22 attrs={23 'data-message': validation.FEEDBACK,24 'data-validate': 'feedback'25 },26 validators=[not_contains_url_or_email, no_html]27 )28class TriageForm(DITHelpModelForm):29 subtitle = "Application via Department for International Trade"30 submit_text = "Apply to join"31 def get_title(self):32 return self.request.GET.get('market', None)33 class Meta:34 model = TriageModel35 exclude = []36 def save(self, *args, **kwargs):37 # Saving the form submission to the database cannot be justified from38 # a GDPR point of view. It is sent to zendesk.39 return40 company_name = fields.CompanyField(required=True,41 label=label.COMPANY_NAME,42 help_text=help_text.COMPANY_NAME,43 error_messages=validation.TRIAGE_COMPANY_NAME,44 attrs={45 'data-validate': 'company',46 'data-action': 'get-companies',47 'data-message': validation.TRIAGE_COMPANY_NAME['required'],48 'autocomplete': 'off',49 'class': 'form-dropdown-input'50 },51 button_label="Search Companies House",52 button_attrs={53 'class': 'button button-border button-border--blue button-medium\54 push--ends search-companies',55 'data-action': 'get-companies'56 })57 soletrader = fields.BooleanField(required=False,58 label=label.TRIAGE_UNREGISTERED_COMPANY,59 attrs={'data-validate': 'soletrader'})60 company_number = fields.CharField(required=False,61 label=label.COMPANY_NUMBER,62 attrs={63 'class': 'form-control--medium',64 'data-validate': 'company-number',65 'data-message': validation.TRIAGE_COMPANY_NUMBER['required']66 })67 company_postcode = fields.CharField(required=True,68 label=label.TRIAGE_POSTCODE,69 attrs={70 'class': 'form-control--medium',71 'data-validate': 'postcode',72 'placeholder': placeholder.POSTCODE,73 'data-message': validation.TRIAGE_COMPANY_POSTCODE['required']74 })75 email_pref = fields.BooleanField(required=False, label=label.EMAIL_PREFERENCE)76 contact_name = fields.CharField(required=True, label=label.CONTACT_NAME,77 error_messages=validation.TRIAGE_CONTACT_NAME,78 attrs={79 'data-validate': 'name',80 'data-message': validation.TRIAGE_CONTACT_NAME['required']81 })82 contact_email = fields.EmailField(required=True, label=label.CONTACT_EMAIL,83 error_messages=validation.TRIAGE_CONTACT_EMAIL,84 attrs={85 'data-validate': 'email',86 'data-message': validation.TRIAGE_CONTACT_EMAIL['required']87 })88 website_address = fields.URLField(required=True, label=label.COMPANY_WEBSITE, help_text=help_text.COMPANY_WEBSITE,89 error_messages=validation.TRIAGE_COMPANY_WEBSITE,90 attrs={91 'placeholder': placeholder.URL,92 'data-validate': 'url',93 'data-message': validation.TRIAGE_COMPANY_WEBSITE['required']94 })95 turnover = fields.ChoiceField(required=True, label=label.TRIAGE_SALES, choices=choices.TRIAGE_SALES_THRESHOLDS,96 widget=forms.RadioSelect(),97 error_messages=validation.TRIAGE_BUSINESS_TURNOVER,98 attrs={99 'data-validate': 'turnover',100 'data-message': validation.TRIAGE_BUSINESS_TURNOVER['required']101 },102 help_text=help_text.TRIAGE_TURNOVER)103 trademarked = fields.ChoiceField(required=True, label=label.TRIAGE_TRADEMARKED, choices=choices.BOOLEAN_YES_NO,104 widget=forms.RadioSelect(),105 error_messages=validation.TRIAGE_BUSINESS_TRADEMARK,106 attrs={107 'data-validate': 'trademark',108 'data-message': validation.TRIAGE_BUSINESS_TRADEMARK['required']109 },110 help_text=help_text.TRIAGE_TRADEMARKED)111 experience = fields.ChoiceField(required=True, label=label.TRIAGE_EXPERIENCE, choices=choices.TRIAGE_EXPERIENCE,112 widget=forms.RadioSelect(),113 error_messages=validation.TRIAGE_EXPERIENCE_EXPORT,114 attrs={115 'data-validate': 'export',116 'data-message': validation.TRIAGE_EXPERIENCE_EXPORT['required']117 })118 description = fields.CharField(required=True, widget=forms.Textarea, help_text=help_text.TRIAGE_DESCRIPTION,119 label=label.TRIAGE_DESCRIPTION,120 error_messages=validation.TRIAGE_EXPERIENCE_INTRODUCTION,121 attrs={122 'data-validate': 'description',123 'class': 'form-textarea--wide',124 'data-message': validation.TRIAGE_EXPERIENCE_INTRODUCTION['required']125 })126 contact_phone = fields.IntegerField(required=True, label=label.CONTACT_PHONE, prefix='+44',127 error_messages=validation.TRIAGE_CONTACT_PHONE,128 attrs={129 'data-validate': 'contact-number',130 'data-message': validation.TRIAGE_CONTACT_PHONE['required']131 })132 sku_count = fields.IntegerField(required=True, label=label.TRIAGE_SKU_NUMBER,133 help_text=help_text.TRIAGE_SKU_NUMBER,134 error_messages=validation.TRIAGE_BUSINESS_SKU,135 attrs={136 'class': 'form-control--medium',137 'placeholder': placeholder.SKU,138 'data-validate': 'sku',139 'data-message': validation.TRIAGE_BUSINESS_SKU['required']140 })141 @property142 def fieldsets(self):143 contact_details_fields = [144 'contact_name',145 'contact_email',146 'contact_phone',147 'email_pref',148 ]149 if settings.USE_CAPTCHA:150 contact_details_fields.append('captcha')151 return (152 (153 'Your business',154 {155 'fields': (156 'company_name',157 'soletrader',158 'company_number',159 'company_postcode',160 'website_address',161 )162 }163 ),164 (165 'Business details',166 {167 'fields': (168 'turnover',169 'sku_count',170 'trademarked',171 )172 }173 ),174 (175 'Your experience',176 {177 'fields': (178 'experience',179 'description',180 )181 }182 ),183 (184 'Contact details',185 {186 'fields': contact_details_fields,187 }188 ),...

Full Screen

Full Screen

main_inference.py

Source:main_inference.py Github

copy

Full Screen

...23 # ##################24 # # build the model from a config file and a checkpoint file25 model_person = init_detector(config_file_person, checkpoint_file_person, device="cuda:0")26 model_triage = init_detector(config_file_triage, checkpoint_file_triage, device="cuda:0")27 # set(번호)_drone(번호)_triage(번호).jpg28 # set(1~5)_drone(1~3)_triage(1~3).jpg29 # im_folder = "dataset_path/set0"30 im_folder = path31 # 제출 레이블 형식 : set_num(1~5), drone_num(1~3), frame_name[사망, 긴급, 응급, 비응급]32 #import pdb; pdb.set_trace()33 person_results = []34 set_keys = ["set_1", "set_2", "set_3", "set_4", "set_5"]35 task4_answer = dict.fromkeys(set_keys)36 # #### Extract Region of Person ####37 for set_n in range(1,6):38 set_dict = dict()39 set_name = "set_"+ str(set_n)40 set_dir= im_folder + "set_0" + str(set_n) + "/"41 # print("set_dir : ",set_dir)42 for filename in glob.glob(set_dir): # filename : dataset_path/set_01/...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run avocado automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful