How to use get_instance method in robotframework-pageobjects

Best Python code snippet using robotframework-pageobjects_python

window.py

Source:window.py Github

copy

Full Screen

...32 global log_message33 global treap34 global log_list35 # Clear the old animations and reset index to zero36 animation_handler.get_instance().skip_list_graph_list.clear()37 animation_handler.get_instance().treap_graph_list.clear()38 animation_handler.get_instance().pseudocode_list.clear()39 animation_handler.get_instance().treap_pseudocode_list.clear()40 graph_list_index = 041 plot.clear()42 try:43 value = int(value_entry.get())44 except ValueError:45 if len(value_entry.get()) == 0:46 log_widget.push("ValueError: Enter a key to perfom an operation")47 log_message.config(text=log_widget.update())48 return49 else:50 log_widget.push(f"Invalid Key : {value_entry.get()}")51 log_message.config(text=log_widget.update())52 return53 # Handling the log widget:54 log_widget.push(f"search:{value}", log_list)55 log_message.config(text=log_widget.update())56 skip_list.find(value)57 treap.find(value, treap)58 skip_list_start = len(animation_handler.get_instance().skip_list_history[0])59 treap_start = len(animation_handler.get_instance().treap_history[0])60 for element in animation_handler.get_instance().skip_list_graph_list:61 animation_handler.get_instance().skip_list_history[0].append(element)62 for element in animation_handler.get_instance().pseudocode_list:63 animation_handler.get_instance().skip_list_history[1].append(element)64 for element in animation_handler.get_instance().treap_graph_list:65 animation_handler.get_instance().treap_history[0].append(element)66 for element in animation_handler.get_instance().treap_pseudocode_list:67 animation_handler.get_instance().treap_history[1].append(element)68 skip_list_end = len(animation_handler.get_instance().skip_list_history[0]) - 169 treap_end = len(animation_handler.get_instance().treap_history[0]) - 170 animation_handler.get_instance().skip_list_time_stamps.append((skip_list_start, skip_list_end))71 animation_handler.get_instance().treap_time_stamps.append((treap_start, treap_end))72 if algorithm.get() == "Skip List":73 update_canvas(animation_handler.get_instance().skip_list_graph_list[graph_list_index])74 pseudocode_obj.update(animation_handler.get_instance().pseudocode_list[graph_list_index][0],75 animation_handler.get_instance().pseudocode_list[graph_list_index][1])76 elif algorithm.get() == "Treap":77 update_canvas(animation_handler.get_instance().treap_graph_list[graph_list_index])78 pseudocode_obj.update(animation_handler.get_instance().treap_pseudocode_list[graph_list_index][0], # text79 animation_handler.get_instance().treap_pseudocode_list[graph_list_index][1]) # color80 value_entry.delete(0, tk.END)81def insert_command(event=None):82 global algorithm83 global animation_handler84 global graph_list_index85 global log_widget86 global log_message87 global log_list88 global treap89 # Clear the old animations and reset index to zero90 animation_handler.get_instance().skip_list_graph_list.clear()91 animation_handler.get_instance().treap_graph_list.clear()92 animation_handler.get_instance().pseudocode_list.clear()93 animation_handler.get_instance().treap_pseudocode_list.clear()94 graph_list_index = 095 plot.clear()96 try:97 value = int(value_entry.get())98 except ValueError:99 if len(value_entry.get()) == 0:100 log_widget.push("ValueError: Enter a key to perfom an operation")101 log_message.config(text=log_widget.update())102 return103 elif value_entry.get() == "jackson":104 webbrowser.open("https://youtu.be/PfrV_6yWbEg?autoplay=1&t=210", new=1)105 return106 else:107 log_widget.push(f"Invalid Key : {value_entry.get()}")108 log_message.config(text=log_widget.update())109 return110 skip_list.insert(value)111 treap.insert(value, treap)112 skip_list_start = len(animation_handler.get_instance().skip_list_history[0])113 treap_start = len(animation_handler.get_instance().treap_history[0])114 for element in animation_handler.get_instance().skip_list_graph_list:115 animation_handler.get_instance().skip_list_history[0].append(element)116 for element in animation_handler.get_instance().pseudocode_list:117 animation_handler.get_instance().skip_list_history[1].append(element)118 for element in animation_handler.get_instance().treap_graph_list:119 animation_handler.get_instance().treap_history[0].append(element)120 for element in animation_handler.get_instance().treap_pseudocode_list:121 animation_handler.get_instance().treap_history[1].append(element)122 skip_list_end = len(animation_handler.get_instance().skip_list_history[0]) - 1123 treap_end = len(animation_handler.get_instance().treap_history[0]) - 1124 animation_handler.get_instance().skip_list_time_stamps.append((skip_list_start, skip_list_end))125 animation_handler.get_instance().treap_time_stamps.append((treap_start, treap_end))126 # Handling the log widget:127 log_widget.push(f"insert:{value}", log_list)128 log_message.config(text=log_widget.update())129 if algorithm.get() == "Skip List":130 update_canvas(animation_handler.get_instance().skip_list_graph_list[graph_list_index])131 pseudocode_obj.update(animation_handler.get_instance().pseudocode_list[graph_list_index][0],132 animation_handler.get_instance().pseudocode_list[graph_list_index][1])133 elif algorithm.get() == "Treap":134 update_canvas(animation_handler.get_instance().treap_graph_list[graph_list_index])135 pseudocode_obj.update(animation_handler.get_instance().treap_pseudocode_list[graph_list_index][0], # text136 animation_handler.get_instance().treap_pseudocode_list[graph_list_index][1]) # color137 value_entry.delete(0, tk.END)138def delete_command(event=None):139 global algorithm140 global graph_list_index141 global animation_handler142 global treap143 global log_widget144 global log_message145 global log_list146 animation_handler.get_instance().skip_list_graph_list.clear()147 animation_handler.get_instance().treap_graph_list.clear()148 animation_handler.get_instance().pseudocode_list.clear()149 animation_handler.get_instance().treap_pseudocode_list.clear()150 graph_list_index = 0151 plot.clear()152 try:153 value = int(value_entry.get())154 except ValueError:155 if len(value_entry.get()) == 0:156 log_widget.push("ValueError: Enter a key to perfom an operation")157 log_message.config(text=log_widget.update())158 return159 else:160 log_widget.push(f"Invalid Key : {value_entry.get()}")161 log_message.config(text=log_widget.update())162 return163 skip_list.delete(value)164 treap.delete(value, treap)165 skip_list_start = len(animation_handler.get_instance().skip_list_history[0])166 treap_start = len(animation_handler.get_instance().treap_history[0])167 for element in animation_handler.get_instance().skip_list_graph_list:168 animation_handler.get_instance().skip_list_history[0].append(element)169 for element in animation_handler.get_instance().pseudocode_list:170 animation_handler.get_instance().skip_list_history[1].append(element)171 for element in animation_handler.get_instance().treap_graph_list:172 animation_handler.get_instance().treap_history[0].append(element)173 for element in animation_handler.get_instance().treap_pseudocode_list:174 animation_handler.get_instance().treap_history[1].append(element)175 skip_list_end = len(animation_handler.get_instance().skip_list_history[0]) - 1176 treap_end = len(animation_handler.get_instance().treap_history[0]) - 1177 animation_handler.get_instance().skip_list_time_stamps.append((skip_list_start, skip_list_end))178 animation_handler.get_instance().treap_time_stamps.append((treap_start, treap_end))179 # Handling the log widget:180 log_widget.push(f"delete:{value}", log_list)181 log_message.config(text=log_widget.update())182 if algorithm.get() == "Skip List":183 update_canvas(animation_handler.get_instance().skip_list_graph_list[graph_list_index])184 pseudocode_obj.update(animation_handler.get_instance().pseudocode_list[graph_list_index][0],185 animation_handler.get_instance().pseudocode_list[graph_list_index][1])186 elif algorithm.get() == "Treap":187 update_canvas(animation_handler.get_instance().treap_graph_list[graph_list_index])188 pseudocode_obj.update(animation_handler.get_instance().treap_pseudocode_list[graph_list_index][0],189 animation_handler.get_instance().treap_pseudocode_list[graph_list_index][1])190 value_entry.delete(0, tk.END)191def play_pause_command(event=None):192 global active193 global play_pause_button194 if play_pause_button["text"] == "Play":195 play_pause_button.config(text="Pause")196 active = True197 play_command()198 else:199 play_pause_button.config(text="Play")200 active = False201def play_command(event=None):202 global active203 global canvas204 global graph_list_index205 global animation_handler206 global algorithm207 global pseudocode_obj208 global mode209 global speed210 if active is False:211 return212 elif mode == "single_command":213 if algorithm.get() == "Skip List":214 if graph_list_index < len(animation_handler.get_instance().skip_list_graph_list) - 1:215 graph_list_index += 1216 update_canvas(animation_handler.get_instance().skip_list_graph_list[graph_list_index])217 pseudocode_obj.update(animation_handler.get_instance().pseudocode_list[graph_list_index][0],218 animation_handler.get_instance().pseudocode_list[graph_list_index][1])219 root.after(1000 - speed, play_command)220 if graph_list_index == len(animation_handler.get_instance().skip_list_graph_list) - 1:221 graph_list_index = 0222 play_pause_command()223 elif algorithm.get() == "Treap":224 if graph_list_index < len(animation_handler.get_instance().treap_graph_list) - 1:225 graph_list_index += 1226 update_canvas(animation_handler.get_instance().treap_graph_list[graph_list_index])227 pseudocode_obj.update(animation_handler.get_instance().treap_pseudocode_list[graph_list_index][0],228 animation_handler.get_instance().treap_pseudocode_list[graph_list_index][1])229 root.after(1000 - speed, play_command)230 if graph_list_index == len(animation_handler.get_instance().skip_list_graph_list) - 1:231 graph_list_index = 0232 play_pause_command()233 elif mode == "all_commands":234 if algorithm.get() == "Skip List":235 if graph_list_index < len(animation_handler.get_instance().skip_list_history[0]) - 1:236 graph_list_index += 1237 update_canvas(animation_handler.get_instance().skip_list_history[0][graph_list_index])238 pseudocode_obj.update(animation_handler.get_instance().skip_list_history[1][graph_list_index][0],239 animation_handler.get_instance().skip_list_history[1][graph_list_index][1])240 root.after(1000 - speed, play_command)241 if graph_list_index == len(animation_handler.get_instance().skip_list_history[0]) - 1:242 graph_list_index = 0243 play_pause_command()244 elif algorithm.get() == "Treap":245 if graph_list_index < len(animation_handler.get_instance().treap_history[0]) - 1:246 graph_list_index += 1247 update_canvas(animation_handler.get_instance().treap_history[0][graph_list_index])248 pseudocode_obj.update(animation_handler.get_instance().treap_history[1][graph_list_index][0],249 animation_handler.get_instance().treap_history[1][graph_list_index][1])250 root.after(1000 - speed, play_command)251 if graph_list_index == len(animation_handler.get_instance().treap_history[0]) - 1:252 graph_list_index = 0253 play_pause_command()254def previous_command(event=None):255 global graph_list_index256 global animation_handler257 global algorithm258 global active259 global play_pause_button260 global command_list_index261 global mode262 play_pause_button.config(text="Play")263 active = False264 if mode == "single_command":265 if algorithm.get() == "Skip List":266 if graph_list_index < len(animation_handler.get_instance().skip_list_graph_list) and graph_list_index > 0:267 graph_list_index -= 1268 update_canvas(animation_handler.get_instance().skip_list_graph_list[graph_list_index])269 pseudocode_obj.update(animation_handler.get_instance().pseudocode_list[graph_list_index][0],270 animation_handler.get_instance().pseudocode_list[graph_list_index][1])271 elif algorithm.get() == "Treap":272 if graph_list_index < len(animation_handler.get_instance().treap_graph_list) and graph_list_index > 0:273 graph_list_index -= 1274 update_canvas(animation_handler.get_instance().treap_graph_list[graph_list_index])275 pseudocode_obj.update(animation_handler.get_instance().treap_pseudocode_list[graph_list_index][0],276 animation_handler.get_instance().treap_pseudocode_list[graph_list_index][1])277 elif mode == "all_commands":278 if command_list_index > 0:279 command_list_index -= 1280 animation_handler.load_command(command_list_index)281 if algorithm.get() == "Skip List":282 update_canvas(animation_handler.get_instance().skip_list_graph_list[0])283 pseudocode_obj.update(animation_handler.get_instance().pseudocode_list[0][0],284 animation_handler.get_instance().pseudocode_list[0][1])285 elif algorithm.get() == "Treap":286 update_canvas(animation_handler.get_instance().treap_graph_list[0])287 pseudocode_obj.update(animation_handler.get_instance().treap_pseudocode_list[0][0],288 animation_handler.get_instance().treap_pseudocode_list[0][1])289 else:290 return291def next_command(event=None):292 global graph_list_index293 global animation_handler294 global algorithm295 global active296 global play_pause_button297 global command_list_index298 global mode299 play_pause_button.config(text="Play")300 active = False301 if mode == "single_command":302 if algorithm.get() == "Skip List":303 if graph_list_index < len(animation_handler.get_instance().skip_list_graph_list) - 1:304 graph_list_index += 1305 update_canvas(animation_handler.get_instance().skip_list_graph_list[graph_list_index])306 pseudocode_obj.update(animation_handler.get_instance().pseudocode_list[graph_list_index][0],307 animation_handler.get_instance().pseudocode_list[graph_list_index][1])308 elif algorithm.get() == "Treap":309 if graph_list_index < len(animation_handler.get_instance().treap_graph_list) - 1:310 graph_list_index += 1311 update_canvas(animation_handler.get_instance().treap_graph_list[graph_list_index])312 pseudocode_obj.update(animation_handler.get_instance().treap_pseudocode_list[graph_list_index][0],313 animation_handler.get_instance().treap_pseudocode_list[graph_list_index][1])314 elif mode == "all_commands":315 if command_list_index < len(animation_handler.get_instance().treap_time_stamps) - 1:316 command_list_index += 1317 animation_handler.load_command(command_list_index)318 if algorithm.get() == "Skip List":319 update_canvas(animation_handler.get_instance().skip_list_graph_list[0])320 pseudocode_obj.update(animation_handler.get_instance().pseudocode_list[0][0],321 animation_handler.get_instance().pseudocode_list[0][1])322 elif algorithm.get() == "Treap":323 update_canvas(animation_handler.get_instance().treap_graph_list[0])324 pseudocode_obj.update(animation_handler.get_instance().treap_pseudocode_list[0][0],325 animation_handler.get_instance().treap_pseudocode_list[0][1])326 else:327 return328def stop_command(event=None):329 global treap330 global algorithm331 global graph_list_index332 global active333 global play_pause_button334 global mode335 play_pause_button.config(text="Play")336 active = False337 graph_list_index = 0338 if mode == "single_command":339 if algorithm.get() == "Skip List":340 update_canvas(animation_handler.get_instance().skip_list_graph_list[graph_list_index])341 pseudocode_obj.update(animation_handler.get_instance().pseudocode_list[graph_list_index][0],342 animation_handler.get_instance().pseudocode_list[graph_list_index][1])343 elif algorithm.get() == "Treap":344 update_canvas(animation_handler.get_instance().treap_graph_list[graph_list_index])345 pseudocode_obj.update(animation_handler.get_instance().treap_pseudocode_list[graph_list_index][0],346 animation_handler.get_instance().treap_pseudocode_list[graph_list_index][1])347 elif mode == "all_commands":348 if algorithm.get() == "Skip List":349 update_canvas(animation_handler.get_instance().skip_list_history[0][graph_list_index])350 pseudocode_obj.update(animation_handler.get_instance().skip_list_history[1][graph_list_index][0],351 animation_handler.get_instance().skip_list_history[1][graph_list_index][1])352 elif algorithm.get() == "Treap":353 update_canvas(animation_handler.get_instance().treap_history[0][graph_list_index])354 pseudocode_obj.update(animation_handler.get_instance().treap_history[1][graph_list_index][0],355 animation_handler.get_instance().treap_history[1][graph_list_index][1])356def clear_command(event=None):357 global graph_list_index358 global animation_handler359 global skip_list_graph360 global treap_graph361 global algorithm362 global treap363 global skip_list364 global active365 global play_pause_button366 global log_list367 global log_widget368 global log_message369 global plot370 global canvas371 play_pause_button.config(text="Play")372 active = False373 graph_list_index = 0374 treap = tr.Treap()375 skip_list = sl.SkipList()376 animation_handler.get_instance().skip_list_graph_list.clear()377 animation_handler.get_instance().treap_graph_list.clear()378 animation_handler.get_instance().pseudocode_list.clear()379 animation_handler.get_instance().treap_pseudocode_list.clear()380 log_widget.clear()381 log_message.config(text=log_widget.update())382 log_list.clear()383 treap_graph = TreapGraph(treap)384 skip_list_graph = SkipListGraph(skip_list)385 animation_handler.get_instance().treap_graph_list.append(treap_graph.create_graph())386 animation_handler.get_instance().skip_list_graph_list.append(skip_list_graph.create_graph(skip_list))387 if algorithm.get() == "Treap":388 # update_canvas(animation_handler.get_instance().treap_graph_list[graph_list_index])389 treap_graph.draw(treap_graph.treap, plot, canvas)390 else:391 update_canvas(animation_handler.get_instance().skip_list_graph_list[graph_list_index])392def switch_algorithm(string, event=None):393 global graph_list_index394 global active395 global play_pause_button396 global animation_handler397 global command_list_index398 global mode399 global canvas400 global plot401 play_pause_button.config(text="Play")402 active = False403 graph_list_index = 0404 if mode == "single_command":405 if string == "Treap":406 try:407 update_canvas(animation_handler.get_instance().treap_graph_list[0])408 except IndexError:409 treap_graph.draw(treap_graph.treap, plot, canvas)410 elif string == "Skip List":411 try:412 update_canvas(animation_handler.get_instance().skip_list_graph_list[0])413 except IndexError:414 skip_list_graph.draw(skip_list_graph.skip_list, plot, canvas)415 elif mode == "all_commands":416 if string == "Treap":417 try:418 update_canvas(animation_handler.get_instance().treap_graph_list[0])419 except IndexError:420 treap_graph.draw(treap_graph.treap, plot, canvas)421 elif string == "Skip List":422 try:423 update_canvas(animation_handler.get_instance().skip_list_graph_list[0])424 except IndexError:425 skip_list_graph.draw(skip_list_graph.skip_list, plot, canvas)426def read_data_command():427 global algorithm428 global animation_handler429 global treap430 global graph_list_index431 global data432 global log_message433 global log_widget434 graph_list_index = 0435 for command, key in data:436 animation_handler.get_instance().skip_list_graph_list.clear()437 animation_handler.get_instance().treap_graph_list.clear()438 animation_handler.get_instance().pseudocode_list.clear()439 animation_handler.get_instance().treap_pseudocode_list.clear()440 if command == "insert":441 if treap.root.key is not None:442 if treap.find_ohne(int(key)):443 continue444 if skip_list.search(int(key)):445 continue446 skip_list.insert(int(key))447 treap.insert(int(key), treap)448 skip_list_start = len(animation_handler.get_instance().skip_list_history[0])449 treap_start = len(animation_handler.get_instance().treap_history[0])450 for element in animation_handler.get_instance().skip_list_graph_list:451 animation_handler.get_instance().skip_list_history[0].append(element)452 for element in animation_handler.get_instance().pseudocode_list:453 animation_handler.get_instance().skip_list_history[1].append(element)454 for element in animation_handler.get_instance().treap_graph_list:455 animation_handler.get_instance().treap_history[0].append(element)456 for element in animation_handler.get_instance().treap_pseudocode_list:457 animation_handler.get_instance().treap_history[1].append(element)458 skip_list_end = len(animation_handler.get_instance().skip_list_history[0]) - 1459 treap_end = len(animation_handler.get_instance().treap_history[0]) - 1460 animation_handler.get_instance().skip_list_time_stamps.append((skip_list_start, skip_list_end))461 animation_handler.get_instance().treap_time_stamps.append((treap_start, treap_end))462 # Handling the log widget:463 log_widget.push(f"insert:{int(key)}", log_list)464 log_message.config(text=log_widget.update())465 if algorithm.get() == "Skip List":466 update_canvas(animation_handler.get_instance().skip_list_graph_list[graph_list_index])467 pseudocode_obj.update(animation_handler.get_instance().pseudocode_list[graph_list_index][0],468 animation_handler.get_instance().pseudocode_list[graph_list_index][1])469 elif algorithm.get() == "Treap":470 update_canvas(animation_handler.get_instance().treap_graph_list[graph_list_index])471 pseudocode_obj.update(animation_handler.get_instance().treap_pseudocode_list[graph_list_index][0],472 animation_handler.get_instance().treap_pseudocode_list[graph_list_index][1])473 elif command == "search":474 skip_list.find(int(key))475 treap.find(int(key), treap)476 for element in animation_handler.get_instance().skip_list_graph_list:477 animation_handler.get_instance().skip_list_history[0].append(element)478 for element in animation_handler.get_instance().pseudocode_list:479 animation_handler.get_instance().skip_list_history[1].append(element)480 for element in animation_handler.get_instance().treap_graph_list:481 animation_handler.get_instance().treap_history[0].append(element)482 for element in animation_handler.get_instance().treap_pseudocode_list:483 animation_handler.get_instance().treap_history[1].append(element)484 log_widget.push(f"search:{int(key)}", log_list)485 log_message.config(text=log_widget.update())486 if algorithm.get() == "Skip List":487 update_canvas(animation_handler.get_instance().skip_list_graph_list[graph_list_index])488 pseudocode_obj.update(animation_handler.get_instance().pseudocode_list[graph_list_index][0],489 animation_handler.get_instance().pseudocode_list[graph_list_index][1])490 elif algorithm.get() == "Treap":491 update_canvas(animation_handler.get_instance().treap_graph_list[graph_list_index])492 pseudocode_obj.update(animation_handler.get_instance().treap_pseudocode_list[graph_list_index][0],493 animation_handler.get_instance().treap_pseudocode_list[graph_list_index][1])494 elif command == "delete":495 skip_list.delete(int(key))496 treap.delete(int(key), treap)497 skip_list_start = len(animation_handler.get_instance().skip_list_history[0])498 treap_start = len(animation_handler.get_instance().treap_history[0])499 for element in animation_handler.get_instance().skip_list_graph_list:500 animation_handler.get_instance().skip_list_history[0].append(element)501 for element in animation_handler.get_instance().pseudocode_list:502 animation_handler.get_instance().skip_list_history[1].append(element)503 for element in animation_handler.get_instance().treap_graph_list:504 animation_handler.get_instance().treap_history[0].append(element)505 for element in animation_handler.get_instance().treap_pseudocode_list:506 animation_handler.get_instance().treap_history[1].append(element)507 skip_list_end = len(animation_handler.get_instance().skip_list_history[0]) - 1508 treap_end = len(animation_handler.get_instance().treap_history[0]) - 1509 animation_handler.get_instance().skip_list_time_stamps.append((skip_list_start, skip_list_end))510 animation_handler.get_instance().treap_time_stamps.append((treap_start, treap_end))511 log_widget.push(f"delete:{int(key)}", log_list)512 log_message.config(text=log_widget.update())513 if algorithm.get() == "Skip List":514 update_canvas(animation_handler.get_instance().skip_list_graph_list[graph_list_index])515 pseudocode_obj.update(animation_handler.get_instance().pseudocode_list[graph_list_index][0],516 animation_handler.get_instance().pseudocode_list[graph_list_index][1])517 elif algorithm.get() == "Treap":518 update_canvas(animation_handler.get_instance().treap_graph_list[graph_list_index])519 pseudocode_obj.update(animation_handler.get_instance().treap_pseudocode_list[graph_list_index][0],520 animation_handler.get_instance().treap_pseudocode_list[graph_list_index][1])521def open_file():522 global active523 global play_pause_button524 play_pause_button.config(text="Play")525 active = False526 global data527 token = False528 file = tk.filedialog.askopenfile(mode='r', title="Open file", filetypes=[('Text Files', '*.txt')])529 # check if file was opend successfully530 if file:531 # set Label with filename only if open was successful532 filename_label.config(text=file.name.split("/")[-1])533 data = []534 # append each line to DATA list, where they are stored535 for line in file:536 command = line.rstrip().split(":")537 tmp = (command[0], command[1]) # command[0] ="insert", command[1]="3"538 if tmp in data:539 continue540 else:541 data.append(tmp)542 read_data_command()543def save_graph():544 global active545 global play_pause_button546 global filename547 global fig548 play_pause_button.config(text="Play")549 active = False550 filename = ""551 filename = tk.filedialog.asksaveasfilename(title="Save File",552 filetypes=[("png files", "*.png"), ("jpeg files", "*.jpeg")])553 if filename:554 fig.savefig(filename)555def save_graph():556 global active557 global play_pause_button558 global filename559 global fig560 play_pause_button.config(text="Play")561 active = False562 filename = ""563 filename = tk.filedialog.asksaveasfilename(title="Save File",564 filetypes=[("png files", "*.png"), ("jpeg files", "*.jpeg")])565 if filename:566 fig.savefig(filename)567def callor(event):568 insert_command()569def placeholder(event):570 value_entry.delete(0, tk.END)571def github():572 webbrowser.open("https://github.com/timokilb/projektgruppe", new=1)573def donate():574 global algorithm575 if algorithm.get() == "Skip List":576 webbrowser.open("https://www.paypal.me/timokilb", new=1)577 else:578 webbrowser.open("https://www.paypal.me/denizdogan94", new=1)579def instagram():580 webbrowser.open("https://www.instagram.com/resbalar.sb/?hl=de", new=1)581def how_it_works(event=None):582 how_it_works_window = tk.Toplevel()583 how_it_works_window.title("How it works")584 how_it_works_window.minsize(300, 300)585 how_it_works_window.config(padx=20, pady=20, bg=background_color)586 how_it_works_window.resizable(height=False, width=False)587 how_it_works_frame = tk.Frame(how_it_works_window, **style_sheet["how_it_works_frame"])588 how_it_works_frame.pack(side="top", anchor="nw", fill="both", expand=1)589 explanation_text = open("./res/howto.txt", "r").read()590 explanation_message = tk.Message(master=how_it_works_frame, text=explanation_text, **style_sheet["how_it_works"])591 explanation_message.pack(anchor="nw")592def check_decision():593 global filename594 global save_decision_list595 if "save_all" and True in save_decision_list[2]:596 save_graph()597 save_log()598 filename = ""599 return600 elif "save_graph" and True in save_decision_list[0]:601 save_graph()602 filename = ""603 elif "save_log" and True in save_decision_list[1]:604 save_log()605 filename = ""606def get_frame():607 global pseudocode_frame608 return pseudocode_frame609def save_log():610 global log_list611 global filename612 if filename == "":613 filename = tk.filedialog.asksaveasfilename(title="Save File",614 filetypes=[("txt files", "*.txt")])615 tmp = open(filename + ".txt", mode="w")616 if tmp:617 for line in log_list:618 tmp.write(line + "\n")619 tmp.close()620def open_save():621 def save_decision():622 global save_decision_list623 save_decision_list.clear()624 save_decision_list.append(("save_graph", save_graph.get()))625 save_decision_list.append(("save_log", save_log.get()))626 save_decision_list.append(("save_all", (save_graph.get() and save_log.get() or (save_all.get()))))627 check_decision()628 choose_save_window.destroy()629 choose_save_window = tk.Toplevel()630 choose_save_window.title("Save")631 choose_save_window.minsize(300, 136)632 choose_save_window.resizable(width=False, height=False)633 choose_save_window.config(padx=10, pady=30, bg=background_color)634 save_graph = tk.BooleanVar()635 tk.Checkbutton(choose_save_window, text="Save Graph", variable=save_graph, **style_sheet["save_window_check"]).pack(636 side="top", anchor="w")637 save_log = tk.BooleanVar()638 tk.Checkbutton(choose_save_window, text="Save Logs", variable=save_log, **style_sheet["save_window_check"]).pack(639 side="top", anchor="w")640 save_all = tk.BooleanVar()641 tk.Checkbutton(choose_save_window, text="Save Graph and Logs", variable=save_all,642 **style_sheet["save_window_check"]).pack(side="top", anchor="w")643 tk.Button(choose_save_window, text='Save', command=save_decision, **style_sheet["save_window_button"], ).pack(644 side="left", anchor="nw", fill="x", expand=1,645 padx=2, pady=6)646 tk.Button(choose_save_window, text='Cancel', command=choose_save_window.destroy,647 **style_sheet["save_window_button"], ).pack(side="left", anchor="nw",648 fill="x", expand=1, padx=2,649 pady=6)650def set_mode(param):651 global graph_list_index652 global algorithm653 global mode654 global log_widget655 global log_message656 if algorithm.get() == "Skip List":657 if graph_list_index > len(animation_handler.get_instance().skip_list_graph_list) - 1:658 graph_list_index = 0659 elif algorithm.get() == "Treap":660 if graph_list_index > len(animation_handler.get_instance().treap_graph_list) - 1:661 graph_list_index = 0662 mode = param663 if mode == "single_command":664 log_widget.push(f"Mode: Single Command")665 log_message.config(text=log_widget.update())666 else:667 log_widget.push(f"Mode:All Commands")668 log_message.config(text=log_widget.update())669def faster_command(event=None):670 global speed671 if speed < 800:672 speed += 150673def slower_command(event=None):674 global speed...

Full Screen

Full Screen

web_transfer.py

Source:web_transfer.py Github

copy

Full Screen

...53 "nodes/%d/info" % (get_config().NODE_ID),54 {"node_id": get_config().NODE_ID},55 {"uptime": str(self.uptime()), "load": str(self.load())},56 )57 online_iplist = ServerPool.get_instance().get_servers_iplist()58 data = []59 for port in online_iplist.keys():60 for ip in online_iplist[port]:61 data.append({"ip": ip, "user_id": self.port_uid_table[port]})62 webapi.postApi(63 "users/aliveip", {"node_id": get_config().NODE_ID}, {"data": data}64 )65 detect_log_list = ServerPool.get_instance().get_servers_detect_log()66 data = []67 for port in detect_log_list.keys():68 for rule_id in detect_log_list[port]:69 data.append(70 {"list_id": rule_id, "user_id": self.port_uid_table[port]}71 )72 webapi.postApi(73 "users/detectlog",74 {"node_id": get_config().NODE_ID},75 {"data": data},76 )77 deny_str = ""78 data = []79 if platform.system() == "Linux" and get_config().ANTISSATTACK == 1:80 wrong_iplist = ServerPool.get_instance().get_servers_wrong()81 server_ip = socket.gethostbyname(get_config().MYSQL_HOST)82 for id in wrong_iplist.keys():83 for ip in wrong_iplist[id]:84 realip = ""85 is_ipv6 = False86 if common.is_ip(ip):87 if common.is_ip(ip) == socket.AF_INET:88 realip = ip89 else:90 if common.match_ipv4_address(ip) is not None:91 realip = common.match_ipv4_address(ip)92 else:93 is_ipv6 = True94 realip = ip95 else:96 continue97 if str(realip).find(str(server_ip)) != -1:98 continue99 has_match_node = False100 for node_ip in self.node_ip_list:101 if str(realip).find(node_ip) != -1:102 has_match_node = True103 continue104 if has_match_node:105 continue106 if get_config().CLOUDSAFE == 1:107 data.append({"ip": realip})108 else:109 if not is_ipv6:110 os.system(111 "route add -host %s gw 127.0.0.1" % str(realip)112 )113 deny_str = deny_str + "\nALL: " + str(realip)114 else:115 os.system(116 "ip -6 route add ::1/128 via %s/128"117 % str(realip)118 )119 deny_str = (120 deny_str + "\nALL: [" + str(realip) + "]/128"121 )122 logging.info("Local Block ip:" + str(realip))123 if get_config().CLOUDSAFE == 0:124 deny_file = open("/etc/hosts.deny", "a")125 fcntl.flock(deny_file.fileno(), fcntl.LOCK_EX)126 deny_file.write(deny_str)127 deny_file.close()128 webapi.postApi(129 "func/block_ip",130 {"node_id": get_config().NODE_ID},131 {"data": data},132 )133 return update_transfer134 def uptime(self):135 with open("/proc/uptime", "r") as f:136 return float(f.readline().split()[0])137 def load(self):138 import os139 return os.popen(140 'cat /proc/loadavg | awk \'{ print $1" "$2" "$3 }\''141 ).readlines()[0]142 def trafficShow(self, Traffic):143 if Traffic < 1024:144 return str(round((Traffic), 2)) + "B"145 if Traffic < 1024 * 1024:146 return str(round((Traffic / 1024), 2)) + "KB"147 if Traffic < 1024 * 1024 * 1024:148 return str(round((Traffic / 1024 / 1024), 2)) + "MB"149 return str(round((Traffic / 1024 / 1024 / 1024), 2)) + "GB"150 def push_db_all_user(self):151 # 更新用户流量到数据库152 last_transfer = self.last_update_transfer153 curr_transfer = ServerPool.get_instance().get_servers_transfer()154 # 上次和本次的增量155 dt_transfer = {}156 for id in curr_transfer.keys():157 if id in last_transfer:158 if (159 curr_transfer[id][0]160 + curr_transfer[id][1]161 - last_transfer[id][0]162 - last_transfer[id][1]163 <= 0164 ):165 continue166 if (167 last_transfer[id][0] <= curr_transfer[id][0]168 and last_transfer[id][1] <= curr_transfer[id][1]169 ):170 dt_transfer[id] = [171 curr_transfer[id][0] - last_transfer[id][0],172 curr_transfer[id][1] - last_transfer[id][1],173 ]174 else:175 dt_transfer[id] = [176 curr_transfer[id][0],177 curr_transfer[id][1],178 ]179 else:180 if curr_transfer[id][0] + curr_transfer[id][1] <= 0:181 continue182 dt_transfer[id] = [curr_transfer[id][0], curr_transfer[id][1]]183 for id in dt_transfer.keys():184 last = last_transfer.get(id, [0, 0])185 last_transfer[id] = [186 last[0] + dt_transfer[id][0],187 last[1] + dt_transfer[id][1],188 ]189 self.last_update_transfer = last_transfer.copy()190 self.update_all_user(dt_transfer)191 def pull_db_all_user(self):192 global webapi193 nodeinfo = webapi.getApi("nodes/%d/info" % (get_config().NODE_ID))194 if not nodeinfo:195 rows = []196 return rows197 self.node_speedlimit = nodeinfo["node_speedlimit"]198 self.traffic_rate = nodeinfo["traffic_rate"]199 self.mu_only = nodeinfo["mu_only"]200 if nodeinfo["sort"] == 10:201 self.is_relay = True202 else:203 self.is_relay = False204 data = webapi.getApi("users", {"node_id": get_config().NODE_ID})205 if not data:206 rows = []207 return rows208 rows = data209 # 读取节点IP210 # SELECT * FROM `ss_node` where `node_ip` != ''211 self.node_ip_list = []212 data = webapi.getApi("nodes")213 for node in data:214 temp_list = str(node["node_ip"]).split(",")215 self.node_ip_list.append(temp_list[0])216 # 读取审计规则,数据包匹配部分217 self.detect_text_list = {}218 self.detect_hex_list = {}219 data = webapi.getApi("func/detect_rules")220 for rule in data:221 d = {}222 d["id"] = int(rule["id"])223 d["regex"] = str(rule["regex"])224 if int(rule["type"]) == 1:225 self.detect_text_list[d["id"]] = d.copy()226 else:227 self.detect_hex_list[d["id"]] = d.copy()228 # 读取中转规则,如果是中转节点的话229 if self.is_relay:230 self.relay_rule_list = {}231 data = webapi.getApi(232 "func/relay_rules", {"node_id": get_config().NODE_ID}233 )234 for rule in data:235 d = {}236 d["id"] = int(rule["id"])237 d["user_id"] = int(rule["user_id"])238 d["dist_ip"] = str(rule["dist_ip"])239 d["port"] = int(rule["port"])240 d["priority"] = int(rule["priority"])241 self.relay_rule_list[d["id"]] = d.copy()242 return rows243 def cmp(self, val1, val2):244 if isinstance(val1, bytes):245 val1 = common.to_str(val1)246 if isinstance(val2, bytes):247 val2 = common.to_str(val2)248 return val1 == val2249 def del_server_out_of_bound_safe(self, last_rows, rows):250 # 停止超流量的服务251 # 启动没超流量的服务252 # 需要动态载入switchrule,以便实时修改规则253 try:254 switchrule = importloader.load("switchrule")255 except Exception as e:256 logging.error("load switchrule.py fail")257 cur_servers = {}258 new_servers = {}259 md5_users = {}260 self.mu_port_list = []261 for row in rows:262 if row["is_multi_user"] != 0:263 self.mu_port_list.append(int(row["port"]))264 continue265 md5_users[row["id"]] = row.copy()266 del md5_users[row["id"]]["u"]267 del md5_users[row["id"]]["d"]268 if md5_users[row["id"]]["disconnect_ip"] is None:269 md5_users[row["id"]]["disconnect_ip"] = ""270 if md5_users[row["id"]]["forbidden_ip"] is None:271 md5_users[row["id"]]["forbidden_ip"] = ""272 if md5_users[row["id"]]["forbidden_port"] is None:273 md5_users[row["id"]]["forbidden_port"] = ""274 md5_users[row["id"]]["md5"] = common.get_md5(275 str(row["id"])276 + row["passwd"]277 + row["method"]278 + row["obfs"]279 + row["protocol"]280 )281 for row in rows:282 self.port_uid_table[row["port"]] = row["id"]283 self.uid_port_table[row["id"]] = row["port"]284 if self.mu_only == 1:285 i = 0286 while i < len(rows):287 if rows[i]["is_multi_user"] == 0:288 rows.pop(i)289 i -= 1290 else:291 pass292 i += 1293 if self.mu_only == -1:294 i = 0295 while i < len(rows):296 if rows[i]["is_multi_user"] != 0:297 rows.pop(i)298 i -= 1299 else:300 pass301 i += 1302 for row in rows:303 port = row["port"]304 user_id = row["id"]305 passwd = common.to_bytes(row["passwd"])306 cfg = {"password": passwd}307 read_config_keys = [308 "method",309 "obfs",310 "obfs_param",311 "protocol",312 "protocol_param",313 "forbidden_ip",314 "forbidden_port",315 "node_speedlimit",316 "disconnect_ip",317 "is_multi_user",318 ]319 for name in read_config_keys:320 if name in row and row[name]:321 cfg[name] = row[name]322 merge_config_keys = ["password"] + read_config_keys323 for name in cfg.keys():324 if hasattr(cfg[name], "encode"):325 try:326 cfg[name] = cfg[name].encode("utf-8")327 except Exception as e:328 logging.warning(329 'encode cfg key "%s" fail, val "%s"'330 % (name, cfg[name])331 )332 if "node_speedlimit" in cfg:333 if (334 float(self.node_speedlimit) > 0.0335 or float(cfg["node_speedlimit"]) > 0.0336 ):337 cfg["node_speedlimit"] = max(338 float(self.node_speedlimit),339 float(cfg["node_speedlimit"]),340 )341 else:342 cfg["node_speedlimit"] = max(343 float(self.node_speedlimit), float(0.00)344 )345 if "disconnect_ip" not in cfg:346 cfg["disconnect_ip"] = ""347 if "forbidden_ip" not in cfg:348 cfg["forbidden_ip"] = ""349 if "forbidden_port" not in cfg:350 cfg["forbidden_port"] = ""351 if "protocol_param" not in cfg:352 cfg["protocol_param"] = ""353 if "obfs_param" not in cfg:354 cfg["obfs_param"] = ""355 if "is_multi_user" not in cfg:356 cfg["is_multi_user"] = 0357 if port not in cur_servers:358 cur_servers[port] = passwd359 else:360 logging.error(361 "more than one user use the same port [%s]" % (port,)362 )363 continue364 if cfg["is_multi_user"] != 0:365 cfg["users_table"] = md5_users.copy()366 cfg["detect_hex_list"] = self.detect_hex_list.copy()367 cfg["detect_text_list"] = self.detect_text_list.copy()368 if self.is_relay and row["is_multi_user"] != 2:369 temp_relay_rules = {}370 for id in self.relay_rule_list:371 if (372 (373 self.relay_rule_list[id]["user_id"] == user_id374 or self.relay_rule_list[id]["user_id"] == 0375 )376 or row["is_multi_user"] != 0377 ) and (378 self.relay_rule_list[id]["port"] == 0379 or self.relay_rule_list[id]["port"] == port380 ):381 has_higher_priority = False382 for priority_id in self.relay_rule_list:383 if (384 (385 (386 self.relay_rule_list[priority_id][387 "priority"388 ]389 > self.relay_rule_list[id]["priority"]390 and self.relay_rule_list[id]["id"]391 != self.relay_rule_list[priority_id][392 "id"393 ]394 )395 or (396 self.relay_rule_list[priority_id][397 "priority"398 ]399 == self.relay_rule_list[id]["priority"]400 and self.relay_rule_list[id]["id"]401 > self.relay_rule_list[priority_id][402 "id"403 ]404 )405 )406 and (407 self.relay_rule_list[priority_id][408 "user_id"409 ]410 == user_id411 or self.relay_rule_list[priority_id][412 "user_id"413 ]414 == 0415 )416 and (417 self.relay_rule_list[priority_id]["port"]418 == port419 or self.relay_rule_list[priority_id][420 "port"421 ]422 == 0423 )424 ):425 has_higher_priority = True426 continue427 if has_higher_priority:428 continue429 if (430 self.relay_rule_list[id]["dist_ip"] == "0.0.0.0"431 and row["is_multi_user"] == 0432 ):433 continue434 temp_relay_rules[id] = self.relay_rule_list[id]435 cfg["relay_rules"] = temp_relay_rules.copy()436 else:437 temp_relay_rules = {}438 cfg["relay_rules"] = temp_relay_rules.copy()439 if ServerPool.get_instance().server_is_run(port) > 0:440 cfgchange = False441 if port in ServerPool.get_instance().tcp_servers_pool:442 ServerPool.get_instance().tcp_servers_pool[443 port444 ].modify_detect_text_list(self.detect_text_list)445 ServerPool.get_instance().tcp_servers_pool[446 port447 ].modify_detect_hex_list(self.detect_hex_list)448 if port in ServerPool.get_instance().tcp_ipv6_servers_pool:449 ServerPool.get_instance().tcp_ipv6_servers_pool[450 port451 ].modify_detect_text_list(self.detect_text_list)452 ServerPool.get_instance().tcp_ipv6_servers_pool[453 port454 ].modify_detect_hex_list(self.detect_hex_list)455 if port in ServerPool.get_instance().udp_servers_pool:456 ServerPool.get_instance().udp_servers_pool[457 port458 ].modify_detect_text_list(self.detect_text_list)459 ServerPool.get_instance().udp_servers_pool[460 port461 ].modify_detect_hex_list(self.detect_hex_list)462 if port in ServerPool.get_instance().udp_ipv6_servers_pool:463 ServerPool.get_instance().udp_ipv6_servers_pool[464 port465 ].modify_detect_text_list(self.detect_text_list)466 ServerPool.get_instance().udp_ipv6_servers_pool[467 port468 ].modify_detect_hex_list(self.detect_hex_list)469 if row["is_multi_user"] != 0:470 if port in ServerPool.get_instance().tcp_servers_pool:471 ServerPool.get_instance().tcp_servers_pool[472 port473 ].modify_multi_user_table(md5_users)474 if port in ServerPool.get_instance().tcp_ipv6_servers_pool:475 ServerPool.get_instance().tcp_ipv6_servers_pool[476 port477 ].modify_multi_user_table(md5_users)478 if port in ServerPool.get_instance().udp_servers_pool:479 ServerPool.get_instance().udp_servers_pool[480 port481 ].modify_multi_user_table(md5_users)482 if port in ServerPool.get_instance().udp_ipv6_servers_pool:483 ServerPool.get_instance().udp_ipv6_servers_pool[484 port485 ].modify_multi_user_table(md5_users)486 if self.is_relay and row["is_multi_user"] != 2:487 temp_relay_rules = {}488 for id in self.relay_rule_list:489 if (490 (491 self.relay_rule_list[id]["user_id"] == user_id492 or self.relay_rule_list[id]["user_id"] == 0493 )494 or row["is_multi_user"] != 0495 ) and (496 self.relay_rule_list[id]["port"] == 0497 or self.relay_rule_list[id]["port"] == port498 ):499 has_higher_priority = False500 for priority_id in self.relay_rule_list:501 if (502 (503 (504 self.relay_rule_list[priority_id][505 "priority"506 ]507 > self.relay_rule_list[id][508 "priority"509 ]510 and self.relay_rule_list[id]["id"]511 != self.relay_rule_list[512 priority_id513 ]["id"]514 )515 or (516 self.relay_rule_list[priority_id][517 "priority"518 ]519 == self.relay_rule_list[id][520 "priority"521 ]522 and self.relay_rule_list[id]["id"]523 > self.relay_rule_list[524 priority_id525 ]["id"]526 )527 )528 and (529 self.relay_rule_list[priority_id][530 "user_id"531 ]532 == user_id533 or self.relay_rule_list[priority_id][534 "user_id"535 ]536 == 0537 )538 and (539 self.relay_rule_list[priority_id][540 "port"541 ]542 == port543 or self.relay_rule_list[priority_id][544 "port"545 ]546 == 0547 )548 ):549 has_higher_priority = True550 continue551 if has_higher_priority:552 continue553 if (554 self.relay_rule_list[id]["dist_ip"]555 == "0.0.0.0"556 and row["is_multi_user"] == 0557 ):558 continue559 temp_relay_rules[id] = self.relay_rule_list[id]560 if port in ServerPool.get_instance().tcp_servers_pool:561 ServerPool.get_instance().tcp_servers_pool[562 port563 ].push_relay_rules(temp_relay_rules)564 if port in ServerPool.get_instance().tcp_ipv6_servers_pool:565 ServerPool.get_instance().tcp_ipv6_servers_pool[566 port567 ].push_relay_rules(temp_relay_rules)568 if port in ServerPool.get_instance().udp_servers_pool:569 ServerPool.get_instance().udp_servers_pool[570 port571 ].push_relay_rules(temp_relay_rules)572 if port in ServerPool.get_instance().udp_ipv6_servers_pool:573 ServerPool.get_instance().udp_ipv6_servers_pool[574 port575 ].push_relay_rules(temp_relay_rules)576 else:577 temp_relay_rules = {}578 if port in ServerPool.get_instance().tcp_servers_pool:579 ServerPool.get_instance().tcp_servers_pool[580 port581 ].push_relay_rules(temp_relay_rules)582 if port in ServerPool.get_instance().tcp_ipv6_servers_pool:583 ServerPool.get_instance().tcp_ipv6_servers_pool[584 port585 ].push_relay_rules(temp_relay_rules)586 if port in ServerPool.get_instance().udp_servers_pool:587 ServerPool.get_instance().udp_servers_pool[588 port589 ].push_relay_rules(temp_relay_rules)590 if port in ServerPool.get_instance().udp_ipv6_servers_pool:591 ServerPool.get_instance().udp_ipv6_servers_pool[592 port593 ].push_relay_rules(temp_relay_rules)594 if port in ServerPool.get_instance().tcp_servers_pool:595 relay = ServerPool.get_instance().tcp_servers_pool[port]596 for name in merge_config_keys:597 if name in cfg and not self.cmp(598 cfg[name], relay._config[name]599 ):600 cfgchange = True601 break602 if (603 not cfgchange604 and port in ServerPool.get_instance().tcp_ipv6_servers_pool605 ):606 relay = ServerPool.get_instance().tcp_ipv6_servers_pool[607 port608 ]609 for name in merge_config_keys:610 if name in cfg and not self.cmp(611 cfg[name], relay._config[name]612 ):613 cfgchange = True614 break615 # config changed616 if cfgchange:617 self.del_server(port, "config changed")618 new_servers[port] = (passwd, cfg)619 elif ServerPool.get_instance().server_run_status(port) is False:620 # new_servers[port] = passwd621 self.new_server(port, passwd, cfg)622 for row in last_rows:623 if row["port"] in cur_servers:624 pass625 else:626 self.del_server(row["port"], "port not exist")627 if len(new_servers) > 0:628 from shadowsocks import eventloop629 self.event.wait(630 eventloop.TIMEOUT_PRECISION + eventloop.TIMEOUT_PRECISION / 2631 )632 for port in new_servers.keys():633 passwd, cfg = new_servers[port]634 self.new_server(port, passwd, cfg)635 ServerPool.get_instance().push_uid_port_table(self.uid_port_table)636 def del_server(self, port, reason):637 logging.info(638 "db stop server at port [%s] reason: %s!" % (port, reason)639 )640 ServerPool.get_instance().cb_del_server(port)641 if port in self.last_update_transfer:642 del self.last_update_transfer[port]643 for mu_user_port in self.mu_port_list:644 if mu_user_port in ServerPool.get_instance().tcp_servers_pool:645 ServerPool.get_instance().tcp_servers_pool[646 mu_user_port647 ].reset_single_multi_user_traffic(self.port_uid_table[port])648 if mu_user_port in ServerPool.get_instance().tcp_ipv6_servers_pool:649 ServerPool.get_instance().tcp_ipv6_servers_pool[650 mu_user_port651 ].reset_single_multi_user_traffic(self.port_uid_table[port])652 if mu_user_port in ServerPool.get_instance().udp_servers_pool:653 ServerPool.get_instance().udp_servers_pool[654 mu_user_port655 ].reset_single_multi_user_traffic(self.port_uid_table[port])656 if mu_user_port in ServerPool.get_instance().udp_ipv6_servers_pool:657 ServerPool.get_instance().udp_ipv6_servers_pool[658 mu_user_port659 ].reset_single_multi_user_traffic(self.port_uid_table[port])660 def new_server(self, port, passwd, cfg):661 protocol = cfg.get(662 "protocol",663 ServerPool.get_instance().config.get("protocol", "origin"),664 )665 method = cfg.get(666 "method", ServerPool.get_instance().config.get("method", "None")667 )668 obfs = cfg.get(669 "obfs", ServerPool.get_instance().config.get("obfs", "plain")670 )671 logging.info(672 "db start server at port [%s] pass [%s] protocol [%s] method [%s] obfs [%s]"673 % (port, passwd, protocol, method, obfs)674 )675 ServerPool.get_instance().new_server(port, cfg)676 @staticmethod677 def del_servers():678 global db_instance679 for port in [680 v for v in ServerPool.get_instance().tcp_servers_pool.keys()681 ]:682 if ServerPool.get_instance().server_is_run(port) > 0:683 ServerPool.get_instance().cb_del_server(port)684 if port in db_instance.last_update_transfer:685 del db_instance.last_update_transfer[port]686 for port in [687 v for v in ServerPool.get_instance().tcp_ipv6_servers_pool.keys()688 ]:689 if ServerPool.get_instance().server_is_run(port) > 0:690 ServerPool.get_instance().cb_del_server(port)691 if port in db_instance.last_update_transfer:692 del db_instance.last_update_transfer[port]693 @staticmethod694 def thread_db(obj):695 import socket696 import webapi_utils697 global db_instance698 global webapi699 timeout = 60700 socket.setdefaulttimeout(timeout)701 last_rows = []702 db_instance = obj()703 webapi = webapi_utils.WebApi()704 shell.log_shadowsocks_version()705 try:706 import resource707 logging.info(708 "current process RLIMIT_NOFILE resource: soft %d hard %d"709 % resource.getrlimit(resource.RLIMIT_NOFILE)710 )711 except:712 pass713 try:714 while True:715 load_config()716 try:717 ping = webapi.getApi("func/ping")718 if ping is None:719 logging.error(720 "something wrong with your http api, please check your config and website status and try again later."721 )722 else:723 db_instance.push_db_all_user()724 rows = db_instance.pull_db_all_user()725 db_instance.del_server_out_of_bound_safe(726 last_rows, rows727 )728 last_rows = rows729 except Exception as e:730 trace = traceback.format_exc()731 logging.error(trace)732 # logging.warn('db thread except:%s' % e)733 if (734 db_instance.event.wait(60)735 or not db_instance.is_all_thread_alive()736 ):737 break738 if db_instance.has_stopped:739 break740 except KeyboardInterrupt as e:741 pass742 db_instance.del_servers()743 ServerPool.get_instance().stop()744 db_instance = None745 @staticmethod746 def thread_db_stop():747 global db_instance748 db_instance.has_stopped = True749 db_instance.event.set()750 def is_all_thread_alive(self):751 if not ServerPool.get_instance().thread.is_alive():752 return False...

Full Screen

Full Screen

MakeDatasetRbp.py

Source:MakeDatasetRbp.py Github

copy

Full Screen

...35 def comparison_dataset(self):36 37 38 # Create Logger instance to see the start of comparison between two datasets 39 Logger.get_instance().info( " Start of comparison datasets:..." )40 41 42 # Definition of InfoDataset arguments43 44 self.dataset_input_path = PropertyManager.get_instance().get_property( DataConstants.DATASET_INPUT_PATH_PROPERTY, True)45 self.dataset_1_file = PropertyManager.get_instance().get_property( DataConstants.DATASET_1_FILE_PROPERTY, True)46 self.dataset_2_file = PropertyManager.get_instance().get_property( DataConstants.DATASET_2_FILE_PROPERTY, True)47 self.dataset_1_index_col = PropertyManager.get_instance().get_property( DataConstants.DATASET_1_INDEX_COL_PROPERTY, True)48 self.dataset_2_index_col = PropertyManager.get_instance().get_property( DataConstants.DATASET_2_INDEX_COL_PROPERTY, True)49 self.dataset_output = PropertyManager.get_instance().get_property( DataConstants.DATASET_OUTPUT_PROPERTY, True)50 self.dataset_1_length = PropertyManager.get_instance().get_property( DataConstants.DATASET_1_LENGTH_PROPERTY, True)51 self.dataset_2_length = PropertyManager.get_instance().get_property( DataConstants.DATASET_2_LENGTH_PROPERTY, True)52 53 self.index_col = (int(self.dataset_1_index_col) , int(self.dataset_2_index_col))54 self.dataset_length = (int(self.dataset_1_length), int(self.dataset_2_length))55 56 self.path_home = Constants.PATH_HOME57 InfoDataset.global_analysis_dataset(self.path_home + self.dataset_input_path, 58 (self.dataset_1_file,59 self.dataset_2_file) ,60 self.index_col,61 self.path_home + self.dataset_output,62 length=self.dataset_length)63 64 Logger.get_instance().info( " The comparison of datasets is completed : \65two file with the common and difference \66items has been generated in \n\n " + self.dataset_output )67 68 69 # creation_list70 # ----------------71 #72 # Creation of the gene and protein list of dataset 1 in order to pass it to Ensembl method 73 # 74 def creation_list(self):75 76 77 78 Logger.get_instance().info( " Creation of gene and protein list \n " )79 80 81 # Creation of two file containing respectively the genes and protein of dataset 182 83 self.dataset_input_path = PropertyManager.get_instance().get_property( DataConstants.DATASET_INPUT_PATH_PROPERTY, True)84 self.file_dataset_1 = PropertyManager.get_instance().get_property( DataConstants.DATASET_1_FILE_PROPERTY, True)85 self.gene_index_col = PropertyManager.get_instance().get_property( DataConstants.LIST_GENE_INDEX_COL_PROPERTY, True)86 self.protein_index_col = PropertyManager.get_instance().get_property( DataConstants.LIST_PROTEIN_INDEX_COL_PROPERTY, True)87 88 89 self.list_gene_dataset_1 = PropertyManager.get_instance().get_property( DataConstants.LIST_FILE_GENE_DATASET_1_PROPERTY, True)90 self.list_protein_dataset_1 = PropertyManager.get_instance().get_property( DataConstants.LIST_FILE_PROTEIN_DATASET_1_PROPERTY, True)91 92 self.path_gene_dataset_1 = Constants.PATH_HOME + self.dataset_input_path + self.list_gene_dataset_193 self.path_protein_dataset_1 = Constants.PATH_HOME + self.dataset_input_path + self.list_protein_dataset_194 95 self.path_home = Constants.PATH_HOME96 self.path_dataset_1 = self.path_home + self.dataset_input_path + self.file_dataset_197 98 dataset_1 = FileParser.make_table(self.path_dataset_1)99 100 gene_dataset_1 = TableWrapper.get_column(dataset_1, int(self.gene_index_col), start=1)101 protein_dataset_1 = TableWrapper.get_column(dataset_1, int(self.protein_index_col), start=1)102 103 FileWriter.write_table(self.path_gene_dataset_1, gene_dataset_1)104 FileWriter.write_table(self.path_protein_dataset_1, protein_dataset_1)105 106 107 Logger.get_instance().info( " The genes and proteins file of dataset 1 have been created \108in the following path \n\n " + self.dataset_input_path )109 110 111 # connection_to_ensembl112 # ----------------------113 # 114 # Section reserved to extraction of sequences from Ensembl database 115 # DATASET 1:116 # This dataset contains the gene and protein ids then you can download directly the protein sequence117 # 118 # DATASET 2:119 # For this dataset are download only the sequences for the gene that not are also in Dataset 1 120 # That is the Differ items obtained from comparison_dataset method121 #122 def connection_to_ensembl(self):123 124 Logger.get_instance().info( " Connection to Ensembl: Starting...\n" )125 126 # DATASET 1127 # =============================================128 129 # Collection of sequences for dataset 1 130 Logger.get_instance().info( " Dataset 1 : Extraction of sequences...\n" )131 132 # Timer step133 Timer.get_instance().step(" Start of sequences extraction \n")134 135 136 # Definition of Ensembl list_get_seq arguments 137 138 self.path_home = Constants.PATH_HOME139 self.list_path = Constants.PATH_HOME + PropertyManager.get_instance().get_property( DataConstants.DATASET_INPUT_PATH_PROPERTY, True)140 141 self.gene_list_1 = PropertyManager.get_instance().get_property( DataConstants.LIST_FILE_GENE_DATASET_1_PROPERTY, True)142 self.protein_list = PropertyManager.get_instance().get_property( DataConstants.LIST_FILE_PROTEIN_DATASET_1_PROPERTY, True)143 144 self.ensembl_gene_list_1_path = self.list_path + self.gene_list_1145 self.ensembl_protein_list_1_path = self.list_path + self.protein_list146 self.type_query1_ensembl = PropertyManager.get_instance().get_property( DataConstants.ENSEMBL_TYPE_QUERY_DATASET_1_PROPERTY, True)147 148 149 self.ensembl_path_output = Constants.PATH_HOME + PropertyManager.get_instance().get_property( DataConstants.ENSEMBL_OUTPUT_PATH_SEQUENCE_PROPERTY, True)150 self.ensembl_output_dataset1 = self.ensembl_path_output + PropertyManager.get_instance().get_property( DataConstants.ENSEMBL_FILE_SEQUENCES_1_PROPERTY)151 # Calling Ensembl.list_get_seq152 Ensembl.list_get_seq(self.ensembl_gene_list_1_path, int(self.type_query1_ensembl), path_protein_list=self.ensembl_protein_list_1_path, path_output=self.ensembl_output_dataset1)153 154 155 156 # Timer step157 Timer.get_instance().step(" End of Dataset 1 Sequences Extraction\n")158 159 Logger.get_instance().info( " Extraction of sequences for the dataset 1 has been completed \n\n" )160 161 162 # END DATASET 1163 # =====================================================164 165 166 167 168 169 # DATASET 2170 # =====================================================171 172 # Collection of sequences for dataset 2 173 Logger.get_instance().info( " Dataset 2 : Extraction of sequences ....\n" )174 175 176 177 # Definition of Ensembl list_get_seq arguments 178 179 180 self.ensembl_input_list_2 = Constants.PATH_HOME + PropertyManager.get_instance().get_property( DataConstants.DATASET_OUTPUT_PROPERTY, True)181 self.gene_list_2 = Constants.FILE_DIFF182 self.ensembl_gene_list_2_path = self.ensembl_input_list_2 + self.gene_list_2183 self.type_query2_ensembl = PropertyManager.get_instance().get_property( DataConstants.ENSEMBL_TYPE_QUERY_DATASET_2_PROPERTY, True)184 185 self.ensembl_path_output = Constants.PATH_HOME + PropertyManager.get_instance().get_property( DataConstants.ENSEMBL_OUTPUT_PATH_SEQUENCE_PROPERTY, True)186 self.ensembl_output_dataset2 = self.ensembl_path_output + PropertyManager.get_instance().get_property( DataConstants.ENSEMBL_FILE_SEQUENCES_2_PROPERTY, True)187 188 189 # Calling Ensembl.list_get_seq190 Ensembl.list_get_seq(self.ensembl_gene_list_2_path, int(self.type_query2_ensembl), path_protein_list=None, path_output = self.ensembl_output_dataset2)191 192 193 # Timer step194 Timer.get_instance().step(" End of Dataset 2 Sequences Extraction\n")195 196 Logger.get_instance().info( " Extraction of sequences for the dataset 2 has been completed \n\n" )197 198 # END DATASET 2199 # =====================================================200 201 202 203 Logger.get_instance().info( " The sequences file of dataset 1 and the novel gene in dataset 2 \204have been created in the following path \n" + self.ensembl_path_output)205 206 # dictionary_identifier207 # ----------------------208 # 209 # Creation of a dictionary: gene = [ isoform1, isoform2,...isoformN] 210 # This method is necessary to select the longest isoform for each gene 211 # 212 def dictionary_identifier(self):213 214 215 Logger.get_instance().info( " Creation of a dictionary for novel gene of dataset 2\216The dictionary structure is : \n \217{gene = [ isoform1, isoform2,...isoformN]}" )218 219 self.ensembl_path_output = Constants.PATH_HOME + PropertyManager.get_instance().get_property( DataConstants.ENSEMBL_OUTPUT_PATH_SEQUENCE_PROPERTY, True)220 self.ensembl_output_dataset2 = self.ensembl_path_output + PropertyManager.get_instance().get_property( DataConstants.ENSEMBL_FILE_SEQUENCES_2_PROPERTY, True)221 222 self.dictionary_output = Constants.PATH_HOME + PropertyManager.get_instance().get_property( DataConstants.DICTIONARY_PATH_OUTPUT_PROPERTY, True)223 self.dictionary_namefile = self.dictionary_output + PropertyManager.get_instance().get_property( DataConstants.DICTIONARY_NAME_FILE_PROPERTY, True) 224 225 dict_identifier = InfoFasta.make_dictionary(self.ensembl_output_dataset2)226 227 file_dict = FileUtils.open_text_w(self.dictionary_namefile)228 229 pickle.dump(dict_identifier, file_dict)230 231 232 Logger.get_instance().info( " The creation of a dictionary for novel gene in dataset 2 is completed \n\n")233 234 # longest_sequence235 # ----------------236 # 237 # this method find the longest isoform for each gene (see the principals of LengthSeq method)238 # 239 def longest_sequence(self):240 241 242 Logger.get_instance().info( " Start of the selection of longest sequences of novel dataset \n")243 244 245 246 # Definition of arguments247 self.path_sequences = Constants.PATH_HOME + PropertyManager.get_instance().get_property( DataConstants.LONGEST_PATH_SEQUENCE_PROPERTY, True)248 self.file_sequences = self.path_sequences + PropertyManager.get_instance().get_property( DataConstants.LONGEST_PROT_FILE_SEQUENCES_2_PROPERTY, True)249 self.path_dictionary_identifier = Constants.PATH_HOME + PropertyManager.get_instance().get_property( DataConstants.LONGEST_PATH_DICTIONARY_PROPERTY, True)250 self.file_dictionary = self.path_dictionary_identifier + PropertyManager.get_instance().get_property( DataConstants.LONGEST_DICTIONARY_NAME_FILE_PROPERTY, True)251 252 self.path_output_longest = Constants.PATH_HOME + PropertyManager.get_instance().get_property( DataConstants.LONGEST_PATH_OUTPUT_PROPERTY, True)253 254 self.path_file_longest = self.path_output_longest + PropertyManager.get_instance().get_property( DataConstants.LONGEST_FILE_PROPERTY, True)255 self.path_file_isoform = self.path_output_longest + PropertyManager.get_instance().get_property( DataConstants.ISOFORM_FILE_PROPERTY, True)256 257 258 # Extraction the longest sequences from dataset 2 sequences (isoforms) 259 LengthSeq.longest_seq(self.file_sequences, self.file_dictionary, self.path_file_longest, self.path_file_isoform)260 261 262 # Timer step263 Timer.get_instance().step(" End of selection of the longest sequences in dataset 2 \n")264 265 266 Logger.get_instance().info( " End of selection of the longest sequences: \n \267two file have been generated one with longest sequences and the other one containing the isoform with same length ")268 269 270 271 272 # isoform_sequences 273 # --------------------------274 #275 # like explained in LengthSeq method a file containing isoform with same length is created276 #277 # In this section occurs the Random Selection of isoforms sequences278 # 279 def isoform_sequences(self):280 281 282 Logger.get_instance().info( " Starting the random selection of isoforms with same length \n")283 Logger.get_instance().info( " The following headers are the proteins randomly selected \n")284 285 self.path_output_longest = Constants.PATH_HOME + PropertyManager.get_instance().get_property( DataConstants.LONGEST_PATH_OUTPUT_PROPERTY, True)286 287 288 self.path_file_isoform = self.path_output_longest + PropertyManager.get_instance().get_property( DataConstants.ISOFORM_FILE_PROPERTY, True)289 self.path_file_selected_isoform = self.path_output_longest + PropertyManager.get_instance().get_property( DataConstants.RANDOM_ISOFORM_SEQ_PROPERTY, True)290 291 # The headers of a Isoform fasta file are taken by InfoFasta class292 # You make sure that the arg text is equal to False because the input object is a file and not a list293 294 295 self.headers = InfoFasta.get_header(self.path_file_isoform, text=False)296 297 # Extraction of genes form headers line298 # This vector contains double gene because the file contains some isoform of the same gene299 300 gene_isoform = []301 for header in self.headers:302 gene = header[1:16]303 gene_isoform.append(gene)304 305 # gene set creation 306 unique_gene = set(gene_isoform)307 308 # This for loop flows on the unique gene 309 #310 random_header = []311 old_num_isoform = 0312 for gene in unique_gene:313 # For each gene counts how many isoform has 314 num_isoform = gene_isoform.count(gene)315 item = range(0,num_isoform)316 # Select one isoform randomly317 sel = random.choice(item)318 # The header selected randomly are stored in array319 random_header.append(self.headers[old_num_isoform : old_num_isoform + num_isoform][sel])320 old_num_isoform = old_num_isoform + num_isoform321 322 323 self.file_random_seq = FileUtils.open_text_a(self.path_file_selected_isoform)324 325 # The sequences corresponding to header selected are extracted from isoform file 326 327 for header in random_header:328 Logger.get_instance().info('Header selected : ' + header)329 identifier = header[33:48]330 sequence = InfoFasta.get_seq(self.path_file_isoform, identifier)331 fasta_seq = SeqToFasta.give_fasta(header, sequence)332 self.file_random_seq.write(fasta_seq)333 334 335 Logger.get_instance().info( " End of selection random sequences \n ")336 337 338 339 # merger_sequences340 # --------------------341 #342 # This method merges:343 # - the Longest sequences and the random isoform (dataset 2)344 # - FinalSeqDataset2 + SequencesDataset1345 # 346 # The output is the final fasta file of dataset that can be gived as input to DisorderAnalysis347 # 348 def merger_sequences(self):349 350 Logger.get_instance().info( " Union of the longest sequences and the random selected isoform ")351 352 # Input variables to merge the longest Novel sequences and random selected isoform of dataset 2353 354 self.path_input_longest = Constants.PATH_HOME + PropertyManager.get_instance().get_property( DataConstants.FUSION_PATH_INPUT_PROPERTY, True)355 self.path_file_longest = self.path_input_longest + PropertyManager.get_instance().get_property( DataConstants.LONGEST_FILE_PROPERTY, True)356 self.path_file_isoform = self.path_input_longest + PropertyManager.get_instance().get_property( DataConstants.SELECTED_ISOFORM_FILE_PROPERTY, True)357 358 self.path_output_seq = Constants.PATH_HOME + PropertyManager.get_instance().get_property( DataConstants.FUSION_PATH_OUTPUT_PROPERTY, True)359 self.path_file_seq_dataset_2 = self.path_output_seq + PropertyManager.get_instance().get_property( DataConstants.FUSION_FILE_SEQ_DATASET_2_PROPERTY, True)360 361 362 FileParser.merge_file(self.path_file_longest, self.path_file_isoform, self.path_file_seq_dataset_2)363 364 365 # Input variables to merge the sequences datasets (Novel_JProteomics and NatRevGenetics)366 367 self.path_input_seq_dataset1 = Constants.PATH_HOME + PropertyManager.get_instance().get_property( DataConstants.FUSION_PATH_INPUT_DATASET_1_PROPERTY, True)368 self.path_file_dataset1 = self.path_input_seq_dataset1 + PropertyManager.get_instance().get_property( DataConstants.FUSION_FILE_DATASET_1_PROPERTY, True)369 370 self.path_file_dataset12 = self.path_output_seq + PropertyManager.get_instance().get_property( DataConstants.FUSION_DATASET_12_PROPERTY, True)371 372 373 374 Logger.get_instance().info( " Union of sequences respectively of dataset 1 and the novel dataset 2 proteins \n ")375 376 FileParser.merge_file(self.path_file_dataset1, self.path_file_seq_dataset_2, self.path_file_dataset12)377 378 379 Logger.get_instance().info( " The New RBP Dataset has been created\n ")380 381 382 # This part checks if there are pseudo genes inside dataset 2383 # Make the comparison between the original genes gived as an input and the genes obtained after 384 # connection to Ensembl 385 # This check allows to find genes that are not anymore available or that are pseudogenes386 387 Logger.get_instance().info( " Comparison between original genes and Ensembl output ")388 self.path_home = Constants.PATH_HOME389 self.path_input_original_file = self.dataset_output = PropertyManager.get_instance().get_property( DataConstants.DATASET_OUTPUT_PROPERTY, True)390 self.original_file = self.path_home + self.path_input_original_file + Constants.FILE_DIFF391 392 original_genes = FileParser.read_file(self.original_file)393 394 395 self.path_output_seq = Constants.PATH_HOME + PropertyManager.get_instance().get_property( DataConstants.FUSION_PATH_OUTPUT_PROPERTY, True)396 self.path_file_seq_dataset_2 = self.path_output_seq + PropertyManager.get_instance().get_property( DataConstants.FUSION_FILE_SEQ_DATASET_2_PROPERTY, True)397 398 final_headers = InfoFasta.get_header(self.path_file_seq_dataset_2)399 final_genes = [item[1:16] for item in final_headers]400 401 out_comparison = InfoDataset.comparison_dataset(original_genes, final_genes, header=False)402 403 genes = '\n'.join(out_comparison[1])404 405 Logger.get_instance().info(" The genes lost during the request to Ensembl are : \n" + genes)406 407 408 409 410 411 412 # method that delete the append file in order to run again the whole workflow413 def delet_append_file(self):414 415 416 self.del_ensembl_input = Constants.PATH_HOME + PropertyManager.get_instance().get_property( DataConstants.DEL_ENSEMBL_PATH_PROPERTY, True)417 self.del_ensembl_file1 = self.del_ensembl_input + PropertyManager.get_instance().get_property( DataConstants.DEL_ENSEMBL_FILE1_PROPERTY, True)418 self.del_ensembl_file2 = self.del_ensembl_input + PropertyManager.get_instance().get_property( DataConstants.DEL_ENSEMBL_FILE2_PROPERTY, True)419 420 self.del_longest_input = Constants.PATH_HOME + PropertyManager.get_instance().get_property( DataConstants.DEL_LONGEST_PATH_PROPERTY, True)421 self.del_longest_file = self.del_longest_input + PropertyManager.get_instance().get_property( DataConstants.DEL_LONGEST_FILE_PROPERTY, True)422 self.del_isoform_file = self.del_longest_input + PropertyManager.get_instance().get_property( DataConstants.DEL_ISOFORM_FILE_PROPERTY, True)423 self.del_random_isoform_file = self.del_longest_input + PropertyManager.get_instance().get_property( DataConstants.DEL_RANDOM_ISOFORM_FILE_PROPERTY, True)424 425 self.del_fusion_path = Constants.PATH_HOME + PropertyManager.get_instance().get_property( DataConstants.DEL_FUSION_PATH_PROPERTY, True)426 self.del_fusion_file_longest = self.del_fusion_path + PropertyManager.get_instance().get_property( DataConstants.DEL_FUSION_DATASET_LONGEST_PROPERTY, True)427 self.del_fusion_file_dataset12 = self.del_fusion_path + PropertyManager.get_instance().get_property( DataConstants.DEL_FUSION_DATASET12_PROPERTY, True)428 429 files = [self.del_ensembl_file1, 430 self.del_ensembl_file2,431 self.del_longest_file,432 self.del_isoform_file,433 self.del_random_isoform_file,434 self.del_fusion_file_longest,435 self.del_fusion_file_dataset12]436 437 for namefile in files:438 RemoveFile.delete_file(namefile)439 440 441 442 # whole_procedure443 # ----------------444 # 445 # This method allows to select just some of previously methods446 #447 448 @staticmethod449 def whole_procedure():450 451 452 # start chrono453 Timer.get_instance().start_chrono()454 Logger.get_instance().info("Start of the creation of RBP dataset.....\n ")455 456 M = MakeDatasetRbp()457 458 #M.delet_append_file()459 460 #M.comparison_dataset()461 #M.creation_list()462 #M.connection_to_ensembl()463 #M.dictionary_identifier()464 #M.longest_sequence()465 #M.isoform_sequences()466 #M.merger_sequences()467 M.split_dataset()468 469 Timer.get_instance().stop_chrono(' End of the creation of RBP dataset')470 471 472 473if __name__ == '__main__':474 475 OptionManager.get_instance().initialize()476 477 # Retrieve the MakeDatasetRbp properties478 PropertyManager.get_instance().read_properties( OptionManager.get_instance().get_option( OptionConstants.OPTION_MAKEDATASETRBP_PROPERTIES_PATH, True))479 480 MakeDatasetRbp.whole_procedure()481 482 483 484 485 486 487 488 489 490 491 492 ...

Full Screen

Full Screen

MotifsAnalysis.py

Source:MotifsAnalysis.py Github

copy

Full Screen

...19 20 def __init__(self):21 22 self.path_home = Constants.PATH_HOME23 self.protein_list_file = self.path_home + PropertyManager.get_instance().get_property( DataConstants.MOTIFS_PROTEIN_FILE_PROPERTY, True)24 self.protein_list = FileParser.read_file(self.protein_list_file)25 self.motif_folder = self.path_home + PropertyManager.get_instance().get_property( DataConstants.MOTIFS_FOLDER_PROPERTY, True)26 self.domain_region_file = self.path_home + PropertyManager.get_instance().get_property( DataConstants.MOTIFS_DOMAIN_REGION_FILE_PROPERTY, True)27 28 # iupred_motifs29 # ----------------------------30 #31 # This method calls the GlobalOverlapRegionAnalysis for iupred tool 32 #33 # Output: 34 # - The result of motifs overlap analysis will be saved in some files in output folder 35 #36 def iupred_motifs(self):37 38 Logger.get_instance().info( " .....Start of IUPred motifs analysis.....\n")39 40 self.iupred_folder = self.path_home + PropertyManager.get_instance().get_property( DataConstants.MOTIFS_IUPRED_FOLDER_PROPERTY,True)41 42 # Iupred Analysis at threshold value of 0.443 44 Timer.get_instance().step(" Start of IUPred motifs analysis - threshold = 0.4 \n")45 46 self.threshold_1 = Constants.MOTIFS_THRESHOLD_147 self.output_folder_1 = self.path_home + PropertyManager.get_instance().get_property( DataConstants.MOTIFS_IUP_OUTPUT_FOLDER_1_PROPERTY, True)48 49 GlobalOverlapRegionAnalysis.iupred_overlap_analysis(self.protein_list,self.iupred_folder, self.output_folder_1, self.threshold_1,50 self.motif_folder,self.domain_region_file)51 52 Timer.get_instance().step(" End of IUPred motifs analysis - threshold = 0.4 \n")53 54 55 # Iupred Analysis at threshold value of 0.556 57 Timer.get_instance().step(" Start of IUPred motifs analysis - threshold = 0.5 \n")58 self.threshold_2 = Constants.MOTIFS_THRESHOLD_259 self.output_folder_2 = self.path_home + PropertyManager.get_instance().get_property( DataConstants.MOTIFS_IUP_OUTPUT_FOLDER_2_PROPERTY, True)60 61 GlobalOverlapRegionAnalysis.iupred_overlap_analysis(self.protein_list,self.iupred_folder, self.output_folder_2, self.threshold_2,62 self.motif_folder,self.domain_region_file)63 64 Timer.get_instance().step(" End of IUPred motifs analysis - threshold = 0.5 \n")65 66 Logger.get_instance().info( " .....End of IUPred motifs analysis.....\n")67 68 69 # anchor_motifs70 # ----------------------------71 #72 # This method calls the GlobalOverlapRegionAnalysis for anchor tool 73 #74 # Output: 75 # - The result of motifs overlap analysis will be saved in some files in output folder 76 # 77 def anchor_motifs(self):78 79 Logger.get_instance().info( " .....Start of IUPred motifs analysis.....\n")80 81 self.anchor_folder = self.path_home + PropertyManager.get_instance().get_property( DataConstants.MOTIFS_ANCHOR_FOLDER_PROPERTY, True)82 self.anchor_output_folder = self.path_home + PropertyManager.get_instance().get_property( DataConstants.MOTIFS_ANCHOR_OUTPUT_FOLDER_PROPERTY, True)83 84 Timer.get_instance().step(" Start of ANCHOR motifs analysis \n")85 86 GlobalOverlapRegionAnalysis.anchor_overlap_analysis(self.protein_list,self.anchor_folder,self.anchor_output_folder,87 self.motif_folder,self.domain_region_file)88 89 90 Timer.get_instance().step(" End of IUPred motifs analysis \n")91 92 Logger.get_instance().info( " .....End of ANCHOR motifs analysis.....\n")93 94 95 # disordp_motifs96 # ----------------------------97 #98 # This method calls the GlobalOverlapRegionAnalysis for disordpbind tool 99 #100 # Output: 101 # - The result of motifs overlap analysis will be saved in some files in output folder 102 103 def disordp_motifs(self):104 105 Logger.get_instance().info( " .....Start of DisoRDPbind motifs analysis.....\n")106 107 self.disordp_folder = self.path_home + PropertyManager.get_instance().get_property( DataConstants.MOTIFS_DISORDP_FOLDER_PROPERTY, True)108 self.disordp_output_folder = self.path_home + PropertyManager.get_instance().get_property( DataConstants.MOTIFS_DISORDP_OUTPUT_FOLDER_PROPERTY, True)109 self.filename = PropertyManager.get_instance().get_property( DataConstants.MOTIFS_DISORDP_FILE_PROPERTY,True)110 111 Timer.get_instance().step(" Start of DisoRDPbind motifs analysis \n")112 113 GlobalOverlapRegionAnalysis.disordp_overlap_analysis(self.protein_list, self.disordp_folder, self.filename, self.motif_folder,114 self.domain_region_file,self.disordp_output_folder)115 116 117 118 Timer.get_instance().step(" End of DisoRDPbind motifs analysis \n")119 Logger.get_instance().info( " .....End of DisoRDPbind motifs analysis.....\n")120 121 122 # whole_procedure123 # ----------------124 # 125 # This method allows to select just some of previously methods126 #127 @staticmethod128 def whole_procedure():129 130 dataset_type = PropertyManager.get_instance().get_property( DataConstants.MOTIFS_DATASET_TYPE_PROPERTY, True)131 132 # start chrono133 Timer.get_instance().start_chrono()134 Logger.get_instance().info(" ........Start of " + dataset_type + " Motifs Analysis.....\n ")135 136 motifs = MotifsAnalysis()137 motifs.iupred_motifs()138 motifs.anchor_motifs()139 motifs.disordp_motifs()140 141 Timer.get_instance().stop_chrono(" End of " + dataset_type + " Motifs Analysis")142if __name__ == '__main__':143 144 OptionManager.get_instance().initialize()145 146 # Set the level of verbosity147 Logger.get_instance().set_level( OptionManager.get_instance().get_option( OptionConstants.OPTION_VERBOSITY))148 PropertyManager.get_instance().read_properties( OptionManager.get_instance().get_option( OptionConstants.OPTION_MOTIFS_ANALYSIS_PROPERTY_PATH, True))149 150 MotifsAnalysis.whole_procedure()...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run robotframework-pageobjects automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful