How to use sliced method in Selene

Best Python code snippet using selene_python

slicing.py

Source:slicing.py Github

copy

Full Screen

1# OBSS SAHI Tool2# Code written by Fatih C Akyon, 2020.3import concurrent.futures4import logging5import os6import time7from pathlib import Path8from typing import Dict, List, Optional, Union9import numpy as np10from PIL import Image11from shapely.errors import TopologicalError12from tqdm import tqdm13from sahi.utils.coco import Coco, CocoAnnotation, CocoImage, create_coco_dict14from sahi.utils.cv import read_image_as_pil15from sahi.utils.file import load_json, save_json16logger = logging.getLogger(__name__)17logging.basicConfig(18 format="%(asctime)s - %(levelname)s - %(name)s - %(message)s",19 datefmt="%m/%d/%Y %H:%M:%S",20 level=os.environ.get("LOGLEVEL", "INFO").upper(),21)22MAX_WORKERS = 2023def get_slice_bboxes(24 image_height: int,25 image_width: int,26 slice_height: int = 512,27 slice_width: int = 512,28 overlap_height_ratio: int = 0.2,29 overlap_width_ratio: int = 0.2,30) -> List[List[int]]:31 """Slices `image_pil` in crops.32 Corner values of each slice will be generated using the `slice_height`,33 `slice_width`, `overlap_height_ratio` and `overlap_width_ratio` arguments.34 Args:35 image_height (int): Height of the original image.36 image_width (int): Width of the original image.37 slice_height (int): Height of each slice. Default 512.38 slice_width (int): Width of each slice. Default 512.39 overlap_height_ratio(float): Fractional overlap in height of each40 slice (e.g. an overlap of 0.2 for a slice of size 100 yields an41 overlap of 20 pixels). Default 0.2.42 overlap_width_ratio(float): Fractional overlap in width of each43 slice (e.g. an overlap of 0.2 for a slice of size 100 yields an44 overlap of 20 pixels). Default 0.2.45 Returns:46 List[List[int]]: List of 4 corner coordinates for each N slices.47 [48 [slice_0_left, slice_0_top, slice_0_right, slice_0_bottom],49 ...50 [slice_N_left, slice_N_top, slice_N_right, slice_N_bottom]51 ]52 """53 slice_bboxes = []54 y_max = y_min = 055 y_overlap = int(overlap_height_ratio * slice_height)56 x_overlap = int(overlap_width_ratio * slice_width)57 while y_max < image_height:58 x_min = x_max = 059 y_max = y_min + slice_height60 while x_max < image_width:61 x_max = x_min + slice_width62 if y_max > image_height or x_max > image_width:63 xmax = min(image_width, x_max)64 ymax = min(image_height, y_max)65 xmin = max(0, xmax - slice_width)66 ymin = max(0, ymax - slice_height)67 slice_bboxes.append([xmin, ymin, xmax, ymax])68 else:69 slice_bboxes.append([x_min, y_min, x_max, y_max])70 x_min = x_max - x_overlap71 y_min = y_max - y_overlap72 return slice_bboxes73def annotation_inside_slice(annotation: Dict, slice_bbox: List[int]) -> bool:74 """Check whether annotation coordinates lie inside slice coordinates.75 Args:76 annotation (dict): Single annotation entry in COCO format.77 slice_bbox (List[int]): Generated from `get_slice_bboxes`.78 Format for each slice bbox: [x_min, y_min, x_max, y_max].79 Returns:80 (bool): True if any annotation coordinate lies inside slice.81 """82 left, top, width, height = annotation["bbox"]83 right = left + width84 bottom = top + height85 if left >= slice_bbox[2]:86 return False87 if top >= slice_bbox[3]:88 return False89 if right <= slice_bbox[0]:90 return False91 if bottom <= slice_bbox[1]:92 return False93 return True94def process_coco_annotations(coco_annotation_list: List[CocoAnnotation], slice_bbox: List[int], min_area_ratio) -> bool:95 """Slices and filters given list of CocoAnnotation objects with given96 'slice_bbox' and 'min_area_ratio'.97 Args:98 coco_annotation_list (List[CocoAnnotation])99 slice_bbox (List[int]): Generated from `get_slice_bboxes`.100 Format for each slice bbox: [x_min, y_min, x_max, y_max].101 min_area_ratio (float): If the cropped annotation area to original102 annotation ratio is smaller than this value, the annotation is103 filtered out. Default 0.1.104 Returns:105 (List[CocoAnnotation]): Sliced annotations.106 """107 sliced_coco_annotation_list: List[CocoAnnotation] = []108 for coco_annotation in coco_annotation_list:109 if annotation_inside_slice(coco_annotation.json, slice_bbox):110 sliced_coco_annotation = coco_annotation.get_sliced_coco_annotation(slice_bbox)111 if sliced_coco_annotation.area / coco_annotation.area >= min_area_ratio:112 sliced_coco_annotation_list.append(sliced_coco_annotation)113 return sliced_coco_annotation_list114class SlicedImage:115 def __init__(self, image, coco_image, starting_pixel):116 """117 image: np.array118 Sliced image.119 coco_image: CocoImage120 Coco styled image object that belong to sliced image.121 starting_pixel: list of list of int122 Starting pixel coordinates of the sliced image.123 """124 self.image = image125 self.coco_image = coco_image126 self.starting_pixel = starting_pixel127class SliceImageResult:128 def __init__(self, original_image_size=None, image_dir: str = None):129 """130 sliced_image_list: list of SlicedImage131 image_dir: str132 Directory of the sliced image exports.133 original_image_size: list of int134 Size of the unsliced original image in [height, width]135 """136 self._sliced_image_list: List[SlicedImage] = []137 self.original_image_height = original_image_size[0]138 self.original_image_width = original_image_size[1]139 self.image_dir = image_dir140 def add_sliced_image(self, sliced_image: SlicedImage):141 if not isinstance(sliced_image, SlicedImage):142 raise TypeError("sliced_image must be a SlicedImage instance")143 self._sliced_image_list.append(sliced_image)144 @property145 def sliced_image_list(self):146 return self._sliced_image_list147 @property148 def images(self):149 """Returns sliced images.150 Returns:151 images: a list of np.array152 """153 images = []154 for sliced_image in self._sliced_image_list:155 images.append(sliced_image.image)156 return images157 @property158 def coco_images(self) -> List[CocoImage]:159 """Returns CocoImage representation of SliceImageResult.160 Returns:161 coco_images: a list of CocoImage162 """163 coco_images: List = []164 for sliced_image in self._sliced_image_list:165 coco_images.append(sliced_image.coco_image)166 return coco_images167 @property168 def starting_pixels(self) -> List[int]:169 """Returns a list of starting pixels for each slice.170 Returns:171 starting_pixels: a list of starting pixel coords [x,y]172 """173 starting_pixels = []174 for sliced_image in self._sliced_image_list:175 starting_pixels.append(sliced_image.starting_pixel)176 return starting_pixels177 @property178 def filenames(self) -> List[int]:179 """Returns a list of filenames for each slice.180 Returns:181 filenames: a list of filenames as str182 """183 filenames = []184 for sliced_image in self._sliced_image_list:185 filenames.append(sliced_image.coco_image.file_name)186 return filenames187 def __len__(self):188 return len(self._sliced_image_list)189def slice_image(190 image: Union[str, Image.Image],191 coco_annotation_list: Optional[CocoAnnotation] = None,192 output_file_name: Optional[str] = None,193 output_dir: Optional[str] = None,194 slice_height: int = 512,195 slice_width: int = 512,196 overlap_height_ratio: float = 0.2,197 overlap_width_ratio: float = 0.2,198 min_area_ratio: float = 0.1,199 out_ext: Optional[str] = None,200 verbose: bool = False,201) -> SliceImageResult:202 """Slice a large image into smaller windows. If output_file_name is given export203 sliced images.204 Args:205 image (str or PIL.Image): File path of image or Pillow Image to be sliced.206 coco_annotation_list (CocoAnnotation): List of CocoAnnotation objects.207 output_file_name (str, optional): Root name of output files (coordinates will208 be appended to this)209 output_dir (str, optional): Output directory210 slice_height (int): Height of each slice. Default 512.211 slice_width (int): Width of each slice. Default 512.212 overlap_height_ratio (float): Fractional overlap in height of each213 slice (e.g. an overlap of 0.2 for a slice of size 100 yields an214 overlap of 20 pixels). Default 0.2.215 overlap_width_ratio (float): Fractional overlap in width of each216 slice (e.g. an overlap of 0.2 for a slice of size 100 yields an217 overlap of 20 pixels). Default 0.2.218 min_area_ratio (float): If the cropped annotation area to original annotation219 ratio is smaller than this value, the annotation is filtered out. Default 0.1.220 out_ext (str, optional): Extension of saved images. Default is the221 original suffix.222 verbose (bool, optional): Switch to print relevant values to screen.223 Default 'False'.224 Returns:225 sliced_image_result: SliceImageResult:226 sliced_image_list: list of SlicedImage227 image_dir: str228 Directory of the sliced image exports.229 original_image_size: list of int230 Size of the unsliced original image in [height, width]231 num_total_invalid_segmentation: int232 Number of invalid segmentation annotations.233 """234 # define verboseprint235 verboselog = logger.info if verbose else lambda *a, **k: None236 def _export_single_slice(image: np.ndarray, output_dir: str, slice_file_name: str):237 image_pil = read_image_as_pil(image)238 slice_file_path = str(Path(output_dir) / slice_file_name)239 # export sliced image240 image_pil.save(slice_file_path)241 verboselog("sliced image path: " + slice_file_path)242 # create outdir if not present243 if output_dir is not None:244 Path(output_dir).mkdir(parents=True, exist_ok=True)245 # read image246 image_pil = read_image_as_pil(image)247 verboselog("image.shape: " + str(image_pil.size))248 image_width, image_height = image_pil.size249 if not (image_width != 0 and image_height != 0):250 raise RuntimeError(f"invalid image size: {image_pil.size} for 'slice_image'.")251 slice_bboxes = get_slice_bboxes(252 image_height=image_height,253 image_width=image_width,254 slice_height=slice_height,255 slice_width=slice_width,256 overlap_height_ratio=overlap_height_ratio,257 overlap_width_ratio=overlap_width_ratio,258 )259 t0 = time.time()260 n_ims = 0261 # init images and annotations lists262 sliced_image_result = SliceImageResult(original_image_size=[image_height, image_width], image_dir=output_dir)263 image_pil_arr = np.asarray(image_pil)264 # iterate over slices265 for slice_bbox in slice_bboxes:266 n_ims += 1267 # extract image268 tlx = slice_bbox[0]269 tly = slice_bbox[1]270 brx = slice_bbox[2]271 bry = slice_bbox[3]272 image_pil_slice = image_pil_arr[tly:bry, tlx:brx]273 # process annotations if coco_annotations is given274 if coco_annotation_list is not None:275 sliced_coco_annotation_list = process_coco_annotations(coco_annotation_list, slice_bbox, min_area_ratio)276 # set image file suffixes277 slice_suffixes = "_".join(map(str, slice_bbox))278 if out_ext:279 suffix = out_ext280 else:281 try:282 suffix = Path(image_pil.filename).suffix283 except AttributeError:284 suffix = ".jpg"285 # set image file name and path286 slice_file_name = f"{output_file_name}_{slice_suffixes}{suffix}"287 # create coco image288 slice_width = slice_bbox[2] - slice_bbox[0]289 slice_height = slice_bbox[3] - slice_bbox[1]290 coco_image = CocoImage(file_name=slice_file_name, height=slice_height, width=slice_width)291 # append coco annotations (if present) to coco image292 if coco_annotation_list:293 for coco_annotation in sliced_coco_annotation_list:294 coco_image.add_annotation(coco_annotation)295 # create sliced image and append to sliced_image_result296 sliced_image = SlicedImage(297 image=image_pil_slice,298 coco_image=coco_image,299 starting_pixel=[slice_bbox[0], slice_bbox[1]],300 )301 sliced_image_result.add_sliced_image(sliced_image)302 # export slices if output directory is provided303 if output_file_name and output_dir:304 conc_exec = concurrent.futures.ThreadPoolExecutor(max_workers=MAX_WORKERS)305 conc_exec.map(306 _export_single_slice,307 sliced_image_result.images,308 [output_dir] * len(sliced_image_result),309 sliced_image_result.filenames,310 )311 verboselog(312 "Num slices: " + str(n_ims) + " slice_height: " + str(slice_height) + " slice_width: " + str(slice_width),313 )314 return sliced_image_result315def slice_coco(316 coco_annotation_file_path: str,317 image_dir: str,318 output_coco_annotation_file_name: str,319 output_dir: Optional[str] = None,320 ignore_negative_samples: bool = False,321 slice_height: int = 512,322 slice_width: int = 512,323 overlap_height_ratio: float = 0.2,324 overlap_width_ratio: float = 0.2,325 min_area_ratio: float = 0.1,326 out_ext: Optional[str] = None,327 verbose: bool = False,328) -> List[Union[Dict, str]]:329 """330 Slice large images given in a directory, into smaller windows. If out_name is given export sliced images and coco file.331 Args:332 coco_annotation_file_pat (str): Location of the coco annotation file333 image_dir (str): Base directory for the images334 output_coco_annotation_file_name (str): File name of the exported coco335 datatset json.336 output_dir (str, optional): Output directory337 ignore_negative_samples (bool): If True, images without annotations338 are ignored. Defaults to False.339 slice_height (int): Height of each slice. Default 512.340 slice_width (int): Width of each slice. Default 512.341 overlap_height_ratio (float): Fractional overlap in height of each342 slice (e.g. an overlap of 0.2 for a slice of size 100 yields an343 overlap of 20 pixels). Default 0.2.344 overlap_width_ratio (float): Fractional overlap in width of each345 slice (e.g. an overlap of 0.2 for a slice of size 100 yields an346 overlap of 20 pixels). Default 0.2.347 min_area_ratio (float): If the cropped annotation area to original annotation348 ratio is smaller than this value, the annotation is filtered out. Default 0.1.349 out_ext (str, optional): Extension of saved images. Default is the350 original suffix.351 verbose (bool, optional): Switch to print relevant values to screen.352 Default 'False'.353 Returns:354 coco_dict: dict355 COCO dict for sliced images and annotations356 save_path: str357 Path to the saved coco file358 """359 # read coco file360 coco_dict: Dict = load_json(coco_annotation_file_path)361 # create image_id_to_annotation_list mapping362 coco = Coco.from_coco_dict_or_path(coco_dict)363 # init sliced coco_utils.CocoImage list364 sliced_coco_images: List = []365 # iterate over images and slice366 for coco_image in tqdm(coco.images):367 # get image path368 image_path: str = os.path.join(image_dir, coco_image.file_name)369 # get annotation json list corresponding to selected coco image370 # slice image371 try:372 slice_image_result = slice_image(373 image=image_path,374 coco_annotation_list=coco_image.annotations,375 output_file_name=Path(coco_image.file_name).stem,376 output_dir=output_dir,377 slice_height=slice_height,378 slice_width=slice_width,379 overlap_height_ratio=overlap_height_ratio,380 overlap_width_ratio=overlap_width_ratio,381 min_area_ratio=min_area_ratio,382 out_ext=out_ext,383 verbose=verbose,384 )385 # append slice outputs386 sliced_coco_images.extend(slice_image_result.coco_images)387 except TopologicalError:388 logger.warning(f"Invalid annotation found, skipping this image: {image_path}")389 # create and save coco dict390 coco_dict = create_coco_dict(391 sliced_coco_images,392 coco_dict["categories"],393 ignore_negative_samples=ignore_negative_samples,394 )395 save_path = ""396 if output_coco_annotation_file_name and output_dir:397 save_path = Path(output_dir) / (output_coco_annotation_file_name + "_coco.json")398 save_json(coco_dict, save_path)...

Full Screen

Full Screen

test_charon_util.py

Source:test_charon_util.py Github

copy

Full Screen

1#2# Copyright (c) 2016 Nutanix Inc. All rights reserved.3#4#5import unittest6from curie.util import CurieUtil, chunk_iter7class TestCurieUtil(unittest.TestCase):8 def setUp(self):9 self.sequence = ["item_%d" % x for x in range(6)]10 def test_slice_with_string_all(self):11 for text in ["all", ":"]:12 sliced = CurieUtil.slice_with_string(self.sequence, text)13 self.assertEqual(sliced.values(), self.sequence)14 self.assertEqual(sliced.keys(), range(6))15 def test_slice_with_string_single(self):16 sliced = CurieUtil.slice_with_string(self.sequence, "2")17 self.assertEqual(sliced.values(), ["item_2"])18 self.assertEqual(sliced.keys(), [2])19 def test_slice_with_string_two_indices(self):20 sliced = CurieUtil.slice_with_string(self.sequence, "2, 4")21 self.assertEqual(sliced.values(), ["item_2", "item_4"])22 self.assertEqual(sliced.keys(), [2, 4])23 def test_slice_with_string_two_indices_reverse(self):24 sliced = CurieUtil.slice_with_string(self.sequence, "4, 2")25 self.assertEqual(sliced.values(), ["item_4", "item_2"])26 self.assertEqual(sliced.keys(), [4, 2])27 def test_slice_with_string_range(self):28 sliced = CurieUtil.slice_with_string(self.sequence, "1:3")29 self.assertEqual(sliced.values(), ["item_1", "item_2"])30 self.assertEqual(sliced.keys(), [1, 2])31 def test_slice_with_string_two_ranges(self):32 sliced = CurieUtil.slice_with_string(self.sequence, "1:3, 4:")33 self.assertEqual(sliced.values(), ["item_1", "item_2", "item_4", "item_5"])34 self.assertEqual(sliced.keys(), [1, 2, 4, 5])35 def test_slice_with_string_mixed_range(self):36 sliced = CurieUtil.slice_with_string(self.sequence, "1:3, 4")37 self.assertEqual(sliced.values(), ["item_1", "item_2", "item_4"])38 self.assertEqual(sliced.keys(), [1, 2, 4])39 def test_slice_with_string_mixed_range_reverse(self):40 sliced = CurieUtil.slice_with_string(self.sequence, "4, 1:3")41 self.assertEqual(sliced.values(), ["item_4", "item_1", "item_2"])42 self.assertEqual(sliced.keys(), [4, 1, 2])43 def test_slice_with_string_three_indices_mixed_order(self):44 sliced = CurieUtil.slice_with_string(self.sequence, "4, 1, 3")45 self.assertEqual(sliced.values(), ["item_4", "item_1", "item_3"])46 self.assertEqual(sliced.keys(), [4, 1, 3])47 def test_slice_with_string_negative_indices_converted_to_positive(self):48 sliced = CurieUtil.slice_with_string(self.sequence, "-1, -2")49 self.assertEqual(sliced.values(), ["item_5", "item_4"])50 self.assertEqual(sliced.keys(), [5, 4])51 def test_slice_with_string_negative_range_converted_to_positive(self):52 sliced = CurieUtil.slice_with_string(self.sequence, ":-3")53 self.assertEqual(sliced.values(), ["item_0", "item_1", "item_2"])54 self.assertEqual(sliced.keys(), [0, 1, 2])55 def test_slice_with_string_single_index_min(self):56 sliced = CurieUtil.slice_with_string(self.sequence, "-6")57 self.assertEqual(sliced.values(), ["item_0"])58 self.assertEqual(sliced.keys(), [0])59 def test_slice_with_string_single_index_max(self):60 sliced = CurieUtil.slice_with_string(self.sequence, "5")61 self.assertEqual(sliced.values(), ["item_5"])62 self.assertEqual(sliced.keys(), [5])63 def test_slice_with_string_single_range_min(self):64 sliced = CurieUtil.slice_with_string(self.sequence, "-6:")65 self.assertEqual(sliced.values(), self.sequence)66 self.assertEqual(sliced.keys(), range(6))67 def test_slice_with_string_single_range_max(self):68 sliced = CurieUtil.slice_with_string(self.sequence, ":6")69 self.assertEqual(sliced.values(), self.sequence)70 self.assertEqual(sliced.keys(), range(6))71 def test_slice_with_string_single_index_out_of_bounds_min(self):72 with self.assertRaises(IndexError):73 CurieUtil.slice_with_string(self.sequence, "0, -7")74 def test_slice_with_string_single_index_out_of_bounds_max(self):75 with self.assertRaises(IndexError):76 CurieUtil.slice_with_string(self.sequence, "0, 6")77 def test_slice_with_n_end(self):78 sliced = CurieUtil.slice_with_string(self.sequence, "1:(n/2)+1")79 self.assertEqual(sliced.values(), ["item_1", "item_2", "item_3"])80 self.assertEqual(sliced.keys(), [1, 2, 3])81 def test_slice_with_n_start(self):82 sliced = CurieUtil.slice_with_string(self.sequence, "(n/2)-1:")83 self.assertEqual(sliced.values(), ["item_2", "item_3", "item_4", "item_5"])84 self.assertEqual(sliced.keys(), [2, 3, 4, 5])85 def test_slice_with_string_empty_slice(self):86 for ii in range(len(self.sequence)):87 sliced = CurieUtil.slice_with_string(self.sequence, "%d:%d" % (ii, ii))88 self.assertEqual(sliced.values(), [])89 self.assertEqual(sliced.keys(), [])90 # Consistency with python built-in slice behavior allowing empty91 # out-of-bounds slices.92 out_of_bounds = len(self.sequence) + 1093 sliced = CurieUtil.slice_with_string(self.sequence, "%d:%d" %94 (out_of_bounds, out_of_bounds))95 self.assertEqual(sliced.values(), [])96 self.assertEqual(sliced.keys(), [])97 def test_slice_with_invalid_input(self):98 with self.assertRaises(ValueError):99 CurieUtil.slice_with_string(self.sequence, "(m/2)-1:")100 with self.assertRaises(ValueError):101 CurieUtil.slice_with_string(102 self.sequence, "import os; os.execv(\"/bin/ls\", [\".\"])")103 def test_chunk_iter(self):104 iterator = chunk_iter(range(10), 3)105 self.assertEqual(next(iterator), [0, 1, 2])106 self.assertEqual(next(iterator), [3, 4, 5])107 self.assertEqual(next(iterator), [6, 7, 8])108 self.assertEqual(next(iterator), [9])109 with self.assertRaises(StopIteration):...

Full Screen

Full Screen

turtleSlicer.py

Source:turtleSlicer.py Github

copy

Full Screen

1#!/bin/python32from itertools import islice3import argparse4import configparser5import os6import CommonUtil7################################################8# main9################################################10if __name__ == '__main__':11 print("Welcome to Turtle Slicer !")12 argparser = argparse.ArgumentParser()13 argparser.add_argument('--props', help='Properties file path', required=False)14 args = argparser.parse_args()15 propFile = args.props16 if propFile is None:17 propFile = './/conf//local.ini'18 config = configparser.ConfigParser()19 config.read(propFile)20 defConf = config['DEFAULT']21 filePath = defConf['filePath']22 outDir = defConf['outDir']23 batchInitStr = defConf['batchInit']24 if batchInitStr is not None:25 batchInit = int(batchInitStr)26 else:27 batchInit = 500028 lineIndx = 029 fileIndx = 030 hasContent = True31 prevExportShort = None32 while(hasContent):33 fileIndx += 134 print("lineIndx = %d" % lineIndx)35 line_slice = (lineIndx, lineIndx + batchInit)36 srcFileRhndl = open(filePath, 'r')37 content = islice(srcFileRhndl, *line_slice)38 #print(content)39 #hasContent = content is not None40 slicedFileShortName = 'rdf_' + str(fileIndx)41 if fileIndx == 1:42 slicedDraftExt = 'prun'43 else:44 slicedDraftExt = 'init'45 slicedDraftPathShort = outDir + '/' + slicedFileShortName46 slicedDraftPath = slicedDraftPathShort + '.' + slicedDraftExt47 slicedDraftWhndl = open(slicedDraftPath, 'w')48 slicedDraftWhndl.writelines(content)49 slicedDraftWhndl.close()50 if fileIndx == 1:51 prevExportShort = slicedDraftPathShort52 slicedPrunedPath = slicedDraftPath53 else:54 #spillLines = []55 slicedDraftRhndl = open(slicedDraftPath, 'r')56 #with open(slicedDraftPath, 'r') as ofr:57 initOutLines = slicedDraftRhndl.readlines()58 outFileLine = 059 spill = True60 #with open(prevExport, 'a') as pef:61 prevExport = prevExportShort + '.prun'62 prevExportAhandle = open(prevExport, 'a')63 slicedPrunedShortPath = outDir + '/' + slicedFileShortName64 slicedPrunedPath = slicedPrunedShortPath + '.prun'65 slicedPrunedWhandle = open(slicedPrunedPath, 'w')66 for initOutLine in initOutLines:67 outFileLine += 168 if initOutLine.startswith('<http://ninds.nih.gov'):69 spill = False70 cleanLine = initOutLine.rstrip()71 if spill:72 print(cleanLine, file=prevExportAhandle)73 else:74 print(cleanLine, file=slicedPrunedWhandle)75 slicedPrunedWhandle.close()76 prevExportAhandle.close()77 slicedDraftRhndl.close()78 finalExportPath = prevExportShort + '.ttl'79 os.rename(prevExport, finalExportPath)80 prevExportShort = slicedPrunedShortPath81 os.remove(slicedDraftPath)82 if os.stat(slicedPrunedPath).st_size == 0:83 #os.remove(slicedDraftPath)84 os.remove(slicedPrunedPath)85 #finalExportPath = slicedPrunedShortPath + '.ttl'86 #os.rename(slicedPrunedPath, finalExportPath)87 hasContent = False88 lineIndx += batchInit89 ttlFileDocList = CommonUtil.getDirectoryFileList(outDir, 'ttl')90 for ttlFileDoc in ttlFileDocList:...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run Selene automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful