Best Python code snippet using slash
policy_operations.py
Source:policy_operations.py  
...101                value, input_dtype, requires_uint8_scaling=self.requires_uint8_scaling102            )103        else:104            transform_param = None105        return self.as_transform(transform_param, p)106def value_to_transform_param(value, input_dtype="float32", requires_uint8_scaling=False):107    value = value.item()108    if input_dtype == "uint8" and requires_uint8_scaling:109        value = int(value * 255.0)110    return value111def convert_value_range(value, new_range):112    lower, upper = new_range113    total = upper - lower114    return value * total + lower115class ShiftRGB(Operation):116    def __init__(self, temperature, shift_r=False, shift_g=False, shift_b=False):117        super().__init__(temperature, value_range=(-1.0, 1.0), requires_uint8_scaling=True)118        self.shift_r = shift_r119        self.shift_g = shift_g120        self.shift_b = shift_b121    def apply_operation(self, input, value):122        return F.shift_rgb(123            input,124            r_shift=value if self.shift_r else 0.0,125            g_shift=value if self.shift_g else 0.0,126            b_shift=value if self.shift_b else 0.0,127        )128    def as_transform(self, value, p):129        limit = (value, value)130        return A.RGBShift(131            r_shift_limit=limit if self.shift_r else 0.0,132            g_shift_limit=limit if self.shift_g else 0.0,133            b_shift_limit=limit if self.shift_b else 0.0,134            p=p,135        )136class RandomBrightness(Operation):137    def __init__(self, temperature):138        super().__init__(temperature, value_range=(-1.0, 1.0))139    def apply_operation(self, input, value):140        return F.brightness_adjust(input, beta=value)141    def as_transform(self, value, p):142        return A.RandomBrightnessContrast(brightness_limit=(value, value), contrast_limit=0, p=p)143class RandomContrast(Operation):144    def __init__(self, temperature):145        super().__init__(temperature, value_range=(0.0, 10.0))146    def apply_operation(self, input, value):147        return F.contrast_adjust(input, alpha=value)148    def as_transform(self, value, p):149        return A.RandomBrightnessContrast(brightness_limit=0, contrast_limit=(value, value), p=p)150class Solarize(Operation):151    def __init__(self, temperature):152        super().__init__(temperature, requires_uint8_scaling=True)153    def apply_operation(self, input, value):154        return F.solarize(input, threshold=value)155    def as_transform(self, value, p):156        return A.Solarize(threshold=value, p=p)157class HorizontalFlip(Operation):158    def __init__(self, temperature):159        super().__init__(temperature, has_magnitude=False, is_spatial_level=True)160    def apply_operation(self, input, value):161        return F.hflip(input)162    def as_transform(self, value, p):163        return A.HorizontalFlip(p=p)164class VerticalFlip(Operation):165    def __init__(self, temperature):166        super().__init__(temperature, has_magnitude=False, is_spatial_level=True)167    def apply_operation(self, input, value):168        return F.vflip(input)169    def as_transform(self, value, p):170        return A.VerticalFlip(p=p)171class ShiftX(Operation):172    def __init__(self, temperature):173        super().__init__(temperature, value_range=(-1.0, 1.0), is_spatial_level=True)174    def apply_operation(self, input, value):175        return F.shift_x(input, dx=value)176    def as_transform(self, value, p):177        return A.ShiftScaleRotate(178            shift_limit_x=(value, value),179            shift_limit_y=(0, 0),180            rotate_limit=(0, 0),181            scale_limit=(0, 0),182            p=p,183        )184class ShiftY(Operation):185    def __init__(self, temperature):186        super().__init__(temperature, value_range=(-1.0, 1.0), is_spatial_level=True)187    def apply_operation(self, input, value):188        return F.shift_y(input, dy=value)189    def as_transform(self, value, p):190        return A.ShiftScaleRotate(191            shift_limit_x=(0, 0),192            shift_limit_y=(value, value),193            rotate_limit=(0, 0),194            scale_limit=(0, 0),195            p=p,196        )197class Scale(Operation):198    def __init__(self, temperature):199        super().__init__(temperature, value_range=(0 + 1e-8, 10.0), is_spatial_level=True)200    def apply_operation(self, input, value):201        return F.scale(input, scale=value)202    def as_transform(self, value, p):203        return A.ShiftScaleRotate(204            shift_limit_x=(0, 0),205            shift_limit_y=(0, 0),206            rotate_limit=(0, 0),207            scale_limit=(value, value),208            p=p,209        )210class Rotate(Operation):211    def __init__(self, temperature):212        super().__init__(temperature, value_range=(-180, 180), is_spatial_level=True)213    def apply_operation(self, input, value):214        return F.rotate(input, angle=value)215    def as_transform(self, value, p):216        return A.ShiftScaleRotate(217            shift_limit_x=(0, 0),218            shift_limit_y=(0, 0),219            rotate_limit=(value, value),220            scale_limit=(0, 0),221            p=p,222        )223class Cutout(Operation):224    def __init__(self, temperature, value_range=(0.0, 1.0)):225        super().__init__(temperature, value_range=value_range, ste=True)226        self.register_buffer("saved_image_shape", torch.Tensor([0, 0]).type(torch.int64))227        self.is_image_shape_saved = False228    def _save_image_shape(self, image_shape):229        if not torch.equal(self.saved_image_shape, image_shape):230            if self.is_image_shape_saved:231                warnings.warn(232                    f"Shape of images in a batch changed between iterations "233                    f"from {self.saved_image_shape} to {image_shape}. "234                    f"This will affect the created Albumentations transform. "235                    f"The transform will use the shape {image_shape} to initialize its parameters",236                    RuntimeWarning,237                )238            self.is_image_shape_saved = True239            self.saved_image_shape = image_shape240    def apply_operation(self, input, value):241        image_shape = input.shape[-2:]242        self._save_image_shape(torch.tensor(image_shape).to(input.device))243        return self._apply_cutout(input, value, image_shape)244    def as_transform(self, value, p):245        image_shape = self.saved_image_shape246        return self._as_cutout_transform(value, p, image_shape)247    def _apply_cutout(self, input, value, image_shape):248        raise NotImplementedError249    def _as_cutout_transform(self, value, p, image_shape):250        raise NotImplementedError251class CutoutFixedNumberOfHoles(Cutout):252    def __init__(self, temperature, num_holes=16):253        super().__init__(temperature)254        self.num_holes = num_holes255    def _calculate_hole_size(self, value, image_shape):256        height, width = image_shape257        min_size = min(height, width)258        return max(int(min_size * value), 1)...read.py
Source:read.py  
1"""Reading functinons."""2import numpy as np3import pandas as pd4import os5import os.path as op6from seegpy.contacts.utils import detect_seeg_contacts7def read_trm(path, as_transform=True, inverse=False):8    """Read a transformation file.9    Parameters10    ----------11    path : string12        Path to the trm file13    as_transform : bool14        Get either the array as a usable (4, 4) array (True) or just simply15        or just the array contained in the file (False)16    inverse : bool | False17        Whether to inverse the transformation or not18    Returns19    -------20    tr : array_like21        Transformation array22    """23    tr = np.genfromtxt(path)24    if as_transform:25        tr = np.vstack((np.c_[tr[1::, :], tr[0, :]], np.array([0, 0, 0, 1])))26    if inverse:27        tr = np.linalg.inv(tr)28    return tr29def read_contacts_trc(path):30    """Read the channels that are contained inside a TRC file.31    This function uses the neo python package.32    Parameters33    ----------34    path : string35        Path to the trc file36    Returns37    -------38    all_chan : list39        List of contacts40    units : list41        List of units per channels42    """43    import neo44    # -------------------------------------------------------------------------45    # read the channels46    micro = neo.MicromedIO(filename=path)47    seg = micro.read_segment(signal_group_mode='split-all', lazy=True)48    all_chan = [sig.name.replace(' ', '').strip().upper()49                for sig in seg.analogsignals]50    units = [str(sig.units) for sig in seg.analogsignals]51    return all_chan, units52def read_3dslicer_fiducial(path):53    """Read coordinates in a fiducial fcsv file.54    Parameters55    ----------56    path : string57        Path to the fcsv file58    Returns59    -------60    df : DataFrame61        DataFrame with the columns label, x, y and z62    """63    return pd.read_csv(path, skiprows=[0, 1])[['label', 'x', 'y', 'z']]64def read_trc(bloc):65    """Read a TRC file.66    Parameters67    ----------68    bloc : str69        Path to the bloc to read70    Returns71    -------72    sf : float73        The sampling frequency74    raw : array_like75        Array of raw data of shape (n_seeg_chan, n_times)76    seeg_chan : array_like77        Array of sEEG channels names78    trig_event : array_like79        Events in the trigger channel of length (n_events,)80    trig_time : array_like81        Time associated to each event of length (n_events,)82    """83    assert op.isfile(bloc)84    # -------------------------------------------------------------------------85    # SAMPLING FREQUENCY86    # -------------------------------------------------------------------------87    import neo88    micro = neo.MicromedIO(filename=bloc)89    seg = micro.read_segment(signal_group_mode='split-all', lazy=True)90    sf = float(seg.analogsignals[0].sampling_rate)91    # -------------------------------------------------------------------------92    # CHANNELS93    # -------------------------------------------------------------------------94    # read again the channels in first bloc95    ch_names, ch_units = read_contacts_trc(bloc)96    # detect seeg / non-seeg channels97    is_seeg = detect_seeg_contacts(ch_names, ch_units=ch_units, seeg_unit='uV')98    seeg_chan = np.array(ch_names)[is_seeg]99    seeg_nb = np.arange(len(ch_names))[is_seeg]100    # -------------------------------------------------------------------------101    # TRIGGERS AND RAW102    # -------------------------------------------------------------------------103    # load the bloc104    micro = neo.MicromedIO(filename=bloc)105    seg = micro.read_segment(signal_group_mode='split-all', lazy=False)106    # read the trigger107    _event = seg.events[0]108    trig_event = np.array(_event.labels).astype(int)109    trig_time = np.array(_event.times)110    # read the raw data111    raw = []112    for c in seeg_nb:113        raw += [seg.analogsignals[c].squeeze()]114    raw = np.stack(raw)115    return sf, raw, seeg_chan.tolist(), trig_event, trig_time116def read_pramat(mat_root):117    """Read a Pragues file.118    Parameters119    ----------120    mat_root : str121        Path to the root matlab folder122    Returns123    -------124    sf : float125        The sampling frequency126    raw : array_like127        Array of raw data of shape (n_seeg_chan, n_times)128    seeg_chan : array_like129        Array of sEEG channels names130    trig_event : array_like131        Events in the trigger channel of length (n_events,)132    trig_time : array_like133        Time associated to each event of length (n_events,)134    """135    import h5py136    assert op.isdir(mat_root)137    # -------------------------------------------------------------------------138    # BUILD PATH139    # -------------------------------------------------------------------------140    # header file141    path_head = op.join(mat_root, 'alignedData')142    files_head = os.listdir(path_head)143    assert len(files_head) == 1144    path_head = op.join(path_head, files_head[0])145    # raw file146    path_raw = op.join(mat_root, 'rawData', 'amplifierData')147    files_raw = os.listdir(path_raw)148    assert len(files_raw) == 1149    path_raw = op.join(path_raw, files_raw[0])150    # -------------------------------------------------------------------------151    # CHANNELS152    # -------------------------------------------------------------------------153    f = h5py.File(path_head, 'r')['H']154    fc = f['channels']155    # read channel names and types156    cn = [''.join(chr(i) for i in f[k[0]][:]) for k in list(fc['name'])]157    ct = [''.join(chr(i) for i in f[k[0]][:]) for k in list(fc['signalType'])]158    ch_names = np.array([k.upper() for k in cn])159    # get only sEEG channels160    is_seeg = np.array(ct) == 'SEEG'161    seeg_chan = np.array(ch_names)[is_seeg]162    # -------------------------------------------------------------------------163    # TRIGGER164    # -------------------------------------------------------------------------165    # sampling frequency166    f = h5py.File(path_raw, 'r')167    sf = float(np.array(f['srate'])[0][0])168    # load the time vector169    times = np.array(f['time']).squeeze()170    # load trigger data171    trig_event = np.round(f['raw'][-1, :]).astype(int).squeeze()172    # keep only the first trigger changes (rm duplicates)173    first_only = np.where(trig_event[1::] - trig_event[:-1])[0] + 1174    trig_event = trig_event[first_only]175    times = times[first_only]176    # remove inter zeros177    nnz = trig_event != 0178    trig_event = trig_event[nnz]179    trig_time = times[nnz]180    # now extract the raw data181    raw = np.array(f['raw'][is_seeg, :])182    return sf, raw, seeg_chan.tolist(), trig_event, trig_time183if __name__ == '__main__':184    path_mat = '/run/media/etienne/Samsung_T5/BACKUPS/RAW/CausaL/PRAGUES_2019_PR7_day1/'...kinect_transform.py
Source:kinect_transform.py  
...36    i = 037    transforms = []38    while not rospy.is_shutdown() and i < 500:39        if head_pose and hand_pose:40            head_transform = tfx.transform(head_pose, parent='camera_rgb_optical_frame', child='ar_frame')#.as_transform()41            hand_transform = tfx.transform(hand_pose, parent='hand_kinect_optical_frame', child='ar_frame')#.as_transform()42            #head_to_ar = tfx.transform(transformer.lookupTransform('ar_frame', 'camera_rgb_optical_frame', rospy.Time()), parent='camera_rgb_optical_frame', child='ar_frame')43            #ar_to_hand = tfx.transform(transformer.lookupTransform('hand_kinect_optical_frame', 'ar_frame', rospy.Time()), parent='ar_frame', child='hand_kinect_optical_frame')44            head_to_hand = tfx.transform(head_transform.matrix * hand_transform.inverse().matrix, parent='camera_rgb_optical_frame', child='hand_kinect_optical_frame')45            #head_to_hand = tfx.transform(head_to_ar.inverse().matrix * ar_to_hand.matrix, parent='camera_rgb_optical_frame', child='hand_kinect_optical_frame')46            #rospy.loginfo(head_transform)47            #rospy.loginfo(hand_transform.inverse())48            #rospy.loginfo(head_to_ar)49            #rospy.loginfo(ar_to_hand)50            #print head_to_hand51            wrist_to_head = tfx.lookupTransform('r_gripper_tool_frame', 'camera_rgb_optical_frame')52            #wrist_to_head = tfx.transform(transformer.lookupTransform('r_gripper_tool_frame', 'camera_rgb_optical_frame', rospy.Time()), child='camera_rgb_optical_frame', parent='r_gripper_tool_frame')53            wrist_to_hand = tfx.transform(wrist_to_head.matrix * head_to_hand.matrix, parent='r_gripper_tool_frame', child='hand_kinect_optical_frame')54            #print wrist_to_head55            print wrist_to_hand...Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!
