How to use __deepcopy__ method in autotest

Best Python code snippet using autotest_python

WaveNetWrapper.py

Source:WaveNetWrapper.py Github

copy

Full Screen

...119 def init_hidden(self, batch_size=1):120 return None121 def parameters(self):122 return self.model.parameters()123# def __deepcopy__(self, memo):124# """125# Fix the deepcopy operation with WeigthNorm layers by removing all126# during copying. The code was posted as a solution at127# https://github.com/pytorch/pytorch/issues/28594128# """129# # save and delete all weightnorm weights on self130# weights = {}131# for hook in self._forward_pre_hooks.values():132# if isinstance(hook, WeightNorm):133# weights[hook.name] = getattr(self, hook.name)134# delattr(self, hook.name)135# # remove this deepcopy method, restoring the object's original one if necessary136# __deepcopy__ = self.__deepcopy__137# if orig_deepcopy:138# self.__deepcopy__ = orig_deepcopy139# else:140# del self.__deepcopy__141# # actually do the copy142# result = copy.deepcopy(self)143# # restore weights and method on self144# for name, value in weights.items():145# setattr(self, name, value)146# self.__deepcopy__ = __deepcopy__147# return result148for layer in [Conv1d, ConvTranspose2d]:149 orig_deepcopy = getattr(layer, '__deepcopy__', None)150 def __deepcopy__(self, memo):151 """152 Fix the deepcopy operation with WeigthNorm layers by removing all153 during copying. The code was posted as a solution at154 https://github.com/pytorch/pytorch/issues/28594155 """156 # save and delete all weightnorm weights on self157 weights = {}158 for hook in self._forward_pre_hooks.values():159 if isinstance(hook, WeightNorm):160 weights[hook.name] = getattr(self, hook.name)161 delattr(self, hook.name)162 # remove this deepcopy method, restoring the object's original one if necessary163 __deepcopy__ = self.__deepcopy__164 if orig_deepcopy:...

Full Screen

Full Screen

tcnlib.py

Source:tcnlib.py Github

copy

Full Screen

...3from torch.nn.utils import weight_norm4from torch.nn.utils.weight_norm import WeightNorm5import copy6orig_deepcopy = getattr(nn.Conv1d, '__deepcopy__', None)7def __deepcopy__(self, memo):8 # save and delete all weightnorm weights on self9 weights = {}10 for hook in self._forward_pre_hooks.values():11 if isinstance(hook, WeightNorm):12 weights[hook.name] = getattr(self, hook.name)13 delattr(self, hook.name)14 # remove this deepcopy method, restoring the object's original one if necessary15 __deepcopy__ = self.__deepcopy__16 if orig_deepcopy:17 self.__deepcopy__ = orig_deepcopy18 else:19 del self.__deepcopy__20 # actually do the copy21 result = copy.deepcopy(self)...

Full Screen

Full Screen

utils.py

Source:utils.py Github

copy

Full Screen

1#!/usr/bin/env python32# Copyright (c) Meta Platforms, Inc. and affiliates.3# All rights reserved.4#5# This source code is licensed under the BSD-style license found in the6# LICENSE file in the root directory of this source tree.7from collections import OrderedDict8from typing import Optional, List, Set, Union9import torch10from torchrec.distributed.types import ShardedModule11def append_prefix(prefix: str, name: str) -> str:12 """13 Appends provided prefix to provided name.14 """15 if prefix != "" and name != "":16 return prefix + "." + name17 else:18 return prefix + name19def filter_state_dict(20 state_dict: "OrderedDict[str, torch.Tensor]", name: str21) -> "OrderedDict[str, torch.Tensor]":22 """23 Filters state dict for keys that start with provided name.24 Strips provided name from beginning of key in the resulting state dict.25 Args:26 state_dict (OrderedDict[str, torch.Tensor]): input state dict to filter.27 name (str): name to filter from state dict keys.28 Returns:29 OrderedDict[str, torch.Tensor]: filtered state dict.30 """31 filtered_state_dict = OrderedDict()32 for key, value in state_dict.items():33 if key.startswith(name):34 # + 1 to length is to remove the '.' after the key35 filtered_state_dict[key[len(name) + 1 :]] = value36 return filtered_state_dict37def _get_unsharded_module_names_helper(38 model: torch.nn.Module,39 path: str,40 unsharded_module_names: Set[str],41) -> bool:42 sharded_children = set()43 for name, child in model.named_children():44 curr_path = path + name45 if isinstance(child, ShardedModule):46 sharded_children.add(name)47 else:48 child_sharded = _get_unsharded_module_names_helper(49 child,50 curr_path + ".",51 unsharded_module_names,52 )53 if child_sharded:54 sharded_children.add(name)55 if len(sharded_children) > 0:56 for name, _ in model.named_children():57 if name not in sharded_children:58 unsharded_module_names.add(path + name)59 return len(sharded_children) > 060def get_unsharded_module_names(model: torch.nn.Module) -> List[str]:61 """62 Retrieves names of top level modules that do not contain any sharded sub-modules.63 Args:64 model (torch.nn.Module): model to retrieve unsharded module names from.65 Returns:66 List[str]: list of names of modules that don't have sharded sub-modules.67 """68 unsharded_module_names: Set[str] = set()69 _get_unsharded_module_names_helper(70 model,71 "",72 unsharded_module_names,73 )74 return list(unsharded_module_names)75class sharded_model_copy:76 """77 Allows copying of DistributedModelParallel module to a target device.78 Example::79 # Copying model to CPU.80 m = DistributedModelParallel(m)81 with sharded_model_copy("cpu"):82 m_cpu = copy.deepcopy(m)83 """84 def __init__(self, device: Optional[Union[str, int, torch.device]]) -> None:85 self.device = device86 def __enter__(self) -> None:87 # pyre-ignore [16]88 self.t_copy_save_ = torch.Tensor.__deepcopy__89 # pyre-ignore [16]90 self.p_copy_save_ = torch.nn.Parameter.__deepcopy__91 device = self.device92 # pyre-ignore [2, 3, 53]93 def _tensor_copy(tensor, memo):94 if tensor.device != device:95 return tensor.detach().to(device)96 else:97 return tensor.detach().clone()98 # pyre-ignore [2, 3]99 def _no_copy(obj, memo):100 return obj101 _copy_or_not = _tensor_copy if self.device is not None else _no_copy102 # pyre-ignore [2, 3, 53]103 def _param_copy(param, memo):104 return torch.nn.Parameter(105 _copy_or_not(param, memo), requires_grad=param.requires_grad106 )107 # pyre-ignore [16]108 torch.Tensor.__deepcopy__ = _copy_or_not109 torch.nn.Parameter.__deepcopy__ = _param_copy110 torch._C._distributed_c10d.ProcessGroupNCCL.__deepcopy__ = _no_copy111 torch._C._distributed_c10d.ProcessGroupGloo.__deepcopy__ = _no_copy112 torch._C._distributed_c10d.Work.__deepcopy__ = _no_copy113 # pyre-ignore [16]114 torch.cuda.streams.Stream.__deepcopy__ = _no_copy115 # pyre-ignore [2]116 def __exit__(self, exc_type, exc_val, exc_tb) -> None:117 # pyre-ignore [16]118 torch.Tensor.__deepcopy__ = self.t_copy_save_119 # pyre-ignore [16]120 torch.nn.Parameter.__deepcopy__ = self.p_copy_save_121 torch._C._distributed_c10d.ProcessGroupNCCL.__deepcopy__ = None122 torch._C._distributed_c10d.ProcessGroupGloo.__deepcopy__ = None123 torch._C._distributed_c10d.Work.__deepcopy__ = None124 # pyre-ignore [16]...

Full Screen

Full Screen

copy.py

Source:copy.py Github

copy

Full Screen

1from copy import deepcopy2from .deco import accepts3def empty_copy(obj):4 """Create empty copy of object.5 Parameters6 ----------7 obj : some python object8 Returns9 -------10 obj :11 Empty copy of obj.12 """13 class Empty(obj.__class__):14 def __init__(self):15 pass16 newcopy = Empty()17 newcopy.__class__ = obj.__class__18 return newcopy19@accepts("s", (list, tuple))20def deepcopy_with_sharing(obj, shared_attributes, memo=None):21 """Deepcopy an object, except for a given list of attributes.22 Those atttributes are shared between the original object and its copy.From:23 https://stackoverflow.com/q/150071824 Parameters25 ----------26 obj: some object27 shared_attributes : list28 A list of strings identifying the attributes that sould be shared29 between the original and its copy.30 memo : dict31 The dictionary passed into __deepcopy__. Ignore this argument if32 not calling from within __deepcopy__.33 """34 shared_attributes = {k: getattr(obj, k) for k in shared_attributes}35 if hasattr(obj, '__deepcopy__'):36 # Do hack to prevent infinite recursion in call to deepcopy37 deepcopy_method = obj.__deepcopy__38 obj.__deepcopy__ = None39 for attr in shared_attributes:40 try:41 del obj.__dict__[attr]42 except KeyError:43 pass44 clone = deepcopy(obj)45 for attr, val in shared_attributes.items():46 setattr(obj, attr, val)47 setattr(clone, attr, val)48 if hasattr(obj, '__deepcopy__'):49 # Undo hack50 obj.__deepcopy__ = deepcopy_method51 del clone.__deepcopy__52 return clone53@accepts("s", list)54def create_clone(obj, keep_attributes, sharing=False):55 """Create clone of object.56 Attributes in keep_attributes are deepcopied, all other discarded.57 """58 del_attributes = [item for item in obj.__dict__.keys() if item not in59 keep_attributes]60 clone = deepcopy_with_sharing(obj, shared_attributes=del_attributes,61 memo=None)62 if sharing is False:63 for attr in del_attributes:64 if attr in obj.__dict__.keys():65 clone.__dict__[attr] = None...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run autotest automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful