How to use load method in stryker-parent

Best JavaScript code snippet using stryker-parent

test_base.py

Source:test_base.py Github

copy

Full Screen

1import os2import shutil3import tempfile4import warnings5import numpy6from pickle import loads7from pickle import dumps8from functools import partial9import pytest10import numpy as np11from sklearn.datasets import get_data_home12from sklearn.datasets import clear_data_home13from sklearn.datasets import load_files14from sklearn.datasets import load_sample_images15from sklearn.datasets import load_sample_image16from sklearn.datasets import load_digits17from sklearn.datasets import load_diabetes18from sklearn.datasets import load_linnerud19from sklearn.datasets import load_iris20from sklearn.datasets import load_breast_cancer21from sklearn.datasets import load_boston22from sklearn.datasets import load_wine23from sklearn.utils import Bunch24from sklearn.datasets.tests.test_common import check_return_X_y25from sklearn.datasets.tests.test_common import check_as_frame26from sklearn.datasets.tests.test_common import check_pandas_dependency_message27from sklearn.externals._pilutil import pillow_installed28from sklearn.utils import IS_PYPY29def _remove_dir(path):30 if os.path.isdir(path):31 shutil.rmtree(path)32@pytest.fixture(scope="module")33def data_home(tmpdir_factory):34 tmp_file = str(tmpdir_factory.mktemp("scikit_learn_data_home_test"))35 yield tmp_file36 _remove_dir(tmp_file)37@pytest.fixture(scope="module")38def load_files_root(tmpdir_factory):39 tmp_file = str(tmpdir_factory.mktemp("scikit_learn_load_files_test"))40 yield tmp_file41 _remove_dir(tmp_file)42@pytest.fixture43def test_category_dir_1(load_files_root):44 test_category_dir1 = tempfile.mkdtemp(dir=load_files_root)45 sample_file = tempfile.NamedTemporaryFile(dir=test_category_dir1,46 delete=False)47 sample_file.write(b"Hello World!\n")48 sample_file.close()49 yield str(test_category_dir1)50 _remove_dir(test_category_dir1)51@pytest.fixture52def test_category_dir_2(load_files_root):53 test_category_dir2 = tempfile.mkdtemp(dir=load_files_root)54 yield str(test_category_dir2)55 _remove_dir(test_category_dir2)56def test_data_home(data_home):57 # get_data_home will point to a pre-existing folder58 data_home = get_data_home(data_home=data_home)59 assert data_home == data_home60 assert os.path.exists(data_home)61 # clear_data_home will delete both the content and the folder it-self62 clear_data_home(data_home=data_home)63 assert not os.path.exists(data_home)64 # if the folder is missing it will be created again65 data_home = get_data_home(data_home=data_home)66 assert os.path.exists(data_home)67def test_default_empty_load_files(load_files_root):68 res = load_files(load_files_root)69 assert len(res.filenames) == 070 assert len(res.target_names) == 071 assert res.DESCR is None72def test_default_load_files(test_category_dir_1, test_category_dir_2,73 load_files_root):74 if IS_PYPY:75 pytest.xfail('[PyPy] fails due to string containing NUL characters')76 res = load_files(load_files_root)77 assert len(res.filenames) == 178 assert len(res.target_names) == 279 assert res.DESCR is None80 assert res.data == [b"Hello World!\n"]81def test_load_files_w_categories_desc_and_encoding(82 test_category_dir_1, test_category_dir_2, load_files_root):83 if IS_PYPY:84 pytest.xfail('[PyPy] fails due to string containing NUL characters')85 category = os.path.abspath(test_category_dir_1).split('/').pop()86 res = load_files(load_files_root, description="test",87 categories=category, encoding="utf-8")88 assert len(res.filenames) == 189 assert len(res.target_names) == 190 assert res.DESCR == "test"91 assert res.data == ["Hello World!\n"]92def test_load_files_wo_load_content(93 test_category_dir_1, test_category_dir_2, load_files_root):94 res = load_files(load_files_root, load_content=False)95 assert len(res.filenames) == 196 assert len(res.target_names) == 297 assert res.DESCR is None98 assert res.get('data') is None99def test_load_sample_images():100 try:101 res = load_sample_images()102 assert len(res.images) == 2103 assert len(res.filenames) == 2104 images = res.images105 # assert is china image106 assert np.all(images[0][0, 0, :] ==107 np.array([174, 201, 231], dtype=np.uint8))108 # assert is flower image109 assert np.all(images[1][0, 0, :] ==110 np.array([2, 19, 13], dtype=np.uint8))111 assert res.DESCR112 except ImportError:113 warnings.warn("Could not load sample images, PIL is not available.")114def test_load_digits():115 digits = load_digits()116 assert digits.data.shape == (1797, 64)117 assert numpy.unique(digits.target).size == 10118 # test return_X_y option119 check_return_X_y(digits, partial(load_digits))120def test_load_digits_n_class_lt_10():121 digits = load_digits(n_class=9)122 assert digits.data.shape == (1617, 64)123 assert numpy.unique(digits.target).size == 9124def test_load_sample_image():125 try:126 china = load_sample_image('china.jpg')127 assert china.dtype == 'uint8'128 assert china.shape == (427, 640, 3)129 except ImportError:130 warnings.warn("Could not load sample images, PIL is not available.")131def test_load_missing_sample_image_error():132 if pillow_installed:133 with pytest.raises(AttributeError):134 load_sample_image('blop.jpg')135 else:136 warnings.warn("Could not load sample images, PIL is not available.")137def test_load_diabetes():138 res = load_diabetes()139 assert res.data.shape == (442, 10)140 assert res.target.size, 442141 assert len(res.feature_names) == 10142 assert res.DESCR143 # test return_X_y option144 check_return_X_y(res, partial(load_diabetes))145def test_load_linnerud():146 res = load_linnerud()147 assert res.data.shape == (20, 3)148 assert res.target.shape == (20, 3)149 assert len(res.target_names) == 3150 assert res.DESCR151 assert os.path.exists(res.data_filename)152 assert os.path.exists(res.target_filename)153 # test return_X_y option154 check_return_X_y(res, partial(load_linnerud))155def test_load_iris():156 res = load_iris()157 assert res.data.shape == (150, 4)158 assert res.target.size == 150159 assert res.target_names.size == 3160 assert res.DESCR161 assert os.path.exists(res.filename)162 # test return_X_y option163 check_return_X_y(res, partial(load_iris))164def test_load_wine():165 res = load_wine()166 assert res.data.shape == (178, 13)167 assert res.target.size == 178168 assert res.target_names.size == 3169 assert res.DESCR170 # test return_X_y option171 check_return_X_y(res, partial(load_wine))172def test_load_breast_cancer():173 res = load_breast_cancer()174 assert res.data.shape == (569, 30)175 assert res.target.size == 569176 assert res.target_names.size == 2177 assert res.DESCR178 assert os.path.exists(res.filename)179 # test return_X_y option180 check_return_X_y(res, partial(load_breast_cancer))181@pytest.mark.parametrize("loader_func, data_dtype, target_dtype", [182 (load_breast_cancer, np.float64, np.int64),183 (load_diabetes, np.float64, np.float64),184 (load_digits, np.float64, np.int64),185 (load_iris, np.float64, np.int64),186 (load_linnerud, np.float64, np.float64),187 (load_wine, np.float64, np.int64),188])189def test_toy_dataset_as_frame(loader_func, data_dtype, target_dtype):190 default_result = loader_func()191 check_as_frame(default_result, partial(loader_func),192 expected_data_dtype=data_dtype,193 expected_target_dtype=target_dtype)194@pytest.mark.parametrize("loader_func", [195 load_breast_cancer,196 load_diabetes,197 load_digits,198 load_iris,199 load_linnerud,200 load_wine,201])202def test_toy_dataset_as_frame_no_pandas(loader_func):203 check_pandas_dependency_message(loader_func)204def test_load_boston():205 res = load_boston()206 assert res.data.shape == (506, 13)207 assert res.target.size == 506208 assert res.feature_names.size == 13209 assert res.DESCR210 assert os.path.exists(res.filename)211 # test return_X_y option212 check_return_X_y(res, partial(load_boston))213def test_loads_dumps_bunch():214 bunch = Bunch(x="x")215 bunch_from_pkl = loads(dumps(bunch))216 bunch_from_pkl.x = "y"217 assert bunch_from_pkl['x'] == bunch_from_pkl.x218def test_bunch_pickle_generated_with_0_16_and_read_with_0_17():219 bunch = Bunch(key='original')220 # This reproduces a problem when Bunch pickles have been created221 # with scikit-learn 0.16 and are read with 0.17. Basically there222 # is a surprising behaviour because reading bunch.key uses223 # bunch.__dict__ (which is non empty for 0.16 Bunch objects)224 # whereas assigning into bunch.key uses bunch.__setattr__. See225 # https://github.com/scikit-learn/scikit-learn/issues/6196 for226 # more details227 bunch.__dict__['key'] = 'set from __dict__'228 bunch_from_pkl = loads(dumps(bunch))229 # After loading from pickle the __dict__ should have been ignored230 assert bunch_from_pkl.key == 'original'231 assert bunch_from_pkl['key'] == 'original'232 # Making sure that changing the attr does change the value233 # associated with __getitem__ as well234 bunch_from_pkl.key = 'changed'235 assert bunch_from_pkl.key == 'changed'236 assert bunch_from_pkl['key'] == 'changed'237def test_bunch_dir():238 # check that dir (important for autocomplete) shows attributes239 data = load_iris()...

Full Screen

Full Screen

load_balancer_load_balancing_rules_operations.py

Source:load_balancer_load_balancing_rules_operations.py Github

copy

Full Screen

1# coding=utf-82# --------------------------------------------------------------------------3# Copyright (c) Microsoft Corporation. All rights reserved.4# Licensed under the MIT License. See License.txt in the project root for5# license information.6#7# Code generated by Microsoft (R) AutoRest Code Generator.8# Changes may cause incorrect behavior and will be lost if the code is9# regenerated.10# --------------------------------------------------------------------------11import uuid12from msrest.pipeline import ClientRawResponse13from msrestazure.azure_exceptions import CloudError14from .. import models15class LoadBalancerLoadBalancingRulesOperations(object):16 """LoadBalancerLoadBalancingRulesOperations operations.17 :param client: Client for service requests.18 :param config: Configuration of service client.19 :param serializer: An object model serializer.20 :param deserializer: An objec model deserializer.21 :ivar api_version: Client API version. Constant value: "2017-08-01".22 """23 def __init__(self, client, config, serializer, deserializer):24 self._client = client25 self._serialize = serializer26 self._deserialize = deserializer27 self.api_version = "2017-08-01"28 self.config = config29 def list(30 self, resource_group_name, load_balancer_name, custom_headers=None, raw=False, **operation_config):31 """Gets all the load balancing rules in a load balancer.32 :param resource_group_name: The name of the resource group.33 :type resource_group_name: str34 :param load_balancer_name: The name of the load balancer.35 :type load_balancer_name: str36 :param dict custom_headers: headers that will be added to the request37 :param bool raw: returns the direct response alongside the38 deserialized response39 :param operation_config: :ref:`Operation configuration40 overrides<msrest:optionsforoperations>`.41 :return: An iterator like instance of :class:`LoadBalancingRule42 <azure.mgmt.network.v2017_08_01.models.LoadBalancingRule>`43 :rtype: :class:`LoadBalancingRulePaged44 <azure.mgmt.network.v2017_08_01.models.LoadBalancingRulePaged>`45 :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`46 """47 def internal_paging(next_link=None, raw=False):48 if not next_link:49 # Construct URL50 url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/loadBalancingRules'51 path_format_arguments = {52 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),53 'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),54 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')55 }56 url = self._client.format_url(url, **path_format_arguments)57 # Construct parameters58 query_parameters = {}59 query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')60 else:61 url = next_link62 query_parameters = {}63 # Construct headers64 header_parameters = {}65 header_parameters['Content-Type'] = 'application/json; charset=utf-8'66 if self.config.generate_client_request_id:67 header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())68 if custom_headers:69 header_parameters.update(custom_headers)70 if self.config.accept_language is not None:71 header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')72 # Construct and send request73 request = self._client.get(url, query_parameters)74 response = self._client.send(75 request, header_parameters, **operation_config)76 if response.status_code not in [200]:77 exp = CloudError(response)78 exp.request_id = response.headers.get('x-ms-request-id')79 raise exp80 return response81 # Deserialize response82 deserialized = models.LoadBalancingRulePaged(internal_paging, self._deserialize.dependencies)83 if raw:84 header_dict = {}85 client_raw_response = models.LoadBalancingRulePaged(internal_paging, self._deserialize.dependencies, header_dict)86 return client_raw_response87 return deserialized88 def get(89 self, resource_group_name, load_balancer_name, load_balancing_rule_name, custom_headers=None, raw=False, **operation_config):90 """Gets the specified load balancer load balancing rule.91 :param resource_group_name: The name of the resource group.92 :type resource_group_name: str93 :param load_balancer_name: The name of the load balancer.94 :type load_balancer_name: str95 :param load_balancing_rule_name: The name of the load balancing rule.96 :type load_balancing_rule_name: str97 :param dict custom_headers: headers that will be added to the request98 :param bool raw: returns the direct response alongside the99 deserialized response100 :param operation_config: :ref:`Operation configuration101 overrides<msrest:optionsforoperations>`.102 :return: :class:`LoadBalancingRule103 <azure.mgmt.network.v2017_08_01.models.LoadBalancingRule>` or104 :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if105 raw=true106 :rtype: :class:`LoadBalancingRule107 <azure.mgmt.network.v2017_08_01.models.LoadBalancingRule>` or108 :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`109 :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`110 """111 # Construct URL112 url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/loadBalancingRules/{loadBalancingRuleName}'113 path_format_arguments = {114 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),115 'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),116 'loadBalancingRuleName': self._serialize.url("load_balancing_rule_name", load_balancing_rule_name, 'str'),117 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')118 }119 url = self._client.format_url(url, **path_format_arguments)120 # Construct parameters121 query_parameters = {}122 query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')123 # Construct headers124 header_parameters = {}125 header_parameters['Content-Type'] = 'application/json; charset=utf-8'126 if self.config.generate_client_request_id:127 header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())128 if custom_headers:129 header_parameters.update(custom_headers)130 if self.config.accept_language is not None:131 header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')132 # Construct and send request133 request = self._client.get(url, query_parameters)134 response = self._client.send(request, header_parameters, **operation_config)135 if response.status_code not in [200]:136 exp = CloudError(response)137 exp.request_id = response.headers.get('x-ms-request-id')138 raise exp139 deserialized = None140 if response.status_code == 200:141 deserialized = self._deserialize('LoadBalancingRule', response)142 if raw:143 client_raw_response = ClientRawResponse(deserialized, response)144 return client_raw_response...

Full Screen

Full Screen

yaml_config.py

Source:yaml_config.py Github

copy

Full Screen

...76 load_sftp_server()77 load_sftp_client()78def configure_logging(path: Path):79 with path.open('rt') as f:80 config = yaml.safe_load(f.read())81 for handler in config['handlers'].values():82 filename = handler.get('filename')83 if filename:84 filename.parent.mkdir(parents=True, exist_ok=True)85 dictConfig(config)86def node_from_file(path: Union[str, Path], paths: Dict[str, Union[str, Path]] = None) -> \87 Union[ReceiverType, SenderType]:88 stream = open(path, 'r')89 parent = Path(path).parent90 return node_from_config(stream, paths=paths, parent=parent)91def node_from_config(conf: TextIO, paths: Dict[str, Union[str, Path]] = None, parent: Path = None) -> \92 Union[ReceiverType, SenderType]:93 paths = paths or get_paths()94 paths['this'] = parent95 load_path(paths)96 configs = list(yaml.safe_load_all(conf))97 node = configs[0]98 if len(configs) > 1:99 misc_config = configs[1]100 log_config_file = misc_config.pop('log_config_file')101 node_name = misc_config.pop('node_name', None) #3.8 assignment expression102 if not node_name:103 node_name = node.full_name.replace(' ', '_').replace(':', '_').lower()104 paths['node'] = node_name105 paths['name'] = node.name.replace(' ', '_').lower()106 if '_' in paths['name']:107 paths['type'] = paths['name'].split('_')[1]108 else:109 paths['type'] = paths['name']110 paths['host'] = getattr(node, 'host', '').replace(':', '').lower()111 paths['port'] = getattr(node, 'port', '')112 paths['pipe_path'] = getattr(node, 'path', '')113 paths['pid'] = str(os.getpid())114 configure_logging(log_config_file)115 settings.APP_CONFIG.update(misc_config)116 return node117def node_from_config_file(conf_path: Union[Path, str], paths: Dict[str, Union[str, Path]] = None) -> Union[ReceiverType, SenderType]:118 f = open(str(conf_path), 'r')119 parent_path = Path(conf_path).parent120 return node_from_config(f, paths=paths, parent=parent_path)121def server_from_config_file(conf_path: Union[Path, str], paths: Dict[str, Union[str, Path]] = None) -> ReceiverType:122 return node_from_config_file(conf_path, paths=paths)123def client_from_config_file(conf_path: Union[Path, str], paths: Dict[str, Union[str, Path]] = None) -> SenderType:124 return node_from_config_file(conf_path, paths=paths)125@dataclass126class SignalServerManager:127 conf_path: Union[Path, str]128 server: ReceiverType = field(init=False)129 notify_pid: int = None130 paths: Dict[str, Union[str, Path]] = None131 logger: LoggerType = field(default_factory=get_logger_receiver)132 _last_modified_time: float = field(init=False, default=None)133 def __post_init__(self):134 self._last_modified_time = self.modified_time135 self._stop_event = asyncio.Event()136 self._restart_event = asyncio.Event()137 self._restart_event.set()138 self.server = self.get_server()139 loop_on_close_signal(self.close, self.logger)140 loop_on_user1_signal(self.check_reload, self.logger)141 @property142 def modified_time(self) -> float:143 return os.stat(self.conf_path).st_mtime144 def close(self) -> None:145 self._stop_event.set()146 def check_reload(self) -> None:147 if os.path.exists(self.conf_path):148 last_modified = self.modified_time149 if last_modified > self._last_modified_time:150 send_reloading()151 self._last_modified_time = last_modified152 self.logger.info('Restarting server')153 send_status('Restarting server')154 self._restart_event.set()155 else:156 send_ready()157 else:158 self._stop_event.set()159 def server_is_started(self):160 return self.server.is_started...

Full Screen

Full Screen

utils.py

Source:utils.py Github

copy

Full Screen

1#2# Licensed under the Apache License, Version 2.0 (the "License"); you may3# not use this file except in compliance with the License. You may obtain4# a copy of the License at5#6# http://www.apache.org/licenses/LICENSE-2.07#8# Unless required by applicable law or agreed to in writing, software9# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT10# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the11# License for the specific language governing permissions and limitations12# under the License.13import os14from wsmanclient.resources import uris15FAKE_ENDPOINT = {16 'host': '1.2.3.4',17 'port': '443',18 'path': '/wsman',19 'protocol': 'https',20 'username': 'admin',21 'password': 's3cr3t'22}23def load_wsman_xml(name):24 """Helper function to load a WSMan XML response from a file."""25 with open(os.path.join(os.path.dirname(__file__), 'wsman_mocks',26 '%s.xml' % name), 'r') as f:27 xml_body = f.read()28 return xml_body29WSManEnumerations = {30 'context': [31 load_wsman_xml('wsman-enum_context-1'),32 load_wsman_xml('wsman-enum_context-2'),33 load_wsman_xml('wsman-enum_context-3'),34 load_wsman_xml('wsman-enum_context-4'),35 ]36}37BIOSEnumerations = {38 uris.DCIM_BIOSEnumeration: {39 'ok': load_wsman_xml('bios_enumeration-enum-ok')40 },41 uris.DCIM_BIOSInteger: {42 'mutable': load_wsman_xml('bios_integer-enum-mutable'),43 'ok': load_wsman_xml('bios_integer-enum-ok')44 },45 uris.DCIM_BIOSString: {46 'colliding': load_wsman_xml('bios_string-enum-colliding'),47 'ok': load_wsman_xml('bios_string-enum-ok'),48 'regexp': load_wsman_xml('bios_string-enum-regexp')49 },50 uris.DCIM_BootConfigSetting: {51 'ok': load_wsman_xml('boot_config_setting-enum-ok')52 },53 uris.DCIM_BootSourceSetting: {54 'ok': load_wsman_xml('boot_source_setting-enum-ok'),55 'ok-11g': load_wsman_xml('boot_source_setting-enum-ok-11g')56 },57 uris.DCIM_ComputerSystem: {58 'ok': load_wsman_xml('computer_system-enum-ok')59 },60}61BIOSInvocations = {62 uris.DCIM_ComputerSystem: {63 'RequestStateChange': {64 'ok': load_wsman_xml(65 'computer_system-invoke-request_state_change-ok'),66 'error': load_wsman_xml(67 'computer_system-invoke-request_state_change-error'),68 },69 },70 uris.DCIM_BIOSService: {71 'SetAttributes': {72 'ok': load_wsman_xml(73 'bios_service-invoke-set_attributes-ok'),74 'error': load_wsman_xml(75 'bios_service-invoke-set_attributes-error'),76 }77 },78 uris.DCIM_BootConfigSetting: {79 'ChangeBootOrderByInstanceID': {80 'ok': load_wsman_xml(81 'boot_config_setting-invoke-change_boot_order_by_instance_id-'82 'ok'),83 'error': load_wsman_xml(84 'boot_config_setting-invoke-change_boot_order_by_instance_id-'85 'error'),86 }87 }88}89CPUEnumerations = {90 uris.DCIM_CPUView: {91 'ok': load_wsman_xml('cpu-enumeration-enum-ok')92 }93}94PSUEnumerations = {95 uris.DCIM_PowerSupplyView: {96 'ok': load_wsman_xml('power-supply-enumeration-enum-ok')97 }98}99JobEnumerations = {100 uris.DCIM_LifecycleJob: {101 'ok': load_wsman_xml('lifecycle_job-enum-ok'),102 'not_found': load_wsman_xml('lifecycle_job-enum-not_found'),103 },104}105JobInvocations = {106 uris.DCIM_BIOSService: {107 'CreateTargetedConfigJob': {108 'ok': load_wsman_xml(109 'bios_service-invoke-create_targeted_config_job-ok'),110 'error': load_wsman_xml(111 'bios_service-invoke-create_targeted_config_job-error'),112 },113 'DeletePendingConfiguration': {114 'ok': load_wsman_xml(115 'bios_service-invoke-delete_pending_configuration-ok'),116 'error': load_wsman_xml(117 'bios_service-invoke-delete_pending_configuration-error'),118 },119 }120}121LifecycleControllerEnumerations = {122 uris.DCIM_SystemView: {123 'ok': load_wsman_xml('system_view-enum-ok')124 },125}126MemoryEnumerations = {127 uris.DCIM_MemoryView: {128 'ok': load_wsman_xml('memory-enumeration-enum-ok')129 },130}131RAIDEnumerations = {132 uris.DCIM_ControllerView: {133 'ok': load_wsman_xml('controller_view-enum-ok')134 },135 uris.DCIM_PhysicalDiskView: {136 'ok': load_wsman_xml('physical_disk_view-enum-ok')137 },138 uris.DCIM_VirtualDiskView: {139 'ok': load_wsman_xml('virtual_disk_view-enum-ok')140 }141}142RAIDInvocations = {143 uris.DCIM_RAIDService: {144 'CreateVirtualDisk': {145 'ok': load_wsman_xml(146 'raid_service-invoke-create_virtual_disk-ok'),147 'error': load_wsman_xml(148 'raid_service-invoke-create_virtual_disk-error'),149 },150 'DeleteVirtualDisk': {151 'ok': load_wsman_xml(152 'raid_service-invoke-delete_virtual_disk-ok'),153 'error': load_wsman_xml(154 'raid_service-invoke-delete_virtual_disk-error'),155 },156 'ConvertToRAID': {157 'ok': load_wsman_xml(158 'raid_service-invoke-convert_physical_disks-ok'),159 'error': load_wsman_xml(160 'raid_service-invoke-convert_physical_disks-error'),161 }162 }...

Full Screen

Full Screen

defaults.py

Source:defaults.py Github

copy

Full Screen

1"""2Default key bindings.::3 registry = load_key_bindings()4 app = Application(key_bindings_registry=registry)5"""6from __future__ import unicode_literals7from prompt_toolkit.key_binding.registry import ConditionalRegistry, MergedRegistry8from prompt_toolkit.key_binding.bindings.basic import load_basic_bindings, load_abort_and_exit_bindings, load_basic_system_bindings, load_auto_suggestion_bindings, load_mouse_bindings9from prompt_toolkit.key_binding.bindings.emacs import load_emacs_bindings, load_emacs_system_bindings, load_emacs_search_bindings, load_emacs_open_in_editor_bindings, load_extra_emacs_page_navigation_bindings10from prompt_toolkit.key_binding.bindings.vi import load_vi_bindings, load_vi_system_bindings, load_vi_search_bindings, load_vi_open_in_editor_bindings, load_extra_vi_page_navigation_bindings11from prompt_toolkit.filters import to_cli_filter12__all__ = (13 'load_key_bindings',14 'load_key_bindings_for_prompt',15)16def load_key_bindings(17 get_search_state=None,18 enable_abort_and_exit_bindings=False,19 enable_system_bindings=False,20 enable_search=False,21 enable_open_in_editor=False,22 enable_extra_page_navigation=False,23 enable_auto_suggest_bindings=False):24 """25 Create a Registry object that contains the default key bindings.26 :param enable_abort_and_exit_bindings: Filter to enable Ctrl-C and Ctrl-D.27 :param enable_system_bindings: Filter to enable the system bindings (meta-!28 prompt and Control-Z suspension.)29 :param enable_search: Filter to enable the search bindings.30 :param enable_open_in_editor: Filter to enable open-in-editor.31 :param enable_open_in_editor: Filter to enable open-in-editor.32 :param enable_extra_page_navigation: Filter for enabling extra page33 navigation. (Bindings for up/down scrolling through long pages, like in34 Emacs or Vi.)35 :param enable_auto_suggest_bindings: Filter to enable fish-style suggestions.36 """37 assert get_search_state is None or callable(get_search_state)38 # Accept both Filters and booleans as input.39 enable_abort_and_exit_bindings = to_cli_filter(enable_abort_and_exit_bindings)40 enable_system_bindings = to_cli_filter(enable_system_bindings)41 enable_search = to_cli_filter(enable_search)42 enable_open_in_editor = to_cli_filter(enable_open_in_editor)43 enable_extra_page_navigation = to_cli_filter(enable_extra_page_navigation)44 enable_auto_suggest_bindings = to_cli_filter(enable_auto_suggest_bindings)45 registry = MergedRegistry([46 # Load basic bindings.47 load_basic_bindings(),48 load_mouse_bindings(),49 ConditionalRegistry(load_abort_and_exit_bindings(),50 enable_abort_and_exit_bindings),51 ConditionalRegistry(load_basic_system_bindings(),52 enable_system_bindings),53 # Load emacs bindings.54 load_emacs_bindings(),55 ConditionalRegistry(load_emacs_open_in_editor_bindings(),56 enable_open_in_editor),57 ConditionalRegistry(load_emacs_search_bindings(get_search_state=get_search_state),58 enable_search),59 ConditionalRegistry(load_emacs_system_bindings(),60 enable_system_bindings),61 ConditionalRegistry(load_extra_emacs_page_navigation_bindings(),62 enable_extra_page_navigation),63 # Load Vi bindings.64 load_vi_bindings(get_search_state=get_search_state),65 ConditionalRegistry(load_vi_open_in_editor_bindings(),66 enable_open_in_editor),67 ConditionalRegistry(load_vi_search_bindings(get_search_state=get_search_state),68 enable_search),69 ConditionalRegistry(load_vi_system_bindings(),70 enable_system_bindings),71 ConditionalRegistry(load_extra_vi_page_navigation_bindings(),72 enable_extra_page_navigation),73 # Suggestion bindings.74 # (This has to come at the end, because the Vi bindings also have an75 # implementation for the "right arrow", but we really want the76 # suggestion binding when a suggestion is available.)77 ConditionalRegistry(load_auto_suggestion_bindings(),78 enable_auto_suggest_bindings),79 ])80 return registry81def load_key_bindings_for_prompt(**kw):82 """83 Create a ``Registry`` object with the defaults key bindings for an input84 prompt.85 This activates the key bindings for abort/exit (Ctrl-C/Ctrl-D),86 incremental search and auto suggestions.87 (Not for full screen applications.)88 """89 kw.setdefault('enable_abort_and_exit_bindings', True)90 kw.setdefault('enable_search', True)91 kw.setdefault('enable_auto_suggest_bindings', True)...

Full Screen

Full Screen

__init__.py

Source:__init__.py Github

copy

Full Screen

1"""2The :mod:`sklearn.datasets` module includes utilities to load datasets,3including methods to load and fetch popular reference datasets. It also4features some artificial data generators.5"""6from .base import load_breast_cancer7from .base import load_boston8from .base import load_diabetes9from .base import load_digits10from .base import load_files11from .base import load_iris12from .base import load_linnerud13from .base import load_sample_images14from .base import load_sample_image15from .base import load_wine16from .base import get_data_home17from .base import clear_data_home18from .covtype import fetch_covtype19from .kddcup99 import fetch_kddcup9920from .mlcomp import load_mlcomp21from .lfw import fetch_lfw_pairs22from .lfw import fetch_lfw_people23from .twenty_newsgroups import fetch_20newsgroups24from .twenty_newsgroups import fetch_20newsgroups_vectorized25from .mldata import fetch_mldata, mldata_filename26from .samples_generator import make_classification27from .samples_generator import make_multilabel_classification28from .samples_generator import make_hastie_10_229from .samples_generator import make_regression30from .samples_generator import make_blobs31from .samples_generator import make_moons32from .samples_generator import make_circles33from .samples_generator import make_friedman134from .samples_generator import make_friedman235from .samples_generator import make_friedman336from .samples_generator import make_low_rank_matrix37from .samples_generator import make_sparse_coded_signal38from .samples_generator import make_sparse_uncorrelated39from .samples_generator import make_spd_matrix40from .samples_generator import make_swiss_roll41from .samples_generator import make_s_curve42from .samples_generator import make_sparse_spd_matrix43from .samples_generator import make_gaussian_quantiles44from .samples_generator import make_biclusters45from .samples_generator import make_checkerboard46from .svmlight_format import load_svmlight_file47from .svmlight_format import load_svmlight_files48from .svmlight_format import dump_svmlight_file49from .olivetti_faces import fetch_olivetti_faces50from .species_distributions import fetch_species_distributions51from .california_housing import fetch_california_housing52from .rcv1 import fetch_rcv153__all__ = ['clear_data_home',54 'dump_svmlight_file',55 'fetch_20newsgroups',56 'fetch_20newsgroups_vectorized',57 'fetch_lfw_pairs',58 'fetch_lfw_people',59 'fetch_mldata',60 'fetch_olivetti_faces',61 'fetch_species_distributions',62 'fetch_california_housing',63 'fetch_covtype',64 'fetch_rcv1',65 'fetch_kddcup99',66 'get_data_home',67 'load_boston',68 'load_diabetes',69 'load_digits',70 'load_files',71 'load_iris',72 'load_breast_cancer',73 'load_linnerud',74 'load_mlcomp',75 'load_sample_image',76 'load_sample_images',77 'load_svmlight_file',78 'load_svmlight_files',79 'load_wine',80 'make_biclusters',81 'make_blobs',82 'make_circles',83 'make_classification',84 'make_checkerboard',85 'make_friedman1',86 'make_friedman2',87 'make_friedman3',88 'make_gaussian_quantiles',89 'make_hastie_10_2',90 'make_low_rank_matrix',91 'make_moons',92 'make_multilabel_classification',93 'make_regression',94 'make_s_curve',95 'make_sparse_coded_signal',96 'make_sparse_spd_matrix',97 'make_sparse_uncorrelated',98 'make_spd_matrix',99 'make_swiss_roll',...

Full Screen

Full Screen

udac_example_dag.py

Source:udac_example_dag.py Github

copy

Full Screen

1from datetime import datetime, timedelta2import os3from airflow import DAG4from airflow.operators.dummy_operator import DummyOperator5from airflow.operators import (StageToRedshiftOperator, LoadFactOperator,6 LoadDimensionOperator, DataQualityOperator)7from helpers import SqlQueries8# AWS_KEY = os.environ.get('AWS_KEY')9# AWS_SECRET = os.environ.get('AWS_SECRET')10default_args = {11 'owner': 'udacity',12 'start_date': datetime(2019, 1, 12),13 'depends_on_past': False,14 'retries': 3,15 'retry_delay': timedelta(minutes=5),16 'email_on_retry': False17}18dag = DAG('udac_example_dag',19 default_args=default_args,20 description='Load and transform data in Redshift with Airflow',21 schedule_interval='0 * * * *',22 catchup=False23 )24start_operator = DummyOperator(task_id='Begin_execution', dag=dag)25stage_events_to_redshift = StageToRedshiftOperator(26 task_id='Stage_events',27 dag=dag,28 table="staging_events",29 redshift_conn_id="redshift",30 aws_credentials_id="aws_credentials",31 s3_bucket="udacity-dend",32 s3_key="log_data",33 json_path="s3://udacity-dend/log_json_path.json",34 file_type="json"35)36stage_songs_to_redshift = StageToRedshiftOperator(37 task_id='Stage_songs',38 dag=dag, 39 table="staging_songs",40 redshift_conn_id="redshift",41 aws_credentials_id="aws_credentials",42 s3_bucket="udacity-dend",43 s3_key="song_data",44 json_path="auto",45 file_type="json"46)47load_songplays_table = LoadFactOperator(48 task_id='Load_songplays_fact_table',49 dag=dag,50 table='songplays',51 redshift_conn_id="redshift",52 load_sql_stmt=SqlQueries.songplay_table_insert53)54load_user_dimension_table = LoadDimensionOperator(55 task_id='Load_user_dim_table',56 dag=dag, 57 table='users',58 redshift_conn_id="redshift",59 load_sql_stmt=SqlQueries.user_table_insert60)61load_song_dimension_table = LoadDimensionOperator(62 task_id='Load_song_dim_table',63 dag=dag, 64 table='songs',65 redshift_conn_id="redshift",66 load_sql_stmt=SqlQueries.song_table_insert67)68load_artist_dimension_table = LoadDimensionOperator(69 task_id='Load_artist_dim_table',70 dag=dag,71 table='artists',72 redshift_conn_id="redshift",73 load_sql_stmt=SqlQueries.artist_table_insert74)75load_time_dimension_table = LoadDimensionOperator(76 task_id='Load_time_dim_table',77 dag=dag, 78 table='time',79 redshift_conn_id="redshift",80 load_sql_stmt=SqlQueries.time_table_insert81)82run_quality_checks = DataQualityOperator(83 task_id='Run_data_quality_checks',84 dag=dag, 85 tables=['songplays', 'users', 'songs', 'artists', 'time'],86 redshift_conn_id="redshift"87)88end_operator = DummyOperator(task_id='Stop_execution', dag=dag)89# Task ordering for the defined DAG tasks90start_operator >> stage_events_to_redshift91start_operator >> stage_songs_to_redshift92stage_events_to_redshift >> load_songplays_table93stage_songs_to_redshift >> load_songplays_table94load_songplays_table >> load_user_dimension_table95load_songplays_table >> load_song_dimension_table96load_songplays_table >> load_artist_dimension_table97load_songplays_table >> load_time_dimension_table98load_user_dimension_table >> run_quality_checks99load_song_dimension_table >> run_quality_checks100load_artist_dimension_table >> run_quality_checks101load_time_dimension_table >> run_quality_checks...

Full Screen

Full Screen

dice.py

Source:dice.py Github

copy

Full Screen

1import pygame2class Dice:3 def __init__(self):4 self.faces = []5 self.faces.append(pygame.image.load("images/dice0.png"))6 self.faces.append(pygame.image.load("images/dice1.png"))7 self.faces.append(pygame.image.load("images/dice2.png"))8 self.faces.append(pygame.image.load("images/dice3.png"))9 self.faces.append(pygame.image.load("images/dice4.png"))10 self.faces.append(pygame.image.load("images/dice5.png"))11 self.faces.append(pygame.image.load("images/dice6.png"))12 self.rfaces = []13 self.rfaces.append(pygame.image.load("images/Rdice0.png"))14 self.rfaces.append(pygame.image.load("images/Rdice1.png"))15 self.rfaces.append(pygame.image.load("images/Rdice2.png"))16 self.rfaces.append(pygame.image.load("images/Rdice3.png"))17 self.rfaces.append(pygame.image.load("images/Rdice4.png"))18 self.rfaces.append(pygame.image.load("images/Rdice5.png"))19 self.rfaces.append(pygame.image.load("images/Rdice6.png"))20 self.rfaces.append(pygame.image.load("images/RdiceRoll.png"))21 self.gfaces = []22 self.gfaces.append(pygame.image.load("images/Gdice0.png"))23 self.gfaces.append(pygame.image.load("images/Gdice1.png"))24 self.gfaces.append(pygame.image.load("images/Gdice2.png"))25 self.gfaces.append(pygame.image.load("images/Gdice3.png"))26 self.gfaces.append(pygame.image.load("images/Gdice4.png"))27 self.gfaces.append(pygame.image.load("images/Gdice5.png"))28 self.gfaces.append(pygame.image.load("images/Gdice6.png"))29 self.gfaces.append(pygame.image.load("images/GdiceRoll.png"))30 self.bfaces = []31 self.bfaces.append(pygame.image.load("images/Bdice0.png"))32 self.bfaces.append(pygame.image.load("images/Bdice1.png"))33 self.bfaces.append(pygame.image.load("images/Bdice2.png"))34 self.bfaces.append(pygame.image.load("images/Bdice3.png"))35 self.bfaces.append(pygame.image.load("images/Bdice4.png"))36 self.bfaces.append(pygame.image.load("images/Bdice5.png"))37 self.bfaces.append(pygame.image.load("images/Bdice6.png"))38 self.bfaces.append(pygame.image.load("images/BdiceRoll.png"))39 self.yfaces = []40 self.yfaces.append(pygame.image.load("images/Ydice0.png"))41 self.yfaces.append(pygame.image.load("images/Ydice1.png"))42 self.yfaces.append(pygame.image.load("images/Ydice2.png"))43 self.yfaces.append(pygame.image.load("images/Ydice3.png"))44 self.yfaces.append(pygame.image.load("images/Ydice4.png"))45 self.yfaces.append(pygame.image.load("images/Ydice5.png"))46 self.yfaces.append(pygame.image.load("images/Ydice6.png"))47 self.yfaces.append(pygame.image.load("images/YdiceRoll.png"))48 def drawDice(self, screen, face, color='-1'):49 if color == '-1':50 screen.blit(self.faces[face], (0, 0))51 elif color == '0':52 screen.blit(self.rfaces[face], (0, 0))53 elif color == '1':54 screen.blit(self.gfaces[face], (0, 0))55 elif color == '2':56 screen.blit(self.bfaces[face], (0, 0))57 elif color == '3':58 screen.blit(self.yfaces[face], (0, 0))59 def hitbox(self):...

Full Screen

Full Screen

Using AI Code Generation

copy

Full Screen

1var parent = require('stryker-parent');2parent.load('test.js');3var parent = require('stryker-parent');4parent.load('stryker.conf.js');5var parent = require('stryker-parent');6parent.load('test.js');7var parent = require('stryker-parent');8parent.load('stryker.conf.js');9var parent = require('stryker-parent');10parent.load('test.js');11var parent = require('stryker-parent');12parent.load('stryker.conf.js');13var parent = require('stryker-parent');14parent.load('test.js');15var parent = require('stryker-parent');16parent.load('stryker.conf.js');17var parent = require('stryker-parent');18parent.load('test.js');19var parent = require('stryker-parent');20parent.load('stryker.conf.js');21var parent = require('stryker-parent');22parent.load('test.js');23var parent = require('stryker-parent');24parent.load('stryker.conf.js');25var parent = require('stryker-parent');26parent.load('test.js');27var parent = require('stryker-parent');28parent.load('stryker.conf.js');

Full Screen

Using AI Code Generation

copy

Full Screen

1var stryker = require('stryker-parent');2stryker.load('stryker-mocha-runner');3var stryker = require('stryker-parent');4stryker.load('stryker-mocha-runner');5var stryker = require('stryker-mocha-runner');6var stryker = require('stryker-parent');7stryker.load('stryker-mocha-runner');8var stryker = require('stryker-parent');9stryker.load('stryker-mocha-runner');10var stryker = require('stryker-mocha-runner');11var stryker = require('stryker-parent');12stryker.load('stryker-mocha-runner');13var stryker = require('stryker-parent');14stryker.load('stryker-mocha-runner');15var stryker = require('stryker-mocha-runner');16var stryker = require('stryker-parent');17stryker.load('stryker-mocha-runner');18var stryker = require('stryker-parent');19stryker.load('stryker-mocha-runner');20var stryker = require('stryker-mocha-runner');21var stryker = require('stryker-parent');22stryker.load('stryker-mocha-runner');23var stryker = require('stryker-parent');24stryker.load('stryker-mocha-runner');25var stryker = require('stryker-mocha-runner');26var stryker = require('stryker-parent');27stryker.load('stryker-mocha-runner');28var stryker = require('stryker-parent');29stryker.load('stryker-mocha-runner');30var stryker = require('stryker-mocha-runner');

Full Screen

Using AI Code Generation

copy

Full Screen

1var parent = require('stryker-parent');2parent.load('stryker-mocha-runner');3var parent = require('stryker-parent');4parent.load('stryker-mocha-runner');5var parent = require('stryker-parent');6parent.load('stryker-mocha-runner');7var parent = require('stryker-parent');8parent.load('stryker-mocha-runner');9var parent = require('stryker-parent');10parent.load('stryker-mocha-runner');11var parent = require('stryker-parent');12parent.load('stryker-mocha-runner');13var parent = require('stryker-parent');14parent.load('stryker-mocha-runner');15var parent = require('stryker-parent');16parent.load('stryker-mocha-runner');17var parent = require('stryker-parent');18parent.load('stryker-mocha-runner');19var parent = require('stryker-parent');20parent.load('stryker-mocha-runner');21var parent = require('stryker-parent');22parent.load('stryker-mocha-runner');23var parent = require('stryker-parent');24parent.load('stryker-mocha-runner');25var parent = require('stryker-parent');26parent.load('stryker-mocha-runner');27var parent = require('stryker-parent');28parent.load('stryker-mocha-runner');29var parent = require('stryker-parent');30parent.load('stryker-mocha-runner');31var parent = require('stryker

Full Screen

Using AI Code Generation

copy

Full Screen

1var parent = require('stryker-parent');2parent.load('stryker-api/core');3var stryker = require('stryker');4stryker.load('stryker-api/core');5var stryker = require('stryker');6stryker.load('stryker-api/core');7var stryker = require('stryker');8stryker.load('stryker-api/core');9var stryker = require('stryker');10stryker.load('stryker-api/core');11var stryker = require('stryker');12stryker.load('stryker-api/core');13var stryker = require('stryker');14stryker.load('stryker-api/core');15var stryker = require('stryker');16stryker.load('stryker-api/core');17var stryker = require('stryker');18stryker.load('stryker-api/core');19var stryker = require('stryker');20stryker.load('stryker-api/core');21var stryker = require('stryker');22stryker.load('stryker-api/core');23var stryker = require('stryker');24stryker.load('stryker-api/core');25var stryker = require('stryker');26stryker.load('stryker-api/core');27var stryker = require('stryker');28stryker.load('stryker-api/core');29var stryker = require('stryker');30stryker.load('stryker-api/core');31var stryker = require('stryker');32stryker.load('stryker-api/core');33var stryker = require('stryker');34stryker.load('stryker-api/core');

Full Screen

Using AI Code Generation

copy

Full Screen

1var parent = require('stryker-parent');2var stryker = require('stryker');3parent.load(stryker);4var parent = require('stryker-parent');5var stryker = require('stryker');6parent.load(stryker);7var parent = require('stryker-parent');8var stryker = require('stryker');9parent.load(stryker);10var parent = require('stryker-parent');11var stryker = require('stryker');12parent.load(stryker);13var parent = require('stryker-parent');14var stryker = require('stryker');15parent.load(stryker);16var parent = require('stryker-parent');17var stryker = require('stryker');18parent.load(stryker);19var parent = require('stryker-parent');20var stryker = require('stryker');21parent.load(stryker);22var parent = require('stryker-parent');23var stryker = require('stryker');24parent.load(stryker);25var parent = require('stryker-parent');26var stryker = require('stryker');27parent.load(stryker);28var parent = require('stryker-parent');29var stryker = require('stryker');30parent.load(stryker);31var parent = require('stryker-parent');32var stryker = require('stryker');33parent.load(stryker);34var parent = require('stryker-parent');35var stryker = require('stryker');36parent.load(stryker);37var parent = require('stryker-parent');38var stryker = require('stryker');39parent.load(stryker);40var parent = require('stryker-parent');41var stryker = require('stryker

Full Screen

Using AI Code Generation

copy

Full Screen

1var load = require('stryker-parent').load;2var config = load('config');3var log4js = load('log4js');4var log = log4js.getLogger('test');5var log = load('log4js').getLogger('test');6var log = load('log4js').getLogger('test');7log.debug('test');8var log = load('log4js').getLogger('test');9log.debug('test');10log.debug('test');11var log = load('log4js').getLogger('test');12log.debug('test');13log.debug('test');14var log = load('log4js').getLogger('test');15log.debug('test');16log.debug('test');17log.debug('test');18var log = load('log4js').getLogger('test');19log.debug('test');20log.debug('test');21log.debug('test');22log.debug('test');23var log = load('log4js').getLogger('test');24log.debug('test');25log.debug('test');26log.debug('test');27log.debug('test');28log.debug('test');29var log = load('log4js').getLogger('test');30log.debug('test');31log.debug('test');32log.debug('test');33log.debug('test');34log.debug('test');35log.debug('test');36var log = load('log4js').getLogger('test');37log.debug('test');38log.debug('test');39log.debug('test');40log.debug('test');41log.debug('test');42log.debug('test');43log.debug('test');44var log = load('log4js').getLogger('test');45log.debug('tes

Full Screen

Using AI Code Generation

copy

Full Screen

1var strykerParent = require('stryker-parent');2strykerParent.load('stryker-mocha-runner');3var strykerMochaRunner = require('stryker-mocha-runner');4var strykerParent = require('stryker-parent');5strykerParent.load('stryker-mocha-runner');6var strykerMochaRunner = require('stryker-mocha-runner');7var strykerParent = require('stryker-parent');8strykerParent.load('stryker-mocha-runner');9var strykerMochaRunner = require('stryker-mocha-runner');

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run stryker-parent automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful