Best Python code snippet using localstack_python
node.py
Source:node.py  
...188        :type: str189        """190        self._transition_reason = transition_reason191    @property192    def is_container_running(self):193        """194        Gets the is_container_running of this Node.195        :return: The is_container_running of this Node.196        :rtype: bool197        """198        return self._is_container_running199    @is_container_running.setter200    def is_container_running(self, is_container_running):201        """202        Sets the is_container_running of this Node.203        :param is_container_running: The is_container_running of this Node.204        :type: bool205        """206        self._is_container_running = is_container_running207    @property208    def os_version(self):209        """210        Gets the os_version of this Node.211        :return: The os_version of this Node.212        :rtype: str213        """214        return self._os_version...test_elasticsearch_eager.py
Source:test_elasticsearch_eager.py  
...31    "Content-Type": "application/json",32    "Authorization": "Basic ZWxhc3RpYzpkZWZhdWx0X3Bhc3N3b3Jk",33}34ATTRS = ["name", "gender", "age", "fare", "vip", "survived"]35def is_container_running():36    """Check whether the elasticsearch container is up and running37    with the correct port being exposed.38    """39    sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)40    status = sock.connect_ex(("127.0.0.1", 9200))41    if status == 0:42        return True43    else:44        return False45@pytest.mark.skipif(not is_container_running(), reason="The container is not running")46def test_create_index():47    """Create an index in the cluster"""48    create_index_url = "{}/{}".format(NODE, INDEX)49    res = requests.put(create_index_url, headers=HEADERS)50    assert res.status_code == 20051@pytest.mark.parametrize(52    "record",53    [54        (("person1", "Male", 20, 80.52, False, 1)),55        (("person2", "Female", 30, 40.88, True, 0)),56        (("person3", "Male", 40, 20.73, True, 0)),57        (("person4", "Female", 50, 100.99, False, 1)),58    ],59)60@pytest.mark.skipif(not is_container_running(), reason="The container is not running")61def test_populate_data(record):62    """Populate the index with data"""63    put_item_url = "{}/{}/{}".format(NODE, INDEX, DOC_TYPE)64    data = {}65    for idx, attr in enumerate(ATTRS):66        data[attr] = record[idx]67    res = requests.post(put_item_url, json=data, headers=HEADERS)68    # The 201 status code indicates the documents have been properly indexed69    assert res.status_code == 20170    # allow the cluster to index in the background.71    time.sleep(1)72@pytest.mark.skipif(not is_container_running(), reason="The container is not running")73def test_elasticsearch_io_dataset():74    """Test the functionality of the ElasticsearchIODataset"""75    dataset = tfio.experimental.elasticsearch.ElasticsearchIODataset(76        nodes=[NODE], index=INDEX, doc_type=DOC_TYPE, headers=HEADERS77    )78    assert issubclass(type(dataset), tf.data.Dataset)79    for item in dataset:80        for attr in ATTRS:81            assert attr in item82@pytest.mark.skipif(not is_container_running(), reason="The container is not running")83def test_elasticsearch_io_dataset_no_auth():84    """Test the functionality of the ElasticsearchIODataset when basic auth is85    required but the associated header is not passed.86    """87    try:88        dataset = tfio.experimental.elasticsearch.ElasticsearchIODataset(89            nodes=[NODE], index=INDEX, doc_type=DOC_TYPE90        )91    except ConnectionError as e:92        assert str(93            e94        ) == "No healthy node available for the index: {}, please check the cluster config".format(95            INDEX96        )97@pytest.mark.skipif(not is_container_running(), reason="The container is not running")98def test_elasticsearch_io_dataset_batch():99    """Test the functionality of the ElasticsearchIODataset"""100    BATCH_SIZE = 2101    dataset = tfio.experimental.elasticsearch.ElasticsearchIODataset(102        nodes=[NODE], index=INDEX, doc_type=DOC_TYPE, headers=HEADERS103    ).batch(BATCH_SIZE)104    assert issubclass(type(dataset), tf.data.Dataset)105    for item in dataset:106        for attr in ATTRS:107            assert attr in item108            assert len(item[attr]) == BATCH_SIZE109@pytest.mark.skipif(not is_container_running(), reason="The container is not running")110def test_elasticsearch_io_dataset_training():111    """Test the functionality of the ElasticsearchIODataset by training a112    tf.keras model on the structured data.113    """114    BATCH_SIZE = 2115    dataset = tfio.experimental.elasticsearch.ElasticsearchIODataset(116        nodes=[NODE], index=INDEX, doc_type=DOC_TYPE, headers=HEADERS117    )118    dataset = dataset.map(lambda v: (v, v.pop("survived")))119    dataset = dataset.batch(BATCH_SIZE)120    assert issubclass(type(dataset), tf.data.Dataset)121    feature_columns = []122    # Numeric column123    fare_column = feature_column.numeric_column("fare")124    feature_columns.append(fare_column)125    # Bucketized column126    age = feature_column.numeric_column("age")127    age_buckets = feature_column.bucketized_column(age, boundaries=[10, 30])128    feature_columns.append(age_buckets)129    # Categorical column130    gender = feature_column.categorical_column_with_vocabulary_list(131        "gender", ["Male", "Female"]132    )133    gender_indicator = feature_column.indicator_column(gender)134    feature_columns.append(gender_indicator)135    # Convert the feature columns into a tf.keras layer136    feature_layer = tf.keras.layers.DenseFeatures(feature_columns)137    # Build the model138    model = tf.keras.Sequential(139        [140            feature_layer,141            layers.Dense(128, activation="relu"),142            layers.Dense(128, activation="relu"),143            layers.Dropout(0.1),144            layers.Dense(1),145        ]146    )147    # Compile the model148    model.compile(149        optimizer="adam",150        loss=tf.keras.losses.BinaryCrossentropy(from_logits=True),151        metrics=["accuracy"],152    )153    # train the model154    model.fit(dataset, epochs=5)155@pytest.mark.skipif(not is_container_running(), reason="The container is not running")156def test_cleanup():157    """Clean up the index"""158    delete_index_url = "{}/{}".format(NODE, INDEX)159    res = requests.delete(delete_index_url, headers=HEADERS)...test_elasticsearch.py
Source:test_elasticsearch.py  
...31    "Content-Type": "application/json",32    "Authorization": "Basic ZWxhc3RpYzpkZWZhdWx0X3Bhc3N3b3Jk",33}34ATTRS = ["name", "gender", "age", "fare", "vip", "survived"]35def is_container_running():36    """Check whether the elasticsearch container is up and running37    with the correct port being exposed.38    """39    sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)40    status = sock.connect_ex(("127.0.0.1", 9200))41    if status == 0:42        return True43    else:44        return False45@pytest.mark.skipif(not is_container_running(), reason="The container is not running")46def test_create_index():47    """Create an index in the cluster"""48    create_index_url = f"{NODE}/{INDEX}"49    res = requests.put(create_index_url, headers=HEADERS)50    assert res.status_code == 20051@pytest.mark.parametrize(52    "record",53    [54        (("person1", "Male", 20, 80.52, False, 1)),55        (("person2", "Female", 30, 40.88, True, 0)),56        (("person3", "Male", 40, 20.73, True, 0)),57        (("person4", "Female", 50, 100.99, False, 1)),58    ],59)60@pytest.mark.skipif(not is_container_running(), reason="The container is not running")61def test_populate_data(record):62    """Populate the index with data"""63    put_item_url = f"{NODE}/{INDEX}/{DOC_TYPE}"64    data = {}65    for idx, attr in enumerate(ATTRS):66        data[attr] = record[idx]67    res = requests.post(put_item_url, json=data, headers=HEADERS)68    # The 201 status code indicates the documents have been properly indexed69    assert res.status_code == 20170    # allow the cluster to index in the background.71    time.sleep(1)72@pytest.mark.skipif(not is_container_running(), reason="The container is not running")73def test_elasticsearch_io_dataset():74    """Test the functionality of the ElasticsearchIODataset"""75    dataset = tfio.experimental.elasticsearch.ElasticsearchIODataset(76        nodes=[NODE], index=INDEX, doc_type=DOC_TYPE, headers=HEADERS77    )78    assert issubclass(type(dataset), tf.data.Dataset)79    for item in dataset:80        for attr in ATTRS:81            assert attr in item82@pytest.mark.skipif(not is_container_running(), reason="The container is not running")83def test_elasticsearch_io_dataset_no_auth():84    """Test the functionality of the ElasticsearchIODataset when basic auth is85    required but the associated header is not passed.86    """87    try:88        dataset = tfio.experimental.elasticsearch.ElasticsearchIODataset(89            nodes=[NODE], index=INDEX, doc_type=DOC_TYPE90        )91    except ConnectionError as e:92        assert str(93            e94        ) == "No healthy node available for the index: {}, please check the cluster config".format(95            INDEX96        )97@pytest.mark.skipif(not is_container_running(), reason="The container is not running")98def test_elasticsearch_io_dataset_batch():99    """Test the functionality of the ElasticsearchIODataset"""100    BATCH_SIZE = 2101    dataset = tfio.experimental.elasticsearch.ElasticsearchIODataset(102        nodes=[NODE], index=INDEX, doc_type=DOC_TYPE, headers=HEADERS103    ).batch(BATCH_SIZE)104    assert issubclass(type(dataset), tf.data.Dataset)105    for item in dataset:106        for attr in ATTRS:107            assert attr in item108            assert len(item[attr]) == BATCH_SIZE109@pytest.mark.skipif(not is_container_running(), reason="The container is not running")110def test_elasticsearch_io_dataset_training():111    """Test the functionality of the ElasticsearchIODataset by training a112    tf.keras model on the structured data.113    """114    BATCH_SIZE = 2115    dataset = tfio.experimental.elasticsearch.ElasticsearchIODataset(116        nodes=[NODE], index=INDEX, doc_type=DOC_TYPE, headers=HEADERS117    )118    dataset = dataset.map(lambda v: (v, v.pop("survived")))119    dataset = dataset.batch(BATCH_SIZE)120    assert issubclass(type(dataset), tf.data.Dataset)121    feature_columns = []122    # Numeric column123    fare_column = feature_column.numeric_column("fare")124    feature_columns.append(fare_column)125    # Bucketized column126    age = feature_column.numeric_column("age")127    age_buckets = feature_column.bucketized_column(age, boundaries=[10, 30])128    feature_columns.append(age_buckets)129    # Categorical column130    gender = feature_column.categorical_column_with_vocabulary_list(131        "gender", ["Male", "Female"]132    )133    gender_indicator = feature_column.indicator_column(gender)134    feature_columns.append(gender_indicator)135    # Convert the feature columns into a tf.keras layer136    feature_layer = tf.keras.layers.DenseFeatures(feature_columns)137    # Build the model138    model = tf.keras.Sequential(139        [140            feature_layer,141            layers.Dense(128, activation="relu"),142            layers.Dense(128, activation="relu"),143            layers.Dropout(0.1),144            layers.Dense(1),145        ]146    )147    # Compile the model148    model.compile(149        optimizer="adam",150        loss=tf.keras.losses.BinaryCrossentropy(from_logits=True),151        metrics=["accuracy"],152    )153    # train the model154    model.fit(dataset, epochs=5)155@pytest.mark.skipif(not is_container_running(), reason="The container is not running")156def test_cleanup():157    """Clean up the index"""158    delete_index_url = f"{NODE}/{INDEX}"159    res = requests.delete(delete_index_url, headers=HEADERS)...Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!
