How to use count method in Playwright Internal

Best JavaScript code snippet using playwright-internal

datastore_stats_generator.py

Source:datastore_stats_generator.py Github

copy

Full Screen

1#!/usr/bin/env python2#3# Copyright 2007 Google Inc.4#5# Licensed under the Apache License, Version 2.0 (the "License");6# you may not use this file except in compliance with the License.7# You may obtain a copy of the License at8#9# http://www.apache.org/licenses/LICENSE-2.010#11# Unless required by applicable law or agreed to in writing, software12# distributed under the License is distributed on an "AS IS" BASIS,13# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.14# See the License for the specific language governing permissions and15# limitations under the License.16#17"""Generate Datastore Stats over Dev mode appserver's datastore."""18import datetime19import logging20from google.appengine.api import datastore21from google.appengine.api import datastore_admin22from google.appengine.api import datastore_types23from google.appengine.api import users24from google.appengine.ext.db import stats25DELETE_BATCH_SIZE = 10026_GLOBAL_KEY = (stats.GlobalStat, 'total_entity_usage', '')27_PROPERTY_TYPE_TO_DSS_NAME = {28 unicode: ('String', 'STRING'),29 bool: ('Boolean', 'BOOLEAN'),30 long: ('Integer', 'INT64'),31 type(None): ('NULL', 'NULL'),32 float: ('Float', 'DOUBLE'),33 datastore_types.Key: ('Key', 'REFERENCE'),34 datastore_types.Blob: ('Blob', 'STRING'),35 datastore_types.EmbeddedEntity: ('EmbeddedEntity', 'STRING'),36 datastore_types.ByteString: ('ShortBlob', 'STRING'),37 datastore_types.Text: ('Text', 'STRING'),38 users.User: ('User', 'USER'),39 datastore_types.Category: ('Category', 'STRING'),40 datastore_types.Link: ('Link', 'STRING'),41 datastore_types.Email: ('Email', 'STRING'),42 datetime.datetime: ('Date/Time', 'INT64'),43 datastore_types.GeoPt: ('GeoPt', 'POINT'),44 datastore_types.IM: ('IM', 'STRING'),45 datastore_types.PhoneNumber: ('PhoneNumber', 'STRING'),46 datastore_types.PostalAddress: ('PostalAddress', 'STRING'),47 datastore_types.Rating: ('Rating', 'INT64'),48 datastore_types.BlobKey: ('BlobKey', 'STRING'),49 }50class DatastoreStatsProcessor(object):51 """Generates datastore stats for an app's an datastore entities."""52 def __init__(self, _app=None):53 self.app_id = datastore_types.ResolveAppId(_app)54 self.whole_app_stats = {}55 self.namespace_stats = {}56 self.found_non_empty_namespace = False57 self.old_stat_keys = []58 self.timestamp = datetime.datetime.utcnow()59 def __ScanAllNamespaces(self):60 """Scans all the namespaces and processes each namespace."""61 namespace_query = datastore.Query('__namespace__', _app=self.app_id)62 for namespace_entity in namespace_query.Run():63 name = namespace_entity.key().name()64 if name is None:65 name = ''66 self.__ProcessNamespace(name)67 def __ProcessNamespace(self, namespace):68 """Process all the entities in a given namespace."""69 all_query = datastore.Query(namespace=namespace, _app=self.app_id)70 for entity in all_query.Run():71 self.found_non_empty_namespace |= (namespace != '')72 proto = entity.ToPb()73 proto_size = len(proto.SerializeToString())74 if entity.key().kind() in stats._DATASTORE_STATS_CLASSES_BY_KIND:75 stat_kind = stats._DATASTORE_STATS_CLASSES_BY_KIND[entity.key().kind()]76 self.old_stat_keys.append(entity.key())77 self.__AggregateTotal(proto_size, entity.key(), proto, namespace,78 stat_kind)79 else:80 self.__ProcessUserEntity(proto_size, entity.key(), proto, namespace)81 def __GetPropertyIndexStat(self, namespace, kind_name,82 entity_key_size, prop):83 """Return the size and count of indexes for a property of an EntityProto."""84 property_index_size = (len(self.app_id) + len(kind_name) +85 len(prop.value().SerializeToString()) +86 len(namespace) + entity_key_size)87 return (property_index_size, 2)88 def __GetTypeIndexStat(self, namespace, kind_name, entity_key_size):89 """Return the size and count of indexes by type of an EntityProto."""90 type_index_size = (len(self.app_id) + len(kind_name) + entity_key_size91 + len(namespace))92 return (type_index_size, 1)93 def __ProcessUserEntity(self, proto_size, key, proto, namespace):94 """Increment datastore stats for a non stats record."""95 self.__AggregateTotal(proto_size, key, proto, namespace, None)96 kind_name = key.kind()97 entity_key_size = (len(proto.key().app()) + len(namespace) +98 len(proto.key().path().SerializeToString()) +99 len(proto.entity_group().SerializeToString()))100 self.__AggregateCompositeIndices(proto, namespace, kind_name,101 entity_key_size)102 type_index_size, type_index_count = self.__GetTypeIndexStat(namespace,103 kind_name,104 entity_key_size)105 property_index_count = 0106 property_index_size = 0107 for prop_list in (proto.property_list(), proto.raw_property_list()):108 for prop in prop_list:109 index_size, index_count = self.__GetPropertyIndexStat(namespace,110 kind_name,111 entity_key_size,112 prop)113 property_index_size += index_size114 property_index_count += index_count115 builtin_index_size = type_index_size + property_index_size116 builtin_index_count = type_index_count + property_index_count117 self.__Increment(self.whole_app_stats, 1,118 (stats.KindStat, kind_name, ''),119 proto_size,120 builtin_index_count=builtin_index_count,121 builtin_index_size=builtin_index_size,122 kind_name=kind_name)123 self.__Increment(self.namespace_stats, 1,124 (stats.NamespaceKindStat, kind_name, namespace),125 proto_size,126 builtin_index_count=builtin_index_count,127 builtin_index_size=builtin_index_size,128 kind_name=kind_name)129 if key.parent() is None:130 whole_app_model = stats.KindRootEntityStat131 namespace_model = stats.NamespaceKindRootEntityStat132 else:133 whole_app_model = stats.KindNonRootEntityStat134 namespace_model = stats.NamespaceKindNonRootEntityStat135 self.__Increment(self.whole_app_stats, 1,136 (whole_app_model, kind_name, ''),137 proto_size,138 kind_name=kind_name)139 self.__Increment(self.namespace_stats, 1,140 (namespace_model, kind_name, namespace),141 proto_size,142 kind_name=kind_name)143 self.__ProcessProperties(144 kind_name,145 namespace,146 entity_key_size,147 (proto.property_list(), proto.raw_property_list()))148 def __ProcessProperties(self, kind_name, namespace, entity_key_size,149 prop_lists):150 for prop_list in prop_lists:151 for prop in prop_list:152 try:153 value = datastore_types.FromPropertyPb(prop)154 self.__AggregateProperty(kind_name, namespace, entity_key_size,155 prop, value)156 except (AssertionError, AttributeError, TypeError, ValueError), e:157 logging.error('Cannot process property %r, exception %s' %158 (prop, e))159 def __AggregateProperty(self, kind_name, namespace, entity_key_size,160 prop, value):161 property_name = prop.name()162 property_type = _PROPERTY_TYPE_TO_DSS_NAME[type(value)][0]163 index_property_type = _PROPERTY_TYPE_TO_DSS_NAME[type(value)][1]164 size = len(prop.SerializeToString())165 index_size, index_count = self.__GetPropertyIndexStat(namespace, kind_name,166 entity_key_size, prop)167 self.__Increment(self.whole_app_stats, 1,168 (stats.PropertyTypeStat, property_type, ''),169 size,170 builtin_index_count=0,171 builtin_index_size=0,172 property_type=property_type)173 self.__Increment(self.whole_app_stats, 0,174 (stats.PropertyTypeStat, index_property_type, ''),175 0,176 builtin_index_count=index_count,177 builtin_index_size=index_size,178 property_type=index_property_type)179 self.__Increment(self.namespace_stats, 1,180 (stats.NamespacePropertyTypeStat,181 property_type, namespace),182 size,183 builtin_index_count=0,184 builtin_index_size=0,185 property_type=property_type)186 self.__Increment(self.namespace_stats, 0,187 (stats.NamespacePropertyTypeStat,188 index_property_type, namespace),189 0,190 builtin_index_count=index_count,191 builtin_index_size=index_size,192 property_type=index_property_type)193 self.__Increment(self.whole_app_stats, 1,194 (stats.KindPropertyTypeStat,195 property_type + '_' + kind_name, ''),196 size,197 builtin_index_count=0,198 builtin_index_size=0,199 property_type=property_type, kind_name=kind_name)200 self.__Increment(self.whole_app_stats, 0,201 (stats.KindPropertyTypeStat,202 index_property_type + '_' + kind_name, ''),203 0,204 builtin_index_count=index_count,205 builtin_index_size=index_size,206 property_type=index_property_type, kind_name=kind_name)207 self.__Increment(self.namespace_stats, 1,208 (stats.NamespaceKindPropertyTypeStat,209 property_type + '_' + kind_name, namespace),210 size,211 builtin_index_count=0,212 builtin_index_size=0,213 property_type=property_type, kind_name=kind_name)214 self.__Increment(self.namespace_stats, 0,215 (stats.NamespaceKindPropertyTypeStat,216 index_property_type + '_' + kind_name, namespace),217 0,218 builtin_index_count=index_count,219 builtin_index_size=index_size,220 property_type=index_property_type, kind_name=kind_name)221 self.__Increment(self.whole_app_stats, 1,222 (stats.KindPropertyNameStat,223 property_name + '_' + kind_name, ''),224 size,225 builtin_index_count=index_count,226 builtin_index_size=index_size,227 property_name=property_name, kind_name=kind_name)228 self.__Increment(self.namespace_stats, 1,229 (stats.NamespaceKindPropertyNameStat,230 property_name + '_' + kind_name, namespace),231 size,232 builtin_index_count=index_count,233 builtin_index_size=index_size,234 property_name=property_name, kind_name=kind_name)235 self.__Increment(self.whole_app_stats, 1,236 (stats.KindPropertyNamePropertyTypeStat,237 property_type + '_' + property_name + '_' + kind_name,238 ''), size,239 builtin_index_count=0,240 builtin_index_size=0,241 property_type=property_type,242 property_name=property_name, kind_name=kind_name)243 self.__Increment(self.whole_app_stats, 0,244 (stats.KindPropertyNamePropertyTypeStat,245 index_property_type + '_' + property_name + '_' +246 kind_name,247 ''), 0,248 builtin_index_count=index_count,249 builtin_index_size=index_size,250 property_type=index_property_type,251 property_name=property_name, kind_name=kind_name)252 self.__Increment(self.namespace_stats, 1,253 (stats.NamespaceKindPropertyNamePropertyTypeStat,254 property_type + '_' + property_name + '_' + kind_name,255 namespace),256 size,257 builtin_index_count=0,258 builtin_index_size=0,259 property_type=property_type,260 property_name=property_name, kind_name=kind_name)261 self.__Increment(self.namespace_stats, 0,262 (stats.NamespaceKindPropertyNamePropertyTypeStat,263 index_property_type + '_' + property_name + '_' +264 kind_name,265 namespace),266 0,267 builtin_index_count=index_count,268 builtin_index_size=index_size,269 property_type=index_property_type,270 property_name=property_name, kind_name=kind_name)271 def __GetCompositeIndexStat(self, definition, proto, namespace, kind_name,272 entity_key_size):273 """Get statistics of composite index for a index definition of an entity."""274 property_list = proto.property_list()275 property_count = []276 property_size = []277 index_count = 1278 for indexed_prop in definition.property_list():279 name = indexed_prop.name()280 count = 0281 prop_size = 0282 for prop in property_list:283 if prop.name() == name:284 count += 1285 prop_size += len(prop.SerializeToString())286 property_count.append(count)287 property_size.append(prop_size)288 index_count *= count289 if index_count == 0:290 return (0, 0)291 index_only_size = 0292 for i in range(len(property_size)):293 index_only_size += property_size[i] * (index_count / property_count[i])294 index_size = (index_count * (entity_key_size + len(kind_name) +295 len(self.app_id) + len(namespace)) +296 index_only_size * 2)297 return (index_size, index_count)298 def __AggregateCompositeIndices(self, proto, namespace, kind_name,299 entity_key_size):300 """Aggregate statistics of composite indexes for an entity."""301 composite_indices = datastore_admin.GetIndices(self.app_id)302 for index in composite_indices:303 definition = index.definition()304 if kind_name != definition.entity_type():305 continue306 index_size, index_count = self.__GetCompositeIndexStat(definition, proto,307 namespace,308 kind_name,309 entity_key_size)310 if index_count == 0:311 continue312 name_id = namespace313 if not name_id:314 name_id = 1315 self.__Increment(self.whole_app_stats, 0, _GLOBAL_KEY, 0,316 composite_index_count=index_count,317 composite_index_size=index_size)318 self.__Increment(self.whole_app_stats, 0,319 (stats.NamespaceStat, name_id, ''), 0,320 composite_index_count=index_count,321 composite_index_size=index_size,322 subject_namespace=namespace)323 self.__Increment(self.namespace_stats, 0,324 (stats.NamespaceGlobalStat, 'total_entity_usage',325 namespace), 0,326 composite_index_count=index_count,327 composite_index_size=index_size)328 self.__Increment(self.whole_app_stats, 0,329 (stats.KindStat, kind_name, ''), 0,330 composite_index_count=index_count,331 composite_index_size=index_size,332 kind_name=kind_name)333 self.__Increment(self.namespace_stats, 0,334 (stats.NamespaceKindStat, kind_name, namespace), 0,335 composite_index_count=index_count,336 composite_index_size=index_size,337 kind_name=kind_name)338 index_id = index.id()339 self.__Increment(self.whole_app_stats, index_count,340 (stats.KindCompositeIndexStat,341 kind_name + '_%s' % index_id, ''), index_size,342 kind_name=kind_name, index_id=index_id)343 self.__Increment(self.namespace_stats, index_count,344 (stats.NamespaceKindCompositeIndexStat,345 kind_name + '_%s' % index_id, namespace), index_size,346 kind_name=kind_name, index_id=index_id)347 def __AggregateTotal(self, size, key, proto, namespace, stat_kind):348 """Aggregate total datastore stats."""349 kind_name = key.kind()350 entity_key_size = (len(proto.key().app()) +351 len(proto.key().path().SerializeToString()) +352 len(proto.entity_group().SerializeToString()))353 type_index_size, type_index_count = self.__GetTypeIndexStat(namespace,354 kind_name,355 entity_key_size)356 property_index_count = 0357 property_index_size = 0358 for prop_list in (proto.property_list(), proto.raw_property_list()):359 for prop in prop_list:360 index_size, index_count = self.__GetPropertyIndexStat(namespace,361 kind_name,362 entity_key_size,363 prop)364 property_index_size += index_size365 property_index_count += index_count366 builtin_index_size = type_index_size + property_index_size367 builtin_index_count = type_index_count + property_index_count368 if stat_kind == stats.GlobalStat:369 count = 0370 else:371 count = 1372 self.__Increment(self.whole_app_stats, count, _GLOBAL_KEY, size,373 builtin_index_count=builtin_index_count,374 builtin_index_size=builtin_index_size)375 name_id = namespace376 if not name_id:377 name_id = 1378 if (stat_kind == stats.NamespaceStat) and (namespace == ''):379 count = 0380 self.__Increment(self.whole_app_stats, count,381 (stats.NamespaceStat, name_id, ''),382 size,383 builtin_index_count=builtin_index_count,384 builtin_index_size=builtin_index_size,385 subject_namespace=namespace)386 if stat_kind == stats.NamespaceGlobalStat:387 count = 0388 self.__Increment(389 self.namespace_stats, count,390 (stats.NamespaceGlobalStat, 'total_entity_usage', namespace), size,391 builtin_index_count=builtin_index_count,392 builtin_index_size=builtin_index_size)393 def __Increment(self, stats_dict, count, stat_key, size,394 builtin_index_count=0, builtin_index_size=0,395 composite_index_count=0, composite_index_size=0, **kwds):396 """Increment stats for a particular kind.397 Args:398 stats_dict: The dictionary where the entities are held.399 The entities are keyed by stat_key. e.g. The400 __Stat_Total__ entity will be found in stats_dict[_GLOBAL_KEY].401 count: The amount to increment the datastore stat by.402 stat_key: A tuple of (db.Model of the stat, key value, namespace).403 size: The "bytes" to increment the size by.404 builtin_index_count: The bytes of builtin index to add in to a stat.405 builtin_index_size: The count of builtin index to add in to a stat.406 composite_index_count: The bytes of composite index to add in to a stat.407 composite_index_size: The count of composite index to add in to a stat.408 kwds: Name value pairs that are set on the created entities.409 """410 if stat_key not in stats_dict:411 stat_model = stat_key[0](412 key=datastore_types.Key.from_path(stat_key[0].STORED_KIND_NAME,413 stat_key[1],414 namespace=stat_key[2],415 _app=self.app_id),416 _app=self.app_id)417 stats_dict[stat_key] = stat_model418 for field, value in kwds.iteritems():419 setattr(stat_model, field, value)420 stat_model.count = count421 if size:422 stat_model.entity_bytes = size423 if builtin_index_size:424 stat_model.builtin_index_bytes = builtin_index_size425 stat_model.builtin_index_count = builtin_index_count426 if composite_index_size:427 stat_model.composite_index_bytes = composite_index_size428 stat_model.composite_index_count = composite_index_count429 stat_model.bytes = size + builtin_index_size + composite_index_size430 stat_model.timestamp = self.timestamp431 else:432 stat_model = stats_dict[stat_key]433 stat_model.count += count434 if size:435 stat_model.entity_bytes += size436 if builtin_index_size:437 stat_model.builtin_index_bytes += builtin_index_size438 stat_model.builtin_index_count += builtin_index_count439 if composite_index_size:440 stat_model.composite_index_bytes += composite_index_size441 stat_model.composite_index_count += composite_index_count442 stat_model.bytes += size + builtin_index_size + composite_index_size443 def __Finalize(self):444 """Finishes processing, deletes all old stats and writes new ones."""445 for i in range(0, len(self.old_stat_keys), DELETE_BATCH_SIZE):446 datastore.Delete(self.old_stat_keys[i:i+DELETE_BATCH_SIZE])447 self.written = 0448 for stat in self.whole_app_stats.itervalues():449 if stat.count or not (isinstance(stat, stats.GlobalStat) or450 isinstance(stat, stats.NamespaceStat)):451 stat.put()452 self.written += 1453 if self.found_non_empty_namespace:454 for stat in self.namespace_stats.itervalues():455 if stat.count or not isinstance(stat, stats.NamespaceGlobalStat):456 stat.put()457 self.written += 1458 def Run(self):459 """Scans the datastore, computes new stats and writes them."""460 self.__ScanAllNamespaces()461 self.__Finalize()462 return self463 def Report(self):464 """Produce a small report about the result."""465 stat = self.whole_app_stats.get(_GLOBAL_KEY, None)466 entity_size = 0467 entity_count = 0468 builtin_index_size = 0469 builtin_index_count = 0470 composite_index_size = 0471 composite_index_count = 0472 if stat:473 entity_size = stat.entity_bytes474 entity_count = stat.count475 builtin_index_size = stat.builtin_index_bytes476 builtin_index_count = stat.builtin_index_count477 composite_index_size = stat.composite_index_bytes478 composite_index_count = stat.composite_index_count479 if not entity_count:480 entity_count = 1481 return ('Scanned %d entities of total %d bytes, %d index entries of total '482 '%d bytes and %d composite index entries of total %d bytes. '483 'Inserted %d new records.'484 % (entity_count, entity_size, builtin_index_count,485 builtin_index_size, composite_index_count, composite_index_size,...

Full Screen

Full Screen

test_database.py

Source:test_database.py Github

copy

Full Screen

...10# the test cases for database11class database_test_cases(unittest.TestCase):12 def setUp(self):13 self.db = Database()14 self.sql_count_article = 'SELECT count(article_id) FROM HooliASE.articles'15 self.sql_count_question = 'SELECT count(question_id) FROM HooliASE.questions'16 self.sql_count_history = 'SELECT count(history_id) FROM HooliASE.history'17 self.sql_count_feedback = 'SELECT count(id) FROM HooliASE.answer_feedback'18 self.sql_delete_article = 'DELETE from HooliASE.articles WHERE article_id=%s'19 self.sql_delete_question = 'DELETE from HooliASE.questions WHERE question_id=%s'20 self.sql_delete_history = 'DELETE from HooliASE.history WHERE history_id=%s'21 self.sql_delete_feedback = 'DELETE from HooliASE.answer_feedback WHERE id=%s'22 # add_article(title, content)23 def test_add_article(self):24 # count the original row number25 self.db.mycursor.execute(self.sql_count_article)26 ori_row = count_rows(self.db.mycursor)27 # add a new test record into articles table28 article_id = self.db.add_article('ASE', 'We all love ASE')29 # assert the success of insertion30 self.db.mycursor.execute(self.sql_count_article)31 new_row = count_rows(self.db.mycursor)...

Full Screen

Full Screen

ets_cleaning_nltk.py

Source:ets_cleaning_nltk.py Github

copy

Full Screen

...84 # Counting85 ZHO_length.append(words)86 ZHO_num_sentences.append(num_sentence)87 ZHO_one_count.append(ones)88 ZHO_the_count.append(determiners.count('the'))89 ZHO_aan_count.append(determiners.count('a') + determiners.count('an'))90 ZHO_this_count.append(determiners.count('this'))91 ZHO_that_count.append(determiners.count('that'))92 ZHO_these_count.append(determiners.count('these'))93 ZHO_those_count.append(determiners.count('those'))94 determiners = [x for x in determiners if x not in det_in_question]95 ZHO_other_det_count.append(len(determiners))96 ZHO_noun_count.append(len(nouns))97 ZHO_verb_count.append(len(verbs))98 ZHO_adj_count.append(len(adjs))99 ZHO_modal_count.append(len(modal))100 ZHO_PRP_count.append(prp)101mandarin_L1['essay_len'] = ZHO_length102mandarin_L1['num_sentence'] = ZHO_num_sentences103mandarin_L1['noun_count'] = ZHO_noun_count104mandarin_L1['verb_count'] = ZHO_verb_count105mandarin_L1['adj_count'] = ZHO_adj_count106mandarin_L1['modal_count'] = ZHO_modal_count107mandarin_L1['noun_per_sentence'] = mandarin_L1['noun_count'] / mandarin_L1['num_sentence']108mandarin_L1['verb_per_sentence'] = mandarin_L1['verb_count'] / mandarin_L1['num_sentence']109mandarin_L1['adj_per_sentence'] = mandarin_L1['adj_count'] / mandarin_L1['num_sentence']110mandarin_L1['modal_per_sentence'] = mandarin_L1['modal_count'] / mandarin_L1['num_sentence']111mandarin_L1['the_count'] = ZHO_the_count112mandarin_L1['aan_count'] = ZHO_aan_count113mandarin_L1['one_count'] = ZHO_one_count114mandarin_L1['this_count'] = ZHO_this_count115mandarin_L1['that_count'] = ZHO_that_count116mandarin_L1['these_count'] = ZHO_these_count117mandarin_L1['those_count'] = ZHO_those_count118mandarin_L1['other_det_count'] = ZHO_other_det_count119mandarin_L1['PRP_count'] = ZHO_PRP_count120mandarin_L1['the_freq'] = mandarin_L1['the_count'] / mandarin_L1['essay_len']121mandarin_L1['aan_freq'] = mandarin_L1['aan_count'] / mandarin_L1['essay_len']122mandarin_L1['one_freq'] = mandarin_L1['one_count'] / mandarin_L1['essay_len']123mandarin_L1['this_freq'] = mandarin_L1['this_count'] / mandarin_L1['essay_len']124mandarin_L1['that_freq'] = mandarin_L1['that_count'] / mandarin_L1['essay_len']125mandarin_L1['these_freq'] = mandarin_L1['these_count'] / mandarin_L1['essay_len']126mandarin_L1['those_freq'] = mandarin_L1['those_count'] / mandarin_L1['essay_len']127mandarin_L1['other_det_freq'] = mandarin_L1['other_det_count'] / mandarin_L1['essay_len']128mandarin_L1['PRP_freq'] = mandarin_L1['PRP_count'] / mandarin_L1['essay_len']129'''SPANISH L1'''130SPA_length = []131SPA_num_sentences = []132SPA_the_count = []133SPA_aan_count = []134SPA_one_count = []135SPA_this_count = []136SPA_that_count = []137SPA_these_count = []138SPA_those_count = []139SPA_other_det_count = []140SPA_noun_count = []141SPA_verb_count = []142SPA_modal_count = []143SPA_adj_count = []144SPA_PRP_count = []145for f in SPA_file:146 f_name = SPA_path + f147 temp = open(f_name, 'r')148 num_sentence = 0149 ones = 0150 determiners = []151 nouns = []152 verbs = []153 adjs = []154 modal = []155 words = 0 # number of words156 prp = 0157 for line in temp:158 arr = line.strip().split()159 if not len(arr) == 0:160 line_tokenized = nltk.word_tokenize(line.lower())161 tagged = [list(ele) for ele in nltk.pos_tag(line_tokenized)]162 tagged = [ele for ele in tagged if ele[0] not in puncs] # leave out punctuations163 words += len(tagged)164 one = np.array([x for x in tagged if x[0] == 'one'])165 dt = np.array([x for x in tagged if x[1] == 'DT'])166 n = np.array([x for x in tagged if x[1] in noun_tag])167 v = np.array([x for x in tagged if x[1] in verb_tag])168 a = np.array([x for x in tagged if x[1] in adj_tag])169 m = np.array([x for x in tagged if x[1] == 'MD'])170 ones += len(one)171 prp += find_PRP(tagged)172 if dt.shape[0] != 0:173 determiners += list(dt[:, 0])174 if n.shape[0] != 0:175 nouns += list(n[:, 0])176 if v.shape[0] != 0:177 verbs += list(v[:, 0])178 if a.shape[0] != 0:179 adjs += list(a[:, 0])180 if m.shape[0] != 0:181 modal += list(m[:, 0])182 num_sentence += 1183 temp.close()184 # Counting185 SPA_length.append(words)186 SPA_num_sentences.append(num_sentence)187 SPA_one_count.append(ones)188 SPA_the_count.append(determiners.count('the'))189 SPA_aan_count.append(determiners.count('a') + determiners.count('an'))190 SPA_this_count.append(determiners.count('this'))191 SPA_that_count.append(determiners.count('that'))192 SPA_these_count.append(determiners.count('these'))193 SPA_those_count.append(determiners.count('those'))194 determiners = [x for x in determiners if x not in det_in_question]195 SPA_other_det_count.append(len(determiners))196 SPA_noun_count.append(len(nouns))197 SPA_verb_count.append(len(verbs))198 SPA_adj_count.append(len(adjs))199 SPA_modal_count.append(len(modal))200 SPA_PRP_count.append(prp)201spanish_L1['essay_len'] = SPA_length202spanish_L1['num_sentence'] = SPA_num_sentences203spanish_L1['noun_count'] = SPA_noun_count204spanish_L1['verb_count'] = SPA_verb_count205spanish_L1['adj_count'] = SPA_adj_count206spanish_L1['modal_count'] = SPA_modal_count207spanish_L1['noun_per_sentence'] = spanish_L1['noun_count'] / spanish_L1['num_sentence']...

Full Screen

Full Screen

analysis.py

Source:analysis.py Github

copy

Full Screen

...34 prov = (each.split())[0]35 data.append(prov)36 count={}37 for i in data:38 count[i]=data.count(i)39 # print(count)40 prov=list(count.keys())41 nums=list(count.values())42 return prov,nums43def prov_plt():44 prov, nums = provience()45 plt.figure(figsize=(8, 4))46 plt.xticks(rotation=0)47 plt.bar(prov, nums, color='g')48 plt.xlabel('省份')49 plt.ylabel('数量')50 plt.title('不同省份数量分布图')51 plt.legend()52 plt.show()53# 词云及根据词云数据进行分析的柱状图54def cloud_plt():55 def cloud_data():56 title=data_analysis('title')57 titles=[]58 # 对每个标题进行分词59 for each in title:60 title_cut=jieba.lcut(each)61 titles.append(title_cut)62 # 剔除不需要的词语63 title_del=[]64 for line in titles:65 line_del=[]66 for word in line:67 if word not in ['2018','妈妈','❤','】','【',' ','Chinism','工作室','倔强']:68 line_del.append(word)69 title_del.append(line_del)70 # print(title_del)71 # 元素去重,每个标题中不含重复元素72 title_clean=[]73 for each in title_del:74 line_dist=[]75 for word in each:76 if word not in line_dist:77 line_dist.append(word)78 title_clean.append(line_dist)79 # 将所有词语转为一个list80 allwords_dist=[]81 for line in title_clean:82 for word in line:83 allwords_dist.append(word)84 # 把列表转为数据框85 allwords_dist=pandas.DataFrame({'allwords':allwords_dist})86 # 对词语进行分类汇总87 word_count=allwords_dist.allwords.value_counts().reset_index()88 # 添加列名89 word_count.columns=['word','count']90 # print(allwords_dist)91 return word_count,title_clean92 def cloud_data_count():93 # 获取商品销量数据94 sell_count = data_analysis('sell_count')95 word_count, title_clean = cloud_data()96 ws_count = []97 # 商品中包含统计的词时,将其销量加入list98 for each in word_count.word:99 i = 0100 s_list = []101 for t in title_clean:102 if each in t:103 s_list.append(int(sell_count[i]))104 # print(s_list)105 i += 1106 # 统计一个关键词所包含商品的销量总数107 ws_count.append(sum(s_list))108 # 把列表转为数据框109 ws_count = pandas.DataFrame({'ws_count': ws_count})110 # 把word_count, ws_count合并为一个表111 word_count = pandas.concat([word_count, ws_count], axis=1, ignore_index=True)112 word_count.columns = ['word', 'count', 'ws_count']113 # 升序排列114 word_count.sort_values('ws_count', inplace=True, ascending=True)115 # 取最大30行数据116 df_ws = word_count.tail(30)117 return df_ws118 # 图云部分119 word_count=cloud_data()[0]120 # 设置字体,背景颜色,字体最大号,121 w_c=WordCloud(font_path='/usr/local/lib/python3.6/dist-packages/matplotlib/mpl-data/fonts/ttf/simhei.ttf',122 background_color='white',123 max_font_size=60,124 margin=1)125 # 取前400个词进行可视化126 wc=w_c.fit_words({x[0]:x[1] for x in word_count.head(1000).values})127 # 设置图优化128 plt.imshow(wc,interpolation='bilinear')129 # 去除边框130 plt.axis('off')131 plt.show()132 # 统计分析柱状图部分133 data = cloud_data_count()134 index = np.arange(data.word.size)135 # plt.figure(figsize=(6,12))136 plt.barh(index, data.ws_count, align='center', alpha=0.8)137 plt.yticks(index, data.word)138 # 添加数据标签139 for y, x in zip(index, data.ws_count):140 plt.text(x, y, '%.0f' % x, ha='left', va='center')141 plt.show()142def impact_analysis():143 sell_count=pandas.DataFrame({'sell_count': data_analysis('sell_count')})144 price=[]145 for i in data_analysis('price'):146 p=i.split('-')147 p_i=p[0].split('.')...

Full Screen

Full Screen

format_ids.py

Source:format_ids.py Github

copy

Full Screen

1from struct import pack, unpack2def parseIP(string):3 bunch = map(int, string.split('.'))4 # pack to bytes5 p = pack('4B', *bunch)6 # unpack as u167 return unpack('>I', p)[0]8def writeIP(raw):9 # pack to bytes10 p = pack('>I', raw)11 # unpack12 return '.'.join(map(str, unpack('4B', p)))13def writeFloat(raw):14 # this is just how floats get printed...15 return '{0:.6f}'.format(raw)16xml_formats = {17 1 : { 'names' : ['void']},18 2 : { 'type' : 'b', 'count' : 1, 'names' : ['s8']},19 3 : { 'type' : 'B', 'count' : 1, 'names' : ['u8']},20 4 : { 'type' : 'h', 'count' : 1, 'names' : ['s16']},21 5 : { 'type' : 'H', 'count' : 1, 'names' : ['u16']},22 6 : { 'type' : 'i', 'count' : 1, 'names' : ['s32']},23 7 : { 'type' : 'I', 'count' : 1, 'names' : ['u32']},24 8 : { 'type' : 'q', 'count' : 1, 'names' : ['s64']},25 9 : { 'type' : 'Q', 'count' : 1, 'names' : ['u64']},26 10 : { 'type' : 'B', 'count' : -1, 'names' : ['bin', 'binary'], 'fromStr' : None},27 11 : { 'type' : 'B', 'count' : -1, 'names' : ['str', 'string'], 'fromStr' : None},28 12 : { 'type' : 'I', 'count' : 1, 'names' : ['ip4'], 'fromStr' : parseIP, 'toStr' : writeIP},29 13 : { 'type' : 'I', 'count' : 1, 'names' : ['time']}, # unix timestamp30 14 : { 'type' : 'f', 'count' : 1, 'names' : ['float', 'f'], 'fromStr' : float, 'toStr' : writeFloat},31 15 : { 'type' : 'd', 'count' : 1, 'names' : ['double', 'd'], 'fromStr' : float, 'toStr' : writeFloat},32 16 : { 'type' : 'b', 'count' : 2, 'names' : ['2s8']},33 17 : { 'type' : 'B', 'count' : 2, 'names' : ['2u8']},34 18 : { 'type' : 'h', 'count' : 2, 'names' : ['2s16']},35 19 : { 'type' : 'H', 'count' : 2, 'names' : ['2u16']},36 20 : { 'type' : 'i', 'count' : 2, 'names' : ['2s32']},37 21 : { 'type' : 'I', 'count' : 2, 'names' : ['2u32']},38 22 : { 'type' : 'q', 'count' : 2, 'names' : ['2s64', 'vs64']},39 23 : { 'type' : 'Q', 'count' : 2, 'names' : ['2u64', 'vu64']},40 24 : { 'type' : 'f', 'count' : 2, 'names' : ['2f'], 'fromStr' : float, 'toStr' : writeFloat},41 25 : { 'type' : 'd', 'count' : 2, 'names' : ['2d', 'vd'], 'fromStr' : float, 'toStr' : writeFloat},42 26 : { 'type' : 'b', 'count' : 3, 'names' : ['3s8']},43 27 : { 'type' : 'B', 'count' : 3, 'names' : ['3u8']},44 28 : { 'type' : 'h', 'count' : 3, 'names' : ['3s16']},45 29 : { 'type' : 'H', 'count' : 3, 'names' : ['3u16']},46 30 : { 'type' : 'i', 'count' : 3, 'names' : ['3s32']},47 31 : { 'type' : 'I', 'count' : 3, 'names' : ['3u32']},48 32 : { 'type' : 'q', 'count' : 3, 'names' : ['3s64']},49 33 : { 'type' : 'Q', 'count' : 3, 'names' : ['3u64']},50 34 : { 'type' : 'f', 'count' : 3, 'names' : ['3f'], 'fromStr' : float, 'toStr' : writeFloat},51 35 : { 'type' : 'd', 'count' : 3, 'names' : ['3d'], 'fromStr' : float, 'toStr' : writeFloat},52 36 : { 'type' : 'b', 'count' : 4, 'names' : ['4s8']},53 37 : { 'type' : 'B', 'count' : 4, 'names' : ['4u8']},54 38 : { 'type' : 'h', 'count' : 4, 'names' : ['4s16']},55 39 : { 'type' : 'H', 'count' : 4, 'names' : ['4u16']},56 40 : { 'type' : 'i', 'count' : 4, 'names' : ['4s32', 'vs32']},57 41 : { 'type' : 'I', 'count' : 4, 'names' : ['4u32', 'vu32']},58 42 : { 'type' : 'q', 'count' : 4, 'names' : ['4s64']},59 43 : { 'type' : 'Q', 'count' : 4, 'names' : ['4u64']},60 44 : { 'type' : 'f', 'count' : 4, 'names' : ['4f', 'vf'], 'fromStr' : float, 'toStr' : writeFloat},61 45 : { 'type' : 'd', 'count' : 4, 'names' : ['4d'], 'fromStr' : float, 'toStr' : writeFloat},62 46 : { 'names' : ['attr']},63 #47 : { 'names' : ['array']}, # TODO: how does this work?64 48 : { 'type' : 'b', 'count' : 16, 'names' : ['vs8']},65 49 : { 'type' : 'B', 'count' : 16, 'names' : ['vu8']},66 50 : { 'type' : 'h', 'count' : 8, 'names' : ['vs16']},67 51 : { 'type' : 'H', 'count' : 8, 'names' : ['vu16']},68 52 : { 'type' : 'b', 'count' : 1, 'names' : ['bool', 'b']},69 53 : { 'type' : 'b', 'count' : 2, 'names' : ['2b']},70 54 : { 'type' : 'b', 'count' : 3, 'names' : ['3b']},71 55 : { 'type' : 'b', 'count' : 4, 'names' : ['4b']},72 56 : { 'type' : 'b', 'count' : 16, 'names' : ['vb']}73}74# little less boilerplate for writing75for key, val in xml_formats.items():76 xml_formats[key]['name'] = xml_formats[key]['names'][0]77xml_types = {}78for key, val in xml_formats.items():79 for n in val['names']:80 xml_types[n] = key81xml_types['nodeStart'] = 182xml_types['nodeEnd'] = 190...

Full Screen

Full Screen

field.js

Source:field.js Github

copy

Full Screen

1// export let field = new Map();2// field.set('生长迟缓','szch_count');3// field.set('偏瘦','ps_count');4// field.set('超重','cz_count');5// field.set('肥胖','fp_count');6// field.set('肺结核可疑症状者','fjhkyzzz_count');7// field.set('肺结核密切接触者','fjhmqjcz_count');8// field.set('轻度视力不良','slbl1_count');9// field.set('中度视力不良','slbl2_count');10// field.set('重度视力不良','slbl3_count');11// field.set('血压偏高','xypg_count');12// field.set('血压偏低','xypd_count');13// field.set('色觉异常','sjyc_count');14// field.set('沙眼','sy_count');15// field.set('近视','js_count');16// field.set('结膜炎','jmy_count');17// field.set('龋患','qh_count');18// field.set('龋失','qs_count');19// field.set('齿列','cl_count');20// field.set('龋补','qb_count');21// field.set('牙周','yz_count');22// field.set('淋巴结','lbj_count');23// field.set('头部','tb_count');24// field.set('颈部','jb_count');25// field.set('脊柱','jz_count');26// field.set('皮肤','pf_count');27// field.set('胸部','xb_count');28// field.set('四肢','sz_count');29// field.set('心脏杂音','xzzy_count');30// field.set('肝大','g_count');31// field.set('脾大','p_count');32// field.set('心率','xlu_count');33// field.set('心律','xl_count');34// field.set('肺部罗音','fbly_count');35// field.set('肝功能','ggn_count');36// field.set('血红蛋白','xhdb_count');37// field.set('胸部 X 线检查','xbx_count');38export default {39 "生长迟缓":'szch_count',40 "偏瘦":"ps_count",41 "超重":"cz_count",42 "肥胖": 'fp_count',43 "肺结核可疑症状者":"fjhkyzzz_count",44 "肺结核密切接触者":"fjhmqjcz_count",45 "轻度视力不良":"slbl1_count",46 "中度视力不良":"slbl2_count",47 "重度视力不良":"slbl3_count",48 "血压偏高":"xypg_count",49 "血压偏低":"xypd_count",50 "色觉异常":"sjyc_count",51 "沙眼":"sy_count",52 "近视":"js_count",53 "结膜炎":"jmy_count",54 "龋患":"qh_count",55 "龋失":"qs_count",56 "齿列":"cl_count",57 "龋补":"qb_count",58 "牙周":"yz_count",59 "淋巴结":"lbj_count",60 "头部":"tb_count",61 "颈部":"jb_count",62 "脊柱":"jz_count",63 "皮肤":"pf_count",64 "胸部":"xb_count",65 "四肢":"sz_count",66 "肝":"g_count",67 "脾":"p_count",68 "心":"xlu_count",69 "肺":"fbly_count",70 "肝功能":"ggn_count",71 "血红蛋白":"xhdb_count",72 "胸部X线检查":"xbx_count"...

Full Screen

Full Screen

CounterEvaluation.py

Source:CounterEvaluation.py Github

copy

Full Screen

1'''2Created on 09.01.20123@author: christian.winkelmann@plista.com4test the maximum amount of counter increments per second5'''6import time7#from pycassa.types import UTF8Type, LongType8#from models.itemModel import itemModel9from contest.packages.models.itemModel import itemModel 10from contest.config import config_global11from contest.config import config_local12import cql13import random14dbconn2 = cql.connect(config_local.cassandra_host, config_local.cassandra_port )15item_id = 116cursor = dbconn2.cursor()17cursor.execute("USE plistaContest")18""" cursor.execute("19 CREATE COLUMN FAMILY CounterCF (KEY text PRIMARY KEY, count_me counter)20 WITH comparator = ascii AND default_validation = counter;21 ")22"""23#cursor.execute("INSERT INTO CounterCF (key) VALUES ('counter1');")24currentTime = time.time()25increments = 100026for i in xrange( increments ):27 28 #cursor.execute("UPDATE CounterCF USING CONSISTENCY ONE SET count_me = count_me + 2 WHERE key = 'counter1'")29 #cursor.execute("UPDATE CounterCF SET count_me = count_me + 2 WHERE key = 'counter2'")30 #cursor.execute("UPDATE CounterCF SET count_me = count_me + 2 WHERE key = 'counter3'")31 cursor.execute( """BEGIN BATCH USING CONSISTENCY ONE32 UPDATE CounterCF SET count_me = count_me + 2 WHERE key = 'counter1'33 UPDATE CounterCF SET count_me = count_me + 2 WHERE key = 'counter2'34 UPDATE CounterCF SET count_me = count_me + 2 WHERE key = 'counter3'35 UPDATE CounterCF SET count_me = count_me + 2 WHERE key = 'counter4'36 UPDATE CounterCF SET count_me = count_me + 2 WHERE key = 'counter5'37 UPDATE CounterCF SET count_me = count_me + 2 WHERE key = 'counter6'38 UPDATE CounterCF SET count_me = count_me + 2 WHERE key = 'counter7'39 UPDATE CounterCF SET count_me = count_me + 2 WHERE key = 'counter8'40 UPDATE CounterCF SET count_me = count_me + 2 WHERE key = 'counter9'41 UPDATE CounterCF SET count_me = count_me + 2 WHERE key = 'counter10'42 UPDATE CounterCF SET count_me = count_me + 2 WHERE key = 'counter11'43 UPDATE CounterCF SET count_me = count_me + 2 WHERE key = 'counter12'44 UPDATE CounterCF SET count_me = count_me + 2 WHERE key = 'counter13'45 UPDATE CounterCF SET count_me = count_me + 2 WHERE key = 'counter14'46 UPDATE CounterCF SET count_me = count_me + 2 WHERE key = 'counter15'47 UPDATE CounterCF SET count_me = count_me + 2 WHERE key = 'counter16'48 UPDATE CounterCF SET count_me = count_me + 2 WHERE key = 'counter17'49 UPDATE CounterCF SET count_me = count_me + 2 WHERE key = 'counter18'50 UPDATE CounterCF SET count_me = count_me + 2 WHERE key = 'counter19'51 UPDATE CounterCF SET count_me = count_me + 2 WHERE key = 'counter20'52 APPLY BATCH53 """)54 55 56cursor.execute("SELECT * FROM CounterCF WHERE KEY = 'counter1'")57print cursor.rowcount58print time.time() - currentTime59print "increments per second :\t" + str( increments / (time.time() - currentTime) )60r = cursor.fetchone()...

Full Screen

Full Screen

count.py

Source:count.py Github

copy

Full Screen

1dataset="./%s" % "AAN"2file_cit=open(dataset+"/training_cit.txt",'r')3file_cit_per_ref = open(dataset+"/citation_per_ref.txt",'w')4file_count_cited = open(dataset+"/count_cited.txt","w")5file_count_ref = open(dataset+"/count_ref.txt",'w')6# file_cit=open(dataset+"/test.txt",'r')7# file_cit_per_ref = open(dataset+"/test_av.txt",'w')8# file_count_cited = open(dataset+"/test_count.txt","w")9citation = file_cit.readlines()10cnt_doc = len(citation)11cnt_be_cit = 0 #number of documents be cited12cnt_have_ref = 0 #number of docuements have ref13cnt_total = 0 #number of links14count_cited=[0]*cnt_doc15line = ['a']*cnt_doc16for i in range(cnt_doc):17 line[i] = citation[i]18 line[i] = line[i].strip()19 line[i] = line[i].split()20 for j in range(1,len(line[i])):21 count_cited[int(line[i][j])]+=122for i in range(cnt_doc):23 cnt_total += count_cited[i]24 if count_cited[i] != 0:25 cnt_be_cit += 126 file_count_cited.write(str(count_cited[i]))27 file_count_cited.write('\n')28print "Number of doucments which are cited at least one time %d" %(cnt_be_cit) 29print "Average cited times per reference %f" %(float(cnt_total)/cnt_be_cit)30for i in range(cnt_doc):31 count_ref = len(line[i]) - 132 if count_ref !=0:33 cnt_have_ref += 134 file_count_ref.write(str(count_ref))35 file_count_ref.write('\n')36 37 count_total = 038 for j in range(1,len(line[i])):39 count_total += count_cited[int(line[i][j])]40 if(count_ref == 0):41 count_av = 042 else:43 count_av = float(count_total)/count_ref44 file_cit_per_ref.write(str(count_av))45 file_cit_per_ref.write('\n')46print "Number of doucments which have at least one ref %d" %(cnt_have_ref) 47file_cit_per_ref.close()48file_count_cited.close()...

Full Screen

Full Screen

Using AI Code Generation

copy

Full Screen

1const { chromium } = require('playwright');2(async () => {3 const browser = await chromium.launch();4 const context = await browser.newContext();5 const page = await context.newPage();6 await page.type('input[name="q"]', 'Playwright');7 await page.click('input[name="btnK"]');8 await page.waitForSelector('text=Playwright - Google Search');9 console.log(await page.$$('text=Playwright'));10 await browser.close();11})();

Full Screen

Using AI Code Generation

copy

Full Screen

1const { chromium } = require('playwright');2(async () => {3 const browser = await chromium.launch();4 const context = await browser.newContext();5 const page = await context.newPage();6 const count = await page.count('css=div');7 console.log(count);8 await browser.close();9})();10- [Playwright](

Full Screen

Using AI Code Generation

copy

Full Screen

1const { chromium } = require('playwright');2(async () => {3const browser = await chromium.launch({ headless: false });4const context = await browser.newContext();5const page = await context.newPage();6const input = await page.$('input[name="q"]');7await input.type('playwright');8await input.press('Enter');9await page.waitForSelector('text="Playwright: Node.js library to automate Chromium, Firefox and WebKit with a single API"');10const results = await page.$$('div.g');11console.log(results.length);12await browser.close();13})();14module.exports = {15};16test('adds 1 + 2 to equal 3', () => {17 expect(1 + 2).toBe(3);18});

Full Screen

Using AI Code Generation

copy

Full Screen

1const { chromium } = require('playwright');2(async () => {3 const browser = await chromium.launch({ headless: false });4 const page = await browser.newPage();5 await page.type('input[aria-label="Search"]', 'Playwright');6 const searchButton = await page.$('input[aria-label="Google Search"]');7 await searchButton.click();8 await page.waitForSelector('text=Playwright: Node.js library to automate');9 const count = await page.evaluate(() => {10 return document.querySelectorAll('text=Playwright: Node.js library to automate').length;11 });12 console.log(count);13 await browser.close();14})();15const { chromium } = require('playwright');16(async () => {17 const browser = await chromium.launch({ headless: false });18 const page = await browser.newPage();19 await page.type('input[aria-label="Search"]', 'Playwright');20 const searchButton = await page.$('input[aria-label="Google Search"]');21 await searchButton.click();22 await page.waitForSelector('text=Playwright: Node.js library to automate');23 const count = await page.evaluateHandle(() => {24 return document.querySelectorAll('text=Playwright: Node.js library to automate');25 });26 console.log(count);27 await browser.close();28})();29const { chromium } = require('playwright');30(async () => {31 const browser = await chromium.launch({ headless: false });32 const page = await browser.newPage();33 await page.type('input[aria-label="Search"]', 'Playwright');34 const searchButton = await page.$('input[aria-label="Google Search"]');35 await searchButton.click();36 await page.waitForSelector('text=Playwright: Node.js library to automate');37 const count = await page.$eval('text=Playwright: Node.js library to automate', (element

Full Screen

Using AI Code Generation

copy

Full Screen

1const { chromium } = require('playwright');2(async () => {3 const browser = await chromium.launch();4 const page = await browser.newPage();5 const count = await page.count('text=Sign in');6 console.log(count);7 await browser.close();8})();9const { chromium } = require('playwright');10(async () => {11 const browser = await chromium.launch();12 const page = await browser.newPage();13 const signInButton = await page.waitForSelector('text=Sign in');14 await signInButton.click();15 await browser.close();16})();17const { chromium } = require('playwright');18(async () => {19 const browser = await chromium.launch();20 const page = await browser.newPage();21 await page.waitForLoadState('networkidle');22 await browser.close();23})();24const { chromium } = require('playwright');25(async () => {26 const browser = await chromium.launch();27 const page = await browser.newPage();28 await browser.close();29})();

Full Screen

Using AI Code Generation

copy

Full Screen

1const { chromium } = require('playwright');2(async () => {3 const browser = await chromium.launch();4 const page = await browser.newPage();5 const count = await page.evaluate(() => window.count);6 console.log(count);7 await browser.close();8})();9- [Playwright Autocomplete Example](

Full Screen

Using AI Code Generation

copy

Full Screen

1const { chromium } = require('playwright');2(async () => {3 const browser = await chromium.launch({ headless: false, slowMo: 50 });4 const page = await browser.newPage();5 await page.screenshot({ path: `example.png` });6 const count = await page.$$eval('text=Get started', elements => elements.length);7 console.log(count);8 await browser.close();9})();10const { chromium } = require('playwright');11(async () => {12 const browser = await chromium.launch({ headless: false, slowMo: 50 });13 const page = await browser.newPage();14 await page.screenshot({ path: `example.png` });15 const count = await page.$$eval('text=Get started', elements => elements.length);16 console.log(count);17 await browser.close();18})();19const { chromium } = require('playwright');20(async () => {21 const browser = await chromium.launch({ headless: false, slowMo: 50 });22 const page = await browser.newPage();23 await page.screenshot({ path: `example.png` });24 const count = await page.$$eval('text=Get started', elements => elements.length);25 console.log(count);26 await browser.close();27})();28const { chromium } = require('playwright');29(async () => {30 const browser = await chromium.launch({ headless: false, slowMo: 50 });31 const page = await browser.newPage();32 await page.screenshot({ path: `example.png` });33 const count = await page.$$eval('text=Get started', elements => elements.length);34 console.log(count);35 await browser.close();36})();

Full Screen

Using AI Code Generation

copy

Full Screen

1const playwright = require("playwright");2const { chromium } = playwright;3(async () => {4 const browser = await chromium.launch();5 const page = await browser.newPage();6 console.log(await page.count("text=Get started"));7 await browser.close();8})();9**[⬆ back to top](#table-of-contents)**10const playwright = require("playwright");11const { chromium } = playwright;12(async () => {13 const browser = await chromium.launch();14 const page = await browser.newPage();15 console.log(await page.url());16 await browser.close();17})();18**[⬆ back to top](#table-of-contents)**19const playwright = require("playwright");20const { chromium } = playwright;21(async () => {22 const browser = await chromium.launch();23 const page = await browser.newPage();24 console.log(await page.url());25 await browser.close();26})();27**[⬆ back to top](#table-of-contents)**28const playwright = require("playwright");29const { chromium } = playwright;30(async () => {31 const browser = await chromium.launch();32 const page = await browser.newPage();33 console.log(await page.url());34 await browser.close();35})();36**[⬆ back to top](#table-of-contents)**37const playwright = require("playwright");38const { chromium } = playwright;39(async () => {40 const browser = await chromium.launch();41 const page = await browser.newPage();42 console.log(await page.url());43 await browser.close();44})();45**[⬆ back to top](#

Full Screen

Using AI Code Generation

copy

Full Screen

1const { test, expect } = require('@playwright/test');2test('Count Method', async ({ page }) => {3 const numberOfLinks = await page.$$eval('text=Get started', (links) =>4 );5 expect(numberOfLinks).toBe(2);6});7const { test, expect } = require('@playwright/test');8test('Count Method', async ({ page }) => {9 const numberOfLinks = await page.$$eval('text=Get started', (links) =>10 );11 expect(numberOfLinks).toBe(2);12});13const { test, expect } = require('@playwright/test');14test('Count Method', async ({ page }) => {15 const numberOfLinks = await page.$$eval('text=Get started', (links) =>16 );17 expect(numberOfLinks).toBe(2);18});19const { test, expect } = require('@playwright/test');20test('Count Method', async ({ page }) => {21 const numberOfLinks = await page.$$eval('text=Get started', (links) =>22 );23 expect(numberOfLinks).toBe(2);24});25const { test, expect } = require('@playwright/test');26test('Count Method', async ({ page }) => {27 const numberOfLinks = await page.$$eval('text=Get started', (links) =>28 );29 expect(numberOfLinks).toBe(2);30});31const { test, expect } = require('@playwright/test');32test('Count Method', async ({ page }) => {33 const numberOfLinks = await page.$$eval('text=Get started', (links) =>

Full Screen

Playwright tutorial

LambdaTest’s Playwright tutorial will give you a broader idea about the Playwright automation framework, its unique features, and use cases with examples to exceed your understanding of Playwright testing. This tutorial will give A to Z guidance, from installing the Playwright framework to some best practices and advanced concepts.

Chapters:

  1. What is Playwright : Playwright is comparatively new but has gained good popularity. Get to know some history of the Playwright with some interesting facts connected with it.
  2. How To Install Playwright : Learn in detail about what basic configuration and dependencies are required for installing Playwright and run a test. Get a step-by-step direction for installing the Playwright automation framework.
  3. Playwright Futuristic Features: Launched in 2020, Playwright gained huge popularity quickly because of some obliging features such as Playwright Test Generator and Inspector, Playwright Reporter, Playwright auto-waiting mechanism and etc. Read up on those features to master Playwright testing.
  4. What is Component Testing: Component testing in Playwright is a unique feature that allows a tester to test a single component of a web application without integrating them with other elements. Learn how to perform Component testing on the Playwright automation framework.
  5. Inputs And Buttons In Playwright: Every website has Input boxes and buttons; learn about testing inputs and buttons with different scenarios and examples.
  6. Functions and Selectors in Playwright: Learn how to launch the Chromium browser with Playwright. Also, gain a better understanding of some important functions like “BrowserContext,” which allows you to run multiple browser sessions, and “newPage” which interacts with a page.
  7. Handling Alerts and Dropdowns in Playwright : Playwright interact with different types of alerts and pop-ups, such as simple, confirmation, and prompt, and different types of dropdowns, such as single selector and multi-selector get your hands-on with handling alerts and dropdown in Playright testing.
  8. Playwright vs Puppeteer: Get to know about the difference between two testing frameworks and how they are different than one another, which browsers they support, and what features they provide.
  9. Run Playwright Tests on LambdaTest: Playwright testing with LambdaTest leverages test performance to the utmost. You can run multiple Playwright tests in Parallel with the LammbdaTest test cloud. Get a step-by-step guide to run your Playwright test on the LambdaTest platform.
  10. Playwright Python Tutorial: Playwright automation framework support all major languages such as Python, JavaScript, TypeScript, .NET and etc. However, there are various advantages to Python end-to-end testing with Playwright because of its versatile utility. Get the hang of Playwright python testing with this chapter.
  11. Playwright End To End Testing Tutorial: Get your hands on with Playwright end-to-end testing and learn to use some exciting features such as TraceViewer, Debugging, Networking, Component testing, Visual testing, and many more.
  12. Playwright Video Tutorial: Watch the video tutorials on Playwright testing from experts and get a consecutive in-depth explanation of Playwright automation testing.

Run Playwright Internal automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful