Best Python code snippet using robotframework
test_returning.py
Source:test_returning.py  
...48        table.drop()49    def test_column_targeting(self):50        result = (51            table.insert()52            .returning(table.c.id, table.c.full)53            .execute({"persons": 1, "full": False})54        )55        row = result.first()56        assert row[table.c.id] == row["id"] == 157        assert row[table.c.full] == row["full"]58        assert row["full"] is False59        result = (60            table.insert()61            .values(persons=5, full=True, goofy="somegoofy")62            .returning(table.c.persons, table.c.full, table.c.goofy)63            .execute()64        )65        row = result.first()66        assert row[table.c.persons] == row["persons"] == 567        assert row[table.c.full] == row["full"]68        eq_(row[table.c.goofy], row["goofy"])69        eq_(row["goofy"], "FOOsomegoofyBAR")70    @testing.fails_on("firebird", "fb can't handle returning x AS y")71    def test_labeling(self):72        result = (73            table.insert()74            .values(persons=6)75            .returning(table.c.persons.label("lala"))76            .execute()77        )78        row = result.first()79        assert row["lala"] == 680    @testing.fails_on(81        "firebird", "fb/kintersbasdb can't handle the bind params"82    )83    @testing.fails_on("oracle+zxjdbc", "JDBC driver bug")84    def test_anon_expressions(self):85        result = (86            table.insert()87            .values(goofy="someOTHERgoofy")88            .returning(func.lower(table.c.goofy, type_=GoofyType))89            .execute()90        )91        row = result.first()92        eq_(row[0], "foosomeothergoofyBAR")93        result = (94            table.insert()95            .values(persons=12)96            .returning(table.c.persons + 18)97            .execute()98        )99        row = result.first()100        eq_(row[0], 30)101    def test_update_returning(self):102        table.insert().execute(103            [{"persons": 5, "full": False}, {"persons": 3, "full": False}]104        )105        result = (106            table.update(table.c.persons > 4, dict(full=True))107            .returning(table.c.id)108            .execute()109        )110        eq_(result.fetchall(), [(1,)])111        result2 = (112            select([table.c.id, table.c.full]).order_by(table.c.id).execute()113        )114        eq_(result2.fetchall(), [(1, True), (2, False)])115    def test_insert_returning(self):116        result = (117            table.insert()118            .returning(table.c.id)119            .execute({"persons": 1, "full": False})120        )121        eq_(result.fetchall(), [(1,)])122    @testing.requires.multivalues_inserts123    def test_multirow_returning(self):124        ins = (125            table.insert()126            .returning(table.c.id, table.c.persons)127            .values(128                [129                    {"persons": 1, "full": False},130                    {"persons": 2, "full": True},131                    {"persons": 3, "full": False},132                ]133            )134        )135        result = testing.db.execute(ins)136        eq_(result.fetchall(), [(1, 1), (2, 2), (3, 3)])137    def test_no_ipk_on_returning(self):138        result = testing.db.execute(139            table.insert().returning(table.c.id), {"persons": 1, "full": False}140        )141        assert_raises_message(142            sa_exc.InvalidRequestError,143            r"Can't call inserted_primary_key when returning\(\) is used.",144            getattr,145            result,146            "inserted_primary_key",147        )148    @testing.fails_on_everything_except("postgresql", "firebird")149    def test_literal_returning(self):150        if testing.against("postgresql"):151            literal_true = "true"152        else:153            literal_true = "1"154        result4 = testing.db.execute(155            'insert into tables (id, persons, "full") '156            "values (5, 10, %s) returning persons" % literal_true157        )158        eq_([dict(row) for row in result4], [{"persons": 10}])159    def test_delete_returning(self):160        table.insert().execute(161            [{"persons": 5, "full": False}, {"persons": 3, "full": False}]162        )163        result = (164            table.delete(table.c.persons > 4).returning(table.c.id).execute()165        )166        eq_(result.fetchall(), [(1,)])167        result2 = (168            select([table.c.id, table.c.full]).order_by(table.c.id).execute()169        )170        eq_(result2.fetchall(), [(2, False)])171class CompositeStatementTest(fixtures.TestBase):172    __requires__ = ("returning",)173    __backend__ = True174    @testing.provide_metadata175    def test_select_doesnt_pollute_result(self):176        class MyType(TypeDecorator):177            impl = Integer178            def process_result_value(self, value, dialect):179                raise Exception("I have not been selected")180        t1 = Table("t1", self.metadata, Column("x", MyType()))181        t2 = Table("t2", self.metadata, Column("x", Integer))182        self.metadata.create_all(testing.db)183        with testing.db.connect() as conn:184            conn.execute(t1.insert().values(x=5))185            stmt = (186                t2.insert()187                .values(x=select([t1.c.x]).as_scalar())188                .returning(t2.c.x)189            )190            result = conn.execute(stmt)191            eq_(result.scalar(), 5)192class SequenceReturningTest(fixtures.TestBase):193    __requires__ = "returning", "sequences"194    __backend__ = True195    def setup(self):196        meta = MetaData(testing.db)197        global table, seq198        seq = Sequence("tid_seq")199        table = Table(200            "tables",201            meta,202            Column("id", Integer, seq, primary_key=True),203            Column("data", String(50)),204        )205        table.create(checkfirst=True)206    def teardown(self):207        table.drop()208    def test_insert(self):209        r = table.insert().values(data="hi").returning(table.c.id).execute()210        assert r.first() == (1,)211        assert seq.execute() == 2212class KeyReturningTest(fixtures.TestBase, AssertsExecutionResults):213    """test returning() works with columns that define 'key'."""214    __requires__ = ("returning",)215    __backend__ = True216    def setup(self):217        meta = MetaData(testing.db)218        global table219        table = Table(220            "tables",221            meta,222            Column(223                "id",224                Integer,225                primary_key=True,226                key="foo_id",227                test_needs_autoincrement=True,228            ),229            Column("data", String(20)),230        )231        table.create(checkfirst=True)232    def teardown(self):233        table.drop()234    @testing.exclude("firebird", "<", (2, 0), "2.0+ feature")235    @testing.exclude("postgresql", "<", (8, 2), "8.2+ feature")236    def test_insert(self):237        result = (238            table.insert().returning(table.c.foo_id).execute(data="somedata")239        )240        row = result.first()241        assert row[table.c.foo_id] == row["id"] == 1242        result = table.select().execute().first()243        assert row[table.c.foo_id] == row["id"] == 1244class ReturnDefaultsTest(fixtures.TablesTest):245    __requires__ = ("returning",)246    run_define_tables = "each"247    __backend__ = True248    @classmethod249    def define_tables(cls, metadata):250        from sqlalchemy.sql import ColumnElement251        from sqlalchemy.ext.compiler import compiles252        counter = itertools.count()253        class IncDefault(ColumnElement):254            pass255        @compiles(IncDefault)256        def compile_(element, compiler, **kw):257            return str(next(counter))258        Table(259            "t1",260            metadata,261            Column(262                "id", Integer, primary_key=True, test_needs_autoincrement=True263            ),264            Column("data", String(50)),265            Column("insdef", Integer, default=IncDefault()),266            Column("upddef", Integer, onupdate=IncDefault()),267        )268    def test_chained_insert_pk(self):269        t1 = self.tables.t1270        result = testing.db.execute(271            t1.insert().values(upddef=1).return_defaults(t1.c.insdef)272        )273        eq_(274            [result.returned_defaults[k] for k in (t1.c.id, t1.c.insdef)],275            [1, 0],276        )277    def test_arg_insert_pk(self):278        t1 = self.tables.t1279        result = testing.db.execute(280            t1.insert(return_defaults=[t1.c.insdef]).values(upddef=1)281        )282        eq_(283            [result.returned_defaults[k] for k in (t1.c.id, t1.c.insdef)],284            [1, 0],285        )286    def test_chained_update_pk(self):287        t1 = self.tables.t1288        testing.db.execute(t1.insert().values(upddef=1))289        result = testing.db.execute(290            t1.update().values(data="d1").return_defaults(t1.c.upddef)291        )292        eq_([result.returned_defaults[k] for k in (t1.c.upddef,)], [1])293    def test_arg_update_pk(self):294        t1 = self.tables.t1295        testing.db.execute(t1.insert().values(upddef=1))296        result = testing.db.execute(297            t1.update(return_defaults=[t1.c.upddef]).values(data="d1")298        )299        eq_([result.returned_defaults[k] for k in (t1.c.upddef,)], [1])300    def test_insert_non_default(self):301        """test that a column not marked at all as a302        default works with this feature."""303        t1 = self.tables.t1304        result = testing.db.execute(305            t1.insert().values(upddef=1).return_defaults(t1.c.data)306        )307        eq_(308            [result.returned_defaults[k] for k in (t1.c.id, t1.c.data)],309            [1, None],310        )311    def test_update_non_default(self):312        """test that a column not marked at all as a313        default works with this feature."""314        t1 = self.tables.t1315        testing.db.execute(t1.insert().values(upddef=1))316        result = testing.db.execute(317            t1.update().values(upddef=2).return_defaults(t1.c.data)318        )319        eq_([result.returned_defaults[k] for k in (t1.c.data,)], [None])320    def test_insert_non_default_plus_default(self):321        t1 = self.tables.t1322        result = testing.db.execute(323            t1.insert()324            .values(upddef=1)325            .return_defaults(t1.c.data, t1.c.insdef)326        )327        eq_(328            dict(result.returned_defaults),329            {"id": 1, "data": None, "insdef": 0},330        )331    def test_update_non_default_plus_default(self):332        t1 = self.tables.t1333        testing.db.execute(t1.insert().values(upddef=1))334        result = testing.db.execute(335            t1.update()336            .values(insdef=2)337            .return_defaults(t1.c.data, t1.c.upddef)338        )339        eq_(dict(result.returned_defaults), {"data": None, "upddef": 1})340    def test_insert_all(self):341        t1 = self.tables.t1342        result = testing.db.execute(343            t1.insert().values(upddef=1).return_defaults()344        )345        eq_(346            dict(result.returned_defaults),347            {"id": 1, "data": None, "insdef": 0},348        )349    def test_update_all(self):350        t1 = self.tables.t1351        testing.db.execute(t1.insert().values(upddef=1))352        result = testing.db.execute(353            t1.update().values(insdef=2).return_defaults()354        )355        eq_(dict(result.returned_defaults), {"upddef": 1})356class ImplicitReturningFlag(fixtures.TestBase):357    __backend__ = True358    def test_flag_turned_off(self):359        e = engines.testing_engine(options={"implicit_returning": False})360        assert e.dialect.implicit_returning is False361        c = e.connect()362        c.close()363        assert e.dialect.implicit_returning is False364    def test_flag_turned_on(self):365        e = engines.testing_engine(options={"implicit_returning": True})366        assert e.dialect.implicit_returning is True367        c = e.connect()368        c.close()369        assert e.dialect.implicit_returning is True370    def test_flag_turned_default(self):371        supports = [False]372        def go():373            supports[0] = True374        testing.requires.returning(go)()375        e = engines.testing_engine()376        # starts as False.  This is because all of Firebird,377        # PostgreSQL, Oracle, SQL Server started supporting RETURNING378        # as of a certain version, and the flag is not set until379        # version detection occurs.  If some DB comes along that has380        # RETURNING in all cases, this test can be adjusted.381        assert e.dialect.implicit_returning is False382        # version detection on connect sets it383        c = e.connect()384        c.close()...study_definition.py
Source:study_definition.py  
1from cohortextractor import (2    StudyDefinition,3    patients,4    codelist_from_csv,5    codelist,6    filter_codes_by_category,7    combine_codelists,8)9from codelists import *10study = StudyDefinition(11    # Configure the expectations framework12    default_expectations={13        "date": {"earliest": "1900-01-01", "latest": "today"},14        "rate": "exponential_increase",15    },16    # This line defines the study population17    population=patients.registered_with_one_practice_between(18        "2019-02-01", "2020-12-31"19    ),20    # The rest of the lines define the covariates with associated GitHub issues21    # https://github.com/ebmdatalab/tpp-sql-notebook/issues/3322    age=patients.age_as_of(23        "2020-10-01",24        return_expectations={25            "rate": "universal",26            "int": {"distribution": "population_ages"},27        },28    ),29    # https://github.com/ebmdatalab/tpp-sql-notebook/issues/4630    sex=patients.sex(31        return_expectations={32            "rate": "universal",33            "category": {"ratios": {"M": 0.49, "F": 0.51}},34        }35    ),36    care_home_type=patients.care_home_status_as_of(37        "2020-10-01",38        categorised_as={39            "PC": """40              IsPotentialCareHome41              AND LocationDoesNotRequireNursing='Y'42              AND LocationRequiresNursing='N'43            """,44            "PN": """45              IsPotentialCareHome46              AND LocationDoesNotRequireNursing='N'47              AND LocationRequiresNursing='Y'48            """,49            "PS": "IsPotentialCareHome",50            "U": "DEFAULT",51        },52        return_expectations={53            "rate": "universal",54            "category": {55                "ratios": {56                    "PC": 0.05,57                    "PN": 0.05,58                    "PS": 0.05,59                    "U": 0.85,60                },61            },62        },63    ),64    # Ethnicity in 6 categories65    ethnicity=patients.with_these_clinical_events(66        ethnicity_codes,67        returning="category",68        find_last_match_in_period=True,69        include_date_of_match=False,70        return_expectations={71            "category": {72                "ratios": {"1": 0.2, "2": 0.2, "3": 0.2, "4": 0.2, "5": 0.2}73            },74            "incidence": 0.75,75        },76    ),77    # https://github.com/ebmdatalab/tpp-sql-notebook/issues/5478    stp=patients.registered_practice_as_of(79        "2020-10-01",80        returning="stp_code",81        return_expectations={82            "rate": "universal",83            "category": {"ratios": {"STP1": 0.5, "STP2": 0.5}},84        },85    ),86    msoa=patients.registered_practice_as_of(87        "2020-10-01",88        returning="msoa_code",89        return_expectations={90            "rate": "universal",91            "category": {"ratios": {"MSOA1": 0.5, "MSOA2": 0.5}},92        },93    ),94    # https://github.com/ebmdatalab/tpp-sql-notebook/issues/5295    imd=patients.address_as_of(96        "2020-10-01",97        returning="index_of_multiple_deprivation",98        round_to_nearest=100,99        return_expectations={100            "rate": "universal",101            "category": {"ratios": {"100": 0.1, "200": 0.2, "300": 0.7}},102        },103    ),104    rural_urban=patients.address_as_of(105        "2020-10-01",106        returning="rural_urban_classification",107        return_expectations={108            "rate": "universal",109            "category": {"ratios": {"rural": 0.1, "urban": 0.9}},110        },111    ),112    ####### HIGH RISK CODELISTS #######113    # https://github.com/opensafely/codelist-development/issues/9114    solid_organ_transplantation=patients.with_these_clinical_events(115        solid_organ_transplantation_codes,116        returning="binary_flag",117        return_expectations={118            "incidence": 0.01,119        },120    ),121    # https://github.com/opensafely/codelist-development/issues/10122    chemo_or_radio=patients.with_these_clinical_events(123        chemotherapy_or_radiotherapy_codes,124        returning="binary_flag",125        return_expectations={126            "incidence": 0.01,127        },128    ),129    # https://github.com/opensafely/codelist-development/issues/10130    lung_cancer=patients.with_these_clinical_events(131        lung_cancer_codes,132        returning="binary_flag",133        return_expectations={134            "incidence": 0.01,135        },136    ),137    # https://github.com/opensafely/codelist-development/issues/10138    cancer_excl_lung_and_haem=patients.with_these_clinical_events(139        cancer_excluding_lung_and_haematological_codes,140        returning="binary_flag",141        return_expectations={142            "incidence": 0.01,143        },144    ),145    # https://github.com/opensafely/codelist-development/issues/10146    haematological_cancer=patients.with_these_clinical_events(147        haematological_cancer_codes,148        returning="binary_flag",149        return_expectations={150            "incidence": 0.01,151        },152    ),153    # In last 6 months154    # https://github.com/opensafely/codelist-development/issues/10155    bone_marrow_transplant=patients.with_these_clinical_events(156        bone_marrow_transplant_codes,157        between=["2020-07-01", "2020-12-31"],158        returning="binary_flag",159        return_expectations={160            "incidence": 0.01,161        },162    ),163    # https://github.com/opensafely/codelist-development/issues/30164    cystic_fibrosis=patients.with_these_clinical_events(165        cystic_fibrosis_codes,166        returning="binary_flag",167        return_expectations={168            "incidence": 0.01,169        },170    ),171    # https://github.com/opensafely/codelist-development/issues/15172    # Severe Asthma - NOT DEFINED YET173    severe_asthma=patients.with_these_clinical_events(174        asthma_diagnosis_codes,175        returning="binary_flag",176        return_expectations={177            "incidence": 0.01,178        },179    ),180    # https://github.com/opensafely/ics-research/issues/12181    current_copd=patients.with_these_clinical_events(182        current_copd_codes,183        returning="binary_flag",184        return_expectations={185            "incidence": 0.01,186        },187    ),188    # https://github.com/opensafely/codelist-development/issues/4189    sickle_cell_disease=patients.with_these_clinical_events(190        sickle_cell_disease_codes,191        returning="binary_flag",192        return_expectations={193            "incidence": 0.01,194        },195    ),196    # https://github.com/opensafely/codelist-development/issues/11197    permanant_immunosuppression=patients.with_these_clinical_events(198        permanent_immunosuppression_codes,199        returning="binary_flag",200        return_expectations={201            "incidence": 0.01,202        },203    ),204    # https://github.com/opensafely/codelist-development/issues/11205    temporary_immunosuppression=patients.with_these_clinical_events(206        temporary_immunosuppression_codes,207        returning="binary_flag",208        return_expectations={209            "incidence": 0.01,210        },211    ),212    # https://github.com/opensafely/hydroxychloroquine-research/issues/2    213    dmards=patients.with_these_medications(214        dmards_codes,215        returning="binary_flag",216        return_expectations={217            "incidence": 0.01,218        },219    ), 220    # https://github.com/opensafely/codelist-development/issues/2221    chronic_cardiac_disease=patients.with_these_clinical_events(222        chronic_cardiac_disease_codes,223        returning="binary_flag",224        return_expectations={225            "incidence": 0.01,226        },227    ),228    # https://github.com/opensafely/codelist-development/issues/29229    intel_dis_incl_downs_syndrome=patients.with_these_clinical_events(230        intellectual_disability_including_downs_syndrome_codes,231        returning="binary_flag",232        return_expectations={233            "incidence": 0.01,234        },235    ),236    # https://github.com/opensafely/codelist-development/issues/28237    dialysis=patients.with_these_clinical_events(238        dialysis_codes,239        returning="binary_flag",240        return_expectations={241            "incidence": 0.01,242        },243    ),244    ####### MODERATE RISK CODELISTS #######245    # non-severe asthma - NOT DEFINED YET246    # https://github.com/opensafely/codelist-development/issues/15247    non_severe_asthma=patients.with_these_clinical_events(248        asthma_diagnosis_codes,249        returning="binary_flag",250        return_expectations={251            "incidence": 0.01,252        },253    ),254    # https://github.com/opensafely/ics-research/issues/10255    other_respiratory_conditions=patients.with_these_clinical_events(256        other_respiratory_conditions_codes,257        returning="binary_flag",258        return_expectations={259            "incidence": 0.01,260        },261    ),262    # https://github.com/ebmdatalab/tpp-sql-notebook/issues/80263    heart_failure=patients.with_these_clinical_events(264        heart_failure_codes,265        returning="binary_flag",266        return_expectations={267            "incidence": 0.01,268        },269    ),270    # https://github.com/ebmdatalab/tpp-sql-notebook/issues/83271    other_heart_disease=patients.with_these_clinical_events(272        other_heart_disease_codes,273        returning="binary_flag",274        return_expectations={275            "incidence": 0.01,276        },277    ),278    # https://github.com/opensafely/codelist-development/issues/8279    diabetes=patients.with_these_clinical_events(280        diabetes_codes,281        returning="binary_flag",282        return_expectations={283            "incidence": 0.01,284        },285    ),286    # https://github.com/opensafely/risk-factors-research/issues/50287    chronic_kidney_disease=patients.with_these_clinical_events(288        chronic_kidney_disease_codes,289        returning="binary_flag",290        return_expectations={291            "incidence": 0.01,292        },293    ),294    # https://github.com/ebmdatalab/tpp-sql-notebook/issues/12295    chronic_liver_disease=patients.with_these_clinical_events(296        chronic_liver_disease_codes,297        returning="binary_flag",298        return_expectations={299            "incidence": 0.01,300        },301    ),302    # https://github.com/opensafely/codelist-development/issues/5303    other_neuro=patients.with_these_clinical_events(304        other_neuro_codes,305        returning="binary_flag",306        return_expectations={307            "incidence": 0.01,308        },309    ),310    # https://github.com/opensafely/codelist-development/issues/5311    #dementia=patients.with_these_clinical_events(312    #    dementia_codes,313    #    returning="binary_flag",314    #    return_expectations={315    #        "incidence": 0.01,316    #    },317    #),318    # https://github.com/opensafely/codelist-development/issues/20319    #stroke=patients.with_these_clinical_events(320    #    stroke_codes,321    #    returning="binary_flag",322    #    return_expectations={323    #        "incidence": 0.01,324    #    },325    #),326    # https://github.com/ebmdatalab/tpp-sql-notebook/issues/10327    bmi=patients.most_recent_bmi(328        on_or_after="2020-02-01",329        minimum_age_at_measurement=16,330        include_measurement_date=False,331        include_month=True,332        return_expectations={333            "incidence": 0.9,334            "float": {"distribution": "normal", "mean": 28, "stddev": 10},335        },336    ),337    # https://codelists.opensafely.org/codelist/opensafely/asthma-inhaler-salbutamol-medication/2020-04-15/338    recent_salbutamol_count=patients.with_these_medications(339        salbutamol_codes,340        returning="binary_flag",341        return_expectations={342            "incidence": 0.01,343        },344    ),345    # 346    psychosis_schiz_bipolar=patients.with_these_clinical_events(347        psychosis_schizophrenia_bipolar_affective_disease_codes,348        returning="binary_flag",349        return_expectations={350            "incidence": 0.01,351        },352    ),353    # https://github.com/opensafely/codelist-development/issues/4354    asplenia=patients.with_these_clinical_events(355        asplenia_codes,356        returning="binary_flag",357        return_expectations={358            "incidence": 0.01,359        },360    ),  361    362    covid_vaccine_tpp_table=patients.with_tpp_vaccination_record(363        target_disease_matches="SARS-2 CORONAVIRUS",364        on_or_after="2020-12-01",  365        find_first_match_in_period=True,366        returning="date",367        date_format="YYYY-MM",368        return_expectations={369            "incidence": 0.01,370            "date": {371                "earliest": "2020-12-08",  # first vaccine administered on the 8/12372                "latest": "2020-12-14",373            }374        },375    ),  ...play_level_data.py
Source:play_level_data.py  
1# Gather play level data.2# Go through NGS data and get info like events, etc.3import pandas as pd4import numpy as np5import tracemalloc6import gc7tracemalloc.start()8# Read in non-NGS data sources9ppd = pd.read_csv('../input/player_punt_data.csv')10gd = pd.read_csv('../input/game_data.csv')11pprd = pd.read_csv('../input/play_player_role_data.csv')12vr = pd.read_csv('../input/video_review.csv')13vfi = pd.read_csv('../input/video_footage-injury.csv')14pi = pd.read_csv('../input/play_information.csv')15all_dfs = [ppd, gd, pprd, vr, vfi, pi]16for mydf in all_dfs:17    mydf.columns = [col.lower() for col in mydf.columns]18# 'NGS-2016-post.csv',19NGS_csv_files = [20    'NGS-2016-pre.csv',21    'NGS-2016-reg-wk1-6.csv',22    'NGS-2016-reg-wk13-17.csv',23    'NGS-2016-reg-wk7-12.csv',24    'NGS-2017-post.csv',25    'NGS-2017-pre.csv',26    'NGS-2017-reg-wk1-6.csv',27    'NGS-2017-reg-wk13-17.csv',28    'NGS-2017-reg-wk7-12.csv',29]30ppd_unique = ppd.groupby('gsisid').agg(lambda x: ', '.join(x)).reset_index()31# Detailed role info32# I made this myself and may include errors require me to rerun later33role_info_dict = {'GL': ['Gunner', 'Punting_Team'],34                  'GLi': ['Gunner', 'Punting_Team'],35                  'GLo': ['Gunner', 'Punting_Team'],36                  'GR': ['Gunner', 'Punting_Team'],37                  'GRi': ['Gunner', 'Punting_Team'],38                  'GRo': ['Gunner', 'Punting_Team'],39                  'P': ['Punter', 'Punting_Team'],40                  'PC': ['Punter_Protector', 'Punting_Team'],41                  'PPR': ['Punter_Protector', 'Punting_Team'],42                  'PPRi': ['Punter_Protector', 'Punting_Team'],43                  'PPRo': ['Punter_Protector', 'Punting_Team'],44                  'PDL1': ['Defensive_Lineman', 'Returning_Team'],45                  'PDL2': ['Defensive_Lineman', 'Returning_Team'],46                  'PDL3': ['Defensive_Lineman', 'Returning_Team'],47                  'PDR1': ['Defensive_Lineman', 'Returning_Team'],48                  'PDR2': ['Defensive_Lineman', 'Returning_Team'],49                  'PDR3': ['Defensive_Lineman', 'Returning_Team'],50                  'PDL5': ['Defensive_Lineman', 'Returning_Team'],51                  'PDL6': ['Defensive_Lineman', 'Returning_Team'],52                  'PFB': ['PuntFullBack', 'Punting_Team'],53                  'PLG': ['Punting_Lineman', 'Punting_Team'],54                  'PLL': ['Defensive_Backer', 'Returning_Team'],55                  'PLL1': ['Defensive_Backer', 'Returning_Team'],56                  'PLL3': ['Defensive_Backer', 'Returning_Team'],57                  'PLS': ['Punting_Longsnapper', 'Punting_Team'],58                  'PLT': ['Punting_Lineman', 'Punting_Team'],59                  'PLW': ['Punting_Wing', 'Punting_Team'],60                  'PRW': ['Punting_Wing', 'Punting_Team'],61                  'PR': ['Punt_Returner', 'Returning_Team'],62                  'PRG': ['Punting_Lineman', 'Punting_Team'],63                  'PRT': ['Punting_Lineman', 'Punting_Team'],64                  'VLo': ['Jammer', 'Returning_Team'],65                  'VR': ['Jammer', 'Returning_Team'],66                  'VL': ['Jammer', 'Returning_Team'],67                  'VRo': ['Jammer', 'Returning_Team'],68                  'VRi': ['Jammer', 'Returning_Team'],69                  'VLi': ['Jammer', 'Returning_Team'],70                  'PPL': ['Punter_Protector', 'Punting_Team'],71                  'PPLo': ['Punter_Protector', 'Punting_Team'],72                  'PPLi': ['Punter_Protector', 'Punting_Team'],73                  'PLR': ['Defensive_Backer', 'Returning_Team'],74                  'PRRo': ['Defensive_Backer', 'Returning_Team'],75                  'PDL4': ['Defensive_Lineman', 'Returning_Team'],76                  'PDR4': ['Defensive_Lineman', 'Returning_Team'],77                  'PLM': ['Defensive_Backer', 'Returning_Team'],78                  'PLM1': ['Defensive_Backer', 'Returning_Team'],79                  'PLR1': ['Defensive_Backer', 'Returning_Team'],80                  'PLR2': ['Defensive_Backer', 'Returning_Team'],81                  'PLR3': ['Defensive_Backer', 'Returning_Team'],82                  'PLL2': ['Defensive_Backer', 'Returning_Team'],83                  'PDM': ['Defensive_Lineman', 'Returning_Team'],84                  'PDR5': ['Defensive_Lineman', 'Returning_Team'],85                  'PDR6': ['Defensive_Lineman', 'Returning_Team'],86                  }87role_info = pd.DataFrame.from_dict(role_info_dict,88                                   orient='index',89                                   columns=['generalized_role', 'punting_returning_team']) \90    .reset_index() \91    .rename(columns={'index': 'role'})92pprd_detailed = pd.merge(pprd, role_info, how='left', on='role')93play_count = 094for ngs_file in NGS_csv_files:95    # Loop through each file so that we save space96    ngs = pd.read_csv('../input/{}'.format(ngs_file))97    ngs.columns = [col.lower() for col in ngs.columns]98    # groupby and loop through play99    grouped = ngs.groupby(['season_year', 'gamekey', 'playid'])100    count = 0101    for s_gk_pid, df in grouped:102        try:103            play_count += 1104            print('========RUNNING FOR PLAY NUMBER {} =============='.format(play_count))105            count += 1106            print('Running for season year gamekey playid: {}'.format(s_gk_pid))107            try:108                print(pd.merge(df, pi)['playdescription'].values[0])109            except Exception as e:110                print('No play info')111                print('exception {}'.format(e))112                with open("broke_plays.txt", "a") as myfile:113                    myfile.write("NO PLAY INFO {} \n".format(s_gk_pid))114                continue115            rows_before = len(df)116            # Merge possible player jersey number and position117            df = pd.merge(df, ppd_unique, how='left', on='gsisid')118            if len(df) != rows_before:119                raise 'Shape has changed! This is not right'120            # Merge player punt role. Drop any player that does not have a role in the play121            # This includes players on sideline who are captured on the field during the play122            df = pd.merge(df, pprd_detailed,123                          on=['season_year', 'gamekey', 'playid', 'gsisid'], how='inner')124            df = pd.merge(df, vr, on=['season_year', 'gamekey', 'playid'],125                          how='left', suffixes=('', '_injured'))126            # Get all events and the event times within the play127            events = df.groupby(['event', 'time'])128            for event, d in events:129                df[event[0]] = event[1]  # Save event as column with time of event130            df['mph'] = df['dis'] * 20.4545455  # miles per hour131            df['injured_player'] = df.apply(132                lambda row: True if row['gsisid'] == row['gsisid_injured'] else False, axis=1)133            df['primary_partner_player'] = df.apply(134                lambda row: True if row['gsisid'] == row['primary_partner_gsisid'] else False, axis=1)135            # Find out if play is left to right - or right to left136            try:137                punt_returner_x_at_snap = df.loc[(df['role'] == 'PR') & (df['event'] == 'ball_snap')]['x'].values[0]138                long_snapper_x_at_snap = df.loc[(df['role'] == 'PLS') & (df['event'] == 'ball_snap')]['x'].values[0]139                if punt_returner_x_at_snap < long_snapper_x_at_snap:140                    df['left_to_right'] = False141                else:142                    df['left_to_right'] = True143            except Exception as e:144                df['left_to_right'] = np.nan145                with open("broke_plays.txt", "a") as myfile:146                    myfile.write("COULDNT DETERMINE LEFT TO RIGHT {} \n".format(s_gk_pid))147            # Join play information148            # Comment out because uncessary149            # df = pd.merge(df, pi, on=['season_year',150            #               'gamekey', 'playid'], how='left')151            df.to_csv('../working/playlevel/all_data/{}-{}-{}.csv'.format(s_gk_pid[0], s_gk_pid[1], s_gk_pid[2]),152                      index=False)153            ##############################################154            # Cut off from start of play to end of play155            ##############################################156            # Only keep time within the play that matters157            print(df['event'].unique())158            if len(df.loc[df['event'] == 'ball_snap']['time'].values) == 0:159                print('........No Snap for this play')160                ball_snap_time = df['time'].min()161            else:162                ball_snap_time = df.loc[df['event'] == 'ball_snap']['time'].values.min()163            try:164                end_time = df.loc[(df['event'] == 'out_of_bounds') |165                                  (df['event'] == 'downed') |166                                  (df['event'] == 'tackle') |167                                  (df['event'] == 'punt_downed') |168                                  (df['event'] == 'fair_catch') |169                                  (df['event'] == 'touchback') |170                                  (df['event'] == 'touchdown')]['time'].values.max()171            except ValueError:172                print('.......No end to play')173                end_time = df['time'].values.max()174                with open("broke_plays.txt", "a") as myfile:175                    myfile.write("NO END TO THE PLAY {} \n".format(s_gk_pid))176            df = df.loc[(df['time'] >= ball_snap_time) & (df['time'] <= end_time)]177            if len(df) == 0:178                print('BROKE FOR {}'.format(s_gk_pid))179            else:180                df.to_csv('../working/playlevel/during_play/{}-{}-{}.csv'.format(181                    s_gk_pid[0], s_gk_pid[1], s_gk_pid[2]))182        except Exception as e:183            print('It broke for this one.............')184            with open("broke_plays.txt", "a") as myfile:185                myfile.write("BROKE SOMEHWERE ELSE {} \n".format(s_gk_pid))186    # Remove data from memory187    del ngs188    gc.collect()...zxjdbc.py
Source:zxjdbc.py  
1# oracle/zxjdbc.py2# Copyright (C) 2005-2019 the SQLAlchemy authors and contributors3# <see AUTHORS file>4#5# This module is part of SQLAlchemy and is released under6# the MIT License: http://www.opensource.org/licenses/mit-license.php7"""8.. dialect:: oracle+zxjdbc9    :name: zxJDBC for Jython10    :dbapi: zxjdbc11    :connectstring: oracle+zxjdbc://user:pass@host/dbname12    :driverurl: http://www.oracle.com/technetwork/database/features/jdbc/index-091264.html13    .. note:: Jython is not supported by current versions of SQLAlchemy.  The14       zxjdbc dialect should be considered as experimental.15"""  # noqa16import collections17import decimal18import re19from .base import OracleCompiler20from .base import OracleDialect21from .base import OracleExecutionContext22from ... import sql23from ... import types as sqltypes24from ... import util25from ...connectors.zxJDBC import ZxJDBCConnector26from ...engine import result as _result27from ...sql import expression28SQLException = zxJDBC = None29class _ZxJDBCDate(sqltypes.Date):30    def result_processor(self, dialect, coltype):31        def process(value):32            if value is None:33                return None34            else:35                return value.date()36        return process37class _ZxJDBCNumeric(sqltypes.Numeric):38    def result_processor(self, dialect, coltype):39        # XXX: does the dialect return Decimal or not???40        # if it does (in all cases), we could use a None processor as well as41        # the to_float generic processor42        if self.asdecimal:43            def process(value):44                if isinstance(value, decimal.Decimal):45                    return value46                else:47                    return decimal.Decimal(str(value))48        else:49            def process(value):50                if isinstance(value, decimal.Decimal):51                    return float(value)52                else:53                    return value54        return process55class OracleCompiler_zxjdbc(OracleCompiler):56    def returning_clause(self, stmt, returning_cols):57        self.returning_cols = list(58            expression._select_iterables(returning_cols)59        )60        # within_columns_clause=False so that labels (foo AS bar) don't render61        columns = [62            self.process(c, within_columns_clause=False)63            for c in self.returning_cols64        ]65        if not hasattr(self, "returning_parameters"):66            self.returning_parameters = []67        binds = []68        for i, col in enumerate(self.returning_cols):69            dbtype = col.type.dialect_impl(self.dialect).get_dbapi_type(70                self.dialect.dbapi71            )72            self.returning_parameters.append((i + 1, dbtype))73            bindparam = sql.bindparam(74                "ret_%d" % i, value=ReturningParam(dbtype)75            )76            self.binds[bindparam.key] = bindparam77            binds.append(78                self.bindparam_string(self._truncate_bindparam(bindparam))79            )80        return "RETURNING " + ", ".join(columns) + " INTO " + ", ".join(binds)81class OracleExecutionContext_zxjdbc(OracleExecutionContext):82    def pre_exec(self):83        if hasattr(self.compiled, "returning_parameters"):84            # prepare a zxJDBC statement so we can grab its underlying85            # OraclePreparedStatement's getReturnResultSet later86            self.statement = self.cursor.prepare(self.statement)87    def get_result_proxy(self):88        if hasattr(self.compiled, "returning_parameters"):89            rrs = None90            try:91                try:92                    rrs = self.statement.__statement__.getReturnResultSet()93                    next(rrs)94                except SQLException as sqle:95                    msg = "%s [SQLCode: %d]" % (96                        sqle.getMessage(),97                        sqle.getErrorCode(),98                    )99                    if sqle.getSQLState() is not None:100                        msg += " [SQLState: %s]" % sqle.getSQLState()101                    raise zxJDBC.Error(msg)102                else:103                    row = tuple(104                        self.cursor.datahandler.getPyObject(rrs, index, dbtype)105                        for index, dbtype in self.compiled.returning_parameters106                    )107                    return ReturningResultProxy(self, row)108            finally:109                if rrs is not None:110                    try:111                        rrs.close()112                    except SQLException:113                        pass114                self.statement.close()115        return _result.ResultProxy(self)116    def create_cursor(self):117        cursor = self._dbapi_connection.cursor()118        cursor.datahandler = self.dialect.DataHandler(cursor.datahandler)119        return cursor120class ReturningResultProxy(_result.FullyBufferedResultProxy):121    """ResultProxy backed by the RETURNING ResultSet results."""122    def __init__(self, context, returning_row):123        self._returning_row = returning_row124        super(ReturningResultProxy, self).__init__(context)125    def _cursor_description(self):126        ret = []127        for c in self.context.compiled.returning_cols:128            if hasattr(c, "name"):129                ret.append((c.name, c.type))130            else:131                ret.append((c.anon_label, c.type))132        return ret133    def _buffer_rows(self):134        return collections.deque([self._returning_row])135class ReturningParam(object):136    """A bindparam value representing a RETURNING parameter.137    Specially handled by OracleReturningDataHandler.138    """139    def __init__(self, type_):140        self.type = type_141    def __eq__(self, other):142        if isinstance(other, ReturningParam):143            return self.type == other.type144        return NotImplemented145    def __ne__(self, other):146        if isinstance(other, ReturningParam):147            return self.type != other.type148        return NotImplemented149    def __repr__(self):150        kls = self.__class__151        return "<%s.%s object at 0x%x type=%s>" % (152            kls.__module__,153            kls.__name__,154            id(self),155            self.type,156        )157class OracleDialect_zxjdbc(ZxJDBCConnector, OracleDialect):158    jdbc_db_name = "oracle"159    jdbc_driver_name = "oracle.jdbc.OracleDriver"160    statement_compiler = OracleCompiler_zxjdbc161    execution_ctx_cls = OracleExecutionContext_zxjdbc162    colspecs = util.update_copy(163        OracleDialect.colspecs,164        {sqltypes.Date: _ZxJDBCDate, sqltypes.Numeric: _ZxJDBCNumeric},165    )166    def __init__(self, *args, **kwargs):167        super(OracleDialect_zxjdbc, self).__init__(*args, **kwargs)168        global SQLException, zxJDBC169        from java.sql import SQLException170        from com.ziclix.python.sql import zxJDBC171        from com.ziclix.python.sql.handler import OracleDataHandler172        class OracleReturningDataHandler(OracleDataHandler):173            """zxJDBC DataHandler that specially handles ReturningParam."""174            def setJDBCObject(self, statement, index, object_, dbtype=None):175                if type(object_) is ReturningParam:176                    statement.registerReturnParameter(index, object_.type)177                elif dbtype is None:178                    OracleDataHandler.setJDBCObject(179                        self, statement, index, object_180                    )181                else:182                    OracleDataHandler.setJDBCObject(183                        self, statement, index, object_, dbtype184                    )185        self.DataHandler = OracleReturningDataHandler186    def initialize(self, connection):187        super(OracleDialect_zxjdbc, self).initialize(connection)188        self.implicit_returning = connection.connection.driverversion >= "10.2"189    def _create_jdbc_url(self, url):190        return "jdbc:oracle:thin:@%s:%s:%s" % (191            url.host,192            url.port or 1521,193            url.database,194        )195    def _get_server_version_info(self, connection):196        version = re.search(197            r"Release ([\d\.]+)", connection.connection.dbversion198        ).group(1)199        return tuple(int(x) for x in version.split("."))...Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!
