How to use test_combined method in grail

Best Python code snippet using grail_python

keystroke_dynamics_model.py

Source:keystroke_dynamics_model.py Github

copy

Full Screen

1import numpy as np2import pandas as pd3import matplotlib.pyplot as plt4import seaborn as sns5from sklearn.metrics import accuracy_score6from sklearn.neighbors import KNeighborsClassifier7from sklearn.model_selection import StratifiedShuffleSplit8from sklearn.model_selection import GridSearchCV, StratifiedShuffleSplit9from xgboost.sklearn import XGBClassifier10import xgboost as xgb11train = pd.read_csv('model/train.csv')12test = pd.read_csv('model/test.csv')13test_Combined = None14train_Combined = None15nr_bins = 1016HDMax, RPDMax, PPDMax = None, None, None17HDBins, RPDBins, PPDBins = None, None, None18train_1 = train19for i in range(1, 13):20 train_1['PPD-' + str(i)] = train_1['press-' + str(i)] - train_1['press-' + str(i - 1)]21 train_1['RPD-' + str(i)] = train_1['release-' + str(i)] - train_1['press-' + str(i - 1)]22for i in range(13):23 train_1['HD-' + str(i)] = train_1['release-' + str(i)] - train_1['press-' + str(i)]24test_1 = test25for i in range(1, 13):26 test_1['PPD-' + str(i)] = test_1['press-' + str(i)] - test_1['press-' + str(i - 1)]27 test_1['RPD-' + str(i)] = test_1['release-' + str(i)] - test_1['press-' + str(i - 1)]28for i in range(13):29 test_1['HD-' + str(i)] = test_1['release-' + str(i)] - test_1['press-' + str(i)]30def train():31 # Training Data32 drop_cols_HD_analysis = ['PPD-' + str(i) for i in range(1, 13)] + ['RPD-' + str(i) for i in range(1, 13)] + ['release-' + str(i) for i in range(13)]33 train_HD_analysis = train_1.drop(columns = drop_cols_HD_analysis)34 train_HD_analysis['id'] = train_HD_analysis.index35 train_HD_analysis = pd.wide_to_long(train_HD_analysis, ['press-', 'HD-'],36 i = 'id', j = 'key_no').sort_values(by = ['user', 'id', 'key_no'])37 drop_cols_PPD_analysis = ['HD-' + str(i) for i in range(13)] + ['RPD-' + str(i) for i in range(1, 13)] + ['release-' + str(i) for i in range(13)] + ['press-0']38 train_PPD_analysis = train_1.drop(columns = drop_cols_PPD_analysis)39 train_PPD_analysis['id'] = train_PPD_analysis.index40 train_PPD_analysis = pd.wide_to_long(train_PPD_analysis, ['press-', 'PPD-'],41 i = 'id', j = 'key_no').sort_values(by =['user', 'id', 'key_no'])42 drop_cols_RPD_analysis = ['HD-'+str(i) for i in range(13)] + ['PPD-' + str(i) for i in range(1, 13)] + ['release-' + str(i) for i in range(13)] + ['press-0']43 44 train_RPD_analysis = train_1.drop(columns = drop_cols_RPD_analysis)45 train_RPD_analysis['id'] = train_RPD_analysis.index46 train_RPD_analysis = pd.wide_to_long(train_RPD_analysis, ['press-', 'RPD-'],47 i = 'id', j = 'key_no').sort_values(by =['user', 'id', 'key_no'])48 # Test Data49 test_HD_analysis = test_1.drop(columns = drop_cols_HD_analysis)50 test_HD_analysis['id'] = test_HD_analysis.index51 test_HD_analysis = pd.wide_to_long(test_HD_analysis, ['press-', 'HD-'],52 i = 'id', j = 'key_no').sort_values(53 by = ['id', 'key_no'])54 test_PPD_analysis = test_1.drop(columns = drop_cols_PPD_analysis)55 test_PPD_analysis['id'] = test_PPD_analysis.index56 test_PPD_analysis = pd.wide_to_long(test_PPD_analysis, ['press-', 'PPD-'],57 i = 'id', j = 'key_no').sort_values(58 by =['id', 'key_no'])59 test_RPD_analysis = test_1.drop(columns = drop_cols_RPD_analysis)60 test_RPD_analysis['id'] = test_RPD_analysis.index61 test_RPD_analysis = pd.wide_to_long(test_RPD_analysis, ['press-', 'RPD-'],62 i = 'id', j = 'key_no').sort_values(63 by =['id', 'key_no'])64 # Join these individual tables together65 test_Combined = test_HD_analysis.join(test_RPD_analysis.drop(columns = ['press-']), rsuffix = 'RPD_').join(test_PPD_analysis.drop(columns = ['press-']), rsuffix = 'PPD_')66 train_Combined = train_HD_analysis.join(train_RPD_analysis.drop(columns = ['user', 'press-']), rsuffix = 'RPD_').join(train_PPD_analysis.drop(columns = ['user', 'press-']), rsuffix = 'PPD_')67 #print('Max values in train are: HDMax: ', HDMax, 'RPDMax:',68 # RPDMax, 'PPDMax:', PPDMax)69 labels = [i for i in range(nr_bins)]70 train_Combined['HDEnc'], HDBins = pd.qcut(train_Combined['HD-'],71 retbins = True, labels = labels,72 q = nr_bins)73 train_Combined['PPDEnc'], RPDBins = pd.qcut(train_Combined['PPD-'],74 retbins = True, labels = labels,75 q = nr_bins)76 train_Combined['RPDEnc'], PPDBins = pd.qcut(train_Combined['RPD-'],77 retbins = True, labels = labels,78 q = nr_bins)79 train_Combined['HDEnc'] = train_Combined['HDEnc'].astype(str).replace('nan', -1).astype(int)80 train_Combined['PPDEnc'] = train_Combined['PPDEnc'].astype(str).replace('nan', -1).astype(float)81 train_Combined['RPDEnc'] = train_Combined['RPDEnc'].astype(str).replace('nan', -1).astype(float)82def predict(pressed_t, released_t):83 HDMax = test_Combined['HD-'].max()84 RPDMax = test_Combined['RPD-'].max()85 PPDMax = test_Combined['PPD-'].max()86 #print('Max values in test are: HDMax: ', HDMax, 'RPDMax:',87 # RPDMax, 'PPDMax:', PPDMax)88 labels = [i for i in range(nr_bins)]89 test_Combined['HDEnc'] = pd.cut(test_Combined['HD-'],90 labels = labels,91 bins = HDBins)92 test_Combined['PPDEnc'] = pd.cut(test_Combined['PPD-'],93 labels = labels,94 bins = RPDBins)95 test_Combined['RPDEnc'] = pd.cut(test_Combined['RPD-'],96 labels = labels,97 bins = PPDBins)98 test_Combined['HDEnc'] = test_Combined['HDEnc'].astype(str).replace('nan', -1).astype(float)99 test_Combined['PPDEnc'] = test_Combined['PPDEnc'].astype(str).replace('nan', -1).astype(float)100 test_Combined['RPDEnc'] = test_Combined['RPDEnc'].astype(str).replace('nan', -1).astype(float)101 train_Combined_HDAvg = train_Combined.reset_index().groupby(['user', 'key_no'])['HDEnc'].mean()102 train_Combined_PPDAvg = train_Combined.reset_index().groupby(['user', 'key_no'])['PPDEnc'].mean()103 train_Combined_RPDAvg = train_Combined.reset_index().groupby(['user', 'key_no'])['RPDEnc'].mean()104 temp = pd.DataFrame({'HD':train_Combined_HDAvg, 'PPD':train_Combined_PPDAvg, 105 'RPD':train_Combined_RPDAvg})106 train_HDProperties = temp.reset_index().groupby('user')['HD'].apply(np.array)107 train_PPDProperties = temp.reset_index().groupby('user')['PPD'].apply(np.array)108 train_RPDProperties = temp.reset_index().groupby('user')['RPD'].apply(np.array)109 train_UserProps = pd.DataFrame({'HD':train_HDProperties, 'PPD':train_PPDProperties,110 'RPD':train_RPDProperties})111 pressed_t = released_t112 released_t = pressed_t113 train_Combined_HDAvg = test_Combined.reset_index().groupby(['id', 'key_no'])['HDEnc'].mean()114 train_Combined_PPDAvg = test_Combined.reset_index().groupby(['id', 'key_no'])['PPDEnc'].mean()115 train_Combined_RPDAvg = test_Combined.reset_index().groupby(['id', 'key_no'])['RPDEnc'].mean()116 temp = pd.DataFrame({'HD':train_Combined_HDAvg, 'PPD':train_Combined_PPDAvg, 117 'RPD':train_Combined_RPDAvg})118 train_HDProperties = temp.reset_index().groupby('id')['HD'].apply(np.array)119 train_PPDProperties = temp.reset_index().groupby('id')['PPD'].apply(np.array)120 train_RPDProperties = temp.reset_index().groupby('id')['RPD'].apply(np.array)121 test_UserProps = pd.DataFrame({'HD':train_HDProperties, 'PPD':train_PPDProperties,122 'RPD':train_RPDProperties})123 test_UserProps = pd.DataFrame(test_UserProps.HD.tolist(),124 index = test_UserProps.index).add_prefix('HD_').join(125 pd.DataFrame(test_UserProps.PPD.tolist(), index = 126 test_UserProps.index).add_prefix('PPD_')).join(127 pd.DataFrame(test_UserProps.RPD.tolist(), index =128 test_UserProps.index).add_prefix('RPD_'))129 train_HDTemp = train_Combined.reset_index().groupby(['user', 'id'])['HDEnc'].apply(np.array)130 train_PPDTemp = train_Combined.reset_index().groupby(['user', 'id'])['PPDEnc'].apply(np.array)131 train_RPDTemp = train_Combined.reset_index().groupby(['user', 'id'])['RPDEnc'].apply(np.array)132 train_User_AllSampleProps = pd.DataFrame({'HD':train_HDTemp, 'PPD':train_PPDTemp,133 'RPD':train_RPDTemp})134 train_User_AllSampleProps = pd.DataFrame(train_User_AllSampleProps.HD.tolist(),135 index = train_User_AllSampleProps.index).add_prefix('HD_').join(136 pd.DataFrame(train_User_AllSampleProps.PPD.tolist(),137 index = train_User_AllSampleProps.index).add_prefix('PPD_')).join(138 pd.DataFrame(train_User_AllSampleProps.RPD.tolist(),139 index = train_User_AllSampleProps.index).add_prefix('RPD_')).reset_index().set_index('user').drop(columns = ['id'])140 train_HDTemp = test_Combined.reset_index().groupby(['id'])['HDEnc'].apply(np.array)141 train_PPDTemp = test_Combined.reset_index().groupby(['id'])['PPDEnc'].apply(np.array)142 train_RPDTemp = test_Combined.reset_index().groupby(['id'])['RPDEnc'].apply(np.array)143 test_User_AllSampleProps = pd.DataFrame({'HD':train_HDTemp, 'PPD':train_PPDTemp,144 'RPD':train_RPDTemp})145 test_User_AllSampleProps = pd.DataFrame(test_User_AllSampleProps.HD.tolist(),146 index = test_User_AllSampleProps.index).add_prefix('HD_').join(147 pd.DataFrame(test_User_AllSampleProps.PPD.tolist(),148 index = test_User_AllSampleProps.index).add_prefix('PPD_')).join(149 pd.DataFrame(test_User_AllSampleProps.RPD.tolist(),150 index = test_User_AllSampleProps.index).add_prefix('RPD_'))151 trainX_allSamples = train_User_AllSampleProps.reset_index().drop(columns = ['user'])152 trainY_allSamples = train_User_AllSampleProps.index153 model = xgb.XGBClassifier()154 model.load_model('keystroke_model.bst')155 testX_allSamples = test_User_AllSampleProps.reset_index().drop(columns = ['id'])156 pd.DataFrame({'idx': testX_allSamples.index},157 index = testX_allSamples.index).to_csv('submission_x.csv', index = False)...

Full Screen

Full Screen

test.py

Source:test.py Github

copy

Full Screen

...18 '''19 single_word = SingleShortenedWord(text)20 single_word.generate()21 test_common(text, target_word, single_word)22def test_combined(text, target_word):23 '''短縮語(結合語)のテスト24 '''25 combined_word = CombinedShortenedWord(text)26 combined_word.generate()27 test_common(text, target_word, combined_word)28def test_yuragi(text, target_word):29 '''ゆらぎ語のテスト30 '''31 yuragi_word = Yuragi(text)32 yuragi_word.generate()33 test_common(text, target_word, yuragi_word)34def main():35 # # -------------------------------36 # # 単一語のテスト37 # # -------------------------------38 # # 単一語(分割)39 # test_single('CRISIS 公安機動捜査隊特捜班', 'CRISIS')40 # test_single('SRサイタマノラッパー~マイクの細道~', 'サイタマノラッパー')41 # test_single('中居正広のミになる図書館', 'ミになる図書館')42 # test_single('ファイナルファンタジーXIV 光のお父さん', '光のお父さん')43 # test_single('関ジャム 完全燃SHOW', '関ジャム')44 # # 単一語(分割して変換)45 # test_single('CRISIS 公安機動捜査隊特捜班', 'クライシス')46 # # 単一語(キャッチコピーを削除してそのまま)47 # test_single('1億人の大質問!?笑ってコラえて!', '笑ってコラえて')48 # # 単一語(主語をそのまま)49 # test_single('櫻子さんの足下には死体が埋まっている', '櫻子さん')50 # # 単一語(主語をそのまま)51 # test_single('警視庁捜査一課9係 season12', '9係')52 # # 単一語(カタカナ語をそのまま)53 # test_single('幸せ!ボンビーガール', 'ボンビーガール')54 # # 単一語(ノイズを除去して、カタカナ語をそのまま)55 # test_single('ユーリ!!! on ICE', 'ユーリ')56 # # 単一語(シリーズ名の除去)57 # test_single('進撃の巨人 Season 2」', '進撃の巨人')58 # # -------------------------------59 # # 結合語のテスト60 # # -------------------------------61 # # 結合語62 # # test_combined('マッサージ探偵 ジョー」', '探偵ジョー') # 失敗する、カタカナ3字は非対応だから63 # test_combined('ボク、運命の人です。', 'ボク運')64 # test_combined('真夜中のプリンス', '真夜プリ')65 # test_combined('緊急取調室 第2シリーズ」', 'キントリ')66 # test_combined('中居正広の金曜日のスマイルたちへ」', '金スマ')67 # test_combined('この素晴らしい世界に祝福を!2」', 'このすば')68 # test_combined('あなたのことはそれほど」', 'あなそれ')69 # test_combined('ボク、運命の人です。」', 'ボク運')70 # test_combined('人は見た目が100パーセント」', 'ひとパー')71 # test_combined('恋がヘタでも生きてます」', '恋ヘタ')72 # test_combined('あなたのことはそれほど」', 'あなそれ')73 # test_combined('3人のパパ」', '3パパ')74 # test_combined('警視庁捜査一課9係 season12」', '9係')75 # # 結合語(分解して変換して結合)76 # test_combined('鋼の錬金術師', 'ハガレン')77 # -------------------------------78 # ゆらぎ候補語のテスト79 # -------------------------------80 # 単一語(分割)81 test_yuragi('ダンジョンに出会いを求めるのは間違っているだろうか', 'ダンまち')82 test_yuragi('Re:ゼロから始める異世界生活', 'リゼロ')83 test_yuragi('転生したらスライムだった件', '転スラ')...

Full Screen

Full Screen

test_meta_data.py

Source:test_meta_data.py Github

copy

Full Screen

1import anvil.meta_data as md2from base_test import TestBase3class TestBaseMetaData(TestBase):4 test_meta_data = {'foo': 'moo'}5 test_other_meta_data = {'bar': 'larp'}6 test_combined = {}7 test_combined.update(test_meta_data)8 test_combined.update(test_other_meta_data)9 test_total_meta_data_overwrite = {'foo': 'boo', 'bar': 'farp'}10 test_other_meta_data_overwrite = {'bar': 'marp'}11 test_overwritten = test_combined.copy()12 test_overwritten.update(test_other_meta_data_overwrite)13 def build_dependencies(cls):14 pass15class TestMetaDataMergeDicts(TestBaseMetaData):16 def test_meta_data_double(self):17 merged_dict = md.MetaData().merge(self.test_meta_data, self.test_other_meta_data)18 self.assertEquals(merged_dict, self.test_combined)19 def test_meta_data_single(self):20 merged_dict = md.MetaData().merge(self.test_meta_data)21 self.assertEquals(merged_dict, self.test_meta_data)22 def test_meta_data_empty(self):23 test_meta_data = {}24 actual = {}25 actual.update(test_meta_data)26 merged_dict = md.MetaData().merge(test_meta_data)27 self.assertEquals(merged_dict, actual)28 def test_meta_data_none(self):29 test_meta_data = None30 merged_dict = md.MetaData().merge(test_meta_data)31 self.assertEquals(merged_dict, {})32class TestKeys(TestBaseMetaData):33 def test_default_merge(self):34 meta_data_object = md.MetaData()35 meta_data_object.merge(self.test_meta_data, self.test_other_meta_data)36 self.assertEquals(meta_data_object.keys(), list(self.test_combined))37 def test_initialize_with_dicts(self):38 meta_data_object = md.MetaData(self.test_meta_data, self.test_other_meta_data)39 self.assertEquals(meta_data_object.keys(), list(self.test_combined))40 def test_initialize_with_dict_and_splat(self):41 meta_data_object = md.MetaData(self.test_meta_data, **self.test_other_meta_data)42 self.assertEquals(meta_data_object.keys(), list(self.test_combined))43class TestSplatting(TestBaseMetaData):44 @staticmethod45 def single_splat_returner(*args):46 return args47 @staticmethod48 def double_splat_returner(**kwargs):49 return kwargs50 def test_single_from_merge(self):51 meta_data_object = md.MetaData()52 meta_data_object.merge(self.test_meta_data, self.test_other_meta_data)53 self.assertEquals(tuple(meta_data_object.keys()), self.single_splat_returner(*meta_data_object))54 def test_double_from_init(self):55 meta_data_object = md.MetaData(self.test_meta_data, self.test_other_meta_data)56 self.assertEquals(meta_data_object, self.double_splat_returner(**meta_data_object.to_dict()))57class TestMerge(TestBaseMetaData):58 def test_default_merge(self):59 meta_data_object = md.MetaData()60 meta_data_object.merge(self.test_meta_data, self.test_other_meta_data)61 self.assertEquals(meta_data_object, self.test_combined)62 def test_initialize_with_dicts(self):63 meta_data_object = md.MetaData(self.test_meta_data, self.test_other_meta_data)64 self.assertEquals(meta_data_object, self.test_combined)65 def test_initialize_with_dict_and_splat(self):66 meta_data_object = md.MetaData(self.test_meta_data, **self.test_other_meta_data)67 self.assertEquals(meta_data_object, self.test_combined)68class TestProtection(TestBaseMetaData):69 def test_overwrite_merge(self):70 meta_data_object = md.MetaData(self.test_combined)71 meta_data_object.merge(self.test_other_meta_data_overwrite)72 self.assertEquals(meta_data_object, self.test_overwritten)73 def test_protected_merge_force(self):74 meta_data_object = md.MetaData(self.test_combined, protected='bar')75 meta_data_object.merge(self.test_other_meta_data_overwrite, force=True)76 self.assertEquals(meta_data_object, self.test_overwritten)77 def test_protected_multi_merge_force(self):78 meta_data_object = md.MetaData(self.test_combined, protected=['foo', 'bar'])79 meta_data_object.merge(self.test_total_meta_data_overwrite, force=True)80 self.assertEquals(meta_data_object, self.test_total_meta_data_overwrite)81 def test_protected_single_merge_force(self):82 meta_data_object = md.MetaData(self.test_combined, protected=['foo'])83 meta_data_object.merge(self.test_total_meta_data_overwrite, force=True)84 self.assertEquals(meta_data_object, self.test_total_meta_data_overwrite)85 def test_protected_merge(self):86 meta_data_object = md.MetaData(self.test_combined, protected='bar')87 meta_data_object.merge(self.test_other_meta_data_overwrite)88 self.assertEquals(meta_data_object, self.test_combined)89 def test_protected_merge_as_list(self):90 meta_data_object = md.MetaData(self.test_combined, protected=['bar'])91 meta_data_object.merge(self.test_other_meta_data_overwrite)92 self.assertEquals(meta_data_object, self.test_combined)93 def test_add_protection_via_method(self):94 meta_data_object = md.MetaData(self.test_combined, protected=['bar'])95 meta_data_object.merge(self.test_other_meta_data_overwrite)96 self.assertEquals(meta_data_object, self.test_combined)97 def test_add_protection_manually(self):98 meta_data_object = md.MetaData(self.test_combined)99 meta_data_object.protected |= {'bar'}100 meta_data_object.merge(self.test_other_meta_data_overwrite)101 self.assertEquals(meta_data_object, self.test_combined)102 def test_add_protection_manually_overwrite(self):103 meta_data_object = md.MetaData(self.test_combined)104 meta_data_object.protected = ['bar']105 meta_data_object.merge(self.test_other_meta_data_overwrite)...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run grail automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful