How to use update_table method in localstack

Best Python code snippet using localstack_python

039_grant_to_assignment.py

Source:039_grant_to_assignment.py Github

copy

Full Screen

1# Copyright 2014 IBM Corp.2#3# Licensed under the Apache License, Version 2.0 (the "License"); you may4# not use this file except in compliance with the License. You may obtain5# a copy of the License at6#7# http://www.apache.org/licenses/LICENSE-2.08#9# Unless required by applicable law or agreed to in writing, software10# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT11# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the12# License for the specific language governing permissions and limitations13# under the License.14import json15import sqlalchemy as sql16from keystone.assignment.backends import sql as assignment_sql17USER_PROJECT_TABLE = 'user_project_metadata'18GROUP_PROJECT_TABLE = 'group_project_metadata'19USER_DOMAIN_TABLE = 'user_domain_metadata'20GROUP_DOMAIN_TABLE = 'group_domain_metadata'21ASSIGNMENT_TABLE = 'assignment'22GRANT_TABLES = [USER_PROJECT_TABLE, USER_DOMAIN_TABLE,23 GROUP_PROJECT_TABLE, GROUP_DOMAIN_TABLE]24def migrate_grant_table(meta, migrate_engine, session, table_name):25 def extract_actor_and_target(table_name, composite_grant):26 if table_name == USER_PROJECT_TABLE:27 return {'type': assignment_sql.AssignmentType.USER_PROJECT,28 'actor_id': composite_grant.user_id,29 'target_id': composite_grant.project_id}30 elif table_name == GROUP_PROJECT_TABLE:31 return {'type': assignment_sql.AssignmentType.GROUP_PROJECT,32 'actor_id': composite_grant.group_id,33 'target_id': composite_grant.project_id}34 elif table_name == USER_DOMAIN_TABLE:35 return {'type': assignment_sql.AssignmentType.USER_DOMAIN,36 'actor_id': composite_grant.user_id,37 'target_id': composite_grant.domain_id}38 else:39 return {'type': assignment_sql.AssignmentType.GROUP_DOMAIN,40 'actor_id': composite_grant.group_id,41 'target_id': composite_grant.domain_id}42 def grant_to_grant_dict_list(table_name, composite_grant):43 """Make each role in the list of this entry a separate assignment."""44 json_metadata = json.loads(composite_grant.data)45 role_dict_list = []46 if 'roles' in json_metadata:47 for x in json_metadata['roles']:48 if x.get('id') is None:49 # Looks like an invalid role, drop it50 break51 grant = extract_actor_and_target(table_name, composite_grant)52 grant['role_id'] = x.get('id')53 grant['inherited'] = False54 if x.get('inherited_to') == 'projects':55 grant['inherited'] = True56 role_dict_list.append(grant)57 return role_dict_list58 upgrade_table = sql.Table(table_name, meta, autoload=True)59 assignment_table = sql.Table(ASSIGNMENT_TABLE, meta, autoload=True)60 # For each grant in this table, expand it out to be an assignment entry for61 # each role in the metadata62 for grant in session.query(upgrade_table).all():63 for grant_role in grant_to_grant_dict_list(table_name, grant):64 new_entry = assignment_table.insert().values(65 type=grant_role['type'],66 actor_id=grant_role['actor_id'],67 target_id=grant_role['target_id'],68 role_id=grant_role['role_id'],69 inherited=grant_role['inherited'])70 migrate_engine.execute(new_entry)71 # Delete all the rows72 migrate_engine.execute(upgrade_table.delete())73def downgrade_assignment_table(meta, migrate_engine):74 def add_to_dict_list(metadata, assignment_row):75 """Update a metadata dict list with the role.76 For the assignment row supplied, we need to append the role_id into77 the metadata list of dicts. If the row is inherited, then we mark78 it so in the dict we append.79 """80 new_entry = {'id': assignment_row.role_id}81 if assignment_row.inherited and (82 assignment_row.type ==83 assignment_sql.AssignmentType.USER_DOMAIN or84 assignment_row.type ==85 assignment_sql.AssignmentType.GROUP_DOMAIN):86 new_entry['inherited_to'] = 'projects'87 if metadata is not None:88 json_metadata = json.loads(metadata)89 else:90 json_metadata = {}91 if json_metadata.get('roles') is None:92 json_metadata['roles'] = []93 json_metadata['roles'].append(new_entry)94 return json.dumps(json_metadata)95 def build_user_project_entry(meta, session, row):96 update_table = sql.Table(USER_PROJECT_TABLE, meta, autoload=True)97 q = session.query(update_table)98 q = q.filter_by(user_id=row.actor_id)99 q = q.filter_by(project_id=row.target_id)100 ref = q.first()101 if ref is not None:102 values = {'data': add_to_dict_list(ref.data, row)}103 update = update_table.update().where(104 update_table.c.user_id == ref.user_id).where(105 update_table.c.project_id == ref.project_id).values(values)106 else:107 values = {'user_id': row.actor_id,108 'project_id': row.target_id,109 'data': add_to_dict_list(None, row)}110 update = update_table.insert().values(values)111 return update112 def build_group_project_entry(meta, session, row):113 update_table = sql.Table(GROUP_PROJECT_TABLE, meta, autoload=True)114 q = session.query(update_table)115 q = q.filter_by(group_id=row.actor_id)116 q = q.filter_by(project_id=row.target_id)117 ref = q.first()118 if ref is not None:119 values = {'data': add_to_dict_list(ref.data, row)}120 update = update_table.update().where(121 update_table.c.group_id == ref.group_id).where(122 update_table.c.project_id == ref.project_id).values(values)123 else:124 values = {'group_id': row.actor_id,125 'project_id': row.target_id,126 'data': add_to_dict_list(None, row)}127 update = update_table.insert().values(values)128 return update129 def build_user_domain_entry(meta, session, row):130 update_table = sql.Table(USER_DOMAIN_TABLE, meta, autoload=True)131 q = session.query(update_table)132 q = q.filter_by(user_id=row.actor_id)133 q = q.filter_by(domain_id=row.target_id)134 ref = q.first()135 if ref is not None:136 values = {'data': add_to_dict_list(ref.data, row)}137 update = update_table.update().where(138 update_table.c.user_id == ref.user_id).where(139 update_table.c.domain_id == ref.domain_id).values(values)140 else:141 values = {'user_id': row.actor_id,142 'domain_id': row.target_id,143 'data': add_to_dict_list(None, row)}144 update = update_table.insert().values(values)145 return update146 def build_group_domain_entry(meta, session, row):147 update_table = sql.Table(GROUP_DOMAIN_TABLE, meta, autoload=True)148 q = session.query(update_table)149 q = q.filter_by(group_id=row.actor_id)150 q = q.filter_by(domain_id=row.target_id)151 ref = q.first()152 if ref is not None:153 values = {'data': add_to_dict_list(ref.data, row)}154 update = update_table.update().where(155 update_table.c.group_id == ref.group_id).where(156 update_table.c.domain_id == ref.domain_id).values(values)157 else:158 values = {'group_id': row.actor_id,159 'domain_id': row.target_id,160 'data': add_to_dict_list(None, row)}161 update = update_table.insert().values(values)162 return update163 def build_update(meta, session, row):164 """Build an update or an insert to the correct metadata table."""165 if row.type == assignment_sql.AssignmentType.USER_PROJECT:166 return build_user_project_entry(meta, session, row)167 elif row.type == assignment_sql.AssignmentType.GROUP_PROJECT:168 return build_group_project_entry(meta, session, row)169 elif row.type == assignment_sql.AssignmentType.USER_DOMAIN:170 return build_user_domain_entry(meta, session, row)171 elif row.type == assignment_sql.AssignmentType.GROUP_DOMAIN:172 return build_group_domain_entry(meta, session, row)173 # If the row type doesn't match any that we understand we drop174 # the data.175 session = sql.orm.sessionmaker(bind=migrate_engine)()176 downgrade_table = sql.Table(ASSIGNMENT_TABLE, meta, autoload=True)177 for assignment in session.query(downgrade_table).all():178 update = build_update(meta, session, assignment)179 if update is not None:180 migrate_engine.execute(update)181 session.commit()182 # Delete all the rows183 migrate_engine.execute(downgrade_table.delete())184 session.commit()185 session.close()186def upgrade(migrate_engine):187 meta = sql.MetaData()188 meta.bind = migrate_engine189 session = sql.orm.sessionmaker(bind=migrate_engine)()190 for table_name in GRANT_TABLES:191 migrate_grant_table(meta, migrate_engine, session, table_name)192 session.commit()193 session.close()194def downgrade(migrate_engine):195 meta = sql.MetaData()196 meta.bind = migrate_engine...

Full Screen

Full Screen

mian.py

Source:mian.py Github

copy

Full Screen

1import pdfplumber2from pdf_annotate import PdfAnnotator, Location, Appearance3import pandas as pd4import numpy as np5import time6import re7import io8import sys9start_time = time.time()10check_version = "developer"11test_times = 112# Open a PDF file.13if check_version == "developer":14 spec_number = r"acrf_CRAD001Y2301-SPEC"15 pdf_path = r"F:\Novartis\ReaderSpec\files/" + spec_number + '.pdf'16 test_times = test_times + 117 output_path = r"F:\Novartis\ReaderSpec\result/" + spec_number + "_" + str(test_times) + ".xlsx"18 offset = 1.519 annotator = PdfAnnotator(pdf_path)20else:21 offset = 1.522# Read the PDF with pdfplumber.23df = pd.DataFrame()24with pdfplumber.open(pdf_path) as pdf:25 all_pages = pdf.pages26 for curpage in range(len(all_pages)):27 page = all_pages[curpage]28 axis_info_ = page.tmp_find_tables(table_settings={"keep_blank_chars": True})29 counttable = 030 for table in page.extract_tables(table_settings={"keep_blank_chars": True}):31 axis_info_1 = axis_info_[counttable]32 counttable += 133 header_index = 034 variable_column_index = 035 axis_info = []36 IsNone = [0] * (len(table) * len(table[0]))37 for i in range(len(table)):38 for j in range(len(table[0])):39 if table[i][j]:40 table[i][j] = table[i][j].replace("\n", "@@").replace(" ", "_").replace("!", " ")41 if table[i][j] is None:42 header_index = i + 143 IsNone[j * len(table) + i] = 144 if table[i][j] == "Variable":45 variable_column_index = i + 146 if table[i][j] is not None:47 axis_info.append(axis_info_1[j * len(table) + i - sum(IsNone[:j * len(table) + i])])48 if None in table[0]:49 DatasetName = table[0][0]50 if "(" in DatasetName:51 DatasetName = DatasetName[: DatasetName.index("(")]52 else:53 DatasetName = "Dataset"54 if variable_column_index != 0:55 variable_column_ = np.array(table[variable_column_index:])[:, 0].repeat(len(table[variable_column_index]))56 columns_name_ = table[header_index] * (len(table) - header_index)57 table = np.array(table).flatten("C").reshape([-1, 1])58 aim_index = np.where(table == None)[0]59 table = np.delete(table, aim_index).reshape([-1, 1])60 if variable_column_index == 0 and DatasetName != "Dataset":61 variable_column = np.array("Codelist").repeat(len(table))62 elif DatasetName == "Dataset":63 variable_column = np.array("DatasetList").repeat(len(table))64 else:65 variable_column = np.concatenate((np.array(" ").repeat(len(table) - len(variable_column_)),66 variable_column_), axis=0)67 columns_name = [" "] * (len(table) - len(columns_name_)) + columns_name_68 index_info = np.concatenate((np.array([page.page_number, DatasetName] * len(table)).reshape([-1, 2]),69 np.array(columns_name).reshape([-1, 1]), variable_column.reshape([-1, 1])), axis=1)70 update_table = np.concatenate((index_info, np.array(table)), axis=1)71 update_table = np.concatenate((update_table, np.array(axis_info)), axis=1)72 df = df.append(pd.DataFrame(update_table))73if check_version == "developer":74 # The path of reader file.75 reader_path = output_path76 # The path of translation file.77 translation_path = r"F:\Novartis\ReaderSpec\result\crad001y2301 spec.xlsx"78 # The path of merged file.79 anotation_path = r"F:\Novartis\ReaderSpec\result/" + spec_number + "_Anotation.xlsx"80else:81 reader_path = " "82 translation_path = " "83 anotation_path = ""84Header = ["Page", "Dataset", "ToColumn", "Variable", "String", "X1", "Y1", "X2", "Y2"]85df.to_excel(output_path, index=True, header=Header)86writer = pd.ExcelWriter(output_path)87# Translation file.88reader_tale_ = pd.read_excel(reader_path)89reader_tale = reader_tale_[(reader_tale_.ToColumn == "Label") & (reader_tale_.Variable != " ")]90reader_tale.iloc[:, -1] = reader_tale.iloc[:, -1] - offset91translation_table = pd.read_excel(translation_path, sheet_name="Variables", usecols=[0, 1, 2, 3])92final_table = pd.merge(reader_tale, translation_table, how="outer", on=["Dataset", "Variable", ])93# Columns name of merged file.94columns_order = ["Page", "ToColumn", "Dataset", "Variable", "Label", "String", "TARGET_LABEL", "X1", "Y1", "X2",95 "Y2"]96final_table = final_table[columns_order]97# Fine tuning coordinate.98final_table.loc[:, ("X2", "Y2")] = final_table.loc[:, ("X2", "Y2")].copy() - offset99final_table.loc[:, ("X1", "Y1")] = final_table.loc[:, ("X1", "Y1")].copy() + offset100fill_values = {"Page": float('inf'), "TARGET_LABEL": "未被翻译"}101update_table = final_table.fillna(value=fill_values)102if check_version == "developer":103 update_table.to_excel(anotation_path, index=True)104 pd.ExcelWriter(anotation_path)105else:106 update_table["STUDYID"] = "CRAD001Y2301"107 update_table["FORM"] = "INC"108 update_table["PAGE"] = update_table["Page"]109 update_table["SOURCE"] = update_table["String"]110 update_table["TARGET"] = update_table["TARGET_LABEL"]111 update_table["SEQ"] = "1"112 update_table["FLAG"] = "label"113 update_table["STATUS"] = " "114 update_table["LOC"] = " "115 update_table["NOTES"] = " "116 update_table["Y1"] = 792 - update_table["Y1"]117 update_table["Y2"] = 792 - update_table["Y2"]118 update_table["WIDTH"] = update_table["X2"] - update_table["X1"]119 update_table["HEIGHT"] = update_table["Y2"] - update_table["Y1"]120 # Reorder columns.121 Header_obj = ["STUDYID", "PAGE", "FORM", "SEQ", "FLAG", "SOURCE",122 "TARGET", "STATUS", "X1", "Y1", "X2", "Y2", "WIDTH", "HEIGHT", "LOC", "NOTES"]123 update_table = update_table[Header_obj]124 update_table.to_excel(anotation_path, index=True, header=Header)...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run localstack automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful