Best Python code snippet using localstack_python
train_lr.py
Source:train_lr.py  
1"""Private training on combined data from several data owners"""2import tf_encrypted as tfe3import json4# from common_private import  ModelOwner, LogisticRegression, XOwner, YOwner5from tfe_keeper.common_private import LogisticRegression6from tfe_keeper.read_data_tf import get_data_xy, get_data_x, get_data_y7from tf_encrypted.keras import backend as KE8import tensorflow as tf9# import sys10import time11import platform12import os13from commonutils.common_config import CommonConfig14if platform.system() == "Darwin":15    absolute_path = "/Users/qizhi.zqz/projects/TFE_zqz/tf-encrypted"16else:17    absolute_path = "/app/file"18def run(taskId, conf, modelFileMachine, modelFilePath, modelFilePlainTextPath, tf_config_file=None):19    progress_file = os.path.join(absolute_path, "tfe/" + taskId + "/train_progress")20    CommonConfig.http_logger.info("progress_file:" + str(progress_file))21    with open(progress_file, "w") as f:22        f.write(str(0.0) + "\n")23        f.flush()24    trainParams = conf.get("trainParams")25    CommonConfig.http_logger.info("train_lr/run:  trainParams:" + str(trainParams))26    learningRate = float(trainParams.get("learningRate"))27    batch_size = int(trainParams.get("batchSize"))28    epoch_num = int(trainParams.get("maxIter"))29    # epsilon = float(trainParams.get("epsilon"))30    # regularizationL1=float(trainParams.get("regularizationL1"))31    # regularizationL2=float(trainParams.get("regularizationL2"))32    dataSet = conf.get("dataSet")33    CommonConfig.http_logger.info("dataSet:" + str(dataSet))34    try:35        node_list = list(dataSet.keys())36        node_key_id1 = node_list.pop()37        node_key_id2 = node_list.pop()38        node_id1 = dataSet.get(node_key_id1)39        node_id2 = dataSet.get(node_key_id2)40    except Exception as e:41        CommonConfig.error_logger.exception(42            'get node from dataSet {} error, exception msg:{}'.format(str(dataSet), str(e)))43    # node_id1=dataSet.get("node_id1")44    # node_id2=dataSet.get("node_id2")45    # print("node1_containY:", node_id1.get("isContainY"))46    CommonConfig.http_logger.info("node1_containY:" + str(node_id1.get("isContainY")))47    try:48        if (node_id1.get("isContainY")):49            featureNumX = int(node_id2.get("featureNum"))50            matchColNumX = int(node_id2.get("matchColNum"))51            path_x = node_id2.get("storagePath")52            record_num = int(node_id2.get("fileRecord"))53            featureNumY = int(node_id1.get("featureNum"))54            matchColNumY = int(node_id1.get("matchColNum"))55            path_y = node_id1.get("storagePath")56        else:57            if not node_id2.get("isContainY"):58                CommonConfig.error_logger.error("both isContainY are False")59            featureNumY = int(node_id2.get("featureNum"))60            matchColNumY = int(node_id2.get("matchColNum"))61            path_y = node_id2.get("storagePath")62            record_num = int(node_id2.get("fileRecord"))63            featureNumX = int(node_id1.get("featureNum"))64            matchColNumX = int(node_id1.get("matchColNum"))65            path_x = node_id1.get("storagePath")66        CommonConfig.http_logger.info("path_x:" + str(path_x))67        CommonConfig.http_logger.info("path_y:" + str(path_y))68        path_x = os.path.join(absolute_path, path_x)69        path_y = os.path.join(absolute_path, path_y)70        train_batch_num = epoch_num * record_num // batch_size + 171        feature_num = featureNumX + featureNumY72        CommonConfig.http_logger.info("path_x:" + str(path_x))73        CommonConfig.http_logger.info("path_y:" + str(path_y))74        CommonConfig.http_logger.info("train_batch_num:" + str(train_batch_num))75        CommonConfig.http_logger.info("feature_num:" + str(feature_num))76        # if len(sys.argv) >= 2:77        #   # config file was specified78        #   config_file = sys.argv[1]79        if tf_config_file:80            config = tfe.RemoteConfig.load(tf_config_file)81        else:82            # default to using local config83            config = tfe.LocalConfig([84                'XOwner',85                'YOwner',86                'RS'])87        CommonConfig.http_logger.info("train_lr/run:  config:" + str(config))88        tfe.set_config(config)89        players = ['XOwner', 'YOwner', 'RS']90        prot = tfe.protocol.SecureNN(*tfe.get_config().get_players(players))91        tfe.set_protocol(prot)92        # session_target = sys.argv[2] if len(sys.argv) > 2 else None93        if (featureNumY == 0):94            x_train = prot.define_local_computation(player='XOwner', computation_fn=get_data_x,95                                                    arguments=(batch_size, path_x, featureNumX,96                                                               matchColNumX, epoch_num * 2, 3.0, 1))97            y_train = prot.define_local_computation(player='YOwner', computation_fn=get_data_y, 98                                                    arguments=(batch_size, path_y, matchColNumY,99                                                               epoch_num * 2, 1))100        else:101            x_train1, y_train = prot.define_local_computation(player='YOwner', computation_fn=get_data_xy,102                                                              arguments=(batch_size, path_y, featureNumY,103                                                                         matchColNumY, epoch_num * 2, 3.0, 1))104            x_train0 = prot.define_local_computation(player='XOwner', computation_fn=get_data_x,105                                                     arguments=(batch_size, path_x, featureNumX, matchColNumX,106                                                                epoch_num * 2, 3.0, 1))107            x_train = prot.concat([x_train0, x_train1], axis=1)108        # print("x_train:", x_train)109        # print("y_train:", y_train)110        CommonConfig.http_logger.info("x_train:" + str(x_train))111        CommonConfig.http_logger.info("y_train:" + str(y_train))112        model = LogisticRegression(feature_num, learning_rate=learningRate)113        CommonConfig.http_logger.info("modelFilePath:" + str(modelFilePath))114        CommonConfig.http_logger.info("modelFileMachine:" + str(modelFileMachine))115        CommonConfig.http_logger.info("modelFilePlainTextPath:" + str(modelFilePlainTextPath))116        save_op = model.save(modelFilePath, modelFileMachine)117        save_as_plaintext_op = model.save_as_plaintext(modelFilePlainTextPath, modelFileMachine)118        # load_op = model.load(modelFilePath, modelFileMachine)119        CommonConfig.http_logger.info("save_op:" + str(save_op))120        # with tfe.Session() as sess:121        try:122            sess = KE.get_session()123            # sess.run(tfe.global_variables_initializer(), tag='init')124            sess.run(tf.global_variables_initializer())125            # sess.run(tf.local_variables_initializer())126        except Exception as e:127            CommonConfig.error_logger.exception(128                'global_variables_initializer error, exception msg:{}'.format(str(e)))129        CommonConfig.http_logger.info("start_time:")130        start_time = time.time()131        CommonConfig.http_logger.info("start_time:" + str(start_time))132        CommonConfig.http_logger.info("train_lr/run: x_train:" + str(x_train))133        CommonConfig.http_logger.info("train_lr/run: y_train:" + str(y_train))134        CommonConfig.http_logger.info("train_lr/run: train_batch_num:" + str(train_batch_num))135        model.fit(sess, x_train, y_train, train_batch_num, progress_file)136        train_time = time.time() - start_time137        print("train_time=", train_time)138        print("Saving model...")139        sess.run(save_op)140        sess.run(save_as_plaintext_op)141        print("Save OK.")142        with open(progress_file, "w") as f:143            f.write("1.00")144            f.flush()145        # sess.close()146    except Exception as e:147        CommonConfig.error_logger.exception(148            'train.run() error, exception msg:{}'.format(str(e)))149if __name__ == '__main__':150    with open('./qqq/conf', 'r') as f:151        conf = f.read()152        print(conf)153    conf = conf.replace("True", "true").replace("False", "false")154    # print(input)155    conf = json.loads(conf)156    print(conf)157    import time158    start_time=time.time()159    # run(taskId="qqq", conf=conf, modelFileMachine="YOwner",160    #     modelFilePath="./qqq/model",161    #     modelFilePlainTextPath="./qqq/model/plaintext_model", tf_config_file="/Users/qizhi.zqz/projects/TFE_zqz/tf-encrypted/tfe_keeper/qqq/config.json")162    run(taskId="qqq", conf=conf, modelFileMachine="YOwner",163        modelFilePath="./qqq/model",164        modelFilePlainTextPath="./qqq/model/plaintext_model")165    end_time=time.time()166    print("time=", end_time-start_time)167    # run(taskId="qqq", conf=conf, modelFileMachine="YOwner",168    # modelFilePath="./qqq/model", modelFilePlainTextPath="./qqq/model/plaintext_model",...Postgres.py
Source:Postgres.py  
1#!/usr/bin/env python2# -*- coding: UTF-8 -*-3from sys import stderr as http_logger4from psycopg2 import connect, Error5from io import StringIO6from config import DatabaseConfig, Config7class UserModel:8    """9    Store a user query history10    """11    def __init__(self):12        db_info = Config()13        try:14            self.con = connect("dbname={0} user={1} host={2} password={3}\15                ".format(db_info.get_db(), db_info.get_user(),16                         db_info.get_host(), db_info.get_password()))17            self.cur = self.con.cursor()18        except Error as error:19            http_logger.write("Error connecting to the user database: {0}\20                              ".format(error))21    def __del__(self):22        try:23            self.con.close()24        except Error as error:25            http_logger.write("Error deleting connection of the user to the \26                DB: {0}".format(error))27    def get_queries_from_email(self, email):28        """29        Return the user querie history30        """31        try:32            self.cur.execute("""SELECT "Query" FROM "Builder"."Queries" WHERE33                "email" = '%s' order by "Date" desc limit 10""" % email)34            return self.cur.fetchall()35        except Error as error:36            http_logger.write("\t*** ERROR *** Error getting the last \37                              queriess,\nEmail: {0}\nError: {1}\n".format(38                                  email, error))39            return False40    def record_query(self, email, query):41        """42        Record the query used by the user43        """44        try:45            if "'" in query:46                query = query.replace("'", "''")47            self.cur.execute("""INSERT INTO "Builder"."Queries" ("email",48                             "Query") VALUES ('%s', '%s')""" % (email, query))49            self.con.commit()50            return True51        except Error as error:52            http_logger.write("\t*** ERROR *** Error inserting the query,\n\53                Email: {0}\nQuery: {1}\nError: {2}\n".format(email, query,54                                                             error))55            return False56class Database():57    """58    MIMIC database59    """60    def __init__(self):61        db_info = DatabaseConfig()62        try:63            self.con = connect("dbname={0} user={1} host={2} password={3} \64                               options='-c statement_timeout=15min'".format(65                                   db_info.get_db(), db_info.get_user(),66                                   db_info.get_host(), db_info.get_password()))67            self.cur = self.con.cursor()68        except Error as error:69            if self.con:70                self.con.rollback()71            http_logger.write("Error connecting: {0}".format(error))72    def __del__(self):73        try:74            self.con.close()75        except Error as error:76            http_logger.write("Error disconnecting: {0}".format(error))77    def get_tables(self, db_name):78        """79        Get table names from postgres using a schema80        """81        try:82            self.cur.execute("SELECT table_name FROM \83                information_schema.tables WHERE table_schema = '%s' ORDER BY \84                table_name" % db_name)85            return self.cur.fetchall()86        except Error as error:87            http_logger.write("Error with tables: {0}".format(error))88            return False89    def describe_tables(self, table):90        """91        Get table's information92        """93        try:94            self.cur.execute("SELECT ordinal_position, column_name, \95                data_type, is_nullable FROM information_schema.columns WHERE \96                table_name ='%s'" % table)97            return self.cur.fetchall()98        except Error as error:99            http_logger.write("Error describing tables: {0}".format(error))100            return False101    def random_query(self, query):102        """103        Execute a random query done by a user104        """105        try:106            http_logger.write("Query -- : {0}".format(query))107            if "information_schema" in query or "pg_" in query:108                return [["Bad Query"], ], ("Bad Query",), False109            try:110                self.cur.execute("%s" % query)111            except Error as error:112                http_logger.write("Error describing tables: {0}".format(error))113                return False, False, error114            listing = []115            if self.cur.description is not None:116                for item in self.cur.description:117                    listing.append(item[0])118            result = self.cur.fetchmany(500)119            return result, listing, False120        except Error as error:121            http_logger.write("Error describing tables: %s" % error)122            return False, False, error123    def get_table_preview(self, table):124        """125        Get 100 lines of preview from DB126        """127        try:128            query = "SELECT * FROM " + table + " LIMIT 100"129            self.cur.execute(query)130            listing = []131            if self.cur.description is not None:132                for item in self.cur.description:133                    listing.append(item[0])134            return self.cur.fetchmany(100), listing, False135        except Error as error:136            http_logger.write("Error describing tables: %s" % error)137            return False, False, error138    def random_query_download(self, query):139        """140        Execute a random query done by a user to download141        """142        try:143            if "information_schema" in query or "pg_" in query:144                return [["Bad Query"], ], ("Bad Query",), False145            text_stream = StringIO()146            if query.rstrip()[-1] == ';':147                query = query.rstrip()[:-1]148            copy_query = "COPY ({}) TO STDOUT WITH CSV HEADER DELIMITER ','\149                         ".format(query)150            self.cur.copy_expert(copy_query, text_stream)151            tmp = 0152            temp = u''153            text_stream.seek(0)154            while tmp < 5000:155                try:156                    temp += next(text_stream)157                    tmp += 1158                except:159                    tmp = 5000160            return temp, False161        except Error as error:162            http_logger.write("\t*** ERROR *** Error executing a random query \163                              to download.\nQuery: {0}\nError: {1}".format(164                                  query, error))165            return False, error166    def set_schema(self, schema):167        """168        Set the database schema169        """170        try:171            self.cur.execute("SET search_path TO " + schema)172            self.cur.execute("SET statement_timeout = '15min'")173            self.con.commit()174        except Error as error:...Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!
