How to use _load_extensions method in lisa

Best Python code snippet using lisa_python

cli.py

Source:cli.py Github

copy

Full Screen

...144 views=False,145):146 """List the tables in the database"""147 db = sqlite_utils.Database(path)148 _load_extensions(db, load_extension)149 headers = ["view" if views else "table"]150 if counts:151 headers.append("count")152 if columns:153 headers.append("columns")154 if schema:155 headers.append("schema")156 def _iter():157 if views:158 items = db.view_names()159 else:160 items = db.table_names(fts4=fts4, fts5=fts5)161 for name in items:162 row = [name]163 if counts:164 row.append(db[name].count)165 if columns:166 cols = [c.name for c in db[name].columns]167 if csv:168 row.append("\n".join(cols))169 else:170 row.append(cols)171 if schema:172 row.append(db[name].schema)173 yield row174 if table:175 print(tabulate.tabulate(_iter(), headers=headers, tablefmt=fmt))176 elif csv or tsv:177 writer = csv_std.writer(sys.stdout, dialect="excel-tab" if tsv else "excel")178 if not no_headers:179 writer.writerow(headers)180 for row in _iter():181 writer.writerow(row)182 else:183 for line in output_rows(_iter(), headers, nl, arrays, json_cols):184 click.echo(line)185@cli.command()186@click.argument(187 "path",188 type=click.Path(exists=True, file_okay=True, dir_okay=False, allow_dash=False),189 required=True,190)191@click.option(192 "--counts", help="Include row counts per view", default=False, is_flag=True193)194@output_options195@click.option(196 "--columns",197 help="Include list of columns for each view",198 is_flag=True,199 default=False,200)201@click.option(202 "--schema",203 help="Include schema for each view",204 is_flag=True,205 default=False,206)207@load_extension_option208def views(209 path,210 counts,211 nl,212 arrays,213 csv,214 tsv,215 no_headers,216 table,217 fmt,218 json_cols,219 columns,220 schema,221 load_extension,222):223 """List the views in the database"""224 tables.callback(225 path=path,226 fts4=False,227 fts5=False,228 counts=counts,229 nl=nl,230 arrays=arrays,231 csv=csv,232 tsv=tsv,233 no_headers=no_headers,234 table=table,235 fmt=fmt,236 json_cols=json_cols,237 columns=columns,238 schema=schema,239 load_extension=load_extension,240 views=True,241 )242@cli.command()243@click.argument(244 "path",245 type=click.Path(exists=True, file_okay=True, dir_okay=False, allow_dash=False),246 required=True,247)248@click.argument("tables", nargs=-1)249@click.option("--no-vacuum", help="Don't run VACUUM", default=False, is_flag=True)250@load_extension_option251def optimize(path, tables, no_vacuum, load_extension):252 """Optimize all full-text search tables and then run VACUUM - should shrink the database file"""253 db = sqlite_utils.Database(path)254 _load_extensions(db, load_extension)255 if not tables:256 tables = db.table_names(fts4=True) + db.table_names(fts5=True)257 with db.conn:258 for table in tables:259 db[table].optimize()260 if not no_vacuum:261 db.vacuum()262@cli.command(name="rebuild-fts")263@click.argument(264 "path",265 type=click.Path(exists=True, file_okay=True, dir_okay=False, allow_dash=False),266 required=True,267)268@click.argument("tables", nargs=-1)269@load_extension_option270def rebuild_fts(path, tables, load_extension):271 """Rebuild all or specific full-text search tables"""272 db = sqlite_utils.Database(path)273 _load_extensions(db, load_extension)274 if not tables:275 tables = db.table_names(fts4=True) + db.table_names(fts5=True)276 with db.conn:277 for table in tables:278 db[table].rebuild_fts()279@cli.command()280@click.argument(281 "path",282 type=click.Path(exists=True, file_okay=True, dir_okay=False, allow_dash=False),283 required=True,284)285def vacuum(path):286 """Run VACUUM against the database"""287 sqlite_utils.Database(path).vacuum()288@cli.command()289@click.argument(290 "path",291 type=click.Path(exists=True, file_okay=True, dir_okay=False, allow_dash=False),292 required=True,293)294@load_extension_option295def dump(path, load_extension):296 """Output a SQL dump of the schema and full contents of the database"""297 db = sqlite_utils.Database(path)298 _load_extensions(db, load_extension)299 for line in db.conn.iterdump():300 click.echo(line)301@cli.command(name="add-column")302@click.argument(303 "path",304 type=click.Path(exists=True, file_okay=True, dir_okay=False, allow_dash=False),305 required=True,306)307@click.argument("table")308@click.argument("col_name")309@click.argument(310 "col_type",311 type=click.Choice(312 ["integer", "float", "blob", "text", "INTEGER", "FLOAT", "BLOB", "TEXT"]313 ),314 required=False,315)316@click.option(317 "--fk", type=str, required=False, help="Table to reference as a foreign key"318)319@click.option(320 "--fk-col",321 type=str,322 required=False,323 help="Referenced column on that foreign key table - if omitted will automatically use the primary key",324)325@click.option(326 "--not-null-default",327 type=str,328 required=False,329 help="Add NOT NULL DEFAULT 'TEXT' constraint",330)331@load_extension_option332def add_column(333 path, table, col_name, col_type, fk, fk_col, not_null_default, load_extension334):335 "Add a column to the specified table"336 db = sqlite_utils.Database(path)337 _load_extensions(db, load_extension)338 db[table].add_column(339 col_name, col_type, fk=fk, fk_col=fk_col, not_null_default=not_null_default340 )341@cli.command(name="add-foreign-key")342@click.argument(343 "path",344 type=click.Path(exists=True, file_okay=True, dir_okay=False, allow_dash=False),345 required=True,346)347@click.argument("table")348@click.argument("column")349@click.argument("other_table", required=False)350@click.argument("other_column", required=False)351@click.option(352 "--ignore",353 is_flag=True,354 help="If foreign key already exists, do nothing",355)356@load_extension_option357def add_foreign_key(358 path, table, column, other_table, other_column, ignore, load_extension359):360 """361 Add a new foreign key constraint to an existing table. Example usage:362 $ sqlite-utils add-foreign-key my.db books author_id authors id363 WARNING: Could corrupt your database! Back up your database file first.364 """365 db = sqlite_utils.Database(path)366 _load_extensions(db, load_extension)367 try:368 db[table].add_foreign_key(column, other_table, other_column, ignore=ignore)369 except AlterError as e:370 raise click.ClickException(e)371@cli.command(name="add-foreign-keys")372@click.argument(373 "path",374 type=click.Path(exists=True, file_okay=True, dir_okay=False, allow_dash=False),375 required=True,376)377@click.argument("foreign_key", nargs=-1)378@load_extension_option379def add_foreign_keys(path, foreign_key, load_extension):380 """381 Add multiple new foreign key constraints to a database. Example usage:382 \b383 sqlite-utils add-foreign-keys my.db \\384 books author_id authors id \\385 authors country_id countries id386 """387 db = sqlite_utils.Database(path)388 _load_extensions(db, load_extension)389 if len(foreign_key) % 4 != 0:390 raise click.ClickException(391 "Each foreign key requires four values: table, column, other_table, other_column"392 )393 tuples = []394 for i in range(len(foreign_key) // 4):395 tuples.append(tuple(foreign_key[i * 4 : (i * 4) + 4]))396 try:397 db.add_foreign_keys(tuples)398 except AlterError as e:399 raise click.ClickException(e)400@cli.command(name="index-foreign-keys")401@click.argument(402 "path",403 type=click.Path(exists=True, file_okay=True, dir_okay=False, allow_dash=False),404 required=True,405)406@load_extension_option407def index_foreign_keys(path, load_extension):408 """409 Ensure every foreign key column has an index on it.410 """411 db = sqlite_utils.Database(path)412 _load_extensions(db, load_extension)413 db.index_foreign_keys()414@cli.command(name="create-index")415@click.argument(416 "path",417 type=click.Path(exists=True, file_okay=True, dir_okay=False, allow_dash=False),418 required=True,419)420@click.argument("table")421@click.argument("column", nargs=-1, required=True)422@click.option("--name", help="Explicit name for the new index")423@click.option("--unique", help="Make this a unique index", default=False, is_flag=True)424@click.option(425 "--if-not-exists",426 help="Ignore if index already exists",427 default=False,428 is_flag=True,429)430@load_extension_option431def create_index(path, table, column, name, unique, if_not_exists, load_extension):432 """433 Add an index to the specified table covering the specified columns.434 Use "sqlite-utils create-index mydb -- -column" to specify descending435 order for a column.436 """437 db = sqlite_utils.Database(path)438 _load_extensions(db, load_extension)439 # Treat -prefix as descending for columns440 columns = []441 for col in column:442 if col.startswith("-"):443 col = DescIndex(col[1:])444 columns.append(col)445 db[table].create_index(446 columns, index_name=name, unique=unique, if_not_exists=if_not_exists447 )448@cli.command(name="enable-fts")449@click.argument(450 "path",451 type=click.Path(exists=True, file_okay=True, dir_okay=False, allow_dash=False),452 required=True,453)454@click.argument("table")455@click.argument("column", nargs=-1, required=True)456@click.option("--fts4", help="Use FTS4", default=False, is_flag=True)457@click.option("--fts5", help="Use FTS5", default=False, is_flag=True)458@click.option("--tokenize", help="Tokenizer to use, e.g. porter")459@click.option(460 "--create-triggers",461 help="Create triggers to update the FTS tables when the parent table changes.",462 default=False,463 is_flag=True,464)465@load_extension_option466def enable_fts(467 path, table, column, fts4, fts5, tokenize, create_triggers, load_extension468):469 "Enable full-text search for specific table and columns"470 fts_version = "FTS5"471 if fts4 and fts5:472 click.echo("Can only use one of --fts4 or --fts5", err=True)473 return474 elif fts4:475 fts_version = "FTS4"476 db = sqlite_utils.Database(path)477 _load_extensions(db, load_extension)478 db[table].enable_fts(479 column,480 fts_version=fts_version,481 tokenize=tokenize,482 create_triggers=create_triggers,483 )484@cli.command(name="populate-fts")485@click.argument(486 "path",487 type=click.Path(exists=True, file_okay=True, dir_okay=False, allow_dash=False),488 required=True,489)490@click.argument("table")491@click.argument("column", nargs=-1, required=True)492@load_extension_option493def populate_fts(path, table, column, load_extension):494 "Re-populate full-text search for specific table and columns"495 db = sqlite_utils.Database(path)496 _load_extensions(db, load_extension)497 db[table].populate_fts(column)498@cli.command(name="disable-fts")499@click.argument(500 "path",501 type=click.Path(exists=True, file_okay=True, dir_okay=False, allow_dash=False),502 required=True,503)504@click.argument("table")505@load_extension_option506def disable_fts(path, table, load_extension):507 "Disable full-text search for specific table"508 db = sqlite_utils.Database(path)509 _load_extensions(db, load_extension)510 db[table].disable_fts()511@cli.command(name="enable-wal")512@click.argument(513 "path",514 nargs=-1,515 type=click.Path(exists=True, file_okay=True, dir_okay=False, allow_dash=False),516 required=True,517)518@load_extension_option519def enable_wal(path, load_extension):520 "Enable WAL for database files"521 for path_ in path:522 db = sqlite_utils.Database(path_)523 _load_extensions(db, load_extension)524 db.enable_wal()525@cli.command(name="disable-wal")526@click.argument(527 "path",528 nargs=-1,529 type=click.Path(exists=True, file_okay=True, dir_okay=False, allow_dash=False),530 required=True,531)532@load_extension_option533def disable_wal(path, load_extension):534 "Disable WAL for database files"535 for path_ in path:536 db = sqlite_utils.Database(path_)537 _load_extensions(db, load_extension)538 db.disable_wal()539@cli.command(name="enable-counts")540@click.argument(541 "path",542 type=click.Path(exists=True, file_okay=True, dir_okay=False, allow_dash=False),543 required=True,544)545@click.argument("tables", nargs=-1)546@load_extension_option547def enable_counts(path, tables, load_extension):548 "Configure triggers to update a _counts table with row counts"549 db = sqlite_utils.Database(path)550 _load_extensions(db, load_extension)551 if not tables:552 db.enable_counts()553 else:554 # Check all tables exist555 bad_tables = [table for table in tables if not db[table].exists()]556 if bad_tables:557 raise click.ClickException("Invalid tables: {}".format(bad_tables))558 for table in tables:559 db[table].enable_counts()560@cli.command(name="reset-counts")561@click.argument(562 "path",563 type=click.Path(exists=True, file_okay=True, dir_okay=False, allow_dash=False),564 required=True,565)566@load_extension_option567def reset_counts(path, load_extension):568 "Reset calculated counts in the _counts table"569 db = sqlite_utils.Database(path)570 _load_extensions(db, load_extension)571 db.reset_counts()572def insert_upsert_options(fn):573 for decorator in reversed(574 (575 click.argument(576 "path",577 type=click.Path(file_okay=True, dir_okay=False, allow_dash=False),578 required=True,579 ),580 click.argument("table"),581 click.argument("json_file", type=click.File("rb"), required=True),582 click.option(583 "--pk", help="Columns to use as the primary key, e.g. id", multiple=True584 ),585 click.option("--nl", is_flag=True, help="Expect newline-delimited JSON"),586 click.option("--flatten", is_flag=True, help="Flatten nested JSON objects"),587 click.option("-c", "--csv", is_flag=True, help="Expect CSV"),588 click.option("--tsv", is_flag=True, help="Expect TSV"),589 click.option("--delimiter", help="Delimiter to use for CSV files"),590 click.option("--quotechar", help="Quote character to use for CSV/TSV"),591 click.option(592 "--sniff", is_flag=True, help="Detect delimiter and quote character"593 ),594 click.option(595 "--no-headers", is_flag=True, help="CSV file has no header row"596 ),597 click.option(598 "--batch-size", type=int, default=100, help="Commit every X records"599 ),600 click.option(601 "--alter",602 is_flag=True,603 help="Alter existing table to add any missing columns",604 ),605 click.option(606 "--not-null",607 multiple=True,608 help="Columns that should be created as NOT NULL",609 ),610 click.option(611 "--default",612 multiple=True,613 type=(str, str),614 help="Default value that should be set for a column",615 ),616 click.option(617 "--encoding",618 help="Character encoding for input, defaults to utf-8",619 ),620 click.option(621 "-d",622 "--detect-types",623 is_flag=True,624 envvar="SQLITE_UTILS_DETECT_TYPES",625 help="Detect types for columns in CSV/TSV data",626 ),627 load_extension_option,628 click.option("--silent", is_flag=True, help="Do not show progress bar"),629 )630 ):631 fn = decorator(fn)632 return fn633def insert_upsert_implementation(634 path,635 table,636 json_file,637 pk,638 nl,639 flatten,640 csv,641 tsv,642 delimiter,643 quotechar,644 sniff,645 no_headers,646 batch_size,647 alter,648 upsert,649 ignore=False,650 replace=False,651 truncate=False,652 not_null=None,653 default=None,654 encoding=None,655 detect_types=None,656 load_extension=None,657 silent=False,658):659 db = sqlite_utils.Database(path)660 _load_extensions(db, load_extension)661 if (delimiter or quotechar or sniff or no_headers) and not tsv:662 csv = True663 if (nl + csv + tsv) >= 2:664 raise click.ClickException("Use just one of --nl, --csv or --tsv")665 if (csv or tsv) and flatten:666 raise click.ClickException("--flatten cannot be used with --csv or --tsv")667 if encoding and not (csv or tsv):668 raise click.ClickException("--encoding must be used with --csv or --tsv")669 if pk and len(pk) == 1:670 pk = pk[0]671 encoding = encoding or "utf-8-sig"672 buffered = io.BufferedReader(json_file, buffer_size=4096)673 decoded = io.TextIOWrapper(buffered, encoding=encoding)674 tracker = None675 if csv or tsv:676 if sniff:677 # Read first 2048 bytes and use that to detect678 first_bytes = buffered.peek(2048)679 dialect = csv_std.Sniffer().sniff(first_bytes.decode(encoding, "ignore"))680 else:681 dialect = "excel-tab" if tsv else "excel"682 with file_progress(decoded, silent=silent) as decoded:683 csv_reader_args = {"dialect": dialect}684 if delimiter:685 csv_reader_args["delimiter"] = delimiter686 if quotechar:687 csv_reader_args["quotechar"] = quotechar688 reader = csv_std.reader(decoded, **csv_reader_args)689 first_row = next(reader)690 if no_headers:691 headers = ["untitled_{}".format(i + 1) for i in range(len(first_row))]692 reader = itertools.chain([first_row], reader)693 else:694 headers = first_row695 docs = (dict(zip(headers, row)) for row in reader)696 if detect_types:697 tracker = TypeTracker()698 docs = tracker.wrap(docs)699 else:700 try:701 if nl:702 docs = (json.loads(line) for line in decoded)703 else:704 docs = json.load(decoded)705 if isinstance(docs, dict):706 docs = [docs]707 except json.decoder.JSONDecodeError:708 raise click.ClickException(709 "Invalid JSON - use --csv for CSV or --tsv for TSV files"710 )711 if flatten:712 docs = (dict(_flatten(doc)) for doc in docs)713 extra_kwargs = {"ignore": ignore, "replace": replace, "truncate": truncate}714 if not_null:715 extra_kwargs["not_null"] = set(not_null)716 if default:717 extra_kwargs["defaults"] = dict(default)718 if upsert:719 extra_kwargs["upsert"] = upsert720 # Apply {"$base64": true, ...} decoding, if needed721 docs = (decode_base64_values(doc) for doc in docs)722 try:723 db[table].insert_all(724 docs, pk=pk, batch_size=batch_size, alter=alter, **extra_kwargs725 )726 except Exception as e:727 if (728 isinstance(e, sqlite3.OperationalError)729 and e.args730 and "has no column named" in e.args[0]731 ):732 raise click.ClickException(733 "{}\n\nTry using --alter to add additional columns".format(e.args[0])734 )735 # If we can find sql= and parameters= arguments, show those736 variables = _find_variables(e.__traceback__, ["sql", "parameters"])737 if "sql" in variables and "parameters" in variables:738 raise click.ClickException(739 "{}\n\nsql = {}\nparameters = {}".format(740 str(e), variables["sql"], variables["parameters"]741 )742 )743 else:744 raise745 if tracker is not None:746 db[table].transform(types=tracker.types)747def _flatten(d):748 for key, value in d.items():749 if isinstance(value, dict):750 for key2, value2 in _flatten(value):751 yield key + "_" + key2, value2752 else:753 yield key, value754def _find_variables(tb, vars):755 to_find = list(vars)756 found = {}757 for var in to_find:758 if var in tb.tb_frame.f_locals:759 vars.remove(var)760 found[var] = tb.tb_frame.f_locals[var]761 if vars and tb.tb_next:762 found.update(_find_variables(tb.tb_next, vars))763 return found764@cli.command()765@insert_upsert_options766@click.option(767 "--ignore", is_flag=True, default=False, help="Ignore records if pk already exists"768)769@click.option(770 "--replace",771 is_flag=True,772 default=False,773 help="Replace records if pk already exists",774)775@click.option(776 "--truncate",777 is_flag=True,778 default=False,779 help="Truncate table before inserting records, if table already exists",780)781def insert(782 path,783 table,784 json_file,785 pk,786 nl,787 flatten,788 csv,789 tsv,790 delimiter,791 quotechar,792 sniff,793 no_headers,794 batch_size,795 alter,796 encoding,797 detect_types,798 load_extension,799 silent,800 ignore,801 replace,802 truncate,803 not_null,804 default,805):806 """807 Insert records from JSON file into a table, creating the table if it808 does not already exist.809 Input should be a JSON array of objects, unless --nl or --csv is used.810 """811 try:812 insert_upsert_implementation(813 path,814 table,815 json_file,816 pk,817 nl,818 flatten,819 csv,820 tsv,821 delimiter,822 quotechar,823 sniff,824 no_headers,825 batch_size,826 alter=alter,827 upsert=False,828 ignore=ignore,829 replace=replace,830 truncate=truncate,831 encoding=encoding,832 detect_types=detect_types,833 load_extension=load_extension,834 silent=silent,835 not_null=not_null,836 default=default,837 )838 except UnicodeDecodeError as ex:839 raise click.ClickException(UNICODE_ERROR.format(ex))840@cli.command()841@insert_upsert_options842def upsert(843 path,844 table,845 json_file,846 pk,847 nl,848 flatten,849 csv,850 tsv,851 batch_size,852 delimiter,853 quotechar,854 sniff,855 no_headers,856 alter,857 not_null,858 default,859 encoding,860 detect_types,861 load_extension,862 silent,863):864 """865 Upsert records based on their primary key. Works like 'insert' but if866 an incoming record has a primary key that matches an existing record867 the existing record will be updated.868 """869 try:870 insert_upsert_implementation(871 path,872 table,873 json_file,874 pk,875 nl,876 flatten,877 csv,878 tsv,879 delimiter,880 quotechar,881 sniff,882 no_headers,883 batch_size,884 alter=alter,885 upsert=True,886 not_null=not_null,887 default=default,888 encoding=encoding,889 load_extension=load_extension,890 silent=silent,891 )892 except UnicodeDecodeError as ex:893 raise click.ClickException(UNICODE_ERROR.format(ex))894@cli.command(name="create-table")895@click.argument(896 "path",897 type=click.Path(file_okay=True, dir_okay=False, allow_dash=False),898 required=True,899)900@click.argument("table")901@click.argument("columns", nargs=-1, required=True)902@click.option("--pk", help="Column to use as primary key")903@click.option(904 "--not-null",905 multiple=True,906 help="Columns that should be created as NOT NULL",907)908@click.option(909 "--default",910 multiple=True,911 type=(str, str),912 help="Default value that should be set for a column",913)914@click.option(915 "--fk",916 multiple=True,917 type=(str, str, str),918 help="Column, other table, other column to set as a foreign key",919)920@click.option(921 "--ignore",922 is_flag=True,923 help="If table already exists, do nothing",924)925@click.option(926 "--replace",927 is_flag=True,928 help="If table already exists, replace it",929)930@load_extension_option931def create_table(932 path, table, columns, pk, not_null, default, fk, ignore, replace, load_extension933):934 """935 Add a table with the specified columns. Columns should be specified using936 name, type pairs, for example:937 \b938 sqlite-utils create-table my.db people \\939 id integer \\940 name text \\941 height float \\942 photo blob --pk id943 """944 db = sqlite_utils.Database(path)945 _load_extensions(db, load_extension)946 if len(columns) % 2 == 1:947 raise click.ClickException(948 "columns must be an even number of 'name' 'type' pairs"949 )950 coltypes = {}951 columns = list(columns)952 while columns:953 name = columns.pop(0)954 ctype = columns.pop(0)955 if ctype.upper() not in VALID_COLUMN_TYPES:956 raise click.ClickException(957 "column types must be one of {}".format(VALID_COLUMN_TYPES)958 )959 coltypes[name] = ctype.upper()960 # Does table already exist?961 if table in db.table_names():962 if ignore:963 return964 elif replace:965 db[table].drop()966 else:967 raise click.ClickException(968 'Table "{}" already exists. Use --replace to delete and replace it.'.format(969 table970 )971 )972 db[table].create(973 coltypes, pk=pk, not_null=not_null, defaults=dict(default), foreign_keys=fk974 )975@cli.command(name="drop-table")976@click.argument(977 "path",978 type=click.Path(file_okay=True, dir_okay=False, allow_dash=False),979 required=True,980)981@click.argument("table")982@click.option("--ignore", is_flag=True)983@load_extension_option984def drop_table(path, table, ignore, load_extension):985 "Drop the specified table"986 db = sqlite_utils.Database(path)987 _load_extensions(db, load_extension)988 try:989 db[table].drop(ignore=ignore)990 except sqlite3.OperationalError:991 raise click.ClickException('Table "{}" does not exist'.format(table))992@cli.command(name="create-view")993@click.argument(994 "path",995 type=click.Path(file_okay=True, dir_okay=False, allow_dash=False),996 required=True,997)998@click.argument("view")999@click.argument("select")1000@click.option(1001 "--ignore",1002 is_flag=True,1003 help="If view already exists, do nothing",1004)1005@click.option(1006 "--replace",1007 is_flag=True,1008 help="If view already exists, replace it",1009)1010@load_extension_option1011def create_view(path, view, select, ignore, replace, load_extension):1012 "Create a view for the provided SELECT query"1013 db = sqlite_utils.Database(path)1014 _load_extensions(db, load_extension)1015 # Does view already exist?1016 if view in db.view_names():1017 if ignore:1018 return1019 elif replace:1020 db[view].drop()1021 else:1022 raise click.ClickException(1023 'View "{}" already exists. Use --replace to delete and replace it.'.format(1024 view1025 )1026 )1027 db.create_view(view, select)1028@cli.command(name="drop-view")1029@click.argument(1030 "path",1031 type=click.Path(file_okay=True, dir_okay=False, allow_dash=False),1032 required=True,1033)1034@click.argument("view")1035@click.option("--ignore", is_flag=True)1036@load_extension_option1037def drop_view(path, view, ignore, load_extension):1038 "Drop the specified view"1039 db = sqlite_utils.Database(path)1040 _load_extensions(db, load_extension)1041 try:1042 db[view].drop(ignore=ignore)1043 except sqlite3.OperationalError:1044 raise click.ClickException('View "{}" does not exist'.format(view))1045@cli.command()1046@click.argument(1047 "path",1048 type=click.Path(file_okay=True, dir_okay=False, allow_dash=False),1049 required=True,1050)1051@click.argument("sql")1052@click.option(1053 "--attach",1054 type=(str, click.Path(file_okay=True, dir_okay=False, allow_dash=False)),1055 multiple=True,1056 help="Additional databases to attach - specify alias and filepath",1057)1058@output_options1059@click.option("-r", "--raw", is_flag=True, help="Raw output, first column of first row")1060@click.option(1061 "-p",1062 "--param",1063 multiple=True,1064 type=(str, str),1065 help="Named :parameters for SQL query",1066)1067@load_extension_option1068def query(1069 path,1070 sql,1071 attach,1072 nl,1073 arrays,1074 csv,1075 tsv,1076 no_headers,1077 table,1078 fmt,1079 json_cols,1080 raw,1081 param,1082 load_extension,1083):1084 "Execute SQL query and return the results as JSON"1085 db = sqlite_utils.Database(path)1086 for alias, attach_path in attach:1087 db.attach(alias, attach_path)1088 _load_extensions(db, load_extension)1089 db.register_fts4_bm25()1090 _execute_query(1091 db, sql, param, raw, table, csv, tsv, no_headers, fmt, nl, arrays, json_cols1092 )1093@cli.command()1094@click.argument(1095 "paths",1096 type=click.Path(file_okay=True, dir_okay=False, allow_dash=True),1097 required=False,1098 nargs=-1,1099)1100@click.argument("sql")1101@click.option(1102 "--attach",1103 type=(str, click.Path(file_okay=True, dir_okay=False, allow_dash=False)),1104 multiple=True,1105 help="Additional databases to attach - specify alias and filepath",1106)1107@output_options1108@click.option("-r", "--raw", is_flag=True, help="Raw output, first column of first row")1109@click.option(1110 "-p",1111 "--param",1112 multiple=True,1113 type=(str, str),1114 help="Named :parameters for SQL query",1115)1116@click.option(1117 "--encoding",1118 help="Character encoding for CSV input, defaults to utf-8",1119)1120@click.option(1121 "-n",1122 "--no-detect-types",1123 is_flag=True,1124 help="Treat all CSV/TSV columns as TEXT",1125)1126@click.option("--schema", is_flag=True, help="Show SQL schema for in-memory database")1127@click.option("--dump", is_flag=True, help="Dump SQL for in-memory database")1128@click.option(1129 "--save",1130 type=click.Path(file_okay=True, dir_okay=False, allow_dash=False),1131 help="Save in-memory database to this file",1132)1133@click.option(1134 "--analyze",1135 is_flag=True,1136 help="Analyze resulting tables and output results",1137)1138@load_extension_option1139def memory(1140 paths,1141 sql,1142 attach,1143 nl,1144 arrays,1145 csv,1146 tsv,1147 no_headers,1148 table,1149 fmt,1150 json_cols,1151 raw,1152 param,1153 encoding,1154 no_detect_types,1155 schema,1156 dump,1157 save,1158 analyze,1159 load_extension,1160):1161 """Execute SQL query against an in-memory database, optionally populated by imported data1162 To import data from CSV, TSV or JSON files pass them on the command-line:1163 \b1164 sqlite-utils memory one.csv two.json \\1165 "select * from one join two on one.two_id = two.id"1166 For data piped into the tool from standard input, use "-" or "stdin":1167 \b1168 cat animals.csv | sqlite-utils memory - \\1169 "select * from stdin where species = 'dog'"1170 The format of the data will be automatically detected. You can specify the format1171 explicitly using :json, :csv, :tsv or :nl (for newline-delimited JSON) - for example:1172 \b1173 cat animals.csv | sqlite-utils memory stdin:csv places.dat:nl \\1174 "select * from stdin where place_id in (select id from places)"1175 Use --schema to view the SQL schema of any imported files:1176 \b1177 sqlite-utils memory animals.csv --schema1178 """1179 db = sqlite_utils.Database(memory=True)1180 # If --dump or --save or --analyze used but no paths detected, assume SQL query is a path:1181 if (dump or save or schema or analyze) and not paths:1182 paths = [sql]1183 sql = None1184 for i, path in enumerate(paths):1185 # Path may have a :format suffix1186 if ":" in path and path.rsplit(":", 1)[-1].upper() in Format.__members__:1187 path, suffix = path.rsplit(":", 1)1188 format = Format[suffix.upper()]1189 else:1190 format = None1191 if path in ("-", "stdin"):1192 csv_fp = sys.stdin.buffer1193 csv_table = "stdin"1194 else:1195 csv_path = pathlib.Path(path)1196 csv_table = csv_path.stem1197 csv_fp = csv_path.open("rb")1198 rows, format_used = rows_from_file(csv_fp, format=format, encoding=encoding)1199 tracker = None1200 if format_used in (Format.CSV, Format.TSV) and not no_detect_types:1201 tracker = TypeTracker()1202 rows = tracker.wrap(rows)1203 db[csv_table].insert_all(rows, alter=True)1204 if tracker is not None:1205 db[csv_table].transform(types=tracker.types)1206 # Add convenient t / t1 / t2 views1207 view_names = ["t{}".format(i + 1)]1208 if i == 0:1209 view_names.append("t")1210 for view_name in view_names:1211 if not db[view_name].exists():1212 db.create_view(view_name, "select * from [{}]".format(csv_table))1213 if analyze:1214 _analyze(db, tables=None, columns=None, save=False)1215 return1216 if dump:1217 for line in db.conn.iterdump():1218 click.echo(line)1219 return1220 if schema:1221 click.echo(db.schema)1222 return1223 if save:1224 db2 = sqlite_utils.Database(save)1225 for line in db.conn.iterdump():1226 db2.execute(line)1227 return1228 for alias, attach_path in attach:1229 db.attach(alias, attach_path)1230 _load_extensions(db, load_extension)1231 db.register_fts4_bm25()1232 _execute_query(1233 db, sql, param, raw, table, csv, tsv, no_headers, fmt, nl, arrays, json_cols1234 )1235def _execute_query(1236 db, sql, param, raw, table, csv, tsv, no_headers, fmt, nl, arrays, json_cols1237):1238 with db.conn:1239 try:1240 cursor = db.execute(sql, dict(param))1241 except sqlite3.OperationalError as e:1242 raise click.ClickException(str(e))1243 if cursor.description is None:1244 # This was an update/insert1245 headers = ["rows_affected"]1246 cursor = [[cursor.rowcount]]1247 else:1248 headers = [c[0] for c in cursor.description]1249 if raw:1250 data = cursor.fetchone()[0]1251 if isinstance(data, bytes):1252 sys.stdout.buffer.write(data)1253 else:1254 sys.stdout.write(str(data))1255 elif table:1256 print(tabulate.tabulate(list(cursor), headers=headers, tablefmt=fmt))1257 elif csv or tsv:1258 writer = csv_std.writer(sys.stdout, dialect="excel-tab" if tsv else "excel")1259 if not no_headers:1260 writer.writerow(headers)1261 for row in cursor:1262 writer.writerow(row)1263 else:1264 for line in output_rows(cursor, headers, nl, arrays, json_cols):1265 click.echo(line)1266@cli.command()1267@click.argument(1268 "path",1269 type=click.Path(file_okay=True, dir_okay=False, allow_dash=False),1270 required=True,1271)1272@click.argument("dbtable")1273@click.argument("q")1274@click.option("-o", "--order", type=str, help="Order by ('column' or 'column desc')")1275@click.option("-c", "--column", type=str, multiple=True, help="Columns to return")1276@click.option(1277 "--limit",1278 type=int,1279 help="Number of rows to return - defaults to everything",1280)1281@click.option(1282 "--sql", "show_sql", is_flag=True, help="Show SQL query that would be run"1283)1284@click.option("--quote", is_flag=True, help="Apply FTS quoting rules to search term")1285@output_options1286@load_extension_option1287@click.pass_context1288def search(1289 ctx,1290 path,1291 dbtable,1292 q,1293 order,1294 show_sql,1295 quote,1296 column,1297 limit,1298 nl,1299 arrays,1300 csv,1301 tsv,1302 no_headers,1303 table,1304 fmt,1305 json_cols,1306 load_extension,1307):1308 "Execute a full-text search against this table"1309 db = sqlite_utils.Database(path)1310 _load_extensions(db, load_extension)1311 # Check table exists1312 table_obj = db[dbtable]1313 if not table_obj.exists():1314 raise click.ClickException("Table '{}' does not exist".format(dbtable))1315 if not table_obj.detect_fts():1316 raise click.ClickException(1317 "Table '{}' is not configured for full-text search".format(dbtable)1318 )1319 if column:1320 # Check they all exist1321 table_columns = table_obj.columns_dict1322 for c in column:1323 if c not in table_columns:1324 raise click.ClickException(1325 "Table '{}' has no column '{}".format(dbtable, c)1326 )1327 sql = table_obj.search_sql(columns=column, order_by=order, limit=limit)1328 if show_sql:1329 click.echo(sql)1330 return1331 if quote:1332 q = db.quote_fts(q)1333 try:1334 ctx.invoke(1335 query,1336 path=path,1337 sql=sql,1338 nl=nl,1339 arrays=arrays,1340 csv=csv,1341 tsv=tsv,1342 no_headers=no_headers,1343 table=table,1344 fmt=fmt,1345 json_cols=json_cols,1346 param=[("query", q)],1347 load_extension=load_extension,1348 )1349 except click.ClickException as e:1350 if "malformed MATCH expression" in str(e) or "unterminated string" in str(e):1351 raise click.ClickException(1352 "{}\n\nTry running this again with the --quote option".format(str(e))1353 )1354 else:1355 raise1356@cli.command()1357@click.argument(1358 "path",1359 type=click.Path(file_okay=True, dir_okay=False, allow_dash=False),1360 required=True,1361)1362@click.argument("dbtable")1363@click.option("-c", "--column", type=str, multiple=True, help="Columns to return")1364@output_options1365@load_extension_option1366@click.pass_context1367def rows(1368 ctx,1369 path,1370 dbtable,1371 column,1372 nl,1373 arrays,1374 csv,1375 tsv,1376 no_headers,1377 table,1378 fmt,1379 json_cols,1380 load_extension,1381):1382 "Output all rows in the specified table"1383 columns = "*"1384 if column:1385 columns = ", ".join("[{}]".format(c) for c in column)1386 ctx.invoke(1387 query,1388 path=path,1389 sql="select {} from [{}]".format(columns, dbtable),1390 nl=nl,1391 arrays=arrays,1392 csv=csv,1393 tsv=tsv,1394 no_headers=no_headers,1395 table=table,1396 fmt=fmt,1397 json_cols=json_cols,1398 load_extension=load_extension,1399 )1400@cli.command()1401@click.argument(1402 "path",1403 type=click.Path(file_okay=True, dir_okay=False, allow_dash=False),1404 required=True,1405)1406@click.argument("tables", nargs=-1)1407@output_options1408@load_extension_option1409@click.pass_context1410def triggers(1411 ctx,1412 path,1413 tables,1414 nl,1415 arrays,1416 csv,1417 tsv,1418 no_headers,1419 table,1420 fmt,1421 json_cols,1422 load_extension,1423):1424 "Show triggers configured in this database"1425 sql = "select name, tbl_name as [table], sql from sqlite_master where type = 'trigger'"1426 if tables:1427 quote = sqlite_utils.Database(memory=True).quote1428 sql += " and [table] in ({})".format(1429 ", ".join(quote(table) for table in tables)1430 )1431 ctx.invoke(1432 query,1433 path=path,1434 sql=sql,1435 nl=nl,1436 arrays=arrays,1437 csv=csv,1438 tsv=tsv,1439 no_headers=no_headers,1440 table=table,1441 fmt=fmt,1442 json_cols=json_cols,1443 load_extension=load_extension,1444 )1445@cli.command()1446@click.argument(1447 "path",1448 type=click.Path(file_okay=True, dir_okay=False, allow_dash=False),1449 required=True,1450)1451@click.argument("tables", nargs=-1)1452@click.option("--aux", is_flag=True, help="Include auxiliary columns")1453@output_options1454@load_extension_option1455@click.pass_context1456def indexes(1457 ctx,1458 path,1459 tables,1460 aux,1461 nl,1462 arrays,1463 csv,1464 tsv,1465 no_headers,1466 table,1467 fmt,1468 json_cols,1469 load_extension,1470):1471 "Show indexes for this database"1472 sql = """1473 select1474 sqlite_master.name as "table",1475 indexes.name as index_name,1476 xinfo.*1477 from sqlite_master1478 join pragma_index_list(sqlite_master.name) indexes1479 join pragma_index_xinfo(index_name) xinfo1480 where1481 sqlite_master.type = 'table'1482 """1483 if tables:1484 quote = sqlite_utils.Database(memory=True).quote1485 sql += " and sqlite_master.name in ({})".format(1486 ", ".join(quote(table) for table in tables)1487 )1488 if not aux:1489 sql += " and xinfo.key = 1"1490 ctx.invoke(1491 query,1492 path=path,1493 sql=sql,1494 nl=nl,1495 arrays=arrays,1496 csv=csv,1497 tsv=tsv,1498 no_headers=no_headers,1499 table=table,1500 fmt=fmt,1501 json_cols=json_cols,1502 load_extension=load_extension,1503 )1504@cli.command()1505@click.argument(1506 "path",1507 type=click.Path(file_okay=True, dir_okay=False, allow_dash=False),1508 required=True,1509)1510@click.argument("tables", nargs=-1, required=False)1511@load_extension_option1512def schema(1513 path,1514 tables,1515 load_extension,1516):1517 "Show full schema for this database or for specified tables"1518 db = sqlite_utils.Database(path)1519 _load_extensions(db, load_extension)1520 if tables:1521 for table in tables:1522 click.echo(db[table].schema)1523 else:1524 click.echo(db.schema)1525@cli.command()1526@click.argument(1527 "path",1528 type=click.Path(file_okay=True, dir_okay=False, allow_dash=False),1529 required=True,1530)1531@click.argument("table")1532@click.option(1533 "--type",1534 type=(1535 str,1536 click.Choice(["INTEGER", "TEXT", "FLOAT", "BLOB"], case_sensitive=False),1537 ),1538 multiple=True,1539 help="Change column type to INTEGER, TEXT, FLOAT or BLOB",1540)1541@click.option("--drop", type=str, multiple=True, help="Drop this column")1542@click.option(1543 "--rename", type=(str, str), multiple=True, help="Rename this column to X"1544)1545@click.option("-o", "--column-order", type=str, multiple=True, help="Reorder columns")1546@click.option("--not-null", type=str, multiple=True, help="Set this column to NOT NULL")1547@click.option(1548 "--not-null-false", type=str, multiple=True, help="Remove NOT NULL from this column"1549)1550@click.option("--pk", type=str, multiple=True, help="Make this column the primary key")1551@click.option(1552 "--pk-none", is_flag=True, help="Remove primary key (convert to rowid table)"1553)1554@click.option(1555 "--default",1556 type=(str, str),1557 multiple=True,1558 help="Set default value for this column",1559)1560@click.option(1561 "--default-none", type=str, multiple=True, help="Remove default from this column"1562)1563@click.option(1564 "--drop-foreign-key",1565 type=str,1566 multiple=True,1567 help="Drop this foreign key constraint",1568)1569@click.option("--sql", is_flag=True, help="Output SQL without executing it")1570@load_extension_option1571def transform(1572 path,1573 table,1574 type,1575 drop,1576 rename,1577 column_order,1578 not_null,1579 not_null_false,1580 pk,1581 pk_none,1582 default,1583 default_none,1584 drop_foreign_key,1585 sql,1586 load_extension,1587):1588 "Transform a table beyond the capabilities of ALTER TABLE"1589 db = sqlite_utils.Database(path)1590 _load_extensions(db, load_extension)1591 types = {}1592 kwargs = {}1593 for column, ctype in type:1594 if ctype.upper() not in VALID_COLUMN_TYPES:1595 raise click.ClickException(1596 "column types must be one of {}".format(VALID_COLUMN_TYPES)1597 )1598 types[column] = ctype.upper()1599 not_null_dict = {}1600 for column in not_null:1601 not_null_dict[column] = True1602 for column in not_null_false:1603 not_null_dict[column] = False1604 default_dict = {}1605 for column, value in default:1606 default_dict[column] = value1607 for column in default_none:1608 default_dict[column] = None1609 kwargs["types"] = types1610 kwargs["drop"] = set(drop)1611 kwargs["rename"] = dict(rename)1612 kwargs["column_order"] = column_order or None1613 kwargs["not_null"] = not_null_dict1614 if pk:1615 if len(pk) == 1:1616 kwargs["pk"] = pk[0]1617 else:1618 kwargs["pk"] = pk1619 elif pk_none:1620 kwargs["pk"] = None1621 kwargs["defaults"] = default_dict1622 if drop_foreign_key:1623 kwargs["drop_foreign_keys"] = drop_foreign_key1624 if sql:1625 for line in db[table].transform_sql(**kwargs):1626 click.echo(line)1627 else:1628 db[table].transform(**kwargs)1629@cli.command()1630@click.argument(1631 "path",1632 type=click.Path(file_okay=True, dir_okay=False, allow_dash=False),1633 required=True,1634)1635@click.argument("table")1636@click.argument("columns", nargs=-1, required=True)1637@click.option(1638 "--table", "other_table", help="Name of the other table to extract columns to"1639)1640@click.option("--fk-column", help="Name of the foreign key column to add to the table")1641@click.option(1642 "--rename",1643 type=(str, str),1644 multiple=True,1645 help="Rename this column in extracted table",1646)1647@load_extension_option1648def extract(1649 path,1650 table,1651 columns,1652 other_table,1653 fk_column,1654 rename,1655 load_extension,1656):1657 "Extract one or more columns into a separate table"1658 db = sqlite_utils.Database(path)1659 _load_extensions(db, load_extension)1660 kwargs = dict(1661 columns=columns,1662 table=other_table,1663 fk_column=fk_column,1664 rename=dict(rename),1665 )1666 db[table].extract(**kwargs)1667@cli.command(name="insert-files")1668@click.argument(1669 "path",1670 type=click.Path(file_okay=True, dir_okay=False, allow_dash=False),1671 required=True,1672)1673@click.argument("table")1674@click.argument(1675 "file_or_dir",1676 nargs=-1,1677 required=True,1678 type=click.Path(file_okay=True, dir_okay=True, allow_dash=True),1679)1680@click.option(1681 "-c",1682 "--column",1683 type=str,1684 multiple=True,1685 help="Column definitions for the table",1686)1687@click.option("--pk", type=str, help="Column to use as primary key")1688@click.option("--alter", is_flag=True, help="Alter table to add missing columns")1689@click.option("--replace", is_flag=True, help="Replace files with matching primary key")1690@click.option("--upsert", is_flag=True, help="Upsert files with matching primary key")1691@click.option("--name", type=str, help="File name to use")1692@click.option("--text", is_flag=True, help="Store file content as TEXT, not BLOB")1693@click.option(1694 "--encoding",1695 help="Character encoding for input, defaults to utf-8",1696)1697@click.option("-s", "--silent", is_flag=True, help="Don't show a progress bar")1698@load_extension_option1699def insert_files(1700 path,1701 table,1702 file_or_dir,1703 column,1704 pk,1705 alter,1706 replace,1707 upsert,1708 name,1709 text,1710 encoding,1711 silent,1712 load_extension,1713):1714 """1715 Insert one or more files using BLOB columns in the specified table1716 Example usage:1717 \b1718 sqlite-utils insert-files pics.db images *.gif \\1719 -c name:name \\1720 -c content:content \\1721 -c content_hash:sha256 \\1722 -c created:ctime_iso \\1723 -c modified:mtime_iso \\1724 -c size:size \\1725 --pk name1726 """1727 if not column:1728 if text:1729 column = ["path:path", "content_text:content_text", "size:size"]1730 else:1731 column = ["path:path", "content:content", "size:size"]1732 if not pk:1733 pk = "path"1734 def yield_paths_and_relative_paths():1735 for f_or_d in file_or_dir:1736 path = pathlib.Path(f_or_d)1737 if f_or_d == "-":1738 yield "-", "-"1739 elif path.is_dir():1740 for subpath in path.rglob("*"):1741 if subpath.is_file():1742 yield subpath, subpath.relative_to(path)1743 elif path.is_file():1744 yield path, path1745 # Load all paths so we can show a progress bar1746 paths_and_relative_paths = list(yield_paths_and_relative_paths())1747 with progressbar(paths_and_relative_paths, silent=silent) as bar:1748 def to_insert():1749 for path, relative_path in bar:1750 row = {}1751 # content_text is special case as it considers 'encoding'1752 def _content_text(p):1753 resolved = p.resolve()1754 try:1755 return resolved.read_text(encoding=encoding)1756 except UnicodeDecodeError as e:1757 raise UnicodeDecodeErrorForPath(e, resolved)1758 lookups = dict(FILE_COLUMNS, content_text=_content_text)1759 if path == "-":1760 stdin_data = sys.stdin.buffer.read()1761 # We only support a subset of columns for this case1762 lookups = {1763 "name": lambda p: name or "-",1764 "path": lambda p: name or "-",1765 "content": lambda p: stdin_data,1766 "content_text": lambda p: stdin_data.decode(1767 encoding or "utf-8"1768 ),1769 "sha256": lambda p: hashlib.sha256(stdin_data).hexdigest(),1770 "md5": lambda p: hashlib.md5(stdin_data).hexdigest(),1771 "size": lambda p: len(stdin_data),1772 }1773 for coldef in column:1774 if ":" in coldef:1775 colname, coltype = coldef.rsplit(":", 1)1776 else:1777 colname, coltype = coldef, coldef1778 try:1779 value = lookups[coltype](path)1780 row[colname] = value1781 except KeyError:1782 raise click.ClickException(1783 "'{}' is not a valid column definition - options are {}".format(1784 coltype, ", ".join(lookups.keys())1785 )1786 )1787 # Special case for --name1788 if coltype == "name" and name:1789 row[colname] = name1790 yield row1791 db = sqlite_utils.Database(path)1792 _load_extensions(db, load_extension)1793 try:1794 with db.conn:1795 db[table].insert_all(1796 to_insert(), pk=pk, alter=alter, replace=replace, upsert=upsert1797 )1798 except UnicodeDecodeErrorForPath as e:1799 raise click.ClickException(1800 UNICODE_ERROR.format(1801 "Could not read file '{}' as text\n\n{}".format(e.path, e.exception)1802 )1803 )1804@cli.command(name="analyze-tables")1805@click.argument(1806 "path",1807 type=click.Path(file_okay=True, dir_okay=False, allow_dash=False, exists=True),1808 required=True,1809)1810@click.argument("tables", nargs=-1)1811@click.option(1812 "-c",1813 "--column",1814 "columns",1815 type=str,1816 multiple=True,1817 help="Specific columns to analyze",1818)1819@click.option("--save", is_flag=True, help="Save results to _analyze_tables table")1820@load_extension_option1821def analyze_tables(1822 path,1823 tables,1824 columns,1825 save,1826 load_extension,1827):1828 "Analyze the columns in one or more tables"1829 db = sqlite_utils.Database(path)1830 _load_extensions(db, load_extension)1831 _analyze(db, tables, columns, save)1832def _analyze(db, tables, columns, save):1833 if not tables:1834 tables = db.table_names()1835 todo = []1836 table_counts = {}1837 for table in tables:1838 table_counts[table] = db[table].count1839 for column in db[table].columns:1840 if not columns or column.name in columns:1841 todo.append((table, column.name))1842 # Now we now how many we need to do1843 for i, (table, column) in enumerate(todo):1844 column_details = db[table].analyze_column(1845 column, total_rows=table_counts[table], value_truncate=801846 )1847 if save:1848 db["_analyze_tables_"].insert(1849 column_details._asdict(), pk=("table", "column"), replace=True1850 )1851 most_common_rendered = _render_common(1852 "\n\n Most common:", column_details.most_common1853 )1854 least_common_rendered = _render_common(1855 "\n\n Least common:", column_details.least_common1856 )1857 details = (1858 (1859 textwrap.dedent(1860 """1861 {table}.{column}: ({i}/{total})1862 Total rows: {total_rows}1863 Null rows: {num_null}1864 Blank rows: {num_blank}1865 Distinct values: {num_distinct}{most_common_rendered}{least_common_rendered}1866 """1867 )1868 .strip()1869 .format(1870 i=i + 1,1871 total=len(todo),1872 most_common_rendered=most_common_rendered,1873 least_common_rendered=least_common_rendered,1874 **column_details._asdict()1875 )1876 )1877 + "\n"1878 )1879 click.echo(details)1880def _generate_convert_help():1881 help = textwrap.dedent(1882 """1883 Convert columns using Python code you supply. For example:1884 \b1885 $ sqlite-utils convert my.db mytable mycolumn \\1886 '"\\n".join(textwrap.wrap(value, 10))' \\1887 --import=textwrap1888 "value" is a variable with the column value to be converted.1889 The following common operations are available as recipe functions:1890 """1891 ).strip()1892 recipe_names = [1893 n for n in dir(recipes) if not n.startswith("_") and n not in ("json", "parser")1894 ]1895 for name in recipe_names:1896 fn = getattr(recipes, name)1897 help += "\n\nr.{}{}\n\n {}".format(1898 name, str(inspect.signature(fn)), fn.__doc__1899 )1900 help += "\n\n"1901 help += textwrap.dedent(1902 """1903 You can use these recipes like so:1904 \b1905 $ sqlite-utils convert my.db mytable mycolumn \\1906 'r.jsonsplit(value, delimiter=":")'1907 """1908 ).strip()1909 return help1910@cli.command(help=_generate_convert_help())1911@click.argument(1912 "db_path",1913 type=click.Path(file_okay=True, dir_okay=False, allow_dash=False),1914 required=True,1915)1916@click.argument("table", type=str)1917@click.argument("columns", type=str, nargs=-1, required=True)1918@click.argument("code", type=str)1919@click.option(1920 "--import", "imports", type=str, multiple=True, help="Python modules to import"1921)1922@click.option(1923 "--dry-run", is_flag=True, help="Show results of running this against first 10 rows"1924)1925@click.option(1926 "--multi", is_flag=True, help="Populate columns for keys in returned dictionary"1927)1928@click.option("--where", help="Optional where clause")1929@click.option(1930 "-p",1931 "--param",1932 multiple=True,1933 type=(str, str),1934 help="Named :parameters for where clause",1935)1936@click.option("--output", help="Optional separate column to populate with the output")1937@click.option(1938 "--output-type",1939 help="Column type to use for the output column",1940 default="text",1941 type=click.Choice(["integer", "float", "blob", "text"]),1942)1943@click.option("--drop", is_flag=True, help="Drop original column afterwards")1944@click.option("-s", "--silent", is_flag=True, help="Don't show a progress bar")1945def convert(1946 db_path,1947 table,1948 columns,1949 code,1950 imports,1951 dry_run,1952 multi,1953 where,1954 param,1955 output,1956 output_type,1957 drop,1958 silent,1959):1960 sqlite3.enable_callback_tracebacks(True)1961 db = sqlite_utils.Database(db_path)1962 if output is not None and len(columns) > 1:1963 raise click.ClickException("Cannot use --output with more than one column")1964 if multi and len(columns) > 1:1965 raise click.ClickException("Cannot use --multi with more than one column")1966 if drop and not (output or multi):1967 raise click.ClickException("--drop can only be used with --output or --multi")1968 # If single line and no 'return', add the return1969 if "\n" not in code and not code.strip().startswith("return "):1970 code = "return {}".format(code)1971 where_args = dict(param) if param else []1972 # Compile the code into a function body called fn(value)1973 new_code = ["def fn(value):"]1974 for line in code.split("\n"):1975 new_code.append(" {}".format(line))1976 code_o = compile("\n".join(new_code), "<string>", "exec")1977 locals = {}1978 globals = {"r": recipes, "recipes": recipes}1979 for import_ in imports:1980 globals[import_] = __import__(import_)1981 exec(code_o, globals, locals)1982 fn = locals["fn"]1983 if dry_run:1984 # Pull first 20 values for first column and preview them1985 db.conn.create_function("preview_transform", 1, lambda v: fn(v) if v else v)1986 sql = """1987 select1988 [{column}] as value,1989 preview_transform([{column}]) as preview1990 from [{table}]{where} limit 101991 """.format(1992 column=columns[0],1993 table=table,1994 where=" where {}".format(where) if where is not None else "",1995 )1996 for row in db.conn.execute(sql, where_args).fetchall():1997 click.echo(str(row[0]))1998 click.echo(" --- becomes:")1999 click.echo(str(row[1]))2000 click.echo()2001 count = db[table].count_where(2002 where=where,2003 where_args=where_args,2004 )2005 click.echo("Would affect {} row{}".format(count, "" if count == 1 else "s"))2006 else:2007 try:2008 db[table].convert(2009 columns,2010 fn,2011 where=where,2012 where_args=where_args,2013 output=output,2014 output_type=output_type,2015 drop=drop,2016 multi=multi,2017 show_progress=not silent,2018 )2019 except BadMultiValues as e:2020 raise click.ClickException(2021 "When using --multi code must return a Python dictionary - returned: {}".format(2022 repr(e.values)2023 )2024 )2025def _render_common(title, values):2026 if values is None:2027 return ""2028 lines = [title]2029 for value, count in values:2030 lines.append(" {}: {}".format(count, value))2031 return "\n".join(lines)2032class UnicodeDecodeErrorForPath(Exception):2033 def __init__(self, exception, path):2034 self.exception = exception2035 self.path = path2036FILE_COLUMNS = {2037 "name": lambda p: p.name,2038 "path": lambda p: str(p),2039 "fullpath": lambda p: str(p.resolve()),2040 "sha256": lambda p: hashlib.sha256(p.resolve().read_bytes()).hexdigest(),2041 "md5": lambda p: hashlib.md5(p.resolve().read_bytes()).hexdigest(),2042 "mode": lambda p: p.stat().st_mode,2043 "content": lambda p: p.resolve().read_bytes(),2044 "mtime": lambda p: p.stat().st_mtime,2045 "ctime": lambda p: p.stat().st_ctime,2046 "mtime_int": lambda p: int(p.stat().st_mtime),2047 "ctime_int": lambda p: int(p.stat().st_ctime),2048 "mtime_iso": lambda p: datetime.utcfromtimestamp(p.stat().st_mtime).isoformat(),2049 "ctime_iso": lambda p: datetime.utcfromtimestamp(p.stat().st_ctime).isoformat(),2050 "size": lambda p: p.stat().st_size,2051}2052def output_rows(iterator, headers, nl, arrays, json_cols):2053 # We have to iterate two-at-a-time so we can know if we2054 # should output a trailing comma or if we have reached2055 # the last row.2056 current_iter, next_iter = itertools.tee(iterator, 2)2057 next(next_iter, None)2058 first = True2059 for row, next_row in itertools.zip_longest(current_iter, next_iter):2060 is_last = next_row is None2061 data = row2062 if json_cols:2063 # Any value that is a valid JSON string should be treated as JSON2064 data = [maybe_json(value) for value in data]2065 if not arrays:2066 data = dict(zip(headers, data))2067 line = "{firstchar}{serialized}{maybecomma}{lastchar}".format(2068 firstchar=("[" if first else " ") if not nl else "",2069 serialized=json.dumps(data, default=json_binary),2070 maybecomma="," if (not nl and not is_last) else "",2071 lastchar="]" if (is_last and not nl) else "",2072 )2073 yield line2074 first = False2075def maybe_json(value):2076 if not isinstance(value, str):2077 return value2078 stripped = value.strip()2079 if not (stripped.startswith("{") or stripped.startswith("[")):2080 return value2081 try:2082 return json.loads(stripped)2083 except ValueError:2084 return value2085def json_binary(value):2086 if isinstance(value, bytes):2087 return {"$base64": True, "encoded": base64.b64encode(value).decode("latin-1")}2088 else:2089 raise TypeError2090def _load_extensions(db, load_extension):2091 if load_extension:2092 db.conn.enable_load_extension(True)2093 for ext in load_extension:2094 if ext == "spatialite" and not os.path.exists(ext):2095 ext = find_spatialite()...

Full Screen

Full Screen

bot.py

Source:bot.py Github

copy

Full Screen

...14 command_prefix=prefix,15 description=DESCRIPTION,16 intents=intents,17 )18 self._load_extensions()19 def _load_extensions(self) -> None:20 cogs_dir = os.listdir("cogs")21 for filename in cogs_dir:22 if filename.endswith(".py"):23 cog = filename[:-3]...

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run lisa automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful