Permalink
Cannot retrieve contributors at this time
Name already in use
A tag already exists with the provided branch name. Many Git commands accept both tag and branch names, so creating this branch may cause unexpected behavior. Are you sure you want to create this branch?
SpecDB/arch/specdb
Go to fileThis commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
executable file
413 lines (354 sloc)
14 KB
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env python3 | |
""" | |
SpecDB Command Line Interface. | |
""" | |
import argparse | |
import json | |
import os | |
import sys | |
import specdb | |
env_keys = ['SPECDB_DB', 'SPECDB_ROOT', 'SPECDB_BACKUP', 'SPECDB_GS_CRED'] | |
examples = """ | |
example command lines: | |
%(prog)s create --db my.db --backup /backups/my.backup.db | |
%(prog)s query --db my.db --sql "SELECT * FROM table users LIMIT 10" | |
%(prog)s insert --file specdb.yaml --env my.db --write | |
%(prog)s backup --db my.db --backup /backups/my.backup.db | |
%(prog)s forms --table user project --num 3 1 | |
%(prog)s summary --table user --db my.db | |
""" | |
# top-level parser | |
sdb_top = argparse.ArgumentParser(prog='specdb', | |
description='Command line tool for interfacing with SpecDB', | |
formatter_class=argparse.RawDescriptionHelpFormatter, | |
epilog=examples) | |
sdb_subs = sdb_top.add_subparsers(dest='command', help='command description') | |
sdb_subs.required = False | |
# configure level parser | |
sdb_configure = sdb_subs.add_parser('create', | |
help='instantiate a new database') | |
sdb_configure.add_argument('--db', type=str, metavar='<path>', required=True, | |
help='path and name for a new database to be made created') | |
sdb_configure.add_argument('--backup', type=str, metavar='<path>', | |
required=True, help='path where incremental backup will be saved') | |
# forms level parser | |
sdb_forms = sdb_subs.add_parser('forms', | |
help=''.join(( | |
("generate a template form for requested table\n"), | |
('tables to generate forms for are: `user`, `project`, `target` '), | |
('`construct`, `expression`, `batch`, `buffer`, `pst`, '), | |
('`spectrometer`, and a JSON for a session')))) | |
sdb_forms.add_argument('--table', nargs='+', metavar='<str>', required=True, | |
help=''.join(( | |
('tables to generate a form for\n'), | |
('tables to generate forms for are: `user`, `project`, `target` '), | |
('`construct`, `expression`, `purification_batch`, `buffer`, '), | |
('`buffer_components`, `pst`, `batch_components`, `spectrometer`, and'), | |
(' `session`')))) | |
sdb_forms.add_argument('--num', nargs='+', metavar='<int>', required=False, | |
default=None, help=''.join(( | |
('number of forms to generate for each requested table')))) | |
# insert level parser | |
sdb_insert = sdb_subs.add_parser('insert', | |
help='insert a single json file into SpecDB') | |
sdb_insert.add_argument('--form', type=str, metavar='<path>', required=True, | |
help='path to a single yaml file to insert') | |
sdb_insert.add_argument('--db', type=str, metavar='<path>', required=True, | |
help=''.join(( | |
('path to specific specdb database file to use for'), | |
(' insertion. use `specdb create` to create the .db file')))) | |
sdb_insert.add_argument('--write', action='store_true', required=False, | |
help=''.join(( | |
('write new/altered information in JSON to database, used only once')))) | |
# sumary level parser | |
sdb_summary = sdb_subs.add_parser('summary', | |
help=''.join(( | |
("make a summary report for SpecDB database. if ran with no"), | |
(" table provided, then a summary for every table is made.")))) | |
sdb_summary.add_argument('--db', type=str, metavar='<path>', required=True, | |
help=''.join(( | |
('path to specific specdb database file to use.'), | |
(' use `specdb create` to create a new database file')))) | |
sdb_summary.add_argument('--table', type=str, metavar='<str>', required=True, | |
help='provide which table a summary is needed for') | |
# query level parser | |
sdb_query = sdb_subs.add_parser('query', | |
help=''.join(( | |
("query records from SpecDB summary table. If no --output is given"), | |
(" then results are simply print to screen")))) | |
sdb_query.add_argument('--sql', nargs='+', type=str, metavar='<str>', | |
required=True, help=''.join(( | |
('query using sql syntax. The query can be on any table.'), | |
(' If no --output format is given, results are printed to screen.')))) | |
sdb_query.add_argument('--star', action='store_true', required=False, | |
help='whether to write a NMR-STAR file for each FID in the query results') | |
sdb_query.add_argument('--db', type=str, metavar='<path>', required=True, | |
help=''.join(( | |
('path to specific specdb database file to use.'), | |
(' use `specdb create` to create a new database file')))) | |
sdb_query.add_argument('--out', type=str, metavar='<path>', required=True, | |
help='directory to place results of the query') | |
# backup level parser | |
sdb_backup = sdb_subs.add_parser('backup', | |
help='perform incremental backup. specdb configure must be ran first') | |
sdb_backup.add_argument('--db', type=str, metavar='<path>', required=True, | |
help=''.join(( | |
('path to specific specdb database to backup')))) | |
sdb_backup.add_argument('--objects', type=str, metavar='<path>', required=False, | |
default='objects', help=''.join(( | |
('path where incremental object_dir is located\n'), | |
('by default, an objects/ directory is made in the location where '), | |
('database to be backed resides.')))) | |
sdb_backup.add_argument('--shafile', type=str, metavar='<path>', required=False, | |
default='backup.txt', help=''.join(( | |
('path where sha256 hashes for SQLite pages are stored\n'), | |
('by default a backup.txt file is made')))) | |
sdb_restore = sdb_subs.add_parser('restore', | |
help='perform database restoration from a SpecDB backup') | |
sdb_restore.add_argument('--backup', type=str, metavar='<path>', required=True, | |
help=''.join(( | |
('provide path to the backup database (not the database to be '), | |
(' backed up. Ex: specdb.backup.db, not '), | |
(' specdb.db)')))) | |
sdb_restore.add_argument('--shafile', type=str, metavar='<path>', | |
required=False, default='backup.txt', help=''.join(( | |
('location of text file for sha256 hash values for SQLite database')))) | |
sdb_restore.add_argument('--objects', type=str, metavar='<path>', | |
required=False, default='objects', help=''.join(( | |
('path to objects directory for backup')))) | |
sdb = sdb_top.parse_args() | |
## Commands ## | |
if sdb.command == 'create': | |
if not sdb.db.endswith('.db'): | |
print(f'{sdb.db} does not end in .db') | |
print('change database name with .db at the end') | |
print('Aborting') | |
sys.exit() | |
if not sdb.backup.endswith('.db'): | |
print(f'{sdb.backup} does not end in .db') | |
print('change database name with .db at the end') | |
print('Aborting') | |
sys.exit() | |
specdb_path = os.path.abspath(sdb.db) | |
backup_path = os.path.abspath(sdb.backup) | |
if os.path.isfile(specdb_path): | |
print(f'db file {sdb.db} already exists') | |
print(f'is there an already existing db at {sdb.db}?') | |
print('Aborting') | |
sys.exit() | |
if os.path.isfile(specdb_path): | |
print(f'db file {sdb.backup} already exists') | |
print(f'is there an already existing db at {sdb.backup}?') | |
print('Aborting') | |
sys.exit() | |
# initial SQLite database at SPECDB_DB location | |
specdb.init(location=specdb_path) | |
specdb.init(location=backup_path) | |
print('db configured and initialized') | |
print('Done') | |
elif sdb.command == 'forms': | |
form = specdb.forms(table=sdb.table, num=sdb.num) | |
print(form) | |
elif sdb.command == 'insert': | |
specdb.insert(file=os.path.abspath(sdb.form), db=sdb.db, write=sdb.write) | |
elif sdb.command == 'summary': | |
specdb.summary(db=sdb.db, table=sdb.table) | |
elif sdb.command == 'query': | |
specdb.query(db=sdb.db, sql=sdb.sql[0], star=sdb.star, output_dir=sdb.out) | |
elif sdb.command == 'backup': | |
specdb.backup(db=sdb.db, object_dir=sdb.objects, backup_file=sdb.shafile) | |
elif sdb.command == 'restore': | |
specdb.restore( | |
backup=sdb.backup, | |
backup_file=sdb.shafile, | |
object_dir=sdb.objects) | |
""" | |
# register level parser | |
sdb_register = sdb_subs.add_parser('register', | |
help='register a new record for a requested table') | |
sdb_register.add_argument('--env', type=str, metavar='<path>', required=False, | |
help='provide a specdb environment file') | |
sdb_register.add_argument('--user', type=str, metavar='<name>', required=False, | |
help='register a new user_id') | |
sdb_register.add_argument('--project', type=str, nargs='+', metavar='<name>', | |
required=False, help='register a new project_id') | |
sdb_register.add_argument('--target', type=str, metavar='<name>', | |
required=False, help='register a new target_id') | |
sdb_register.add_argument('--construct', type=str, metavar='<name>', | |
required=False, help='register a new construct_id') | |
sdb_register.add_argument('--expression', type=str, metavar='<name>', | |
required=False, help='register a new expression_id') | |
sdb_register.add_argument('--batch', type=str, metavar='<name>', required=False, | |
help='register a new batch_id') | |
sdb_register.add_argument('--pst', type=str, metavar='<name>', | |
required=False, help='register a new pst_id') | |
sdb_reg_subs = sdb_register.add_subparsers(dest='subcommand') | |
sdb_reg_subs.required = False | |
sdb_buffer_component = sdb_reg_subs.add_parser('buffer-component', | |
help='add a buffer component') | |
sdb_buffer_component.add_argument('--buffer', type=str, metavar='<name>', | |
help='buffer-id this component goes to') | |
sdb_buffer_component.add_argument('--component', type=str, nargs='+', | |
metavar='<name>', help='name of buffer component') | |
sdb_buffer_component.add_argument('--conc-value', type=float, metavar='<float>', | |
help='concentration of component value') | |
sdb_buffer_component.add_argument('--unit', type=str, metavar='<str>', | |
help='concentration unit for buffer component') | |
sdb_register.add_argument('--spectrometer', type=str, metavar='<name>', | |
help='add an new spectrometer id') | |
sdb_register.add_argument('--probe', type=str, metavar='<name>', | |
help='add a new probe id') | |
# update level parser | |
sdb_update = sdb_subs.add_parser('update', | |
help='insert all new specdb.json files into SpecDB') | |
sdb_update.add_argument('--env', type=str, metavar='<path>', required=True, | |
help=''.join(( | |
('path to specific specdb database environment file to be updated.'), | |
(' use `specdb configure` to create .env file')))) | |
sdb_update.add_argument('--overwrite', action='store_true', | |
required=False, help=''.join(( | |
('if a specdb.json conflicts with SpecDB, overwite SpecDB to match.'), | |
(' Otherwise, a conflict resuslts in the specdb.json not being'), | |
(' inserted into SpecDB and is filed in specdb.log')))) | |
def register(table=None, table_id=None, env=None): | |
environ = read_config(file=env) | |
sql = f"SELECT table, id FROM register WHERE table == '{table}' AND", | |
sql += f" id == '{table_id}'" | |
results = cursor.exectute(sql) | |
# if results empty INSERT | |
if len(results) == 0: | |
sql = f"INSERT register (table, id) VALUES({table}, {table_id})" | |
cursor.execute(sql) | |
print("Registered") | |
return | |
else: | |
print(f"id: {table_id} for table: {table} already registered") | |
print("Done") | |
return | |
def update(overwrite=False): | |
# read environment | |
config = read_config() | |
cmd = f'find {SPECDB_ROOT} -type f -name "{SPECDB_JSON}" -newermt {SPECB_LAST}' | |
new_jsons = subprocess.check_output(cmd, shell=True) | |
for line in new_jsons: | |
line = line.rstrip() | |
insert(json=linef, overwrite=overwrite) | |
if 'INSERT' in line: continue | |
if 'FROM' in line: continue | |
if 'VALUES' in line: continue | |
if 'PRAGMA' in line: continue | |
if len(line) == 0: continue | |
if table == 'session': | |
if table_name != 'summary' and table_name != 'tubes': | |
form_dic[table_name] = list() | |
form_dic[table_name].append(dict()) | |
form_dic[table_name] | |
if table == table_name: | |
out = False | |
continue | |
else: | |
out = True | |
elif table_name == 'tubes': continue | |
elif table_name == 'summary': | |
config['SPECDB_DB'] = os.path.abspath(sdb.db) | |
config['SPECDB_NAME'] = specdb_name | |
config['SPECDB_BACKUP'] = os.path.join( | |
os.path.abspath(sdb.backup), | |
f'{specdb_name}.backup.db') | |
config['SPECDB_LOG'] = os.path.join( | |
os.path.abspath(sdb.log), | |
f'{specdb_name}.log.json') | |
specdb_env = os.path.join(sdb.log, f'{specdb_name}.specdb.env') | |
with open(specdb_env, 'w') as fp: | |
for k,v in config.items(): | |
fp.write(f'{k}={v}\n') | |
log = [] | |
with open(config['SPECDB_LOG'], 'w') as fp: | |
json.dump(log, fp, indent=2) | |
def read_config(file=None): | |
"""""" | |
Read specdb.env and return dictionary with keys being SpecDB environemnt | |
variables. | |
Parameters | |
---------- | |
file path to .env file to read | |
Returns | |
------- | |
env dict, key/value pairs for SpecDB environemnt variables | |
"""""" | |
with open(file, 'r') as fp: | |
env = {} | |
for line in fp.readlines(): | |
line = line.rstrip() | |
if '=' not in line: | |
print(f'{file} not formatted correctly') | |
print('Aborting') | |
sys.exit() | |
params = line.split('=') | |
if params[0] not in env_keys: | |
print(f'unexpected environment variable {params[0]}') | |
print('Aborting') | |
sys.exit() | |
env[params[0]] = params[1] | |
return env | |
def read_log(environment=None): | |
"""""" | |
Read the SpecDB log file to check which specdb.json files experienced | |
problems. | |
Parameters | |
---------- | |
+ environment path to environment file for SpecDB | |
Returns | |
------- | |
+ log dictionary of specdb.json paths listed in log file | |
"""""" | |
environ = read_config(file=environment) | |
log = [] | |
with open(environ['SPECDB_LOG'], 'r') as fp: | |
log = json.load(fp) | |
return log | |
sys.exit() | |
return | |
# # read template json file to check if provided json has expected keys | |
# specdb_path = os.path.abspath(os.path.dirname(__file__)) | |
# temp_json = os.path.join(specdb_path, '../sql/template_specdb.json') | |
# with open(temp_json, 'r') as fp: | |
# template_json = json.load(fp) | |
for k1 in record: | |
if k1 not in template_json: | |
print(f'unknown key {k1} in {json}') | |
print('Aborting') | |
sys.exit() | |
if type(record[k1]) is dict: | |
for k2 in record[k1]: | |
if k2 not in template_json[k1]: | |
print(f'unknown key {k2} in {json}') | |
print('Aborting') | |
sys.exit() | |
return False | |
elif type(record[k1]) is list: | |
for item in record[k1]: | |
assert(type(item) is dict) | |
for k2 in item: | |
if k2 not in template_json[k1][0]: | |
print(f'unknown {k2} in {json}') | |
print('Aborting') | |
sys.exit() | |
return False | |
new_fid['experiment_date'] = datetime.datetime.fromtimestamp( | |
os.path.getmtime( | |
os.path.join(zipped_path, time_domain[0]))) | |
# if 'session' in table: | |
# if 'time_domain_dataset' not in table: | |
# print('session and time_domain_dataset must be together in a form') | |
# print('only session requested') | |
# print('Aborting') | |
# sys.exit() | |
# | |
# if 'time_domain_dataset' in table: | |
# if 'session' not in table: | |
# print('session and time_domain_dataset must be together in a form') | |
# print('only time_domain_dataset requested') | |
# print('Aborting') | |
# sys.exit() | |
""" | |