Here are the examples of the python api sqlalchemy.create_engine taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
182 Examples
0
Example 101
Project: ocf Source File: fsdb.py
def __init__ (self):
self.engine = create_engine('sqlite:///:memory:', echo=True)
self.md = MetaData()
self.tables = []
self.tables.append(Table('dpports', self.md,
Column('dpid', String(23)),
Column('in_port', String(10))))
self.tables.append(Table('dlsrc', self.md,
Column('dl_src', String(23))))
self.tables.append(Table('dldst', self.md,
Column('dl_dst', String(23))))
self.dltype = Table('dltype', self.md,
Column('dl_type', String(6)))
self.vlanid = Table('vlanid', self.md,
Column('dl_vlan', String(6)))
self.nwsrc = Table('nwsrc', self.md,
Column('nw_src', String(18)))
self.nwdst = Table('nwdst', self.md,
Column('nw_dst', String(18)))
self.nwproto = Table('nwproto', self.md,
Column('nw_proto', String(3)))
self.nwtos = Table('nwtos', self.md,
Column('nw_tos', String(3)))
self.tpsrc = Table('tpsrc', self.md,
Column('tp_src', String(5)))
self.tpdst = Table('tpdst', self.md,
Column('tp_dst', String(5)))
0
Example 102
Project: nova-billing Source File: migrate.py
def migrate_instances(old_db_url):
engine1 = create_engine(old_db_url)
tariffs = db_api.tariff_map()
instance_resources = ("local_gb", "memory_mb", "vcpus")
instance_info_attrs = (
"id", "instance_id", "project_id",
"local_gb", "memory_mb", "vcpus")
instance_segment_attrs = (
"id", "instance_info_id",
"segment_type", "begin_at",
"end_at")
instance_infos = {}
accounts = {}
for inst1 in engine1.execute(
"select distinct project_id from billing_instance_info"):
accounts[inst1.project_id] = \
db_api.account_get_or_create(inst1.project_id).id
for inst1 in engine1.execute(
"select %s from billing_instance_info" %
", ".join(instance_info_attrs)):
account_id = accounts[inst1.project_id]
inst2 = db_api.resource_get_or_create(
account_id, None,
ResourceTypes.Instance,
inst1.instance_id
)
inst_dict = {
"inst1": inst1,
"inst2": inst2,
}
for rtype in instance_resources:
inst_dict[rtype + "_id"] = db_api.resource_get_or_create(
account_id, inst2.id,
rtype,
None
)
instance_infos[inst1.id] = inst_dict
for iseg in engine1.execute(
"select %s from billing_instance_segment" %
", ".join(instance_segment_attrs)):
inst_dict = instance_infos[iseg.instance_info_id]
inst1 = inst_dict["inst1"]
begin_at = utils.str_to_datetime(iseg.begin_at)
end_at = utils.str_to_datetime(iseg.end_at)
inst_dict["begin_at"] = (min(inst_dict["begin_at"], begin_at)
if "begin_at" in inst_dict else begin_at)
try:
prev = inst_dict["end_at"]
except KeyError:
inst_dict["end_at"] = end_at
else:
inst_dict["end_at"] = (
max(prev, end_at) if prev
else None)
for rtype in instance_resources:
seg = Segment(
resource_id=inst_dict[rtype + "_id"].id,
cost=getattr(inst1, rtype) * tariffs.get(rtype, 1),
begin_at=begin_at,
end_at=end_at)
db.session.add(seg)
for inst_dict in instance_infos.values():
seg = Segment(
resource_id=inst_dict["inst2"].id,
cost=tariffs.get("nova/instance", 0),
begin_at=inst_dict.get("begin_at", None),
end_at=inst_dict.get("end_at", None))
db.session.add(seg)
db.session.commit()
0
Example 103
Project: superset Source File: sql_lab.py
@celery_app.task(bind=True)
def get_sql_results(self, query_id, return_results=True, store_results=False):
"""Executes the sql query returns the results."""
if not self.request.called_directly:
engine = sqlalchemy.create_engine(
app.config.get('SQLALCHEMY_DATABASE_URI'), poolclass=NullPool)
session_class = sessionmaker()
session_class.configure(bind=engine)
session = session_class()
else:
session = db.session()
session.commit() # HACK
query = session.query(models.Query).filter_by(id=query_id).one()
database = query.database
executed_sql = query.sql.strip().strip(';')
db_engine_spec = database.db_engine_spec
def handle_error(msg):
"""Local method handling error while processing the SQL"""
query.error_message = msg
query.status = QueryStatus.FAILED
query.tmp_table_name = None
session.commit()
raise Exception(query.error_message)
# Limit enforced only for retrieving the data, not for the CTA queries.
is_select = is_query_select(executed_sql);
if not is_select and not database.allow_dml:
handle_error(
"Only `SELECT` statements are allowed against this database")
if query.select_as_cta:
if not is_select:
handle_error(
"Only `SELECT` statements can be used with the CREATE TABLE "
"feature.")
if not query.tmp_table_name:
start_dttm = datetime.fromtimestamp(query.start_time)
query.tmp_table_name = 'tmp_{}_table_{}'.format(
query.user_id,
start_dttm.strftime('%Y_%m_%d_%H_%M_%S'))
executed_sql = create_table_as(
executed_sql, query.tmp_table_name, database.force_ctas_schema)
query.select_as_cta_used = True
elif (
query.limit and is_select and
db_engine_spec.limit_method == LimitMethod.WRAP_SQL):
executed_sql = database.wrap_sql_limit(executed_sql, query.limit)
query.limit_used = True
engine = database.get_sqla_engine(schema=query.schema)
try:
template_processor = get_template_processor(
database=database, query=query)
executed_sql = template_processor.process_template(executed_sql)
except Exception as e:
logging.exception(e)
msg = "Template rendering failed: " + utils.error_msg_from_exception(e)
handle_error(msg)
try:
query.executed_sql = executed_sql
logging.info("Running query: \n{}".format(executed_sql))
result_proxy = engine.execute(query.executed_sql, schema=query.schema)
except Exception as e:
logging.exception(e)
handle_error(utils.error_msg_from_exception(e))
cursor = result_proxy.cursor
query.status = QueryStatus.RUNNING
session.flush()
db_engine_spec.handle_cursor(cursor, query, session)
cdf = None
if result_proxy.cursor:
column_names = [col[0] for col in result_proxy.cursor.description]
if db_engine_spec.limit_method == LimitMethod.FETCH_MANY:
data = result_proxy.fetchmany(query.limit)
else:
data = result_proxy.fetchall()
cdf = dataframe.SupersetDataFrame(
pd.DataFrame(data, columns=column_names))
query.rows = result_proxy.rowcount
query.progress = 100
query.status = QueryStatus.SUCCESS
if query.rows == -1 and cdf:
# Presto doesn't provide result_proxy.row_count
query.rows = cdf.size
if query.select_as_cta:
query.select_sql = '{}'.format(database.select_star(
query.tmp_table_name, limit=query.limit))
query.end_time = utils.now_as_float()
session.flush()
payload = {
'query_id': query.id,
'status': query.status,
'data': [],
}
payload['data'] = cdf.data if cdf else []
payload['columns'] = cdf.columns_dict if cdf else []
payload['query'] = query.to_dict()
payload = json.dumps(payload, default=utils.json_iso_dttm_ser)
if store_results and results_backend:
key = '{}'.format(uuid.uuid4())
logging.info("Storing results in results backend, key: {}".format(key))
results_backend.set(key, zlib.compress(payload))
query.results_key = key
session.flush()
session.commit()
if return_results:
return payload
0
Example 104
Project: portingdb Source File: get-history.py
@click.command(help=__doc__)
@click.option('-u', '--update', help='CSV file with existing data')
def main(update):
excluded = set(BAD_COMMITS)
tmpdir = tempfile.mkdtemp()
writer = csv.DictWriter(sys.stdout,
['commit', 'date', 'status', 'num_packages'],
lineterminator='\n')
writer.writeheader()
all_statuses = [
"blocked", "dropped", "idle", "in-progress", "released", "mispackaged"]
prev_date = None
prev_commit = None
if update:
with open(update) as f:
for row in csv.DictReader(f):
excluded.add(row['commit'])
prev_date = row['date']
prev_commit = row['commit']
writer.writerow(row)
try:
tmpclone = os.path.join(tmpdir, 'tmp_clone')
tmpdata = os.path.join(tmpclone, 'data')
tmpdb = os.path.join(tmpclone, 'tmp-portingdb.sqlite')
run(['git', 'clone', '.', tmpclone])
prev_data_hash = None
prev_batch = []
for commit in reversed(git_history()):
date = run(['git', 'log', '-n1', '--pretty=%ci', commit]).strip()
if prev_date and prev_date > date:
continue
data_hash = run(['git', 'rev-parse', commit + ':' + 'data'])
if (commit in excluded) or (data_hash == prev_data_hash):
prev_data_hash = data_hash
continue
if prev_date and prev_date[:11] != date[:11]:
prev_date = date
prev_commit = commit
for row in prev_batch:
writer.writerow(row)
else:
prev_commit = commit
print('{},{} - skipping'.format(prev_commit, prev_date),
file=sys.stderr)
prev_batch = []
# Note: we don't remove files that didn't exist in the old
# version.
run(['git', 'checkout', commit, '--', 'data'], cwd=tmpclone)
run(['python3', '-m', 'portingdb',
'--datadir', tmpdata,
'--db', tmpdb,
'load'])
engine = create_engine('sqlite:///' + os.path.abspath(tmpdb))
db = get_db(engine=engine)
columns = [tables.Package.status, func.count()]
query = select(columns).select_from(tables.Package.__table__)
query = query.group_by(tables.Package.status)
package_numbers = {status: num_packages
for status, num_packages
in db.execute(query)}
for status in all_statuses:
row = {
'commit': commit,
'date': date,
'status': status,
'num_packages': package_numbers.get(status, 0),
}
prev_batch.append(row)
os.unlink(tmpdb)
prev_data_hash = data_hash
for row in prev_batch:
writer.writerow(row)
finally:
shutil.rmtree(tmpdir)
return
0
Example 105
Project: mittn Source File: test_dbtools.py
def test_add_false_positive(self):
# Add a false positive to database and check that all fields
# get populated and can be compared back originals
response = {'scenario_id': '1',
'req_headers': 'headers',
'req_body': 'body',
'url': 'url',
'timestamp': datetime.datetime.utcnow(),
'req_method': 'method',
'server_protocol_error': None,
'server_timeout': False,
'server_error_text_detected': False,
'server_error_text_matched': 'matched_text',
'resp_statuscode': 'statuscode',
'resp_headers': 'resp_headers',
'resp_body': 'resp_body',
'resp_history': 'resp_history'}
dbtools.add_false_positive(self.context, response)
# Connect directly to the database and check the data is there
db_engine = sqlalchemy.create_engine(self.context.dburl)
dbconn = db_engine.connect()
db_metadata = sqlalchemy.MetaData()
httpfuzzer_issues = Table('httpfuzzer_issues', db_metadata,
Column('new_issue', types.Boolean),
Column('issue_no', types.Integer, primary_key=True, nullable=False),
Column('timestamp', types.DateTime(timezone=True)),
Column('test_runner_host', types.Text),
Column('scenario_id', types.Text),
Column('url', types.Text),
Column('server_protocol_error', types.Text),
Column('server_timeout', types.Boolean),
Column('server_error_text_detected', types.Boolean),
Column('server_error_text_matched', types.Text),
Column('req_method', types.Text),
Column('req_headers', types.LargeBinary),
Column('req_body', types.LargeBinary),
Column('resp_statuscode', types.Text),
Column('resp_headers', types.LargeBinary),
Column('resp_body', types.LargeBinary),
Column('resp_history', types.LargeBinary))
db_select = sqlalchemy.sql.select([httpfuzzer_issues])
db_result = dbconn.execute(db_select)
result = db_result.fetchone()
for key, value in response.iteritems():
self.assertEqual(result[key], value,
'%s not found in database after add' % key)
self.assertEqual(result['test_runner_host'], socket.gethostbyname(socket.getfqdn()),
'Test runner host name not correct in database')
self.assertLessEqual(result['timestamp'], datetime.datetime.utcnow(),
'Timestamp not correctly stored in database')
dbconn.close()
0
Example 106
Project: alchemist Source File: _engine.py
@utils.memoize
def __getitem__(self, name):
if 'DATABASES' not in settings:
raise exceptions.ImproperlyConfigured(
'DATABASES not configured in project settings.')
if name not in settings['DATABASES']:
raise exceptions.ImproperlyConfigured(
'%r not present in DATABASES configuration.' % name)
config = settings['DATABASES'][name]
if isinstance(config, six.string_types):
url = make_url(config)
options = {}
else:
config = dict(map(lambda i: (i[0].lower(), i[1]), config.items()))
options = config.get('options', {})
url = URL(
config['engine'],
username=config.get('username', config.get('user')),
password=config.get('password', config.get('pass')),
host=config.get('hostname', config.get('host')),
port=config.get('port'),
database=config.get('name', config.get('database')))
# If alchemist is invoked by a test runner we should switch to using
# testing databases.
if settings['TESTING']:
if url.drivername.startswith('sqlite'):
# Switch to using an in-memory database for sqlite.
url.database = ':memory:'
else:
# Switch to using a named testing database for other dialects.
ident = threading.current_thread().ident
url.database = 'test_%s_%s' % (url.database, ident)
# Apply MySQL hacks to make MySQL play nice.
pool_size = None
pool_recycle = None
if url.drivername.startswith('mysql'):
pool_size = 10
pool_recycle = 7200
# Get "global" options for the database engine.
pool_size = settings.get('DATABASE_POOL_SIZE', pool_size)
if pool_size:
options.setdefault('pool_size', pool_size)
pool_recycle = settings.get('DATABASE_POOL_RECYCLE', pool_recycle)
if pool_recycle:
options.setdefault('pool_recycle', pool_recycle)
pool_timeout = settings.get('DATABASE_POOL_TIMEOUT')
if pool_timeout:
options.setdefault('pool_timeout', pool_timeout)
# Forward configuration to sqlalchemy and create the engine.
engine = sa.create_engine(url, **options)
if settings["DEBUG"]:
# Create a no-op listener if we're in debug mode.
from sqlalchemy.event import listen
listen(engine, "after_cursor_execute", lambda *a, **kw: None)
# Return the created engine.
return engine
0
Example 107
Project: pycsw Source File: admin.py
def setup_db(database, table, home, create_sfsql_tables=True, create_plpythonu_functions=True, postgis_geometry_column='wkb_geometry', extra_columns=[], language='english'):
"""Setup database tables and indexes"""
from sqlalchemy import Column, create_engine, Integer, MetaData, \
Table, Text
from sqlalchemy.orm import create_session
LOGGER.info('Creating database %s', database)
if database.startswith('sqlite'):
dbtype, filepath = database.split('sqlite:///')
dirname = os.path.dirname(filepath)
if not os.path.exists(dirname):
raise RuntimeError('SQLite directory %s does not exist' % dirname)
dbase = create_engine(database)
schema, table = util.sniff_table(table)
mdata = MetaData(dbase, schema=schema)
create_postgis_geometry = False
# If PostGIS 2.x detected, do not create sfsql tables.
if dbase.name == 'postgresql':
try:
dbsession = create_session(dbase)
for row in dbsession.execute('select(postgis_lib_version())'):
postgis_lib_version = row[0]
create_sfsql_tables=False
create_postgis_geometry = True
LOGGER.info('PostGIS %s detected: Skipping SFSQL tables creation', postgis_lib_version)
except:
pass
if create_sfsql_tables:
LOGGER.info('Creating table spatial_ref_sys')
srs = Table(
'spatial_ref_sys', mdata,
Column('srid', Integer, nullable=False, primary_key=True),
Column('auth_name', Text),
Column('auth_srid', Integer),
Column('srtext', Text)
)
srs.create()
i = srs.insert()
i.execute(srid=4326, auth_name='EPSG', auth_srid=4326, srtext='GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]')
LOGGER.info('Creating table geometry_columns')
geom = Table(
'geometry_columns', mdata,
Column('f_table_catalog', Text, nullable=False),
Column('f_table_schema', Text, nullable=False),
Column('f_table_name', Text, nullable=False),
Column('f_geometry_column', Text, nullable=False),
Column('geometry_type', Integer),
Column('coord_dimension', Integer),
Column('srid', Integer, nullable=False),
Column('geometry_format', Text, nullable=False),
)
geom.create()
i = geom.insert()
i.execute(f_table_catalog='public', f_table_schema='public',
f_table_name=table, f_geometry_column='wkt_geometry',
geometry_type=3, coord_dimension=2,
srid=4326, geometry_format='WKT')
# abstract metadata information model
LOGGER.info('Creating table %s', table)
records = Table(
table, mdata,
# core; nothing happens without these
Column('identifier', Text, primary_key=True),
Column('typename', Text,
default='csw:Record', nullable=False, index=True),
Column('schema', Text,
default='http://www.opengis.net/cat/csw/2.0.2', nullable=False,
index=True),
Column('mdsource', Text, default='local', nullable=False,
index=True),
Column('insert_date', Text, nullable=False, index=True),
Column('xml', Text, nullable=False),
Column('anytext', Text, nullable=False),
Column('language', Text, index=True),
# identification
Column('type', Text, index=True),
Column('title', Text, index=True),
Column('title_alternate', Text, index=True),
Column('abstract', Text, index=True),
Column('keywords', Text, index=True),
Column('keywordstype', Text, index=True),
Column('parentidentifier', Text, index=True),
Column('relation', Text, index=True),
Column('time_begin', Text, index=True),
Column('time_end', Text, index=True),
Column('topicategory', Text, index=True),
Column('resourcelanguage', Text, index=True),
# attribution
Column('creator', Text, index=True),
Column('publisher', Text, index=True),
Column('contributor', Text, index=True),
Column('organization', Text, index=True),
# security
Column('securityconstraints', Text, index=True),
Column('accessconstraints', Text, index=True),
Column('otherconstraints', Text, index=True),
# date
Column('date', Text, index=True),
Column('date_revision', Text, index=True),
Column('date_creation', Text, index=True),
Column('date_publication', Text, index=True),
Column('date_modified', Text, index=True),
Column('format', Text, index=True),
Column('source', Text, index=True),
# geospatial
Column('crs', Text, index=True),
Column('geodescode', Text, index=True),
Column('denominator', Text, index=True),
Column('distancevalue', Text, index=True),
Column('distanceuom', Text, index=True),
Column('wkt_geometry', Text),
# service
Column('servicetype', Text, index=True),
Column('servicetypeversion', Text, index=True),
Column('operation', Text, index=True),
Column('couplingtype', Text, index=True),
Column('operateson', Text, index=True),
Column('operatesonidentifier', Text, index=True),
Column('operatesoname', Text, index=True),
# additional
Column('degree', Text, index=True),
Column('classification', Text, index=True),
Column('conditionapplyingtoaccessanduse', Text, index=True),
Column('lineage', Text, index=True),
Column('responsiblepartyrole', Text, index=True),
Column('specificationtitle', Text, index=True),
Column('specificationdate', Text, index=True),
Column('specificationdatetype', Text, index=True),
# distribution
# links: format "name,description,protocol,url[^,,,[^,,,]]"
Column('links', Text, index=True),
)
# add extra columns that may have been passed via extra_columns
# extra_columns is a list of sqlalchemy.Column objects
if extra_columns:
LOGGER.info('Extra column definitions detected')
for extra_column in extra_columns:
LOGGER.info('Adding extra column: %s', extra_column)
records.append_column(extra_column)
records.create()
conn = dbase.connect()
if create_plpythonu_functions and not create_postgis_geometry:
if dbase.name == 'postgresql': # create plpythonu functions within db
LOGGER.info('Setting plpythonu functions')
pycsw_home = home
function_get_anytext = '''
CREATE OR REPLACE FUNCTION get_anytext(xml text)
RETURNS text
AS $$
import sys
sys.path.append('%s')
from pycsw import util
return util.get_anytext(xml)
$$ LANGUAGE plpythonu;
''' % pycsw_home
function_query_spatial = '''
CREATE OR REPLACE FUNCTION query_spatial(bbox_data_wkt text, bbox_input_wkt text, predicate text, distance text)
RETURNS text
AS $$
import sys
sys.path.append('%s')
from pycsw import util
return util.query_spatial(bbox_data_wkt, bbox_input_wkt, predicate, distance)
$$ LANGUAGE plpythonu;
''' % pycsw_home
function_update_xpath = '''
CREATE OR REPLACE FUNCTION update_xpath(nsmap text, xml text, recprops text)
RETURNS text
AS $$
import sys
sys.path.append('%s')
from pycsw import util
return util.update_xpath(nsmap, xml, recprops)
$$ LANGUAGE plpythonu;
''' % pycsw_home
function_get_geometry_area = '''
CREATE OR REPLACE FUNCTION get_geometry_area(geom text)
RETURNS text
AS $$
import sys
sys.path.append('%s')
from pycsw import util
return util.get_geometry_area(geom)
$$ LANGUAGE plpythonu;
''' % pycsw_home
function_get_spatial_overlay_rank = '''
CREATE OR REPLACE FUNCTION get_spatial_overlay_rank(target_geom text, query_geom text)
RETURNS text
AS $$
import sys
sys.path.append('%s')
from pycsw import util
return util.get_spatial_overlay_rank(target_geom, query_geom)
$$ LANGUAGE plpythonu;
''' % pycsw_home
conn.execute(function_get_anytext)
conn.execute(function_query_spatial)
conn.execute(function_update_xpath)
conn.execute(function_get_geometry_area)
conn.execute(function_get_spatial_overlay_rank)
if dbase.name == 'postgresql':
LOGGER.info('Creating PostgreSQL Free Text Search (FTS) GIN index')
tsvector_fts = "alter table %s add column anytext_tsvector tsvector" % table
conn.execute(tsvector_fts)
index_fts = "create index fts_gin_idx on %s using gin(anytext_tsvector)" % table
conn.execute(index_fts)
# This needs to run if records exist "UPDATE records SET anytext_tsvector = to_tsvector('english', anytext)"
trigger_fts = "create trigger ftsupdate before insert or update on %s for each row execute procedure tsvector_update_trigger('anytext_tsvector', 'pg_catalog.%s', 'anytext')" % (table, language)
conn.execute(trigger_fts)
if dbase.name == 'postgresql' and create_postgis_geometry:
# create native geometry column within db
LOGGER.info('Creating native PostGIS geometry column')
if postgis_lib_version < '2':
create_column_sql = "SELECT AddGeometryColumn('%s', '%s', 4326, 'POLYGON', 2)" % (table, postgis_geometry_column)
else:
create_column_sql = "ALTER TABLE %s ADD COLUMN %s geometry(Geometry,4326);" % (table, postgis_geometry_column)
create_insert_update_trigger_sql = '''
DROP TRIGGER IF EXISTS %(table)s_update_geometry ON %(table)s;
DROP FUNCTION IF EXISTS %(table)s_update_geometry();
CREATE FUNCTION %(table)s_update_geometry() RETURNS trigger AS $%(table)s_update_geometry$
BEGIN
IF NEW.wkt_geometry IS NULL THEN
RETURN NEW;
END IF;
NEW.%(geometry)s := ST_GeomFromText(NEW.wkt_geometry,4326);
RETURN NEW;
END;
$%(table)s_update_geometry$ LANGUAGE plpgsql;
CREATE TRIGGER %(table)s_update_geometry BEFORE INSERT OR UPDATE ON %(table)s
FOR EACH ROW EXECUTE PROCEDURE %(table)s_update_geometry();
''' % {'table': table, 'geometry': postgis_geometry_column}
create_spatial_index_sql = 'CREATE INDEX %(geometry)s_idx ON %(table)s USING GIST (%(geometry)s);' \
% {'table': table, 'geometry': postgis_geometry_column}
conn.execute(create_column_sql)
conn.execute(create_insert_update_trigger_sql)
conn.execute(create_spatial_index_sql)
0
Example 108
Project: python-pyhsm Source File: db_import.py
def main():
parser = argparse.ArgumentParser(description='Import AEADs into the database')
parser.add_argument('path', help='filesystem path of where to find AEADs')
parser.add_argument('dburl', help='connection URL for the database')
args = parser.parse_args()
path = args.path
databaseUrl = args.dburl
if not os.path.isdir(path):
print("\nInvalid path, check your spelling.\n")
return 2
try:
engine = sqlalchemy.create_engine(databaseUrl)
#SQLAlchemy voodoo
metadata = sqlalchemy.MetaData()
aeadobj = sqlalchemy.Table('aead_table', metadata, autoload=True, autoload_with=engine)
connection = engine.connect()
except:
print("FATAL: Database connect failure")
return 1
for root, subFolders, files in os.walk(path):
if files:
if not re.match(r'^[cbdefghijklnrtuv]+$', files[0]):
continue
#build file path
filepath = os.path.join(root, files[0])
#extract the key handle from the path
keyhandle = extract_keyhandle(path, filepath)
kh_int = pyhsm.util.key_handle_to_int(keyhandle)
#instantiate a new aead object
aead = pyhsm.aead_cmd.YHSM_GeneratedAEAD(None, kh_int, '')
aead.load(filepath)
#set the public_id
public_id = str(files[0])
#check it is old format aead
if not aead.nonce:
#configure values for oldformat
aead.nonce = pyhsm.yubikey.modhex_decode(public_id).decode('hex')
aead.key_handle = key_handle_to_int(keyhandle)
if not insert_query(connection, public_id, aead, keyhandle, aeadobj):
print("WARNING: could not insert %s" % public_id)
#close sqlalchemy
connection.close()
0
Example 109
Project: beeswarm Source File: database_setup.py
def setup_db(connection_string):
"""
Sets up the database schema and adds defaults.
:param connection_string: Database URL. e.g: sqlite:///filename.db
This is usually taken from the config file.
"""
global DB_Session, engine
new_database = False
if connection_string == 'sqlite://' or not database_exists(connection_string):
new_database = True
engine = create_engine(connection_string, connect_args={'timeout': 20})
entities.Base.metadata.create_all(engine)
DB_Session = sessionmaker(bind=engine)
db_path = os.path.dirname(__file__)
if new_database:
# bootstrapping the db with classifications types.
json_file = open(os.path.join(db_path, 'bootstrap.json'))
data = json.load(json_file)
session = get_session()
session.execute('PRAGMA user_version = {0}'.format(beeswarm.server.db.DATABASE_VERSION))
for entry in data['classifications']:
c = session.query(Classification).filter(Classification.type == entry['type']).first()
if not c:
classification = Classification(type=entry['type'], description_short=entry['description_short'],
description_long=entry['description_long'])
session.add(classification)
else:
c.description_short = entry['description_short']
c.description_long = entry['description_long']
for username in data['bait_users']:
u = session.query(BaitUser).filter(BaitUser.username == username).first()
if not u:
logger.debug('Creating default BaitUser: {}'.format(username))
password = data['bait_users'][username]
bait_user = BaitUser(username=username, password=password)
session.add(bait_user)
session.commit()
else:
result = engine.execute("PRAGMA user_version;")
version = result.fetchone()[0]
result.close()
logger.info('Database is at version {0}.'.format(version))
if version != beeswarm.server.db.DATABASE_VERSION:
logger.error('Incompatible database version detected. This version of Beeswarm is compatible with '
'database version {0}, but {1} was found. Please delete the database, restart the Beeswarm '
'server and reconnect the drones.'.format(beeswarm.server.db.DATABASE_VERSION,
version))
sys.exit(1)
0
Example 110
Project: odo Source File: sql.py
@resource.register(r'(.*sql.*|oracle|redshift)(\+\w+)?://.+')
def resource_sql(uri, *args, **kwargs):
engine = create_engine(
uri,
# roundtrip through a frozenset of tuples so we can cache the dict
connect_args=kwargs.pop('connect_args', {}),
**filter_kwargs(sa.create_engine, kwargs)
)
ds = kwargs.pop('dshape', None)
schema = kwargs.pop('schema', None)
foreign_keys = kwargs.pop('foreign_keys', None)
primary_key = kwargs.pop('primary_key', None)
# we were also given a table name
if args and isinstance(args[0], (str, unicode)):
table_name, args = args[0], args[1:]
metadata = metadata_of_engine(engine, schema=schema)
with ignoring(sa.exc.NoSuchTableError):
return attach_schema(
sa.Table(
table_name,
metadata,
autoload_with=engine,
),
schema,
)
if ds:
t = dshape_to_table(table_name, ds, metadata=metadata,
foreign_keys=foreign_keys,
primary_key=primary_key)
t.create()
return t
else:
raise ValueError("Table does not exist and no dshape provided")
# We were not given a table name
if ds:
create_from_datashape(engine, ds, schema=schema,
foreign_keys=foreign_keys)
return engine
0
Example 111
Project: pkgdb2 Source File: update_package_info.py
def main():
working_dir = tempfile.mkdtemp()
print working_dir
UNKNOWN = set()
KNOWN = set()
UPDATED = 0
for name, version in VERSIONS:
print '%s: %s' % (name, version)
base_url = BASE_URL % version
primary_db_location = get_primary_db_location(base_url)
if not primary_db_location:
print 'No primary db found at %s' % base_url
continue
db_ext = primary_db_location.split('primary.')[1]
dbfile_xz = os.path.join(working_dir, 'primary_db.%s' % db_ext)
download_primary_db(base_url, primary_db_location, dbfile_xz)
dbfile = os.path.join(working_dir, 'primary_db_%s.sqlite' % name)
decompress_primary_db(dbfile_xz, dbfile)
db_url = 'sqlite:///%s' % dbfile
db_session = sessionmaker(bind=create_engine(db_url))
session = db_session()
# Update the package in pkgdb
count = 0
if name == 'rawhide':
for pkg in pkgdb2.lib.search_package(
pkgdb2.SESSION, 'rpms', '*', status='Approved'):
try:
pkgobj = get_pkg_info(session, pkg.name)
except Exception, err:
UNKNOWN.add(pkg.name)
continue
if not pkgobj:
UNKNOWN.add(pkg.name)
continue
KNOWN.add(pkg.name)
msg = pkgdb2.lib.edit_package(
session=pkgdb2.SESSION,
package=pkg,
pkg_summary=pkgobj.summary,
pkg_description=pkgobj.description,
pkg_upstream_url=pkgobj.url,
user=User()
)
if msg:
UPDATED += 1
else:
tmp = set()
for pkgname in UNKNOWN:
pkg = pkgdb2.lib.search_package(
pkgdb2.SESSION, 'rpms', pkgname, status='Approved')
if len(pkg) == 1:
pkg = pkg[0]
else:
print pkgname, pkg
try:
pkgobj = get_pkg_info(session, pkg.name)
except Exception, err:
tmp.add(pkg.name)
continue
if not pkgobj:
tmp.add(pkg.name)
continue
KNOWN.add(pkg.name)
msg = pkgdb2.lib.edit_package(
session=pkgdb2.SESSION,
package=pkg,
pkg_summary=pkgobj.summary,
pkg_description=pkgobj.description,
pkg_upstream_url=pkgobj.url,
user=User()
)
if msg:
UPDATED += 1
# Add the package we didn't find here (in case)
UNKNOWN.update(tmp)
# Remove the ones we found
UNKNOWN.difference_update(KNOWN)
pkgdb2.SESSION.commit()
print '%s packages found' % len(KNOWN)
print '%s packages updated' % UPDATED
print '%s packages not found\n' % len(UNKNOWN)
for pkg in sorted(UNKNOWN):
print "No such package %s found in yum's metadata." % pkg
# Drop the temp directory
shutil.rmtree(working_dir)
0
Example 112
@stellar.command()
def init():
"""Initializes Stellar configuration."""
while True:
url = click.prompt(
"Please enter the url for your database.\n\n"
"For example:\n"
"PostgreSQL: postgresql://localhost:5432/\n"
"MySQL: mysql+pymysql://root@localhost/"
)
if url.count('/') == 2 and not url.endswith('/'):
url = url + '/'
if (
url.count('/') == 3 and
url.endswith('/') and
url.startswith('postgresql://')
):
connection_url = url + 'template1'
else:
connection_url = url
engine = create_engine(connection_url, echo=False)
try:
conn = engine.connect()
except OperationalError as err:
click.echo("Could not connect to database: %s" % url)
click.echo("Error message: %s" % err.message)
click.echo('')
else:
break
if engine.dialect.name not in SUPPORTED_DIALECTS:
click.echo("Your engine dialect %s is not supported." % (
engine.dialect.name
))
click.echo("Supported dialects: %s" % (
', '.join(SUPPORTED_DIALECTS)
))
if url.count('/') == 3 and url.endswith('/'):
while True:
click.echo("You have the following databases: %s" % ', '.join([
db for db in list_of_databases(conn)
if not db.startswith('stellar_')
]))
db_name = click.prompt(
"Please enter the name of the database (eg. projectdb)"
)
if database_exists(conn, db_name):
break
else:
click.echo("Could not find database %s" % db_name)
click.echo('')
else:
db_name = url.rsplit('/', 1)[-1]
url = url.rsplit('/', 1)[0] + '/'
name = click.prompt(
'Please enter your project name (used internally, eg. %s)' % db_name,
default=db_name
)
raw_url = url
if engine.dialect.name == 'postgresql':
raw_url = raw_url + 'template1'
with open('stellar.yaml', 'w') as project_file:
project_file.write(
"""
project_name: '%(name)s'
tracked_databases: ['%(db_name)s']
url: '%(raw_url)s'
stellar_url: '%(url)sstellar_data'
""".strip() %
{
'name': name,
'raw_url': raw_url,
'url': url,
'db_name': db_name
}
)
click.echo("Wrote stellar.yaml")
click.echo('')
if engine.dialect.name == 'mysql':
click.echo("Warning: MySQL support is still in beta.")
click.echo("Tip: You probably want to take a snapshot: stellar snapshot")
0
Example 113
Project: CloudBot Source File: bot.py
def __init__(self, loop=asyncio.get_event_loop()):
# basic variables
self.loop = loop
self.start_time = time.time()
self.running = True
# future which will be called when the bot stopsIf you
self.stopped_future = asyncio.Future(loop=self.loop)
# stores each bot server connection
self.connections = {}
# for plugins
self.logger = logger
# for plugins to abuse
self.memory = collections.defaultdict()
# declare and create data folder
self.data_dir = os.path.abspath('data')
if not os.path.exists(self.data_dir):
logger.debug("Data folder not found, creating.")
os.mkdir(self.data_dir)
# set up config
self.config = Config(self)
logger.debug("Config system initialised.")
# set values for reloading
self.plugin_reloading_enabled = self.config.get("reloading", {}).get("plugin_reloading", False)
self.config_reloading_enabled = self.config.get("reloading", {}).get("config_reloading", True)
# this doesn't REALLY need to be here but it's nice
self.user_agent = self.config.get('user_agent', 'CloudBot/3.0 - CloudBot Refresh '
'<https://github.com/CloudBotIRC/CloudBot/>')
# setup db
db_path = self.config.get('database', 'sqlite:///cloudbot.db')
self.db_engine = create_engine(db_path)
self.db_factory = sessionmaker(bind=self.db_engine)
self.db_session = scoped_session(self.db_factory)
self.db_metadata = MetaData()
self.db_base = declarative_base(metadata=self.db_metadata, bind=self.db_engine)
# create web interface
if self.config.get("web", {}).get("enabled", False) and web_installed:
self.web = WebInterface(self)
# set botvars so plugins can access when loading
database.metadata = self.db_metadata
database.base = self.db_base
logger.debug("Database system initialised.")
# Bot initialisation complete
logger.debug("Bot setup completed.")
# create bot connections
self.create_connections()
if self.plugin_reloading_enabled:
self.reloader = PluginReloader(self)
self.plugin_manager = PluginManager(self)
0
Example 114
Project: taxtastic Source File: update_taxids.py
def action(args):
rows = pandas.read_csv(args.infile, dtype='str')
columns = rows.columns.tolist() # preserve column order
if args.taxid_column not in columns:
raise ValueError("No column " + args.taxid_column)
if args.name_column:
if args.name_column not in columns:
msg = '"No "' + args.name_column + '" column'
raise ValueError(msg)
con = 'sqlite:///{0}'.format(args.database_file)
e = sqlalchemy.create_engine(con)
tax = Taxonomy(e, ncbi.RANKS)
merged = pandas.read_sql_table('merged', con, index_col='old_tax_id')
log.info('updating tax_ids')
rows = rows.join(merged, on=args.taxid_column)
# overwrite tax_ids where there is a new_tax_id
inew_tax_ids = ~rows['new_tax_id'].isnull()
rows.loc[inew_tax_ids, args.taxid_column] = \
rows[inew_tax_ids]['new_tax_id']
rows = rows.drop('new_tax_id', axis=1)
log.info('loading names table')
names = pandas.read_sql_table(
'names', con, columns=['tax_id', 'tax_name', 'is_primary'])
if args.name_column:
"""
use the args.name_column to do a string comparison with
names.tax_name column to find a suitable tax_id
"""
unknowns = rows[~rows[args.taxid_column].isin(names['tax_id'])]
if not unknowns.empty:
"""
Take any tax_id associated with a string match
to tax_name prioritizing is_primary=True
"""
unknowns = unknowns.drop(args.taxid_column, axis=1)
names = names.sort_values('is_primary', ascending=False)
names = names.drop_duplicates(subset='tax_name', keep='first')
names = names.set_index('tax_name')
found = unknowns.join(names, on=args.name_column, how='inner')
rows.loc[found.index, args.taxid_column] = found['tax_id']
if not args.ignore_unknowns:
unknowns = rows[~rows[args.taxid_column].isin(names['tax_id'])]
if args.unknowns:
"""
Output unknown tax_ids
"""
unknowns.to_csv(
args.unknowns,
index=False,
columns=columns,
quoting=csv.QUOTE_NONNUMERIC)
elif not unknowns.empty:
raise ValueError('Unknown or missing tax_ids present')
rows = rows[~rows.index.isin(unknowns.index)]
if args.taxid_classified:
"""
"""
if 'taxid_classified' in columns:
rows = rows.drop('taxid_classified', axis=1)
else:
columns.append('taxid_classified')
def is_classified(row):
row['taxid_classified'] = species_is_classified(
row[args.taxid_column], tax)
return row
msg = 'validating tax_ids:'
rows = utils.apply_df_status(is_classified, rows, msg)
if args.append_lineage:
"""
Append a column from the taxonomy to seq_info
"""
if args.append_lineage in columns:
rows = rows.drop(args.append_lineage, axis=1)
else:
columns.append(args.append_lineage)
def add_rank_column(row):
try:
lineage = tax.lineage(row[args.taxid_column])
except ValueError as e:
log.warn(e)
lineage = {}
row[args.append_lineage] = lineage.get(args.append_lineage, None)
return row
msg = 'appending {} column'.format(args.append_lineage)
rows = utils.apply_df_status(add_rank_column, rows, msg)
# output seq_info with new tax_ids
rows.to_csv(
args.out_file,
index=False,
columns=columns,
quoting=csv.QUOTE_NONNUMERIC)
0
Example 115
Project: pyspider Source File: taskdb.py
def __init__(self, url):
self.table = Table('__tablename__', MetaData(),
Column('taskid', String(64), primary_key=True, nullable=False),
Column('project', String(64)),
Column('url', String(1024)),
Column('status', Integer),
Column('schedule', LargeBinary),
Column('fetch', LargeBinary),
Column('process', LargeBinary),
Column('track', LargeBinary),
Column('lastcrawltime', Float(32)),
Column('updatetime', Float(32)),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
self.url = make_url(url)
if self.url.database:
database = self.url.database
self.url.database = None
try:
engine = create_engine(self.url, pool_recycle=3600)
conn = engine.connect()
conn.execute("commit")
conn.execute("CREATE DATABASE %s" % database)
except sqlalchemy.exc.SQLAlchemyError:
pass
self.url.database = database
self.engine = create_engine(url, pool_recycle=3600)
self._list_project()
0
Example 116
def test_storage():
# Get resources
articles_descriptor = json.load(io.open('data/articles.json', encoding='utf-8'))
comments_descriptor = json.load(io.open('data/comments.json', encoding='utf-8'))
articles_rows = Stream('data/articles.csv', headers=1).open().read()
comments_rows = Stream('data/comments.csv', headers=1).open().read()
# Engine
engine = create_engine(os.environ['DATABASE_URL'])
# Storage
storage = Storage(engine=engine, prefix='test_storage_')
# Delete buckets
storage.delete()
# Create buckets
storage.create(
['articles', 'comments'],
[articles_descriptor, comments_descriptor])
# Recreate bucket
storage.create('comments', comments_descriptor, force=True)
# Write data to buckets
storage.write('articles', articles_rows)
storage.write('comments', comments_rows)
# Create new storage to use reflection only
storage = Storage(engine=engine, prefix='test_storage_')
# Create existent bucket
with pytest.raises(RuntimeError):
storage.create('articles', articles_descriptor)
# Assert representation
assert repr(storage).startswith('Storage')
# Assert buckets
assert storage.buckets == ['articles', 'comments']
# Assert descriptors
assert storage.describe('articles') == sync_descriptor(articles_descriptor)
assert storage.describe('comments') == sync_descriptor(comments_descriptor)
# Assert rows
assert list(storage.read('articles')) == sync_rows(articles_descriptor, articles_rows)
assert list(storage.read('comments')) == sync_rows(comments_descriptor, comments_rows)
# Delete non existent bucket
with pytest.raises(RuntimeError):
storage.delete('non_existent')
# Delete buckets
storage.delete()
0
Example 117
def __init__(self, url=None, engine=None, metadata=None, **options):
"""
The options are:
Required (one of the two, `engine` takes precedence):
* `url` - database URL in form of:
``backend://user:password@host:port/database``
* `sqlalchemy_options` - this backend accepts options for SQLAlchemy
in the form: ``option1=value1[&option2=value2]...``
* `engine` - SQLAlchemy engine - either this or URL should be provided
Optional:
* `schema` - default schema, where all tables are located (if not
explicitly stated otherwise)
* `fact_prefix` - used by the snowflake mapper to find fact table for a
cube, when no explicit fact table name is specified
* `dimension_prefix` - used by snowflake mapper to find dimension
tables when no explicit mapping is specified
* `fact_suffix` - used by the snowflake mapper to find fact table for a
cube, when no explicit fact table name is specified
* `dimension_suffix` - used by snowflake mapper to find dimension
tables when no explicit mapping is specified
* `dimension_schema` – schema where dimension tables are stored, if
different than common schema.
Options for denormalized views:
* `use_denormalization` - browser will use dernormalized view instead
of snowflake
* `denormalized_prefix` - if denormalization is used, then this
prefix is added for cube name to find corresponding cube view
* `denormalized_schema` - schema wehere denormalized views are
located (use this if the views are in different schema than fact
tables, otherwise default schema is going to be used)
"""
super(SQLStore, self).__init__(**options)
if not engine and not url:
raise ConfigurationError("No URL or engine specified in options, "
"provide at least one")
if engine and url:
raise ConfigurationError("Both engine and URL specified. Use only one.")
# Create a copy of options, because we will be popping from it
self.options = coalesce_options(options, OPTION_TYPES)
self.naming = distill_naming(self.options)
if not engine:
# Process SQLAlchemy options
sa_options = sqlalchemy_options(options)
engine = sa.create_engine(url, **sa_options)
self.logger = get_logger(name=__name__)
self.connectable = engine
self.schema = self.naming.schema
# Load metadata here. This might be too expensive operation to be
# performed on every request, therefore it is recommended to have one
# shared open store per process. SQLAlchemy will take care about
# necessary connections.
if metadata:
self.metadata = metadata
else:
self.metadata = sa.MetaData(bind=self.connectable,
schema=self.schema)
0
Example 118
Project: petl Source File: test_db_server.py
def test_mysql():
import pymysql
connect = pymysql.connect
# assume database already created
dbapi_connection = connect(host=host,
user=user,
password=password,
database=database)
# exercise using a dbapi_connection
_setup_mysql(dbapi_connection)
_test_dbo(dbapi_connection)
# exercise using a dbapi_cursor
_setup_mysql(dbapi_connection)
dbapi_cursor = dbapi_connection.cursor()
_test_dbo(dbapi_cursor)
dbapi_cursor.close()
# exercise sqlalchemy dbapi_connection
_setup_mysql(dbapi_connection)
from sqlalchemy import create_engine
sqlalchemy_engine = create_engine('mysql+pymysql://%s:%s@%s/%s' %
(user, password, host, database))
sqlalchemy_connection = sqlalchemy_engine.connect()
sqlalchemy_connection.execute('SET SQL_MODE=ANSI_QUOTES')
_test_dbo(sqlalchemy_connection)
sqlalchemy_connection.close()
# exercise sqlalchemy session
_setup_mysql(dbapi_connection)
from sqlalchemy.orm import sessionmaker
Session = sessionmaker(bind=sqlalchemy_engine)
sqlalchemy_session = Session()
_test_dbo(sqlalchemy_session)
sqlalchemy_session.close()
# other exercises
_test_with_schema(dbapi_connection, database)
utf8_connection = connect(host=host, user=user,
password=password,
database=database,
charset='utf8')
utf8_connection.cursor().execute('SET SQL_MODE=ANSI_QUOTES')
_test_unicode(utf8_connection)
0
Example 119
Project: mapchete Source File: vector_io.py
def write_vector(
process,
metadata,
features,
pixelbuffer=0,
overwrite=False
):
assert isinstance(metadata["output"].schema, dict)
assert isinstance(metadata["output"].driver, str)
assert isinstance(features, list)
if process.output.is_db:
# connect to db
db_url = 'postgresql://%s:%s@%s:%s/%s' %(
metadata["output"].db_params["user"],
metadata["output"].db_params["password"],
metadata["output"].db_params["host"],
metadata["output"].db_params["port"],
metadata["output"].db_params["db"]
)
engine = create_engine(db_url, poolclass=NullPool)
meta = MetaData()
meta.reflect(bind=engine)
TargetTable = Table(
metadata["output"].db_params["table"],
meta,
autoload=True,
autoload_with=engine
)
Session = sessionmaker(bind=engine)
session = Session()
if overwrite:
delete_old = TargetTable.delete(and_(
TargetTable.c.zoom == process.tile.zoom,
TargetTable.c.row == process.tile.row,
TargetTable.c.col == process.tile.col)
)
session.execute(delete_old)
for feature in features:
try:
geom = from_shape(
shape(feature["geometry"]).intersection(
process.tile.bbox(pixelbuffer=pixelbuffer)
),
srid=process.tile.srid
)
# else:
# continue
except Exception as e:
warnings.warn("corrupt geometry: %s" %(e))
continue
properties = {}
properties.update(
zoom=process.tile.zoom,
row=process.tile.row,
col=process.tile.col,
geom=geom
)
properties.update(feature["properties"])
insert = TargetTable.insert().values(properties)
session.execute(insert)
session.commit()
session.close()
engine.dispose()
else:
process.tile.prepare_paths()
if process.tile.exists():
os.remove(process.tile.path)
try:
write_vector_window(
process.tile.path,
process.tile,
metadata,
features,
pixelbuffer=pixelbuffer
)
except:
if process.tile.exists():
os.remove(process.tile.path)
raise
0
Example 120
Project: home-assistant Source File: db_migrator.py
def run(script_args: List) -> int:
"""The actual script body."""
# pylint: disable=invalid-name
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from homeassistant.components.recorder import models
parser = argparse.ArgumentParser(
description="Migrate legacy DB to SQLAlchemy format.")
parser.add_argument(
'-c', '--config',
metavar='path_to_config_dir',
default=config_util.get_default_config_dir(),
help="Directory that contains the Home Assistant configuration")
parser.add_argument(
'-a', '--append',
action='store_true',
default=False,
help="Append to existing new format SQLite database")
parser.add_argument(
'--uri',
type=str,
help="Connect to URI and import (implies --append)"
"eg: mysql://localhost/homeassistant")
parser.add_argument(
'--script',
choices=['db_migrator'])
args = parser.parse_args()
config_dir = os.path.join(os.getcwd(), args.config) # type: str
# Test if configuration directory exists
if not os.path.isdir(config_dir):
if config_dir != config_util.get_default_config_dir():
print(('Fatal Error: Specified configuration directory does '
'not exist {} ').format(config_dir))
return 1
src_db = '{}/home-assistant.db'.format(config_dir)
dst_db = '{}/home-assistant_v2.db'.format(config_dir)
if not os.path.exists(src_db):
print("Fatal Error: Old format database '{}' does not exist".format(
src_db))
return 1
if not args.uri and (os.path.exists(dst_db) and not args.append):
print("Fatal Error: New format database '{}' exists already - "
"Remove it or use --append".format(dst_db))
print("Note: --append must maintain an ID mapping and is much slower"
"and requires sufficient memory to track all event IDs")
return 1
conn = sqlite3.connect(src_db)
uri = args.uri or "sqlite:///{}".format(dst_db)
engine = create_engine(uri, echo=False)
models.Base.metadata.create_all(engine)
session_factory = sessionmaker(bind=engine)
session = session_factory()
append = args.append or args.uri
c = conn.cursor()
c.execute("SELECT count(*) FROM recorder_runs")
num_rows = c.fetchone()[0]
print("Converting {} recorder_runs".format(num_rows))
c.close()
c = conn.cursor()
n = 0
for row in c.execute("SELECT * FROM recorder_runs"): # type: ignore
n += 1
session.add(models.RecorderRuns(
start=ts_to_dt(row[1]),
end=ts_to_dt(row[2]),
closed_incorrect=row[3],
created=ts_to_dt(row[4])
))
if n % 1000 == 0:
session.commit()
print_progress(n, num_rows)
print_progress(n, num_rows)
session.commit()
c.close()
c = conn.cursor()
c.execute("SELECT count(*) FROM events")
num_rows = c.fetchone()[0]
print("Converting {} events".format(num_rows))
c.close()
id_mapping = {}
c = conn.cursor()
n = 0
for row in c.execute("SELECT * FROM events"): # type: ignore
n += 1
o = models.Events(
event_type=row[1],
event_data=row[2],
origin=row[3],
created=ts_to_dt(row[4]),
time_fired=ts_to_dt(row[5]),
)
session.add(o)
if append:
session.flush()
id_mapping[row[0]] = o.event_id
if n % 1000 == 0:
session.commit()
print_progress(n, num_rows)
print_progress(n, num_rows)
session.commit()
c.close()
c = conn.cursor()
c.execute("SELECT count(*) FROM states")
num_rows = c.fetchone()[0]
print("Converting {} states".format(num_rows))
c.close()
c = conn.cursor()
n = 0
for row in c.execute("SELECT * FROM states"): # type: ignore
n += 1
session.add(models.States(
entity_id=row[1],
state=row[2],
attributes=row[3],
last_changed=ts_to_dt(row[4]),
last_updated=ts_to_dt(row[5]),
event_id=id_mapping.get(row[6], row[6]),
domain=row[7]
))
if n % 1000 == 0:
session.commit()
print_progress(n, num_rows)
print_progress(n, num_rows)
session.commit()
c.close()
return 0
0
Example 121
def init_sqlalchemy():
global engine, session
if engine is None:
try:
with open("db.url") as f:
db = f.read().strip()
except Exception:
raise ValueError("It looks like you don't have a `db.url` file. Please contact someone on the Open Source team to help you out.")
engine = create_engine(db)
meta = MetaData(engine)
mapper(PullRequest, Table('github_pull_request', meta, autoload=True))
mapper(PullRequestFile, Table('github_pull_request_file', meta, autoload=True))
mapper(Repository, Table('github_repository', meta, autoload=True))
mapper(User, Table('github_user', meta, autoload=True))
if 0:
print("PullRequest")
print(dir(PullRequest))
print()
print("PullRequestFile")
print(dir(PullRequestFile))
print()
print("Repository")
print(dir(Repository))
print()
print("User")
print(dir(User))
Session = sessionmaker(bind=engine)
session = Session()
return session
0
Example 122
Project: mittn Source File: test_dbtools.py
def test_add_false_positive(self):
# Add a false positive to database and check that all fields
# get populated and can be compared back originals
issue = {'scenario_id': '1',
'url': 'testurl',
'severity': 'testseverity',
'issuetype': 'testissuetype',
'issuename': 'testissuename',
'issuedetail': 'testissuedetail',
'confidence': 'testconfidence',
'host': 'testhost',
'port': 'testport',
'protocol': 'testprotocol',
'messages': '{foo=bar}'}
dbtools.add_false_positive(self.context, issue)
# Connect directly to the database and check the data is there
db_engine = sqlalchemy.create_engine(self.context.dburl)
dbconn = db_engine.connect()
db_metadata = sqlalchemy.MetaData()
headlessscanner_issues = Table(
'headlessscanner_issues',
db_metadata,
Column('new_issue', types.Boolean),
Column('issue_no', types.Integer, primary_key=True, nullable=False), # Implicit autoincrement
Column('timestamp', types.DateTime(timezone=True)),
Column('test_runner_host', types.Text),
Column('scenario_id', types.Text),
Column('url', types.Text),
Column('severity', types.Text),
Column('issuetype', types.Text),
Column('issuename', types.Text),
Column('issuedetail', types.Text),
Column('confidence', types.Text),
Column('host', types.Text),
Column('port', types.Text),
Column('protocol', types.Text),
Column('messages', types.LargeBinary)
)
db_select = sqlalchemy.sql.select([headlessscanner_issues])
db_result = dbconn.execute(db_select)
result = db_result.fetchone()
for key, value in issue.iteritems():
if key == 'messages':
self.assertEqual(result[key], json.dumps(value))
else:
self.assertEqual(result[key], value,
'%s not found in database after add' % key)
self.assertEqual(result['test_runner_host'], socket.gethostbyname(socket.getfqdn()),
'Test runner host name not correct in database')
self.assertLessEqual(result['timestamp'], datetime.datetime.utcnow(),
'Timestamp not correctly stored in database')
dbconn.close()
0
Example 123
Project: pokedex Source File: test_schema.py
def test_i18n_table_creation():
"""Creates and manipulates a magical i18n table, completely independent of
the existing schema and data. Makes sure that the expected behavior of the
various proxies and columns works.
"""
Base = declarative_base()
engine = create_engine("sqlite:///:memory:", echo=True)
Base.metadata.bind = engine
# Need this for the foreign keys to work!
class Language(Base):
__tablename__ = 'languages'
id = Column(Integer, primary_key=True, nullable=False)
identifier = Column(String(2), nullable=False, unique=True)
class Foo(Base):
__tablename__ = 'foos'
__singlename__ = 'foo'
id = Column(Integer, primary_key=True, nullable=False)
translation_classes = []
FooText = create_translation_table('foo_text', Foo, 'texts',
language_class=Language,
name = Column(String(100)),
)
# OK, create all the tables and gimme a session
Base.metadata.create_all()
sm = sessionmaker(class_=MultilangSession)
sess = MultilangScopedSession(sm)
# Create some languages and foos to bind together
lang_en = Language(identifier='en')
sess.add(lang_en)
lang_jp = Language(identifier='jp')
sess.add(lang_jp)
lang_ru = Language(identifier='ru')
sess.add(lang_ru)
foo = Foo()
sess.add(foo)
# Commit so the above get primary keys filled in, then give the
# session the language id
sess.commit()
# Note that this won't apply to sessions created in other threads, but that
# ought not be a problem!
sess.default_language_id = lang_en.id
# Give our foo some names, as directly as possible
foo_text = FooText()
foo_text.foreign_id = foo.id
foo_text.local_language_id = lang_en.id
foo_text.name = 'english'
sess.add(foo_text)
foo_text = FooText()
foo_text.foo_id = foo.id
foo_text.local_language_id = lang_jp.id
foo_text.name = 'nihongo'
sess.add(foo_text)
# Commit! This will expire all of the above.
sess.commit()
### Test 1: re-fetch foo and check its attributes
foo = sess.query(Foo).params(_default_language_id=lang_en.id).one()
# Dictionary of language identifiers => names
assert foo.name_map[lang_en] == 'english'
assert foo.name_map[lang_jp] == 'nihongo'
# Default language, currently English
assert foo.name == 'english'
sess.expire_all()
### Test 2: querying by default language name should work
foo = sess.query(Foo).filter_by(name='english').one()
assert foo.name == 'english'
sess.expire_all()
### Test 3: joinedload on the default name should appear to work
# THIS SHOULD WORK SOMEDAY
# .options(joinedload(Foo.name)) \
foo = sess.query(Foo) \
.options(joinedload(Foo.texts_local)) \
.one()
assert foo.name == 'english'
sess.expire_all()
### Test 4: joinedload on all the names should appear to work
# THIS SHOULD ALSO WORK SOMEDAY
# .options(joinedload(Foo.name_map)) \
foo = sess.query(Foo) \
.options(joinedload(Foo.texts)) \
.one()
assert foo.name_map[lang_en] == 'english'
assert foo.name_map[lang_jp] == 'nihongo'
sess.expire_all()
### Test 5: Mutating the dict collection should work
foo = sess.query(Foo).one()
foo.name_map[lang_en] = 'different english'
foo.name_map[lang_ru] = 'new russian'
sess.commit()
assert foo.name_map[lang_en] == 'different english'
assert foo.name_map[lang_ru] == 'new russian'
0
Example 124
Project: PyDev.Debugger Source File: sql.py
def __init__(self, url, creator = None):
"""
Connect to the database using the given connection URL.
The current implementation uses SQLAlchemy and so it will support
whatever database said module supports.
@type url: str
@param url:
URL that specifies the database to connect to.
Some examples:
- Opening an SQLite file:
C{dao = CrashDAO("sqlite:///C:\\some\\path\\database.sqlite")}
- Connecting to a locally installed SQL Express database:
C{dao = CrashDAO("mssql://.\\SQLEXPRESS/Crashes?trusted_connection=yes")}
- Connecting to a MySQL database running locally, using the
C{oursql} library, authenticating as the "winappdbg" user with
no password:
C{dao = CrashDAO("mysql+oursql://winappdbg@localhost/Crashes")}
- Connecting to a Postgres database running locally,
authenticating with user and password:
C{dao = CrashDAO("postgresql://winappdbg:winappdbg@localhost/Crashes")}
For more information see the C{SQLAlchemy} docuementation online:
U{http://docs.sqlalchemy.org/en/latest/core/engines.html}
Note that in all dialects except for SQLite the database
must already exist. The tables schema, however, is created
automatically when connecting for the first time.
To create the database in MSSQL, you can use the
U{SQLCMD<http://msdn.microsoft.com/en-us/library/ms180944.aspx>}
command::
sqlcmd -Q "CREATE DATABASE Crashes"
In MySQL you can use something like the following::
mysql -u root -e "CREATE DATABASE Crashes;"
And in Postgres::
createdb Crashes -h localhost -U winappdbg -p winappdbg -O winappdbg
Some small changes to the schema may be tolerated (for example,
increasing the maximum length of string columns, or adding new
columns with default values). Of course, it's best to test it
first before making changes in a live database. This all depends
very much on the SQLAlchemy version you're using, but it's best
to use the latest version always.
@type creator: callable
@param creator: (Optional) Callback function that creates the SQL
database connection.
Normally it's not necessary to use this argument. However in some
odd cases you may need to customize the database connection.
"""
# Parse the connection URL.
parsed_url = URL(url)
schema = parsed_url.drivername
if '+' in schema:
dialect, driver = schema.split('+')
else:
dialect, driver = schema, 'base'
dialect = dialect.strip().lower()
driver = driver.strip()
# Prepare the database engine arguments.
arguments = {'echo' : self._echo}
if dialect == 'sqlite':
arguments['module'] = sqlite3.dbapi2
arguments['listeners'] = [_SQLitePatch()]
if creator is not None:
arguments['creator'] = creator
# Load the database engine.
engine = create_engine(url, **arguments)
# Create a new session.
session = self._new_session(bind = engine)
# Create the required tables if they don't exist.
BaseDTO.metadata.create_all(engine)
# TODO: create a dialect specific index on the "signature" column.
# Set the instance properties.
self._url = parsed_url
self._driver = driver
self._dialect = dialect
self._session = session
0
Example 125
Project: bitex Source File: test_model.py
def setUp(self):
from models import Base, db_bootstrap
self.engine = create_engine('sqlite://', echo=False)
Base.metadata.create_all(self.engine)
self.db_session = scoped_session(sessionmaker(bind=self.engine))
TradeApplication.instance().db_session = self.db_session
db_bootstrap(self.db_session)
currencies = [
[ "BTC" , u"฿" , "Bitcoin" , True, 10000, "{:,.8f}", u"฿ #,##0.00000000;(฿ #,##0.00000000)" , "{:,.8f}", u"฿ #,##0.00000000;(฿ #,##0.0000000)" ],
[ "USD" , u"$" , "Dollar" , False, 100 , "{:,.2f}", u"¤ #,##0.00;(¤ #,##0.00)" , "{:,.2f}", u"¤ #,##0.00;(¤ #,##0.00)" ]
]
for c in currencies:
e = Currency(code = c[0],
sign = c[1],
description = c[2],
is_crypto = c[3],
pip = c[4],
format_python = c[5],
format_js = c[6],
human_format_python = c[7],
human_format_js = c[8] )
self.db_session.add(e)
self.db_session.commit()
instruments = [
["BTCUSD", "USD", "BTC / USD"]
]
for currency_description in instruments:
e = Instrument(symbol=currency_description[0],
currency=currency_description[1],
description=currency_description[2])
self.db_session.add(e)
self.db_session.commit()
# user root
e = User(id = -1,
username = 'root',
email = '[email protected]',
password = 'abc12345',
country_code = 'US',
state = 'NY',
transaction_fee_buy = 0,
transaction_fee_sell = 0,
verified = 3,
is_staff = True,
is_system = True,
is_broker = True,
email_lang = 'en')
self.db_session.add(e)
# user blinktrade
e = User(id = 8999999,
username = 'blinktrade',
email = '[email protected]',
password = 'abc12345',
country_code = 'US',
state = 'NY',
transaction_fee_buy = 0,
transaction_fee_sell = 0,
verified = 3,
is_staff = True,
is_broker = True,
email_lang = 'en')
self.db_session.add(e)
# user exchange
e = User(id = 5,
username = 'exchange',
email = '[email protected]',
broker_id = 8999999,
broker_username = 'blinktrade',
password = 'abc12345',
country_code = 'US',
state = 'NY',
transaction_fee_buy = 0,
transaction_fee_sell = 0,
verified = 5,
is_broker = True,
email_lang = 'en')
self.db_session.add(e)
self.db_session.commit()
# user exchange bonus
e = User(id = 90000000,
username = 'exchange_bonus',
email = '[email protected]',
broker_id = 5,
broker_username = 'exchange',
password = 'abc12345',
country_code = 'US',
state = 'NY',
transaction_fee_buy = 0,
transaction_fee_sell = 0,
verified = 5,
is_broker = True,
email_lang = 'en')
self.db_session.add(e)
self.db_session.commit()
self.user_exchange_bonus = e
# user exchange fees
e = User(id = 90000001,
username = 'exchange_fees',
email = '[email protected]',
broker_id = 5,
broker_username = 'exchange',
password = 'abc12345',
country_code = 'US',
state = 'NY',
transaction_fee_buy = 0,
transaction_fee_sell = 0,
verified = 5,
is_broker = True,
email_lang = 'en')
self.db_session.add(e)
self.db_session.commit()
self.user_exchange_fees = e
# broker exchange
e = Broker(id = 5,
short_name = 'exchange',
business_name = 'BlinkTrade Demo Exchange',
address = '21 Bitcoin Ave',
signup_label = 'BlinkTrade Demo Exchange',
city = 'New York',
state = 'NY',
zip_code = '10000',
country_code = 'US',
lang = 'en',
country = 'United States',
mandrill_api_key = None,
mailer_from_name = 'BlinkTrade',
mailer_from_email = '[email protected]',
mailer_signature = 'BlinkTrade Demo Exchange',
mailchimp_list_id = '5c7b7818d8',
phone_number_1 = None,
phone_number_2 = None,
skype = 'blinktrade',
email = '[email protected]',
verification_jotform = 'https://secure.jotform.co/form/42336230941852?user_id={{UserID}}&username={{Username}}&broker_id={{BrokerID}}&broker_username={{BrokerUsername}}&email={{Email}}&phoneNumber[country]=1&address[state]={{State}}&address[country]=United+States',
upload_jotform = 'https://secure.jotform.co/form/42344880060854?user_id={{UserID}}&username={{Username}}&broker_id={{BrokerID}}&broker_username={{BrokerUsername}}&deposit_method={{DepositMethod}}&control_number={{ControlNumber}}&deposit_id={{DepositID}}',
currencies = 'USD',
withdraw_structure = json.dumps(
{
"BTC": [
{
"method":"bitcoin",
"description":"Bitcoin withdrawal",
"disclaimer": "",
"percent_fee":0,
"fixed_fee":0,
"limits": {
"0": {"enabled": True, "min": 500000, "max": 100000000},
"1": {"enabled": True, "min": 500000, "max": 100000000},
"2": {"enabled": True, "min": 500000, "max": 100000000},
"3": {"enabled": True, "min": 500000},
"4": {"enabled": True, "min": 500000},
"5": {"enabled": True, "min": 500000}
},
"fields": [
{"side":"client", "name": "Wallet" , "validator":"validateAddress", "type":"text" , "value":"" , "label":"Wallet", "placeholder":"" },
{"side":"broker", "name": "TransactionID" , "validator":"validateAlphaNum", "type":"text" , "value":"" , "label":"TransactionID", "placeholder":"" }
]
}
],
"USD": [
{
"method":"swift",
"description":"Swift International Transfer",
"disclaimer":"84 hours, 1% fee + $25",
"percent_fee": 1,
"fixed_fee": 2500000000,
"limits": {
"0": {"enabled": False},
"1": {"enabled": False},
"2": {"enabled": False},
"3": {"enabled": True, "min": 3500000000, "max": 280000000000},
"4": {"enabled": True, "min": 3500000000, "max": 5000000000000},
"5": {"enabled": True, "min": 3500000000}
},
"fields": [
{"side":"client", "name": "BankName" , "validator":"validateAlphaNum", "type":"text" , "value":"" , "label":"Banco name", "placeholder": "ex. JPMORGAN CHASE BANK, N.A" },
{"side":"client", "name": "BankSwift" , "validator":"validateAlphaNum", "type":"text" , "value":"" , "label":"Swift code", "placeholder": "ex. CHASUS33" },
{"side":"client", "name": "RoutingNumber", "validator":"validateAlphaNum", "type":"text" , "value":"" , "label":"Routing Number", "placeholder":"ex. 021000021" },
{"side":"client", "name": "AccountNumber", "validator":"validateAlphaNum", "type":"text" , "value":"" , "label":"Account Number", "placeholder":"ex. 88888-8" },
{"side":"broker", "name": "TransactionID", "validator":"validateAlphaNum", "type":"text" , "value":"" , "label":"TransactionID", "placeholder":"" }
]
}
]
}
).decode('utf-8'),
crypto_currencies = json.dumps([
{
"Wallets": [
{
"managed_by": "BlinkTrade, Exchange Operator, Mediator ",
"signatures": [],
"type": "cold",
"multisig": False,
"address": "n3yyGwzyfTxbKB8hkkv2AsQ9nBQgEozsV4"
},
{
"managed_by": "Exchange Operator ",
"signatures": [],
"type": "hot",
"multisig": False,
"address": "msQRdMPcwLr3rWsLzG56ABhHtfavHH2yVW"
}
],
"CurrencyCode": "BTC",
"Confirmations": [
[ 0, 200000000, 1],
[ 200000000, 20000000000, 3],
[20000000000, 2100000000000000, 6]
],
"CurrencyDescription": "Bitcoin"
}
]).decode('utf-8'),
accept_customers_from = json.dumps([["*"],[ "CU", "SO", "SD", "NG", "IR", "KP" ]]).decode('utf-8'),
is_broker_hub = False,
support_url = 'mailto:[email protected]',
tos_url = 'https://docs.google.com/a/blinktrade.com/docuement/d/1HyFRs_2Seh4LGZYjPk8bmbxueUjF7RMz-koAM3rG2Pc/pub?embedded=true',
fee_structure = json.dumps([
{ "Operation" : "Wire transfer", "Fee":"1%" , "Terms":"Exchange operator decides its fees" }
] ).decode('utf-8'),
transaction_fee_buy = 60,
transaction_fee_sell = 60,
accounts = json.dumps({
"bonus":[ 90000000, "exchange_bonus", [ "USD", 100000000 ] ] ,
"fees":[ 90000001, "exchange_fees" ]
}).decode('utf-8'),
status = '1',
ranking = 5 )
self.db_session.add(e)
self.db_session.commit()
e = DepositMethods(id = 501,
broker_id = 5,
name = 'usps',
description = 'USPS Money order',
disclaimer = '1 business day',
type = 'DTP',
percent_fee = 0,
fixed_fee = 500000000,
broker_deposit_ctrl_num = 501000001,
currency = 'USD',
deposit_limits = json.dumps({
"0": {"enabled": False},
"1": {"enabled": False},
"2": {"enabled": False},
"3": {"enabled": True, "min" : 1000000000, "max": 280000000000 },
"4": {"enabled": True, "min" : 1000000000, "max": 5000000000000 },
"5": {"enabled": True, "min" : 1000000000 }
}).decode('utf-8'),
html_template = """
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<link href="//maxcdn.bootstrapcdn.com/bootstrap/2.3.0/css/bootstrap.min.css" rel="stylesheet">
</head>
<body style="background-color: #ffffff">
<div class="container">
<div class="content-fluid">
<table class="table table-condensed">
<tr>
<td>Order ID:</td>
<td>*|control_number|*</td>
</tr>
<tr>
<td>Created:</td>
<td>*|created|*</td>
</tr>
<tr>
<td>Deposit Method:</td>
<td>Money Order</td>
</tr>
<tr>
<td>Instructions:</td>
<td>
1. Head to your local United States Postal Service and purchase a money order slip for the correct amount. Learn more about USPS money orders <a href="https://www.usps.com/shop/money-orders.htm">here</a><br/>
2. Fill out the money order form. <b>Important: Make sure to write your confirmation code directly on it!</b><br/>
3. Take a picture of the filled out money order<br/>
4. Upload the photo of the money order in the system<br/>
5. Send the money order to :
<strong>Satoshi Nakamoto<strong><br/>
<strong>21 Bitcoin Street<strong><br/>
<strong>New York - NY - 10001<strong><br/>
</td>
</tr>
<tr>
<td>Total Deposit:</td>
<td>$ *|value|*</td>
</tr>
<tr>
<td>Notes:</td>
<td> <small>
Please complete your deposit according to your preferred method. Be sure to send a copy of the Order ID with the receipt of completed payment to us.
</small> </td>
</tr>
</table>
</div>
</div>
</body>
</html>
""",
parameters = '{}')
self.db_session.add(e)
self.db_session.commit()
self.deposit_method_501 = e
self.deposit_method_501.generate_deposit(self.db_session,
self.user_exchange_bonus,
100000000000,
None)
0
Example 126
def create_tables(config, debug=False):
""" Create the tables in the database using the information from the
url obtained.
:arg db_url, URL used to connect to the database. The URL contains
information with regards to the database engine, the host to
connect to, the user and password and the database name.
ie: <engine>://<user>:<password>@<host>/<dbname>
:kwarg alembic_ini, path to the alembic ini file. This is necessary
to be able to use alembic correctly, but not for the unit-tests.
:kwarg debug, a boolean specifying wether we should have the verbose
output of sqlalchemy or not.
:return a session that can be used to query the database.
"""
db_url = config['SQLALCHEMY_DATABASE_URI']
engine = sa.create_engine(db_url, echo=debug)
models.Base.metadata.create_all(engine)
# engine.execute(collection_package_create_view(driver=engine.driver))
if db_url.startswith('sqlite:'):
# Ignore the warning about con_record
# pylint: disable=W0613
def _fk_pragma_on_connect(dbapi_con, con_record):
''' Tries to enforce referential constraints on sqlite. '''
dbapi_con.execute('pragma foreign_keys=ON')
sa.event.listen(engine, 'connect', _fk_pragma_on_connect)
# Generate the Alembic version table and "stamp" it with the latest rev
alembic.command.stamp(get_alembic_config(db_url), "head")
# Add missing distributions
session = create_session(db_url, debug=debug)
for d_id, d_name in config["DISTRIBUTIONS"].items():
distro = session.query(models.Distribution).get(d_id)
if distro:
continue
session.add(models.Distribution(id=d_id, name=d_name))
session.commit()
0
Example 127
Project: bottle-cork Source File: sqlalchemy_backend.py
def __init__(self, db_full_url, users_tname='users', roles_tname='roles',
pending_reg_tname='register', initialize=False, **kwargs):
if not sqlalchemy_available:
raise RuntimeError("The SQLAlchemy library is not available.")
self._metadata = MetaData()
if initialize:
# Create new database if needed.
db_url, db_name = db_full_url.rsplit('/', 1)
if is_py3 and db_url.startswith('mysql'):
print("WARNING: MySQL is not supported under Python3")
self._engine = create_engine(db_url, encoding='utf-8', **kwargs)
try:
self._engine.execute("CREATE DATABASE %s" % db_name)
except Exception as e:
log.info("Failed DB creation: %s" % e)
# SQLite in-memory database URL: "sqlite://:memory:"
if db_name != ':memory:' and not db_url.startswith('postgresql'):
self._engine.execute("USE %s" % db_name)
else:
self._engine = create_engine(db_full_url, encoding='utf-8', **kwargs)
self._users = Table(users_tname, self._metadata,
Column('username', Unicode(128), primary_key=True),
Column('role', ForeignKey(roles_tname + '.role')),
Column('hash', String(256), nullable=False),
Column('email_addr', String(128)),
Column('desc', String(128)),
Column('creation_date', String(128), nullable=False),
Column('last_login', String(128), nullable=False)
)
self._roles = Table(roles_tname, self._metadata,
Column('role', String(128), primary_key=True),
Column('level', Integer, nullable=False)
)
self._pending_reg = Table(pending_reg_tname, self._metadata,
Column('code', String(128), primary_key=True),
Column('username', Unicode(128), nullable=False),
Column('role', ForeignKey(roles_tname + '.role')),
Column('hash', String(256), nullable=False),
Column('email_addr', String(128)),
Column('desc', String(128)),
Column('creation_date', String(128), nullable=False)
)
self.users = SqlTable(self._engine, self._users, 'username')
self.roles = SqlSingleValueTable(self._engine, self._roles, 'role', 'level')
self.pending_registrations = SqlTable(self._engine, self._pending_reg, 'code')
if initialize:
self._initialize_storage(db_name)
log.debug("Tables created")
0
Example 128
Project: bitex Source File: bootstrap.py
def main():
candidates = ['bootstrap.ini']
if len(sys.argv) > 1:
candidates.append(os.path.expanduser(sys.argv[1]))
config = ConfigParser.SafeConfigParser()
config.read( candidates )
from trade.models import Base, Currency, Instrument, User, Broker, DepositMethods
db_engine = config.get('database','sqlalchemy_engine') + ':///' + os.path.expanduser(config.get('database','sqlalchemy_connection_string'))
engine = create_engine( db_engine, echo=True)
#engine.raw_connection().connection.text_factory = str
Base.metadata.create_all(engine)
session = scoped_session(sessionmaker(bind=engine))
for section_name in config.sections():
if section_name == 'currencies':
for id, currency_json in config.items(section_name):
c = json.loads(currency_json)
if Currency.get_currency(session,c[0]) :
continue
e = Currency(code = c[0],
sign = c[1],
description = c[2],
is_crypto = c[3],
pip = c[4],
format_python = c[5],
format_js = c[6],
human_format_python = c[7],
human_format_js = c[8] )
session.add(e)
session.commit()
if section_name == 'instruments':
for id, instrument_json in config.items(section_name):
currency_description = json.loads(instrument_json)
if Instrument.get_instrument(session, currency_description[0]):
continue
e = Instrument(symbol=currency_description[0],
currency=currency_description[1],
description=currency_description[2])
session.add(e)
session.commit()
if section_name[:4] == 'user':
broker_id = None
try:
broker_id = config.getint(section_name, 'broker_id')
except Exception,e:
pass
broker_username = None
try:
broker_username = config.get(section_name, 'broker_username')
except Exception,e:
pass
if not User.get_user(session,broker_id, config.get(section_name, 'username')):
password = base64.b32encode(os.urandom(10))
try:
password = config.get(section_name, 'password')
except Exception,e:
pass
transaction_fee_buy = None
try:
transaction_fee_buy = config.getint(section_name, 'transaction_fee_buy')
except Exception,e:
pass
transaction_fee_sell = None
try:
transaction_fee_sell = config.getint(section_name, 'transaction_fee_sell')
except Exception,e:
pass
verified = 0
try:
verified = config.getint(section_name, 'verified')
except Exception,e:
pass
is_system = False
try:
is_system = config.getboolean(section_name, 'is_system')
except Exception,e:
pass
is_staff = False
try:
is_staff = config.getboolean(section_name, 'is_staff')
except Exception,e:
pass
is_broker = False
try:
is_broker = config.getboolean(section_name, 'is_broker')
except Exception,e:
pass
state = None
try:
state = config.get(section_name, 'state')
except Exception,e:
pass
e = User(id = config.getint(section_name, 'id'),
username = config.get(section_name, 'username'),
email = config.get(section_name, 'email'),
broker_id = broker_id,
broker_username = broker_username,
password = password,
country_code = config.get(section_name, 'country_code'),
state = state,
transaction_fee_buy = transaction_fee_buy,
transaction_fee_sell = transaction_fee_sell,
verified = verified,
is_staff = is_staff,
is_system = is_system,
is_broker = is_broker,
email_lang = config.get(section_name, 'email_lang'))
session.add(e)
session.commit()
if section_name[:6] == 'broker':
if not Broker.get_broker(session, config.getint(section_name, 'id')):
phone_number_1 = None
try:
phone_number_1 = config.get(section_name, 'phone_number_1')
except Exception,e:
pass
phone_number_2 = None
try:
phone_number_2 = config.get(section_name, 'phone_number_2')
except Exception,e:
pass
skype = None
try:
skype = config.get(section_name, 'skype')
except Exception,e:
pass
transaction_fee_buy = 0
try:
transaction_fee_buy = config.getint(section_name, 'transaction_fee_buy')
except Exception,e:
pass
transaction_fee_sell = 0
try:
transaction_fee_sell = config.getint(section_name, 'transaction_fee_sell')
except Exception,e:
pass
e = Broker(id = config.getint(section_name, 'id'),
short_name = config.get(section_name, 'short_name'),
business_name = config.get(section_name, 'business_name'),
address = config.get(section_name, 'address'),
signup_label = config.get(section_name, 'signup_label'),
city = config.get(section_name, 'city'),
state = config.get(section_name, 'state'),
zip_code = config.get(section_name, 'zip_code'),
country_code = config.get(section_name, 'country_code'),
lang = config.get(section_name, 'lang'),
country = config.get(section_name, 'country'),
mandrill_api_key = config.get(section_name, 'mandrill_api_key'),
mailer_from_name = config.get(section_name, 'mailer_from_name'),
mailer_from_email = config.get(section_name, 'mailer_from_email'),
mailer_signature = config.get(section_name, 'mailer_signature'),
mailchimp_list_id = config.get(section_name, 'mailchimp_list_id'),
phone_number_1 = phone_number_1,
phone_number_2 = phone_number_2,
skype = skype,
email = config.get(section_name, 'email'),
verification_jotform = config.get(section_name, 'verification_jotform'),
upload_jotform = config.get(section_name, 'upload_jotform'),
currencies = config.get(section_name, 'currencies'),
withdraw_structure = json.dumps(json.loads(config.get(section_name, 'withdraw_structure', raw=True))).decode('utf-8'),
crypto_currencies = json.dumps(json.loads(config.get(section_name, 'crypto_currencies', raw=True))).decode('utf-8'),
accept_customers_from = json.dumps(json.loads(config.get(section_name, 'accept_customers_from', raw=True))).decode('utf-8'),
is_broker_hub = config.getboolean(section_name, 'is_broker_hub'),
support_url = config.get(section_name, 'support_url'),
tos_url = config.get(section_name, 'tos_url'),
fee_structure = json.dumps(json.loads(config.get(section_name, 'fee_structure', raw=True))).decode('utf-8'),
transaction_fee_buy = transaction_fee_buy,
transaction_fee_sell = transaction_fee_sell,
accounts = json.dumps(json.loads(config.get(section_name, 'accounts', raw=True))).decode('utf-8'),
status = config.get(section_name, 'status'),
ranking = config.getint(section_name, 'ranking'))
session.add(e)
session.commit()
if section_name[:14] == 'deposit_method':
if not DepositMethods.get_deposit_method(session, config.getint(section_name, 'id')):
e = DepositMethods(id = config.getint(section_name, 'id'),
broker_id = config.getint(section_name, 'broker_id'),
name = config.get(section_name, 'name').decode('utf-8'),
description = config.get(section_name, 'description').decode('utf-8'),
disclaimer = config.get(section_name, 'disclaimer').decode('utf-8'),
type = config.get(section_name, 'type'),
percent_fee = config.getfloat(section_name, 'percent_fee'),
fixed_fee = config.getint(section_name, 'fixed_fee'),
broker_deposit_ctrl_num = config.getint(section_name, 'broker_deposit_ctrl_num'),
currency = config.get(section_name, 'currency'),
deposit_limits = json.dumps(json.loads(config.get(section_name, 'deposit_limits', raw=True))).decode('utf-8'),
html_template = config.get(section_name, 'html_template', raw=True).decode('utf-8'),
parameters = json.dumps(json.loads(config.get(section_name, 'parameters', raw=True))).decode('utf-8') )
session.add(e)
session.commit()
0
Example 129
Project: portingdb Source File: htmlreport.py
def create_app(db_url, cache_config=None):
if cache_config is None:
cache_config = {'backend': 'dogpile.cache.null'}
cache = make_region().configure(**cache_config)
app = Flask(__name__)
app.config['DB'] = sessionmaker(bind=create_engine(db_url))
db = app.config['DB']()
app.config['Cache'] = cache
app.config['CONFIG'] = {c.key: json.loads(c.value)
for c in db.query(tables.Config)}
app.jinja_env.undefined = StrictUndefined
app.jinja_env.filters['md'] = markdown_filter
app.jinja_env.filters['format_rpm_name'] = format_rpm_name
app.jinja_env.filters['format_quantity'] = format_quantity
app.jinja_env.filters['format_percent'] = format_percent
app.jinja_env.filters['format_time_ago'] = format_time_ago
@app.context_processor
def add_template_globals():
return {
'cache_tag': uuid.uuid4(),
'len': len,
'log': math.log,
'config': app.config['CONFIG'],
}
def _add_route(url, func, get_keys=()):
@functools.wraps(func)
def decorated(*args, **kwargs):
creator = functools.partial(func, *args, **kwargs)
key_dict = {'url': url,
'args': args,
'kwargs': kwargs,
'get': {k: request.args.get(k) for k in get_keys}}
key = json.dumps(key_dict, sort_keys=True)
print(key)
return cache.get_or_create(key, creator)
app.route(url)(decorated)
_add_route("/", hello)
_add_route("/stats.json", jsonstats)
_add_route("/pkg/<pkg>/", package)
_add_route("/grp/<grp>/", group)
_add_route("/graph/", graph)
_add_route("/graph/portingdb.json", graph_json)
_add_route("/piechart.svg", piechart_svg)
_add_route("/grp/<grp>/piechart.svg", piechart_grp)
_add_route("/pkg/<pkg>/piechart.svg", piechart_pkg)
_add_route("/grp/<grp>/graph/", graph_grp)
_add_route("/grp/<grp>/graph/data.json", graph_json_grp)
_add_route("/pkg/<pkg>/graph/", graph_pkg)
_add_route("/pkg/<pkg>/graph/data.json", graph_json_pkg)
_add_route("/by_loc/", by_loc, get_keys={'sort', 'reverse'})
_add_route("/by_loc/grp/<grp>/", group_by_loc, get_keys={'sort', 'reverse'})
_add_route("/mispackaged/", mispackaged, get_keys={'requested'})
_add_route("/history/", history, get_keys={'expand'})
_add_route("/history/data.csv", history_csv)
_add_route("/howto/", howto)
return app
0
Example 130
def setUp(self):
Model = declarative_base()
student_course = Table(
'student_course', Model.metadata,
Column('student_id', sqla_types.Integer, ForeignKey('student.id')),
Column('course_id', sqla_types.Integer, ForeignKey('course.id'))
)
class Course(Model):
__tablename__ = "course"
id = Column(sqla_types.Integer, primary_key=True)
name = Column(sqla_types.String(255), nullable=False)
# These are for better model form testing
cost = Column(sqla_types.Numeric(5, 2), nullable=False)
description = Column(sqla_types.Text, nullable=False)
level = Column(sqla_types.Enum('Primary', 'Secondary'))
has_prereqs = Column(sqla_types.Boolean, nullable=False)
started = Column(sqla_types.DateTime, nullable=False)
grade = Column(AnotherInteger, nullable=False)
class School(Model):
__tablename__ = "school"
id = Column(sqla_types.Integer, primary_key=True)
name = Column(sqla_types.String(255), nullable=False)
class Student(Model):
__tablename__ = "student"
id = Column(sqla_types.Integer, primary_key=True)
full_name = Column(sqla_types.String(255), nullable=False, unique=True)
dob = Column(sqla_types.Date(), nullable=True)
current_school_id = Column(sqla_types.Integer, ForeignKey(School.id), nullable=False)
current_school = relationship(School, backref=backref('students'))
courses = relationship(
"Course",
secondary=student_course,
backref=backref("students", lazy='dynamic')
)
self.School = School
self.Student = Student
self.Course = Course
engine = create_engine('sqlite:///:memory:', echo=False)
Session = sessionmaker(bind=engine)
self.metadata = Model.metadata
self.metadata.create_all(bind=engine)
self.sess = Session()
0
Example 131
Project: taxtastic Source File: pare_database.py
def main():
parser = argparse.ArgumentParser(description="""Generate SQL statements to
create a smaller version of a taxonomy database, keeping only
lineages associated with a set of tax_ids Use as:
python devtools/pare_database.py -k testfiles/keep_taxids.txt | sqlite3 test_output/ncbi_master.db""")
parser.add_argument('-d', help='source database',
default='ncbi_taxonomy.db')
parser.add_argument('-k', '--keep-taxids', help="""file containing
whitespace-delimited list of taxids to keep""",
type=argparse.FileType('r'))
parser.add_argument('-o', '--output-file',
default=sys.stdout, type=argparse.FileType('w'))
a = parser.parse_args()
with a.keep_taxids as fp:
lines = (i for i in fp if not i.startswith('#'))
keep_ids = frozenset(i for i in re.split(
r'[\s\n]+', ''.join(lines), flags=re.MULTILINE) if i)
e = sqlalchemy.create_engine('sqlite:///{0}'.format(a.d))
t = taxonomy.Taxonomy(e, ncbi.ranks)
# Get lineages
lineages = (lineage(t, i) for i in keep_ids)
keep_taxa = frozenset(i[1] for l in lineages for i in l)
with tempfile.NamedTemporaryFile() as tf:
with open(a.d) as fp:
print >> sys.stderr, "copying db"
shutil.copyfileobj(fp, tf, 20 << 10)
tf.flush()
con = sqlite3.connect(tf.name)
cur = con.cursor()
ic = in_clause(keep_taxa)
print >> sys.stderr, "pruning nodes"
cur.execute("DELETE FROM nodes WHERE tax_id NOT IN {0}".format(
ic), list(keep_taxa))
print >> sys.stderr, "pruning names"
cur.execute("DELETE FROM names WHERE tax_id NOT IN {0}".format(
ic), list(keep_taxa))
print >> sys.stderr, "pruning merged"
cur.execute("DELETE FROM merged WHERE old_tax_id NOT IN {0} AND new_tax_id NOT IN {0}".format(ic),
list(keep_taxa) + list(keep_taxa))
with a.output_file as fp:
for stmt in con.iterdump():
print >> fp, stmt
0
Example 132
Project: rdbms-subsetter Source File: subsetter.py
def __init__(self, sqla_conn, args, schemas=[None]):
self.args = args
self.sqla_conn = sqla_conn
self.schemas = schemas
self.engine = sa.create_engine(sqla_conn)
self.inspector = Inspector(bind=self.engine)
self.conn = self.engine.connect()
self.tables = OrderedDict()
for schema in self.schemas:
meta = sa.MetaData(bind=self.engine) # excised schema=schema to prevent errors
meta.reflect(schema=schema)
for tbl in meta.sorted_tables:
if args.tables and not _table_matches_any_pattern(tbl.schema, tbl.name, self.args.tables):
continue
if _table_matches_any_pattern(tbl.schema, tbl.name, self.args.exclude_tables):
continue
tbl.db = self
# TODO: Replace all these monkeypatches with an instance assigment
tbl.find_n_rows = types.MethodType(_find_n_rows, tbl)
tbl.random_row_func = types.MethodType(_random_row_func, tbl)
tbl.fks = self.inspector.get_foreign_keys(tbl.name, schema=tbl.schema)
tbl.pk = self.inspector.get_primary_keys(tbl.name, schema=tbl.schema)
if not tbl.pk:
tbl.pk = [d['name'] for d in self.inspector.get_columns(tbl.name)]
tbl.filtered_by = types.MethodType(_filtered_by, tbl)
tbl.by_pk = types.MethodType(_by_pk, tbl)
tbl.pk_val = types.MethodType(_pk_val, tbl)
tbl.child_fks = []
estimate_rows = not _table_matches_any_pattern(tbl.schema, tbl.name, self.args.full_tables)
tbl.find_n_rows(estimate=estimate_rows)
self.tables[(tbl.schema, tbl.name)] = tbl
all_constraints = args.config.get('constraints', {})
for ((tbl_schema, tbl_name), tbl) in self.tables.items():
qualified = "{}.{}".format(tbl_schema, tbl_name)
if qualified in all_constraints:
constraints = all_constraints[qualified]
else:
constraints=all_constraints.get(tbl_name, [])
tbl.constraints = constraints
for fk in (tbl.fks + constraints):
fk['constrained_schema'] = tbl_schema
fk['constrained_table'] = tbl_name # TODO: check against constrained_table
self.tables[(fk['referred_schema'], fk['referred_table'])].child_fks.append(fk)
0
Example 133
def main():
working_dir = tempfile.mkdtemp(prefix='repotojson-')
output = {}
dbfiles = find_primary_sqlite(upload_dir)
for dbfile_xz in dbfiles:
cur_fold = os.path.join(*dbfile_xz.rsplit(os.sep, 2)[:-2])
dbfile = os.path.join(working_dir, 'primary_db_.sqlite')
decompress_primary_db(dbfile_xz, dbfile)
if not os.path.isfile(dbfile):
print '%s was incorrectly decompressed -- ignoring' % dbfile
continue
db_url = 'sqlite:///%s' % dbfile
db_session = sessionmaker(bind=create_engine(db_url))
session = db_session()
cnt = 0
new = 0
for pkg in session.query(Package).all():
if pkg.basename in output:
if pkg.arch not in output[pkg.basename]['arch']:
output[pkg.basename]['arch'].append(pkg.arch)
else:
new += 1
output[pkg.basename] = {
'arch': [pkg.arch],
'version': pkg.version,
'summary': pkg.summary,
}
cnt += 1
outputfile = os.getcwd() + '/yumapi/repo.json'
with open(outputfile, 'w') as stream:
stream.write(json.dumps(output, sort_keys=True, indent=2, separators=(',', ': ')))
shutil.rmtree(working_dir)
0
Example 134
Project: osm4routing Source File: osm4routing.py
def parse(file, output="csv", edges_name="edges", nodes_name="nodes", spatial=False):
if not os.path.exists(file):
raise IOError("File {0} not found".format(file))
if output != "csv":
metadata = MetaData()
if(spatial):
node_geom = Point(2)
edge_geom = LineString(2)
else:
node_geom = String
edge_geom = String
nodes_table = Table(nodes_name, metadata,
Column('id', Integer, primary_key = True),
Column('original_id', BigInteger, index = True),
Column('elevation', Integer),
Column('lon', Float, index = True),
Column('lat', Float, index = True),
Column('the_geom', node_geom)
)
edges_table = Table(edges_name, metadata,
Column('id', Integer, primary_key=True),
Column('source', BigInteger, index=True),
Column('target', BigInteger, index=True),
Column('length', Float),
Column('car', SmallInteger),
Column('car_rev', SmallInteger),
Column('bike', SmallInteger),
Column('bike_rev', SmallInteger),
Column('foot', SmallInteger),
Column('the_geom', edge_geom)
)
GeometryDDL(nodes_table)
GeometryDDL(edges_table)
engine = create_engine(output)
metadata.drop_all(engine)
metadata.create_all(engine)
mapper(Node, nodes_table)
mapper(Edge, edges_table)
Session = sessionmaker(bind=engine)
session = Session()
extension = os.path.splitext(file)[1]
if extension == '.bz2':
print "Recognized as bzip2 file"
f = bz2.BZ2File(file, 'r')
elif extension == '.gz':
print "Recognized as gzip2 file"
f = gzip.open(file, 'r')
else:
print "Supposing OSM/xml file"
filesize = os.path.getsize(file)
f = open(file, 'r')
buffer_size = 4096
p = Parser()
eof = False
print "Step 1: reading file {0}".format(file)
read = 0
while not eof:
s = f.read(buffer_size)
eof = len(s) != buffer_size
p.read(s, len(s), eof)
read += len(s)
print " Read {0} nodes and {1} ways\n".format(p.get_osm_nodes(), p.get_osm_ways())
print "Step 2: saving the nodes"
nodes = p.get_nodes()
if output == "csv":
n = open(nodes_name + '.csv', 'w')
n.write('"node_id","longitude","latitude"\n')
count = 0
for node in nodes:
if output == "csv":
n.write("{0},{1},{2}\n".format(node.id, node.lon, node.lat))
else:
session.add(Node(node.id, node.lon, node.lat, spatial=spatial))
count += 1
if output == "csv":
n.close()
else:
session.commit()
print " Wrote {0} nodes\n".format(count)
print "Step 3: saving the edges"
edges = p.get_edges()
count = 0
if output == "csv":
e = open(edges_name + '.csv', 'w')
e.write('"edge_id","source","target","length","car","car reverse","bike","bike reverse","foot","WKT"\n')
for edge in edges:
if output == "csv":
e.write('{0},{1},{2},{3},{4},{5},{6},{7},{8},LINESTRING({9})\n'.format(edge.edge_id, edge.source, edge.target, edge.length, edge.car, edge.car_d, edge.bike, edge.bike_d, edge.foot, edge.geom))
else:
session.add(Edge(edge.edge_id, edge.source, edge.target, edge.length, edge.car, edge.car_d, edge.bike, edge.bike_d, edge.foot, edge.geom, spatial=spatial))
count += 1
if output == "csv":
e.close()
else:
session.commit()
print " Wrote {0} edges\n".format(count)
print "Happy routing :) and please give some feedback!"
0
Example 135
Project: AndroidAppTranslate Source File: Import.py
def importFile(self, window, path, languageTo):
self.deleteAllData()
file = open(path)
gs = goslate.Goslate()
doc = minidom.parse(file)
data = doc.getElementsByTagName('string')
engine = create_engine('sqlite:///data.sqlite',connect_args={'check_same_thread':True}, poolclass=StaticPool)
session = sessionmaker()
session.configure(bind=engine)
s = session()
for d in range(len(data)):
window.lbProcess.setText('Processing element %d/%d'%(d, len(data)))
attr_name = data[d].attributes['name'].value
text = data[d].firstChild.nodeValue
translation = gs.translate(text, languageTo)
print "---------------------------"
print "Len : ", len(data)
print "Attribute Name : ", attr_name
print "Text : ", text
print "Translated: ", translation
print "Language to: ", languageTo
try:
ins = DataString(name=attr_name,
value=text,
translation=translation,
language_translation=languageTo)
s.add(ins)
s.commit()
except Exception as e:
window.lbProcess.setText(str(e))
print e
s.rollback()
window.lbProcess.setText('Finished!')
window.btExportFile.setEnabled(True)
0
Example 136
Project: crate-python Source File: tests.py
def setUpCrateLayerAndSqlAlchemy(test):
setUpWithCrateLayer(test)
import sqlalchemy as sa
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
with connect(crate_host) as conn:
cursor = conn.cursor()
cursor.execute("""create table characters (
id string primary key,
name string,
quote string,
details object,
more_details array(object),
INDEX name_ft using fulltext(name) with (analyzer = 'english'),
INDEX quote_ft using fulltext(quote) with (analyzer = 'english')
) """)
engine = sa.create_engine('crate://{0}'.format(crate_host))
Base = declarative_base()
class Location(Base):
__tablename__ = 'locations'
name = sa.Column(sa.String, primary_key=True)
kind = sa.Column(sa.String)
date = sa.Column(sa.Date, default=date.today)
datetime = sa.Column(sa.DateTime, default=datetime.utcnow)
nullable_datetime = sa.Column(sa.DateTime)
nullable_date = sa.Column(sa.Date)
flag = sa.Column(sa.Boolean)
details = sa.Column(ObjectArray)
Session = sessionmaker(engine)
session = Session()
test.globs['sa'] = sa
test.globs['engine'] = engine
test.globs['Location'] = Location
test.globs['Base'] = Base
test.globs['session'] = session
test.globs['Session'] = Session
test.globs['CrateDialect'] = CrateDialect
0
Example 137
Project: labmanager Source File: db.py
def init_db(drop = False, silence = False):
# import all modules here that might define models so that
# they will be registered properly on the metadata. Otherwise
# you will have to import them first before calling init_db()
from labmanager.models import LabManagerUser
engine = create_engine(app.config['SQLALCHEMY_DATABASE_URI'])
if drop:
print "Droping Database"
Base.metadata.drop_all(bind=engine)
meta = MetaData(engine)
meta.reflect()
if 'alembic_version' in meta.tables:
meta.drop_all()
alembic_config = create_alembic_config(silence)
alembic_config.set_section_option('logger_alembic', 'level', 'WARN')
with app.app_context():
db.create_all()
command.stamp(alembic_config, "head")
with app.app_context():
password = unicode(hashlib.new('sha', 'password').hexdigest())
admin_user = LabManagerUser(u'admin', u'Administrator', password)
db.session.add(admin_user)
db.session.commit()
0
Example 138
Project: python-pyhsm Source File: db_export.py
def main():
parser = argparse.ArgumentParser(description='Import AEADs into the database')
parser.add_argument('path', help='filesystem path of where to put AEADs')
parser.add_argument('dburl', help='connection URL for the database')
args = parser.parse_args()
#set the path
path = args.path
if not os.path.isdir(path):
print("\nInvalid path, make sure it exists.\n")
return 2
#mysql url
databaseUrl = args.dburl
try:
#check database connection
engine = sqlalchemy.create_engine(databaseUrl)
#SQLAlchemy voodoo
metadata = sqlalchemy.MetaData()
aeadobj = sqlalchemy.Table('aead_table', metadata, autoload=True, autoload_with=engine)
connection = engine.connect()
except:
print("FATAL: Database connect failure")
return 1
aead = None
nonce = None
key_handle = None
aead = pyhsm.aead_cmd.YHSM_GeneratedAEAD(nonce, key_handle, aead)
#get data from the database
result = connection.execute("SELECT * from aead_table")
#cycle through resutls
for row in result:
#read values row by row
aead.data = row['aead']
publicId = row['public_id']
aead.key_handle = row['keyhandle']
aead.nonce = row['nonce']
aead_dir = os.path.join(path, str(hex(aead.key_handle)).rstrip('L'), insert_slash(publicId))
#sanitize path
aead_dir = os.path.normpath(aead_dir)
#create path
mkdir_p(aead_dir)
#write the file in the path
pyhsm.aead_cmd.YHSM_GeneratedAEAD.save(aead, os.path.join(aead_dir, publicId))
#close connection
connection.close()
0
Example 139
Project: ghini.desktop Source File: db.py
def open(uri, verify=True, show_error_dialogs=False):
"""
Open a database connection. This function sets bauble.db.engine to
the opened engined.
Return bauble.db.engine if successful else returns None and
bauble.db.engine remains unchanged.
:param uri: The URI of the database to open.
:type uri: str
:param verify: Where the database we connect to should be verified
as one created by Ghini. This flag is used mostly for
testing.
:type verify: bool
:param show_error_dialogs: A flag to indicate whether the error
dialogs should be displayed. This is used mostly for testing.
:type show_error_dialogs: bool
"""
# ** WARNING: this can print your passwd
logger.debug('db.open(%s)' % uri)
from sqlalchemy.orm import sessionmaker
global engine
new_engine = None
from sqlalchemy.pool import NullPool, SingletonThreadPool
from bauble.prefs import testing
poolclass = testing and SingletonThreadPool or NullPool
poolclass = SingletonThreadPool
new_engine = sa.create_engine(uri, echo=SQLALCHEMY_DEBUG,
implicit_returning=False,
poolclass=poolclass, pool_size=20)
# TODO: there is a problem here: the code may cause an exception, but we
# immediately loose the 'new_engine', which should know about the
# encoding used in the exception string.
try:
new_engine.connect().close() # make sure we can connect
except Exception:
logger.info('about to forget about encoding of exception text.')
raise
def _bind():
"""bind metadata to engine and create sessionmaker """
global Session, engine
engine = new_engine
metadata.bind = engine # make engine implicit for metadata
def temp():
import inspect
logger.debug('creating session %s' % str(inspect.stack()[1]))
return sessionmaker(bind=engine, autoflush=False)()
Session = sessionmaker(bind=engine, autoflush=False)
Session = temp
if new_engine is not None and not verify:
_bind()
return engine
elif new_engine is None:
return None
verify_connection(new_engine, show_error_dialogs)
_bind()
return engine
0
Example 140
Project: caravel Source File: sql_lab.py
@celery_app.task(bind=True)
def get_sql_results(self, query_id, return_results=True, store_results=False):
"""Executes the sql query returns the results."""
if not self.request.called_directly:
engine = sqlalchemy.create_engine(
app.config.get('SQLALCHEMY_DATABASE_URI'), poolclass=NullPool)
session_class = sessionmaker()
session_class.configure(bind=engine)
session = session_class()
else:
session = db.session()
session.commit() # HACK
query = session.query(models.Query).filter_by(id=query_id).one()
database = query.database
executed_sql = query.sql.strip().strip(';')
db_engine_spec = database.db_engine_spec
def handle_error(msg):
"""Local method handling error while processing the SQL"""
query.error_message = msg
query.status = QueryStatus.FAILED
query.tmp_table_name = None
session.commit()
raise Exception(query.error_message)
# Limit enforced only for retrieving the data, not for the CTA queries.
is_select = is_query_select(executed_sql);
if not is_select and not database.allow_dml:
handle_error(
"Only `SELECT` statements are allowed against this database")
if query.select_as_cta:
if not is_select:
handle_error(
"Only `SELECT` statements can be used with the CREATE TABLE "
"feature.")
if not query.tmp_table_name:
start_dttm = datetime.fromtimestamp(query.start_time)
query.tmp_table_name = 'tmp_{}_table_{}'.format(
query.user_id,
start_dttm.strftime('%Y_%m_%d_%H_%M_%S'))
executed_sql = create_table_as(
executed_sql, query.tmp_table_name, database.force_ctas_schema)
query.select_as_cta_used = True
elif (
query.limit and is_select and
db_engine_spec.limit_method == LimitMethod.WRAP_SQL):
executed_sql = database.wrap_sql_limit(executed_sql, query.limit)
query.limit_used = True
engine = database.get_sqla_engine(schema=query.schema)
try:
template_processor = get_template_processor(
database=database, query=query)
executed_sql = template_processor.process_template(executed_sql)
except Exception as e:
logging.exception(e)
msg = "Template rendering failed: " + utils.error_msg_from_exception(e)
handle_error(msg)
try:
query.executed_sql = executed_sql
logging.info("Running query: \n{}".format(executed_sql))
result_proxy = engine.execute(query.executed_sql, schema=query.schema)
except Exception as e:
logging.exception(e)
handle_error(utils.error_msg_from_exception(e))
cursor = result_proxy.cursor
query.status = QueryStatus.RUNNING
session.flush()
db_engine_spec.handle_cursor(cursor, query, session)
cdf = None
if result_proxy.cursor:
column_names = [col[0] for col in result_proxy.cursor.description]
if db_engine_spec.limit_method == LimitMethod.FETCH_MANY:
data = result_proxy.fetchmany(query.limit)
else:
data = result_proxy.fetchall()
cdf = dataframe.CaravelDataFrame(
pd.DataFrame(data, columns=column_names))
query.rows = result_proxy.rowcount
query.progress = 100
query.status = QueryStatus.SUCCESS
if query.rows == -1 and cdf:
# Presto doesn't provide result_proxy.row_count
query.rows = cdf.size
if query.select_as_cta:
query.select_sql = '{}'.format(database.select_star(
query.tmp_table_name, limit=query.limit))
query.end_time = utils.now_as_float()
session.flush()
payload = {
'query_id': query.id,
'status': query.status,
'data': [],
}
payload['data'] = cdf.data if cdf else []
payload['columns'] = cdf.columns_dict if cdf else []
payload['query'] = query.to_dict()
payload = json.dumps(payload, default=utils.json_iso_dttm_ser)
if store_results and results_backend:
key = '{}'.format(uuid.uuid4())
logging.info("Storing results in results backend, key: {}".format(key))
results_backend.set(key, zlib.compress(payload))
query.results_key = key
session.flush()
session.commit()
if return_results:
return payload
0
Example 141
Project: petl Source File: test_db_server.py
def test_postgresql():
import psycopg2
import psycopg2.extensions
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
psycopg2.extensions.register_type(psycopg2.extensions.UNICODEARRAY)
# assume database already created
dbapi_connection = psycopg2.connect(
'host=%s dbname=%s user=%s password=%s'
% (host, database, user, password)
)
# exercise using a dbapi_connection
_setup_postgresql(dbapi_connection)
_test_dbo(dbapi_connection)
# exercise using a dbapi_cursor
_setup_postgresql(dbapi_connection)
dbapi_cursor = dbapi_connection.cursor()
_test_dbo(dbapi_cursor)
dbapi_cursor.close()
# exercise sqlalchemy dbapi_connection
_setup_postgresql(dbapi_connection)
from sqlalchemy import create_engine
sqlalchemy_engine = create_engine('postgresql+psycopg2://%s:%s@%s/%s' %
(user, password, host, database))
sqlalchemy_connection = sqlalchemy_engine.connect()
_test_dbo(sqlalchemy_connection)
sqlalchemy_connection.close()
# exercise sqlalchemy session
_setup_postgresql(dbapi_connection)
from sqlalchemy.orm import sessionmaker
Session = sessionmaker(bind=sqlalchemy_engine)
sqlalchemy_session = Session()
_test_dbo(sqlalchemy_session)
sqlalchemy_session.close()
# other exercises
_test_dbo(dbapi_connection,
lambda: dbapi_connection.cursor(name='arbitrary'))
_test_with_schema(dbapi_connection, 'public')
_test_unicode(dbapi_connection)
0
Example 142
Project: deblaze Source File: test_sqlalchemy.py
def setUp(self):
# Create DB and map objects
self.metadata = MetaData()
self.engine = create_engine('sqlite:///:memory:', echo=False)
Session = sessionmaker(bind=self.engine)
self.session = Session()
self.tables = {}
self.tables['users'] = Table('users', self.metadata,
Column('id', Integer, primary_key=True),
Column('name', String(64)))
self.tables['addresses'] = Table('addresses', self.metadata,
Column('id', Integer, primary_key=True),
Column('user_id', Integer, ForeignKey('users.id')),
Column('email_address', String(128)))
self.tables['lazy_loaded'] = Table('lazy_loaded', self.metadata,
Column('id', Integer, primary_key=True),
Column('user_id', Integer, ForeignKey('users.id')))
self.tables['another_lazy_loaded'] = Table('another_lazy_loaded', self.metadata,
Column('id', Integer, primary_key=True),
Column('user_id', Integer, ForeignKey('users.id')))
self.mappers = {}
self.mappers['user'] = mapper(User, self.tables['users'], properties={
'addresses': relation(Address, backref='user', lazy=False),
'lazy_loaded': relation(LazyLoaded, lazy=True),
'another_lazy_loaded': relation(AnotherLazyLoaded, lazy=True)
})
self.mappers['addresses'] = mapper(Address, self.tables['addresses'])
self.mappers['lazy_loaded'] = mapper(LazyLoaded,
self.tables['lazy_loaded'])
self.mappers['another_lazy_loaded'] = mapper(AnotherLazyLoaded,
self.tables['another_lazy_loaded'])
self.metadata.create_all(self.engine)
pyamf.register_class(User, 'server.User')
pyamf.register_class(Address, 'server.Address')
pyamf.register_class(LazyLoaded, 'server.LazyLoaded')
0
Example 143
def create_app(env_name='prod'):
# rendering is handled by MessageMiddleware
routes = PUBLIC_ROUTES + JUROR_ROUTES + ADMIN_ROUTES + META_ROUTES
print '== creating WSGI app using env name: %s' % (env_name,)
config_file_name = 'config.%s.yaml' % env_name
config_file_path = os.path.join(PROJ_PATH, config_file_name)
print '== loading config file: %s' % (config_file_path,)
config = yaml.load(open(config_file_path))
logging.basicConfig()
logging.getLogger('sqlalchemy.engine').setLevel(logging.WARN)
engine = create_engine(config.get('db_url', DEFAULT_DB_URL), pool_recycle=60)
session_type = sessionmaker()
session_type.configure(bind=engine)
tmp_rdb_session = session_type()
schema_errors = get_schema_errors(Base, tmp_rdb_session)
if not schema_errors:
print '++ schema validated ok'
else:
for err in schema_errors:
print '!! ', err
print '!! recreate the database and update the code, then try again'
sys.exit(2)
# create maintainer users if they don't exist yet
musers = bootstrap_maintainers(tmp_rdb_session)
if musers:
print '++ created new users for maintainers: %r' % (musers,)
tmp_rdb_session.commit()
engine.echo = config.get('db_echo', False)
if not config.get('db_disable_ping'):
event.listen(engine, 'engine_connect', ping_connection)
cookie_secret = config['cookie_secret']
assert cookie_secret
root_path = config.get('root_path', '/')
scm_secure = env_name == 'prod' # https only in prod
scm_mw = SignedCookieMiddleware(secret_key=cookie_secret,
path=root_path,
http_only=True,
secure=scm_secure)
if not scm_secure:
scm_mw.data_expiry = NEVER
def get_engine():
engine = create_engine(config.get('db_url', DEFAULT_DB_URL), pool_recycle=60)
engine.echo = config.get('db_echo', False)
if not config.get('db_disable_ping'):
event.listen(engine, 'engine_connect', ping_connection)
return engine
blank_session_type = sessionmaker()
middlewares = [MessageMiddleware(),
TimingMiddleware(),
scm_mw,
DBSessionMiddleware(blank_session_type, get_engine),
UserMiddleware()]
api_log_path = config.get('api_log_path')
if api_log_path:
log_mw = LoggingMiddleware(api_log_path)
middlewares.insert(0, log_mw)
# hack
config['api_exc_log_path'] = getattr(log_mw, 'exc_log_path', None)
replay_log_path = config.get('replay_log_path')
if replay_log_path:
replay_log_mw = ReplayLogMiddleware(replay_log_path)
middlewares.append(replay_log_mw)
consumer_token = ConsumerToken(config['oauth_consumer_token'],
config['oauth_secret_token'])
resources = {'config': config,
'consumer_token': consumer_token,
'root_path': root_path}
app = Application(routes, resources, middlewares=middlewares)
static_app = StaticApplication(STATIC_PATH)
root_app = Application([StaticFileRoute('/', STATIC_PATH + '/index.html'),
('/', static_app),
('/', app),
('/meta', MetaApplication())])
return root_app
0
Example 144
Project: time_trial Source File: time_trial.py
def __init__(self):
################
# Data Init
################
logger = logging.getLogger('time_trial')
logger.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
ch.setFormatter(formatter)
logger.addHandler(ch)
logger.info("Time Trial starting up...")
#####################
# Connect Database
#####################
self.engine = create_engine('sqlite:///data.sqlite', connect_args={'check_same_thread':False}, echo=False)
Base.metadata.create_all(self.engine, checkfirst=True)
Session = sessionmaker(bind=self.engine)
self.session = Session()
################################
# Launching Background Threads
################################
# thread to check for results
self.processor = RqResultsProcessor()
ThreadSession = sessionmaker(bind=self.engine)
self.processor.session = ThreadSession()
self.processor.start()
################
# G U I
################
QtGui.QMainWindow.__init__(self)
self.setAttribute(QtCore.Qt.WA_DeleteOnClose)
self.setWindowTitle("Time Trial")
self.setWindowIcon(QtGui.QIcon('images/clock.png'))
################
# M E N U
################
self.file_menu = QtGui.QMenu('&File', self)
self.file_menu.addAction('&Quit', self.fileQuit,
QtCore.Qt.CTRL + QtCore.Qt.Key_Q)
self.menuBar().addMenu(self.file_menu)
self.help_menu = QtGui.QMenu('&Help', self)
self.menuBar().addSeparator()
self.menuBar().addMenu(self.help_menu)
self.help_menu.addAction('&About', self.about)
################
# Main Window
################
self.main_widget = QtGui.QWidget(self)
self.main_layout = QtGui.QGridLayout(self.main_widget)
self.tab_widget = QtGui.QTabWidget(self.main_widget)
self.main_layout.addWidget(self.tab_widget,0,0)
self.experiment_tab = ExperimentsTab(session = self.session, parent = self.main_widget)
QtCore.QObject.connect(self.experiment_tab,
QtCore.SIGNAL("shorter_trial_set(PyQt_PyObject)") ,
self.shorter_trial_set)
QtCore.QObject.connect(self.experiment_tab,
QtCore.SIGNAL("longer_trial_set(PyQt_PyObject)") ,
self.longer_trial_set)
self.tab_widget.addTab(self.experiment_tab, "Experiments")
self.feasibility_tab = FeasibilityTab(session = self.session, parent = self.main_widget)
self.tab_widget.addTab(self.feasibility_tab, "Feasibility Analysis")
#
# self.plotter_tab = PlotterTab(self.main_widget)
# self.tab_widget.addTab(self.plotter_tab, "Plotter")
#
self.settings_tab = SettingsTab(self.main_widget, session = self.session)
self.tab_widget.addTab(self.settings_tab, "Settings")
self.main_widget.setFocus()
self.setCentralWidget(self.main_widget)
0
Example 145
def mock_engine(dialect_name=None):
"""Provides a mocking engine based on the current testing.db.
This is normally used to test DDL generation flow as emitted
by an Engine.
It should not be used in other cases, as assert_compile() and
assert_sql_execution() are much better choices with fewer
moving parts.
"""
from sqlalchemy import create_engine
if not dialect_name:
dialect_name = config.db.name
buffer = []
def executor(sql, *a, **kw):
buffer.append(sql)
def assert_sql(stmts):
recv = [re.sub(r'[\n\t]', '', str(s)) for s in buffer]
assert recv == stmts, recv
def print_sql():
d = engine.dialect
return "\n".join(
str(s.compile(dialect=d))
for s in engine.mock
)
engine = create_engine(dialect_name + '://',
strategy='mock', executor=executor)
assert not hasattr(engine, 'mock')
engine.mock = buffer
engine.assert_sql = assert_sql
engine.print_sql = print_sql
return engine
0
Example 146
Project: iktomi Source File: sqla.py
def test_schema_several_meta(self):
Base1 = declarative_base()
class A1(Base1):
__tablename__ = 'A'
id = Column(Integer, primary_key=True)
class B1(Base1):
__tablename__ = 'B'
id = Column(Integer, primary_key=True)
Base2 = declarative_base()
class A2(Base2):
__tablename__ = 'A'
id = Column(Integer, primary_key=True)
engine1 = create_engine('sqlite://')
engine2 = create_engine('sqlite://')
binds = {
A1.__table__: engine1,
B1.__table__: engine1,
A2.__table__: engine2,
}
meta = {
'm1': Base1.metadata,
'm2': Base2.metadata,
'm3': MetaData(),
}
cli = Sqla(orm.sessionmaker(binds=binds), metadata=meta)
output = StringIO()
with mock.patch.object(sys, 'stdout', output):
cli.command_schema()
created = self._created_tables(output.getvalue())
self.assertEqual(len(created), 3)
self.assertEqual(created.count('A'), 2)
self.assertEqual(created.count('B'), 1)
output = StringIO()
with mock.patch.object(sys, 'stdout', output):
cli.command_schema('m1')
created = self._created_tables(output.getvalue())
self.assertEqual(len(created), 2)
self.assertEqual(created.count('A'), 1)
self.assertEqual(created.count('B'), 1)
output = StringIO()
with mock.patch.object(sys, 'stdout', output):
cli.command_schema('m1.B')
created = self._created_tables(output.getvalue())
self.assertEqual(created, ['B'])
output = StringIO()
with mock.patch.object(sys, 'stdout', output):
try:
cli.command_schema('m2.B')
except SystemExit:
pass
created = self._created_tables(output.getvalue())
self.assertEqual(created, [])
output = StringIO()
with mock.patch.object(sys, 'stdout', output):
try:
cli.command_schema('m3.A')
except SystemExit:
pass
created = self._created_tables(output.getvalue())
self.assertEqual(created, [])
0
Example 147
Project: DockCI Source File: gunicorn.py
@MANAGER.option("-w", "--workers",
help="Number of gunicorn workers to start",
default=10)
@MANAGER.option("--bind",
help="Interface, and port to listen on",
default="127.0.0.1:5000")
@MANAGER.option("--debug",
help="Turn debug mode on for Flask, and stops app preload for "
"auto reloading",
default=False, action='store_true')
@MANAGER.option("--db-migrate",
default=False, action='store_true',
help="Migrate the DB on load")
@MANAGER.option("--timeout",
default=0, type=int,
help="Time to wait for the resources to be available")
@MANAGER.option("--collect-static",
default=False, action='store_true',
help="Collect static dependencies before start")
def run(**kwargs):
""" Run the Gunicorn worker """
kwargs['reload'] = kwargs['debug']
kwargs['preload'] = not kwargs['debug']
APP.debug = kwargs['debug']
if kwargs['collect_static']:
subprocess.check_call('./_deps_collectstatic.sh',
cwd=project_root().strpath)
if kwargs['timeout'] != 0:
start_time = time.time()
db_engine = create_engine(
get_db_uri(),
connect_args=dict(connect_timeout=2),
)
db_conn = None
mq_conn = None
while time.time() - start_time < kwargs['timeout']:
try:
if db_conn is None or db_conn.closed:
db_conn = db_engine.connect()
except OperationalError:
time.sleep(2)
continue
try:
if mq_conn is None:
mq_conn = get_pika_conn()
except AMQPError:
time.sleep(2)
continue
break
if db_conn is None or db_conn.closed:
stderr.write("Timed out waiting for the database to be ready\n")
return 1
if mq_conn is None:
stderr.write("Timed out waiting for RabbitMQ to be ready\n")
return 1
# Setup the exchange
channel = mq_conn.channel()
channel.exchange_declare(exchange='dockci.job', type='topic')
channel.exchange_declare(exchange='dockci.queue', type='topic')
channel.queue_declare(queue='dockci.agent')
channel.queue_bind(exchange='dockci.queue',
queue='dockci.agent',
routing_key='*')
mq_conn.close()
if kwargs['db_migrate']:
db_upgrade( # doesn't return anything
local(__file__).dirpath().join('../../alembic').strpath
)
else:
# Migrate will init the app for us
app_init()
GunicornWrapper(kwargs).run()
0
Example 148
def execute(self):
connection_url = "mysql://%(USER)s:%(PASS)s@%(HOST)s/%(NAME)s" % {
"USER": self.user,
"PASS": self.password,
"HOST": "localhost",
"NAME": self.db }
self.engine = create_engine(connection_url, convert_unicode=True, echo=False)
Session = sessionmaker(bind=self.engine)
session = Session()
try:
connection = dbi.connect(host="localhost", user=self.user, passwd=self.password, db=self.db)
try:
cursor = connection.cursor()
try:
print "Checking %s..." % type(self).__name__,
if self.CHECK_FORMAT == self.SQL_FORMAT:
check_arg = cursor
else:
check_arg = session
try:
applicable = self.check(check_arg)
except:
print "[ERROR CHECKING]"
print
traceback.print_exc()
print
applicable = False
if applicable:
print "[NOT APPLIED]"
print "Applying %s..." % type(self).__name__,
try:
if self.APPLY_FORMAT == self.SQL_FORMAT:
self.apply(cursor)
connection.commit()
else:
self.apply(session)
session.commit()
except:
print "[FAIL]"
print
traceback.print_exc()
print
else:
print "[OK]"
else:
print "[already applied]"
finally:
cursor.close()
finally:
connection.close()
finally:
session.close()
0
Example 149
Project: bitex Source File: main.py
def __init__(self, opt, instance_name):
self.options = opt
self.instance_name = instance_name
handlers = [
(r'/', WebSocketHandler),
(r'/get_deposit(.*)', DepositHandler),
(r'/_webhook/verification_form', VerificationWebHookHandler),
(r'/_webhook/deposit_receipt', DepositReceiptWebHookHandler),
(r'/process_deposit(.*)', ProcessDepositHandler),
(r'/api/(?P<version>[^\/]+)/(?P<symbol>[^\/]+)/(?P<resource>[^\/]+)', RestApiHandler)
]
settings = dict(
cookie_secret='cookie_secret'
)
tornado.web.Application.__init__(self, handlers, **settings)
self.allowed_origins = json.loads(self.options.allowed_origins)
self.allow_all_origins = self.allowed_origins[0] == '*'
input_log_file_handler = logging.handlers.TimedRotatingFileHandler(
os.path.expanduser(self.options.gateway_log), when='MIDNIGHT')
formatter = logging.Formatter('%(asctime)s - %(message)s')
input_log_file_handler.setFormatter(formatter)
self.replay_logger = logging.getLogger(self.instance_name)
self.replay_logger.setLevel(logging.INFO)
self.replay_logger.addHandler(input_log_file_handler)
ch = logging.StreamHandler(sys.stdout)
ch.setLevel(logging.DEBUG)
ch.setFormatter(logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s'))
self.replay_logger.addHandler(ch)
self.replay_logger.info('START')
self.log_start_data()
self.update_tor_nodes()
from models import Base, db_bootstrap
db_engine = self.options.sqlalchemy_engine + ':///' +\
os.path.expanduser(self.options.sqlalchemy_connection_string)
engine = create_engine( db_engine, echo=self.options.db_echo)
Base.metadata.create_all(engine)
self.db_session = scoped_session(sessionmaker(bind=engine))
db_bootstrap(self.db_session)
self.zmq_context = zmq.Context()
self.trade_in_socket = self.zmq_context.socket(zmq.REQ)
self.trade_in_socket.connect(self.options.trade_in)
self.application_trade_client = TradeClient(
self.zmq_context,
self.trade_in_socket)
self.application_trade_client.connect()
self.security_list = self.application_trade_client.getSecurityList()
self.md_subscriber = {}
for instrument in self.security_list.get('Instruments'):
symbol = instrument['Symbol']
self.md_subscriber[symbol] = MarketDataSubscriber.get(symbol, self)
self.md_subscriber[symbol].subscribe(
self.zmq_context,
self.options.trade_pub,
self.application_trade_client)
last_trade_id = Trade.get_last_trade_id(self.db_session)
trade_list = self.application_trade_client.getLastTrades(last_trade_id)
for trade in trade_list:
msg = dict()
msg['id'] = trade[0]
msg['symbol'] = trade[1]
msg['side'] = trade[2]
msg['price'] = trade[3]
msg['size'] = trade[4]
msg['buyer_id'] = trade[5]
msg['seller_id'] = trade[6]
msg['buyer_username'] = trade[7]
msg['seller_username'] = trade[8]
msg['created'] = trade[9]
msg['trade_date'] = trade[9][:10]
msg['trade_time'] = trade[9][11:]
msg['order_id'] = trade[10]
msg['counter_order_id'] = trade[11]
Trade.create( self.db_session, msg)
all_trades = Trade.get_all_trades(self.db_session)
for t in all_trades:
trade_info = dict()
trade_info['price'] = t.price
trade_info['size'] = t.size
trade_info['trade_date'] = t.created.strftime('%Y-%m-%d')
trade_info['trade_time'] = t.created.strftime('%H:%M:%S')
self.md_subscriber[ t.symbol ].inst_status.push_trade(trade_info)
for symbol, subscriber in self.md_subscriber.iteritems():
subscriber.ready()
self.connections = {}
self.heart_beat_timer = tornado.ioloop.PeriodicCallback(
self.send_heartbeat_to_trade,
30000)
self.heart_beat_timer.start()
self.update_tor_nodes_timer = tornado.ioloop.PeriodicCallback(
self.update_tor_nodes,
3600000)
self.update_tor_nodes_timer.start()
0
Example 150
Project: anitya Source File: __init__.py
def init(db_url, alembic_ini=None, debug=False, create=False):
""" Create the tables in the database using the information from the
url obtained.
:arg db_url, URL used to connect to the database. The URL contains
information with regards to the database engine, the host to
connect to, the user and password and the database name.
ie: <engine>://<user>:<password>@<host>/<dbname>
:kwarg alembic_ini, path to the alembic ini file. This is necessary
to be able to use alembic correctly, but not for the unit-tests.
:kwarg debug, a boolean specifying wether we should have the verbose
output of sqlalchemy or not.
:return a session that can be used to query the database.
"""
engine = create_engine(db_url, echo=debug)
if create:
anitya.lib.model.BASE.metadata.create_all(engine)
# Source: http://docs.sqlalchemy.org/en/latest/dialects/sqlite.html
# see section 'sqlite-foreign-keys'
if db_url.startswith('sqlite:'):
def _fk_pragma_on_connect(dbapi_con, con_record):
dbapi_con.execute("PRAGMA foreign_keys=ON")
sa.event.listen(engine, 'connect', _fk_pragma_on_connect)
if alembic_ini is not None: # pragma: no cover
# then, load the Alembic configuration and generate the
# version table, "stamping" it with the most recent rev:
from alembic.config import Config
from alembic import command
alembic_cfg = Config(alembic_ini)
command.stamp(alembic_cfg, "head")
scopedsession = scoped_session(sessionmaker(bind=engine))
if create:
anitya.lib.plugins.load_plugins(scopedsession)
return scopedsession