sqlalchemy.engine.reflection.Inspector.from_engine

Here are the examples of the python api sqlalchemy.engine.reflection.Inspector.from_engine taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

96 Examples 7

Page 1 Selected Page 2

Example 1

Project: group-based-policy Source File: 2f3834ea746b_ep_redirect_action.py
def upgrade(active_plugins=None, options=None):
    inspector = reflection.Inspector.from_engine(op.get_bind())
    fk_name = [fk['name'] for fk in
               inspector.get_foreign_keys('gpm_ptgs_servicechain_mapping')
               if 'consumer_ptg_id' in fk['constrained_columns']]
    op.drop_constraint(fk_name[0], 'gpm_ptgs_servicechain_mapping',
                       'foreignkey')

Example 2

Project: tacker Source File: __init__.py
@contextlib.contextmanager
def modify_foreign_keys_constraint(table_names):
    inspector = reflection.Inspector.from_engine(op.get_bind())
    try:
        for table in table_names:
            fk_constraints = inspector.get_foreign_keys(table)
            drop_foreign_key_constraint(table, fk_constraints)
        yield
    finally:
        for table in table_names:
            fk_constraints = inspector.get_foreign_keys(table)
            create_foreign_key_constraint(table, fk_constraints)

Example 3

Project: alembic Source File: test_mysql.py
Function: compare_default
    def _compare_default(
        self,
        t1, t2, col,
        rendered
    ):
        t1.create(self.bind)
        insp = Inspector.from_engine(self.bind)
        cols = insp.get_columns(t1.name)
        ctx = self.autogen_context['context']
        return ctx.impl.compare_server_default(
            None,
            col,
            rendered,
            cols[0]['default'])

Example 4

Project: geoalchemy2 Source File: test_functional.py
    def test_LakeIndex(self):
        """ Make sure the Lake table has an index on the geom column """

        inspector = reflection.Inspector.from_engine(engine)
        indices = inspector.get_indexes(Lake.__tablename__, schema='gis')
        assert len(indices) == 1

        index = indices[0]
        assert not index.get('unique')
        assert index.get('column_names') == [u'geom']

Example 5

Project: sqlalchemy Source File: test_reflection.py
    @testing.provide_metadata
    def test_inspect_enums(self):
        enum_type = postgresql.ENUM(
            'cat', 'dog', 'rat', name='pet', metadata=self.metadata)
        enum_type.create(testing.db)
        inspector = reflection.Inspector.from_engine(testing.db)
        eq_(inspector.get_enums(), [
            {
                'visible': True,
                'labels': ['cat', 'dog', 'rat'],
                'name': 'pet',
                'schema': 'public'
            }])

Example 6

Project: group-based-policy Source File: 1fadeb573886_es_ip_allocation_fix_constraints.py
def upgrade(active_plugins=None, options=None):
    inspector = reflection.Inspector.from_engine(op.get_bind())
    unique_constraints = inspector.get_unique_constraints(
        'gp_es_to_l3p_associations')
    for constraint in unique_constraints:
        if constraint['column_names'] == ['external_segment_id',
                                          'allocated_address']:
            with migration.remove_fks_from_table(
                    'gp_es_to_l3p_associations'):
                op.drop_constraint(constraint['name'],
                                   'gp_es_to_l3p_associations',
                                   'unique')
            break

Example 7

Project: neutron-lbaas Source File: 4b4dc6d5d843_rename_tenant_to_project.py
def get_inspector():
    """Reuse inspector"""

    global _INSPECTOR

    if _INSPECTOR:
        return _INSPECTOR

    else:
        bind = op.get_bind()
        _INSPECTOR = sa.engine.reflection.Inspector.from_engine(bind)

    return _INSPECTOR

Example 8

Project: heat Source File: test_migrations.py
    def _check_073(self, engine, data):
        # check if column still exists and is not nullable.
        self.assertColumnIsNotNullable(engine, 'resource_data', 'resource_id')
        # Ensure that only one foreign key exists and is created as expected.
        inspector = sqlalchemy.engine.reflection.Inspector.from_engine(engine)
        resource_data_fkeys = inspector.get_foreign_keys('resource_data')
        self.assertEqual(1, len(resource_data_fkeys))
        fk = resource_data_fkeys[0]
        self.assertEqual('fk_resource_id', fk['name'])
        self.assertEqual(['resource_id'], fk['constrained_columns'])
        self.assertEqual('resource', fk['referred_table'])
        self.assertEqual(['id'], fk['referred_columns'])

Example 9

Project: vmware-nsx Source File: d49ac91b560e_nsxv_lbaasv2_shared_pools.py
def change_pk_constraint(table_name, columns):
    inspector = reflection.Inspector.from_engine(op.get_bind())
    pk_constraint = inspector.get_pk_constraint(table_name)
    op.drop_constraint(pk_constraint.get('name'), table_name, type_='primary')
    op.drop_column(table_name, 'listener_id')
    op.create_primary_key(None, table_name, columns)

Example 10

Project: alembic Source File: test_autogen_diffs.py
    def test_dont_barf_on_already_reflected(self):
        from sqlalchemy.util import OrderedSet
        inspector = Inspector.from_engine(self.bind)
        uo = ops.UpgradeOps(ops=[])
        autogenerate.compare._compare_tables(
            OrderedSet([(None, 'extra'), (None, 'user')]),
            OrderedSet(), inspector,
            MetaData(), uo, self.autogen_context
        )
        eq_(
            [(rec[0], rec[1].name) for rec in uo.as_diffs()],
            [('remove_table', 'extra'), ('remove_table', 'user')]
        )

Example 11

Project: Flask-AppBuilder Source File: manager.py
Function: create_db
    def create_db(self):
        try:
            engine = self.get_session.get_bind(mapper=None, clause=None)
            inspector = Inspector.from_engine(engine)
            if 'ab_user' not in inspector.get_table_names():
                log.info(c.LOGMSG_INF_SEC_NO_DB)
                Base.metadata.create_all(engine)
                log.info(c.LOGMSG_INF_SEC_ADD_DB)
            super(SecurityManager, self).create_db()
        except Exception as e:
            log.error(c.LOGMSG_ERR_SEC_CREATE_DB.format(str(e)))
            exit(1)

Example 12

Project: alembic Source File: test_batch.py
Function: test_change_type_boolean_to_int
    def test_change_type_boolean_to_int(self):
        self._boolean_fixture()
        with self.op.batch_alter_table(
                "hasbool"
        ) as batch_op:
            batch_op.alter_column(
                'x', type_=Integer, existing_type=Boolean(
                    create_constraint=True, name='ck1'))
        insp = Inspector.from_engine(config.db)

        eq_(
            [c['type']._type_affinity for c in insp.get_columns('hasbool')
             if c['name'] == 'x'],
            [Integer]
        )

Example 13

Project: alembic Source File: test_batch.py
    def test_ix_existing(self):
        self._table_w_index_fixture()

        with self.op.batch_alter_table("t_w_ix") as batch_op:
            batch_op.alter_column('data', type_=String(30))
            batch_op.create_index("ix_data", ["data"])

        insp = Inspector.from_engine(config.db)
        eq_(
            set(
                (ix['name'], tuple(ix['column_names'])) for ix in
                insp.get_indexes('t_w_ix')
            ),
            set([
                ('ix_data', ('data',)),
                ('ix_thing', ('thing', ))
            ])
        )

Example 14

Project: fuel-web Source File: utils.py
def is_buffer_table_exist(connection):
    """Performs check if buffer table exists in the database.

    :returns: True if table exists, False otherwise
    """
    inspector = Inspector.from_engine(connection)
    return (extensions_migration_buffer_table_name in
            inspector.get_table_names())

Example 15

Project: ODM2 Source File: cvload.py
Function: get_schema
def _getSchema(engine):
    from sqlalchemy.engine import reflection

    insp=reflection.Inspector.from_engine(engine)

    for name in insp.get_schema_names():
        if 'odm2'== name.lower():
            return name
    else:
        return insp.default_schema_name

Example 16

Project: networking-bgpvpn Source File: 23ce05e0a19f_rename_tenant_to_project.py
def get_inspector():
    """Reuse inspector"""

    global _INSPECTOR

    if _INSPECTOR:
        return _INSPECTOR

    else:
        bind = op.get_bind()
        _INSPECTOR = reflection.Inspector.from_engine(bind)

    return _INSPECTOR

Example 17

Project: networking-sfc Source File: 06382790fb2c_fix_foreign_constraints.py
def upgrade():
    inspector = reflection.Inspector.from_engine(op.get_bind())

    fks_to_cascade = {
        'sfc_flow_classifier_l7_parameters': 'classifier_id',
        'sfc_chain_group_associations': 'portchain_id',
        'sfc_port_chain_parameters': 'chain_id',
        'sfc_service_function_params': 'pair_id',
        'sfc_chain_classifier_associations': 'portchain_id'
    }

    for table, column in fks_to_cascade.items():
        fk_constraints = inspector.get_foreign_keys(table)
        for fk in fk_constraints:
            if column in fk['constrained_columns']:
                fk['options']['ondelete'] = 'CASCADE'
                migration.remove_foreign_keys(table, fk_constraints)
                migration.create_foreign_keys(table, fk_constraints)

Example 18

Project: neutron-vpnaas Source File: b6a2519ab7dc_rename_tenant_to_project.py
def get_inspector():
    """Reuse inspector"""

    global _INSPECTOR

    if _INSPECTOR:
        return _INSPECTOR

    bind = op.get_bind()
    _INSPECTOR = reflection.Inspector.from_engine(bind)
    return _INSPECTOR

Example 19

Project: piecash Source File: test_book.py
    def test_create_without_FK(self):
        # create without FK
        b = create_book(uri_conn=db_sqlite_uri, keep_foreign_keys=False, overwrite=True)
        b.session.close()

        insp = Inspector.from_engine(create_engine(db_sqlite_uri))
        for tbl in insp.get_table_names():
            fk = insp.get_foreign_keys(tbl)
            assert len(fk) == 0

Example 20

Project: tacker Source File: __init__.py
def modify_foreign_keys_constraint_with_col_change(
        table_name, old_local_col, new_local_col, existing_type,
        nullable=False):
    inspector = reflection.Inspector.from_engine(op.get_bind())
    fk_constraints = inspector.get_foreign_keys(table_name)
    for fk in fk_constraints:
        if old_local_col in fk['constrained_columns']:
            drop_foreign_key_constraint(table_name, [fk])
    op.alter_column(table_name, old_local_col,
                    new_column_name=new_local_col,
                    existing_type=existing_type,
                    nullable=nullable)
    fk_constraints = inspector.get_foreign_keys(table_name)
    for fk in fk_constraints:
        for i in range(len(fk['constrained_columns'])):
            if old_local_col == fk['constrained_columns'][i]:
                fk['constrained_columns'][i] = new_local_col
                create_foreign_key_constraint(table_name, [fk])
                break

Example 21

Project: alembic Source File: test_postgresql.py
Function: compare_default
    def _compare_default(
        self,
        t1, t2, col,
        rendered
    ):
        t1.create(self.bind, checkfirst=True)
        insp = Inspector.from_engine(self.bind)
        cols = insp.get_columns(t1.name)
        ctx = self.autogen_context.migration_context

        return ctx.impl.compare_server_default(
            None,
            col,
            rendered,
            cols[0]['default'])

Example 22

Project: sqlalchemy Source File: test_reflection.py
    @testing.provide_metadata
    def test_inspect_enums_schema(self):
        conn = testing.db.connect()
        enum_type = postgresql.ENUM(
            'sad', 'ok', 'happy', name='mood',
            schema='test_schema',
            metadata=self.metadata)
        enum_type.create(conn)
        inspector = reflection.Inspector.from_engine(conn.engine)
        eq_(
            inspector.get_enums('test_schema'), [{
                'visible': False,
                'name': 'mood',
                'schema': 'test_schema',
                'labels': ['sad', 'ok', 'happy']
            }])

Example 23

Project: rack Source File: utils.py
Function: index_exists
def _index_exists(migrate_engine, table_name, index_name):
    inspector = reflection.Inspector.from_engine(migrate_engine)
    indexes = inspector.get_indexes(table_name)
    index_names = [index['name'] for index in indexes]

    return index_name in index_names

Example 24

Project: piecash Source File: test_book.py
    def test_create_with_FK(self):
        # create and keep FK
        b = create_book(uri_conn=db_sqlite_uri, keep_foreign_keys=True, overwrite=True)
        b.session.close()

        insp = Inspector.from_engine(create_engine(db_sqlite_uri))
        fk_total = []
        for tbl in insp.get_table_names():
            fk_total.append(insp.get_foreign_keys(tbl))
        assert len(fk_total) == 25

Example 25

Project: luigi Source File: db_task_history.py
Function: upgrade_schema
def _upgrade_schema(engine):
    """
    Ensure the database schema is up to date with the codebase.

    :param engine: SQLAlchemy engine of the underlying database.
    """
    inspector = reflection.Inspector.from_engine(engine)
    conn = engine.connect()

    # Upgrade 1.  Add task_id column and index to tasks
    if 'task_id' not in [x['name'] for x in inspector.get_columns('tasks')]:
        logger.warn('Upgrading DbTaskHistory schema: Adding tasks.task_id')
        conn.execute('ALTER TABLE tasks ADD COLUMN task_id VARCHAR(200)')
        conn.execute('CREATE INDEX ix_task_id ON tasks (task_id)')

Example 26

Project: DIRAC Source File: InstalledComponentsDB.py
  def __initializeConnection( self, dbPath ):

    result = getDBParameters( dbPath )
    if not result[ 'OK' ]:
      raise Exception( 'Cannot get database parameters: %s' % result['Message'] )

    dbParameters = result[ 'Value' ]
    self.host = dbParameters[ 'Host' ]
    self.port = dbParameters[ 'Port' ]
    self.user = dbParameters[ 'User' ]
    self.password = dbParameters[ 'Password' ]
    self.dbName = dbParameters[ 'DBName' ]

    self.engine = create_engine( 'mysql://%s:%s@%s:%s/%s' % ( self.user, self.password, self.host, self.port, self.dbName ),
                                 pool_recycle = 3600, echo_pool = True)
    self.session = scoped_session( sessionmaker( bind = self.engine ) )
    self.inspector = Inspector.from_engine( self.engine )

Example 27

Project: pdfhook Source File: views.py
@blueprint.before_app_first_request
def make_sure_there_is_a_working_database(*args, **kwargs):
    if current_app.config.get('ENV') != 'dev':
        return
    inspector = Inspector.from_engine(db.engine)
    tables = inspector.get_table_names()
    required_tables = [models.PDFForm.__tablename__]
    if not (set(required_tables) < set(tables)):
        current_app.logger.warning(
            "database tables {} not found. Creating tables".format(required_tables))
        db.create_all()

Example 28

Project: pyramid_sqlalchemy Source File: test_testing.py
    def test_tables_exist(self):
        from sqlalchemy.engine.reflection import Inspector

        testcase = self.DatabaseTestCase()
        try:
            testcase.setUp()
            inspector = Inspector.from_engine(Session.bind)
            self.assertTrue('dummy' in inspector.get_table_names())
        finally:
            testcase.tearDown()

Example 29

Project: Flask-AppBuilder Source File: test_base.py
Function: test_model_creation
    def test_model_creation(self):
        """
            Test Model creation
        """
        from sqlalchemy.engine.reflection import Inspector

        engine = self.db.session.get_bind(mapper=None, clause=None)
        inspector = Inspector.from_engine(engine)
        # Check if tables exist
        ok_('model1' in inspector.get_table_names())
        ok_('model2' in inspector.get_table_names())

Example 30

Project: oslo.db Source File: utils.py
Function: index_exists
def index_exists(migrate_engine, table_name, index_name):
    """Check if given index exists.

    :param migrate_engine: sqlalchemy engine
    :param table_name:     name of the table
    :param index_name:     name of the index
    """
    inspector = reflection.Inspector.from_engine(migrate_engine)
    indexes = inspector.get_indexes(table_name)
    index_names = [index['name'] for index in indexes]
    return index_name in index_names

Example 31

Project: tap-as-a-service Source File: 4086b3cffc01_rename_tenant_to_project.py
def get_inspector():
    """Reuse inspector."""

    global _INSPECTOR

    if _INSPECTOR:
        return _INSPECTOR

    else:
        bind = op.get_bind()
        _INSPECTOR = reflection.Inspector.from_engine(bind)

    return _INSPECTOR

Example 32

Project: alembic Source File: test_batch.py
    def test_drop_col_schematype(self):
        self._boolean_fixture()
        with self.op.batch_alter_table(
                "hasbool"
        ) as batch_op:
            batch_op.drop_column('x')
        insp = Inspector.from_engine(config.db)

        assert 'x' not in (c['name'] for c in insp.get_columns('hasbool'))

Example 33

Project: cubes Source File: store.py
Function: init
    def __init__(self, engine, naming, metadata=None):
        """Creates an inspector that discovers tables in a database according
        to specified configuration and naming conventions."""
        self.engine = engine
        self.naming = naming
        self.metadata = metadata or MetaData(engine)

        self.inspector = reflection.Inspector.from_engine(engine)

Example 34

Project: tvb-framework Source File: model_manager.py
Function: reset_database
def reset_database():
    """
    Remove all tables in DB.
    """
    LOGGER.warning("Your Database tables will be deleted.")
    try:
        session = SA_SESSIONMAKER()
        LOGGER.debug("Delete connection initiated.")
        inspector = reflection.Inspector.from_engine(session.connection())
        for table in inspector.get_table_names():
            try:
                LOGGER.debug("Removing:" + table)
                session.execute(text("DROP TABLE \"%s\" CASCADE" % table))
            except Exception:
                try:
                    session.execute(text("DROP TABLE %s" % table))
                except Exception, excep1:
                    LOGGER.error("Could no drop table %s", table)
                    LOGGER.exception(excep1)
        session.commit()
        LOGGER.info("Database was cleanup!")
    except Exception, excep:
        LOGGER.warning(excep)
    finally:
        session.close()

Example 35

Project: alembic Source File: test_batch.py
    def test_change_type_int_to_boolean(self):
        self._int_to_boolean_fixture()
        with self.op.batch_alter_table(
                "hasbool"
        ) as batch_op:
            batch_op.alter_column(
                'x', type_=Boolean(create_constraint=True, name='ck1'))
        insp = Inspector.from_engine(config.db)

        if exclusions.against(config, "sqlite"):
            eq_(
                [c['type']._type_affinity for
                 c in insp.get_columns('hasbool') if c['name'] == 'x'],
                [Boolean]
            )
        elif exclusions.against(config, "mysql"):
            eq_(
                [c['type']._type_affinity for
                 c in insp.get_columns('hasbool') if c['name'] == 'x'],
                [Integer]
            )

Example 36

Project: alembic Source File: test_batch.py
    def _test_fk_points_to_me(self, recreate):
        bar = Table(
            'bar', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('foo_id', Integer, ForeignKey('foo.id')),
            mysql_engine='InnoDB'
        )
        bar.create(self.conn)
        self.conn.execute(bar.insert(), {'id': 1, 'foo_id': 3})

        with self.op.batch_alter_table("foo", recreate=recreate) as batch_op:
            batch_op.alter_column(
                'data', new_column_name='newdata', existing_type=String(50))

        insp = Inspector.from_engine(self.conn)
        eq_(
            [(key['referred_table'],
             key['referred_columns'], key['constrained_columns'])
             for key in insp.get_foreign_keys('bar')],
            [('foo', ['id'], ['foo_id'])]
        )

Example 37

Project: alembic Source File: test_batch.py
    def _test_selfref_fk(self, recreate):
        bar = Table(
            'bar', self.metadata,
            Column('id', Integer, primary_key=True),
            Column('bar_id', Integer, ForeignKey('bar.id')),
            Column('data', String(50)),
            mysql_engine='InnoDB'
        )
        bar.create(self.conn)
        self.conn.execute(bar.insert(), {'id': 1, 'data': 'x', 'bar_id': None})
        self.conn.execute(bar.insert(), {'id': 2, 'data': 'y', 'bar_id': 1})

        with self.op.batch_alter_table("bar", recreate=recreate) as batch_op:
            batch_op.alter_column(
                'data', new_column_name='newdata', existing_type=String(50))

        insp = Inspector.from_engine(self.conn)

        insp = Inspector.from_engine(self.conn)
        eq_(
            [(key['referred_table'],
             key['referred_columns'], key['constrained_columns'])
             for key in insp.get_foreign_keys('bar')],
            [('bar', ['id'], ['bar_id'])]
        )

Example 38

Project: alembic Source File: api.py
Function: inspector
    @util.memoized_property
    def inspector(self):
        return Inspector.from_engine(self.connection)

Example 39

Project: alembic Source File: test_batch.py
    def test_create_drop_index(self):
        insp = Inspector.from_engine(config.db)
        eq_(
            insp.get_indexes('foo'), []
        )

        with self.op.batch_alter_table("foo", recreate='always') as batch_op:
            batch_op.create_index(
                'ix_data', ['data'], unique=True)

        self._assert_data([
            {"id": 1, "data": "d1", "x": 5},
            {"id": 2, "data": "22", "x": 6},
            {"id": 3, "data": "8.5", "x": 7},
            {"id": 4, "data": "9.46", "x": 8},
            {"id": 5, "data": "d5", "x": 9}
        ])

        insp = Inspector.from_engine(config.db)
        eq_(
            [
                dict(unique=ix['unique'],
                     name=ix['name'],
                     column_names=ix['column_names'])
                for ix in insp.get_indexes('foo')
            ],
            [{'unique': True, 'name': 'ix_data', 'column_names': ['data']}]
        )

        with self.op.batch_alter_table("foo", recreate='always') as batch_op:
            batch_op.drop_index('ix_data')

        insp = Inspector.from_engine(config.db)
        eq_(
            insp.get_indexes('foo'), []
        )

Example 40

Project: frontera Source File: __init__.py
    @classmethod
    def db_worker(cls, manager):
        b = cls(manager)
        settings = manager.settings
        drop = settings.get('SQLALCHEMYBACKEND_DROP_ALL_TABLES')
        clear_content = settings.get('SQLALCHEMYBACKEND_CLEAR_CONTENT')
        inspector = Inspector.from_engine(b.engine)

        metadata_m = b.models['MetadataModel']
        queue_m = b.models['QueueModel']
        if drop:
            existing = inspector.get_table_names()
            if metadata_m.__table__.name in existing:
                metadata_m.__table__.drop(bind=b.engine)
            if queue_m.__table__.name in existing:
                queue_m.__table__.drop(bind=b.engine)
        metadata_m.__table__.create(bind=b.engine)
        queue_m.__table__.create(bind=b.engine)

        if clear_content:
            session = b.session_cls()
            session.execute(metadata_m.__table__.delete())
            session.execute(queue_m.__table__.delete())
            session.close()

        b._metadata = Metadata(b.session_cls, metadata_m,
                               settings.get('SQLALCHEMYBACKEND_CACHE_SIZE'))
        b._queue = Queue(b.session_cls, queue_m, settings.get('SPIDER_FEED_PARTITIONS'))
        return b

Example 41

Project: iktomi Source File: sqla.py
def drop_everything(engine):
    '''Droping all tables and custom types (enums) using `engine`.
    Taken from http://www.sqlalchemy.org/trac/wiki/UsageRecipes/DropEverything

    This method is more robust than `metadata.drop_all(engine)`. B.c. when
    you change a table or a type name, `drop_all` does not consider the old one.
    Thus, DB holds some unused entities.'''
    conn = engine.connect()
    # the transaction only applies if the DB supports
    # transactional DDL, i.e. Postgresql, MS SQL Server
    trans = conn.begin()
    inspector = reflection.Inspector.from_engine(engine)
    metadata = MetaData()
    tbs = []
    all_fks = []
    types = []
    for table_name in inspector.get_table_names():
        fks = []
        for fk in inspector.get_foreign_keys(table_name):
            if not fk['name']:
                continue
            fks.append(ForeignKeyConstraint((), (), name=fk['name']))
        for col in inspector.get_columns(table_name):
            if isinstance(col['type'], SchemaType):
                types.append(col['type'])
        t = Table(table_name, metadata, *fks)
        tbs.append(t)
        all_fks.extend(fks)
    try:
        for fkc in all_fks:
            conn.execute(DropConstraint(fkc))
        for table in tbs:
            conn.execute(DropTable(table))
        for custom_type in types:
            custom_type.drop(conn)
        trans.commit()
    except: # pragma: no cover
        trans.rollback()
        raise

Example 42

Project: alembic Source File: compare.py
Function: produce_net_changes
def _produce_net_changes(autogen_context, upgrade_ops):

    connection = autogen_context.connection
    include_schemas = autogen_context.opts.get('include_schemas', False)

    inspector = Inspector.from_engine(connection)

    default_schema = connection.dialect.default_schema_name
    if include_schemas:
        schemas = set(inspector.get_schema_names())
        # replace default schema name with None
        schemas.discard("information_schema")
        # replace the "default" schema with None
        schemas.discard(default_schema)
        schemas.add(None)
    else:
        schemas = [None]

    comparators.dispatch("schema", autogen_context.dialect.name)(
        autogen_context, upgrade_ops, schemas
    )

Example 43

Project: alembic Source File: test_postgresql.py
Function: compare_default_roundtrip
    def _compare_default_roundtrip(
            self, type_, orig_default, alternate=None, diff_expected=None):
        diff_expected = diff_expected \
            if diff_expected is not None \
            else alternate is not None
        if alternate is None:
            alternate = orig_default

        t1 = Table("test", self.metadata,
                   Column("somecol", type_, server_default=orig_default))
        t2 = Table("test", MetaData(),
                   Column("somecol", type_, server_default=alternate))

        t1.create(self.bind)

        insp = Inspector.from_engine(self.bind)
        cols = insp.get_columns(t1.name)
        insp_col = Column("somecol", cols[0]['type'],
                          server_default=text(cols[0]['default']))
        op = ops.AlterColumnOp("test", "somecol")
        _compare_server_default(
            self.autogen_context, op,
            None, "test", "somecol", insp_col, t2.c.somecol)

        diffs = op.to_diff_tuple()
        eq_(bool(diffs), diff_expected)

Example 44

Project: alembic Source File: test_postgresql.py
    @provide_metadata
    def _expect_default(self, c_expected, col, seq=None):
        Table('t', self.metadata, col)

        if seq:
            seq._set_metadata(self.metadata)
        self.metadata.create_all(config.db)

        insp = Inspector.from_engine(config.db)

        uo = ops.UpgradeOps(ops=[])
        _compare_tables(
            set([(None, 't')]), set([]),
            insp, self.metadata, uo, self.autogen_context)
        diffs = uo.as_diffs()
        tab = diffs[0][1]

        eq_(_render_server_default_for_compare(
            tab.c.x.server_default, tab.c.x, self.autogen_context),
            c_expected)

        insp = Inspector.from_engine(config.db)
        uo = ops.UpgradeOps(ops=[])
        m2 = MetaData()
        Table('t', m2, Column('x', BigInteger()))
        _compare_tables(
            set([(None, 't')]), set([(None, 't')]),
            insp, m2, uo, self.autogen_context)
        diffs = uo.as_diffs()
        server_default = diffs[0][0][4]['existing_server_default']
        eq_(_render_server_default_for_compare(
            server_default, tab.c.x, self.autogen_context),
            c_expected)

Example 45

Project: alembic Source File: plugin_base.py
@post
def _prep_testing_database(options, file_config):
    from alembic.testing import config
    from alembic.testing.exclusions import against
    from sqlalchemy import schema
    from alembic import util

    if util.sqla_08:
        from sqlalchemy import inspect
    else:
        from sqlalchemy.engine.reflection import Inspector
        inspect = Inspector.from_engine

    if options.dropfirst:
        for cfg in config.Config.all_configs():
            e = cfg.db
            inspector = inspect(e)
            try:
                view_names = inspector.get_view_names()
            except NotImplementedError:
                pass
            else:
                for vname in view_names:
                    e.execute(schema._DropView(
                        schema.Table(vname, schema.MetaData())
                    ))

            if config.requirements.schemas.enabled_for_config(cfg):
                try:
                    view_names = inspector.get_view_names(
                        schema="test_schema")
                except NotImplementedError:
                    pass
                else:
                    for vname in view_names:
                        e.execute(schema._DropView(
                            schema.Table(vname, schema.MetaData(),
                                         schema="test_schema")
                        ))

            for tname in reversed(inspector.get_table_names(
                    order_by="foreign_key")):
                e.execute(schema.DropTable(
                    schema.Table(tname, schema.MetaData())
                ))

            if config.requirements.schemas.enabled_for_config(cfg):
                for tname in reversed(inspector.get_table_names(
                        order_by="foreign_key", schema="test_schema")):
                    e.execute(schema.DropTable(
                        schema.Table(tname, schema.MetaData(),
                                     schema="test_schema")
                    ))

            if against(cfg, "postgresql") and util.sqla_100:
                from sqlalchemy.dialects import postgresql
                for enum in inspector.get_enums("*"):
                    e.execute(postgresql.DropEnumType(
                        postgresql.ENUM(
                            name=enum['name'],
                            schema=enum['schema'])))

Example 46

Project: sqlalchemy Source File: test_reflection.py
    def test_inspect_view_definition(self):
        inspector = Inspector.from_engine(testing.db)
        view_def = inspector.get_view_definition("huge_named_view")
        eq_(view_def, self.view_str)

Example 47

Project: frontera Source File: __init__.py
    @classmethod
    def strategy_worker(cls, manager):
        b = cls(manager)
        settings = manager.settings
        drop_all_tables = settings.get('SQLALCHEMYBACKEND_DROP_ALL_TABLES')
        clear_content = settings.get('SQLALCHEMYBACKEND_CLEAR_CONTENT')
        model = b.models['StateModel']
        inspector = Inspector.from_engine(b.engine)

        if drop_all_tables:
            if model.__table__.name in inspector.get_table_names():
                model.__table__.drop(bind=b.engine)
        model.__table__.create(bind=b.engine)

        if clear_content:
            session = b.session_cls()
            session.execute(model.__table__.delete())
            session.close()
        b._states = States(b.session_cls, model,
                           settings.get('STATE_CACHE_SIZE_LIMIT'))
        return b

Example 48

Project: tvb-framework Source File: model_manager.py
def initialize_startup():
    """ Force DB tables create, in case no data is already found."""
    is_db_empty = False
    session = SA_SESSIONMAKER()
    inspector = reflection.Inspector.from_engine(session.connection())
    if len(inspector.get_table_names()) < 1:
        LOGGER.debug("Database access exception, maybe DB is empty")
        is_db_empty = True
    session.close()

    versions_repo = TvbProfile.current.db.DB_VERSIONING_REPO
    if is_db_empty:
        LOGGER.info("Initializing Database")
        if os.path.exists(versions_repo):
            shutil.rmtree(versions_repo)
        migratesqlapi.create(versions_repo, os.path.split(versions_repo)[1])
        _update_sql_scripts()
        migratesqlapi.version_control(TvbProfile.current.db.DB_URL, versions_repo,
                                      version=TvbProfile.current.version.DB_STRUCTURE_VERSION)
        session = SA_SESSIONMAKER()
        model.Base.metadata.create_all(bind=session.connection())
        session.commit()
        session.close()
        LOGGER.info("Database Default Tables created successfully!")
    else:
        _update_sql_scripts()
        migratesqlapi.upgrade(TvbProfile.current.db.DB_URL, versions_repo,
                              version=TvbProfile.current.version.DB_STRUCTURE_VERSION)
        LOGGER.info("Database already has some data, will not be re-created!")
    return is_db_empty

Example 49

Project: sqlalchemy Source File: test_reflection.py
    @testing.provide_metadata
    def test_inspect_enums_star(self):
        enum_type = postgresql.ENUM(
            'cat', 'dog', 'rat', name='pet', metadata=self.metadata)
        schema_enum_type = postgresql.ENUM(
            'sad', 'ok', 'happy', name='mood',
            schema='test_schema',
            metadata=self.metadata)
        enum_type.create(testing.db)
        schema_enum_type.create(testing.db)
        inspector = reflection.Inspector.from_engine(testing.db)

        eq_(inspector.get_enums(), [
            {
                'visible': True,
                'labels': ['cat', 'dog', 'rat'],
                'name': 'pet',
                'schema': 'public'
            }])

        eq_(inspector.get_enums('*'), [
            {
                'visible': True,
                'labels': ['cat', 'dog', 'rat'],
                'name': 'pet',
                'schema': 'public'
            },
            {
                'visible': False,
                'name': 'mood',
                'schema': 'test_schema',
                'labels': ['sad', 'ok', 'happy']
            }])

Example 50

Project: gamification-engine Source File: 2351a64b05ef_added_cascades.py
def upgrade():
    ### commands auto generated by Alembic - please adjust! ###
    from sqlalchemy.engine.reflection import Inspector 
    insp = Inspector.from_engine(op.get_bind())
    tables = insp.get_table_names() 
    
    for table in tables:
        fks = insp.get_foreign_keys(table) 
        
        
        for fk in fks:
            op.execute("ALTER TABLE "+table+" DROP CONSTRAINT "+fk["name"])
    
    op.create_foreign_key(None, 'achievements', 'achievementcategories', ['achievementcategory_id'], ['id'], ondelete="SET NULL")
    op.create_foreign_key(None, 'achievements_properties', 'translationvariables', ['value_translation_id'], ['id'], ondelete="RESTRICT")
    op.create_foreign_key(None, 'achievements_properties', 'properties', ['property_id'], ['id'], ondelete="CASCADE")
    op.create_foreign_key(None, 'achievements_properties', 'achievements', ['achievement_id'], ['id'], ondelete="CASCADE")
    op.create_foreign_key(None, 'achievements_rewards', 'achievements', ['achievement_id'], ['id'], ondelete="CASCADE")
    op.create_foreign_key(None, 'achievements_rewards', 'translationvariables', ['value_translation_id'], ['id'], ondelete="RESTRICT")
    op.create_foreign_key(None, 'achievements_rewards', 'rewards', ['reward_id'], ['id'], ondelete="CASCADE")
    op.create_foreign_key(None, 'achievements_users', 'users', ['user_id'], ['id'], ondelete="CASCADE")
    op.create_foreign_key(None, 'achievements_users', 'achievements', ['achievement_id'], ['id'], ondelete="CASCADE")
    op.create_foreign_key(None, 'denials', 'achievements', ['from_id'], ['id'], ondelete="CASCADE")
    op.create_foreign_key(None, 'denials', 'achievements', ['to_id'], ['id'], ondelete="CASCADE")
    op.create_foreign_key(None, 'goal_evaluation_cache', 'goals', ['goal_id'], ['id'], ondelete="CASCADE")
    op.create_foreign_key(None, 'goal_evaluation_cache', 'users', ['user_id'], ['id'], ondelete="CASCADE")
    op.create_foreign_key(None, 'goals', 'translationvariables', ['name_translation_id'], ['id'], ondelete="RESTRICT")
    op.create_foreign_key(None, 'goals', 'achievements', ['achievement_id'], ['id'], ondelete="CASCADE")
    op.create_foreign_key(None, 'requirements', 'achievements', ['to_id'], ['id'], ondelete="CASCADE")
    op.create_foreign_key(None, 'requirements', 'achievements', ['from_id'], ['id'], ondelete="CASCADE")
    op.create_foreign_key(None, 'translations', 'languages', ['language_id'], ['id'], ondelete="CASCADE")
    op.create_foreign_key(None, 'translations', 'translationvariables', ['translationvariable_id'], ['id'], ondelete="CASCADE")
    op.create_foreign_key(None, 'users_groups', 'users', ['user_id'], ['id'], ondelete="CASCADE")
    op.create_foreign_key(None, 'users_groups', 'groups', ['group_id'], ['id'], ondelete="CASCADE")
    op.create_foreign_key(None, 'users_users', 'users', ['to_id'], ['id'], ondelete="CASCADE")
    op.create_foreign_key(None, 'users_users', 'users', ['from_id'], ['id'], ondelete="CASCADE")
    op.create_foreign_key(None, 'values', 'users', ['user_id'], ['id'], ondelete="CASCADE")
    op.create_foreign_key(None, 'values', 'variables', ['variable_id'], ['id'], ondelete="CASCADE")
See More Examples - Go to Next Page
Page 1 Selected Page 2