sqlalchemy.schema.ForeignKeyConstraint

Here are the examples of the python api sqlalchemy.schema.ForeignKeyConstraint taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

17 Examples 7

Example 1

Project: iktomi Source File: sqla.py
def drop_everything(engine):
    '''Droping all tables and custom types (enums) using `engine`.
    Taken from http://www.sqlalchemy.org/trac/wiki/UsageRecipes/DropEverything

    This method is more robust than `metadata.drop_all(engine)`. B.c. when
    you change a table or a type name, `drop_all` does not consider the old one.
    Thus, DB holds some unused entities.'''
    conn = engine.connect()
    # the transaction only applies if the DB supports
    # transactional DDL, i.e. Postgresql, MS SQL Server
    trans = conn.begin()
    inspector = reflection.Inspector.from_engine(engine)
    metadata = MetaData()
    tbs = []
    all_fks = []
    types = []
    for table_name in inspector.get_table_names():
        fks = []
        for fk in inspector.get_foreign_keys(table_name):
            if not fk['name']:
                continue
            fks.append(ForeignKeyConstraint((), (), name=fk['name']))
        for col in inspector.get_columns(table_name):
            if isinstance(col['type'], SchemaType):
                types.append(col['type'])
        t = Table(table_name, metadata, *fks)
        tbs.append(t)
        all_fks.extend(fks)
    try:
        for fkc in all_fks:
            conn.execute(DropConstraint(fkc))
        for table in tbs:
            conn.execute(DropTable(table))
        for custom_type in types:
            custom_type.drop(conn)
        trans.commit()
    except: # pragma: no cover
        trans.rollback()
        raise

Example 2

Project: open-event-orga-server Source File: drop_db.py
def db_drop_everything(db):
    # From http://www.sqlalchemy.org/trac/wiki/UsageRecipes/DropEverything

    conn = db.engine.connect()

    # the transaction only applies if the DB supports
    # transactional DDL, i.e. Postgresql, MS SQL Server
    trans = conn.begin()

    inspector = reflection.Inspector.from_engine(db.engine)

    # gather all data first before dropping anything.
    # some DBs lock after things have been dropped in
    # a transaction.
    metadata = MetaData()

    tbs = []
    all_fks = []

    for table_name in inspector.get_table_names():
        fks = []
        for fk in inspector.get_foreign_keys(table_name):
            if not fk['name']:
                continue
            fks.append(
                ForeignKeyConstraint((), (), name=fk['name'])
            )
        t = Table(table_name, metadata, *fks)
        tbs.append(t)
        all_fks.extend(fks)

    for fkc in all_fks:
        conn.execute(DropConstraint(fkc))

    for table in tbs:
        conn.execute(DropTable(table))

    trans.commit()

Example 3

Project: depsy Source File: utils.py
def drop_everything(db, app):
    from sqlalchemy.engine import reflection
    from sqlalchemy import create_engine
    from sqlalchemy.schema import (
        MetaData,
        Table,
        DropTable,
        ForeignKeyConstraint,
        DropConstraint,
        )

    conn = db.session()

    engine = create_engine(app.config['SQLALCHEMY_DATABASE_URI'])
    inspector = reflection.Inspector.from_engine(engine)

    # gather all data first before dropping anything.
    # some DBs lock after things have been dropped in 
    # a transaction.
    
    metadata = MetaData()

    tbs = []
    all_fks = []

    for table_name in inspector.get_table_names():
        fks = []
        for fk in inspector.get_foreign_keys(table_name):
            if not fk['name']:
                continue
            fks.append(
                ForeignKeyConstraint((),(),name=fk['name'])
                )
        t = Table(table_name,metadata,*fks)
        tbs.append(t)
        all_fks.extend(fks)

    for fkc in all_fks:
        conn.execute(DropConstraint(fkc))

    for table in tbs:
        conn.execute(DropTable(table))

    db.session.commit()    

Example 4

Project: total-impact-core Source File: utils.py
def drop_everything(db, app):
    from sqlalchemy.engine import reflection
    from sqlalchemy import create_engine
    from sqlalchemy.schema import (
        MetaData,
        Table,
        DropTable,
        ForeignKeyConstraint,
        DropConstraint,
        )

    conn = db.session()

    engine = create_engine(app.config['SQLALCHEMY_DATABASE_URI'])
    inspector = reflection.Inspector.from_engine(engine)

    # gather all data first before dropping anything.
    # some DBs lock after things have been dropped in 
    # a transaction.
    
    metadata = MetaData()

    tbs = []
    all_fks = []

    for table_name in inspector.get_table_names():
        fks = []
        for fk in inspector.get_foreign_keys(table_name):
            if not fk['name']:
                continue
            fks.append(
                ForeignKeyConstraint((),(),name=fk['name'])
                )
        t = Table(table_name,metadata,*fks)
        tbs.append(t)
        all_fks.extend(fks)

    conn.execute("""drop view if exists min_biblio""")

    for fkc in all_fks:
        conn.execute(DropConstraint(fkc))

    for table in tbs:
        conn.execute(DropTable(table))

    db.session.commit()    

Example 5

Project: amivapi Source File: bootstrap.py
Function: clear_database
def clear_database(engine):
    """ Clears all the tables from the database
    To do this first all ForeignKey constraints are removed,
    then all tables are dropped.

    Code is from
    https://bitbucket.org/zzzeek/sqlalchemy/wiki/UsageRecipes/DropEverything

    :param engine: SQLalchemy engine to use
    """

    conn = engine.connect()

    # the transaction only applies if the DB supports
    # transactional DDL, i.e. Postgresql, MS SQL Server
    trans = conn.begin()

    inspector = reflection.Inspector.from_engine(engine)

    # gather all data first before dropping anything.
    # some DBs lock after things have been dropped in
    # a transaction.

    metadata = MetaData()

    tbs = []
    all_fks = []

    for table_name in inspector.get_table_names():
        fks = []
        for fk in inspector.get_foreign_keys(table_name):
            if not fk['name']:
                continue
            fks.append(
                ForeignKeyConstraint((), (), name=fk['name'])
            )
        t = Table(table_name, metadata, *fks)
        tbs.append(t)
        all_fks.extend(fks)

    for fkc in all_fks:
        conn.execute(DropConstraint(fkc))

    for table in tbs:
        conn.execute(DropTable(table))

    trans.commit()

Example 6

Project: JARR Source File: __init__.py
def db_empty(db):
    "Will drop every datas stocked in db."
    # From http://www.sqlalchemy.org/trac/wiki/UsageRecipes/DropEverything
    conn = db.engine.connect()

    # the transaction only applies if the DB supports
    # transactional DDL, i.e. Postgresql, MS SQL Server
    trans = conn.begin()

    inspector = reflection.Inspector.from_engine(db.engine)

    # gather all data first before dropping anything.
    # some DBs lock after things have been dropped in
    # a transaction.
    metadata = MetaData()

    tbs = []
    all_fks = []

    for table_name in inspector.get_table_names():
        fks = []
        for fk in inspector.get_foreign_keys(table_name):
            if not fk['name']:
                continue
            fks.append(ForeignKeyConstraint((), (), name=fk['name']))
        t = Table(table_name, metadata, *fks)
        tbs.append(t)
        all_fks.extend(fks)

    for fkc in all_fks:
        conn.execute(DropConstraint(fkc))

    for table in tbs:
        conn.execute(DropTable(table))

    trans.commit()

Example 7

Project: sqlalchemy-i18n Source File: manager.py
def translation_base(
    parent_cls,
    base_class_factory=None,
    foreign_key_args=None
):
    if base_class_factory is None:
        base_class_factory = get_declarative_base

    if foreign_key_args is None:
        foreign_key_args = {}
        foreign_key_args.setdefault('ondelete', 'CASCADE')

    class TranslationMixin(
        base_class_factory(parent_cls),
        BaseTranslationMixin
    ):
        __abstract__ = True
        __parent_class__ = parent_cls

        @declared_attr
        def __table_args__(cls):
            if has_inherited_table(cls):
                return tuple()
            else:
                names = list(get_primary_keys(parent_cls).keys())

                return (
                    sa.schema.ForeignKeyConstraint(
                        names,
                        [
                            '%s.%s' % (parent_cls.__tablename__, name)
                            for name in names
                        ],
                        **foreign_key_args
                    ),
                )

    for column in parent_cls.__table__.c:
        if column.primary_key:
            setattr(
                TranslationMixin,
                column.key,
                column.copy()
            )

    TranslationMixin.locale = sa.Column(
        sa.String(10), primary_key=True
    )

    return TranslationMixin

Example 8

Project: adhocracy Source File: drop_data.py
def main():
    parser = create_parser(description=__doc__, use_instance=False)
    parser.add_argument('-f', dest='force', default=False, action='store_true',
                        help="force deletion without asking for confirmation")
    args = parser.parse_args()

    if not args.force:
        input = raw_input('Delete all data? No backup will be done! '
                          'If so type "yes": ')
        if input != 'yes':
            print 'Answer not "yes", but: "%s"\nAborting.' % input
            exit(1)

    config = config_from_args(args)
    engine = get_engine(config, echo=True)
    conn = engine.connect()

    # the transaction only applies if the DB supports
    # transactional DDL, i.e. Postgresql, MS SQL Server
    trans = conn.begin()

    inspector = reflection.Inspector.from_engine(engine)

    # gather all data first before dropping anything.
    # some DBs lock after things have been dropped in
    # a transaction.

    metadata = MetaData()

    tbs = []
    all_fks = []

    for table_name in inspector.get_table_names():
        fks = []
        for fk in inspector.get_foreign_keys(table_name):
            if not fk['name']:
                continue
            fks.append(
                ForeignKeyConstraint((), (), name=fk['name'])
                )
        t = Table(table_name, metadata, *fks)
        tbs.append(t)
        all_fks.extend(fks)

    for fkc in all_fks:
        conn.execute(DropConstraint(fkc))

    for table in tbs:
        conn.execute(DropTable(table))

    trans.commit()

Example 9

Project: sync-engine Source File: db.py
def drop_everything(engine, keep_tables=[], reset_columns={}):
    """ Drops all tables in the db unless their name is in `keep_tables`.
        `reset_columns` is used to specify the columns that should be reset to
        default value in the tables that we're keeping -
        provided as a dict of table_name: list_of_column_names.
    """

    conn = engine.connect()
    trans = conn.begin()

    inspector = reflection.Inspector.from_engine(engine)

    # gather all data first before dropping anything.
    # some DBs lock after things have been dropped in
    # a transaction.

    metadata = MetaData()

    tbs = []
    all_fks = []

    for table_name in inspector.get_table_names():
        if table_name in keep_tables:
            # Reset certain columns in certain tables we're keeping
            if table_name in reset_columns:
                t = Table(table_name, metadata)

                column_names = reset_columns[table_name]
                for c in inspector.get_columns(table_name):
                    if c['name'] in column_names:
                        assert c['default']

                        q = "UPDATE {0} SET {1}={2};".\
                            format(table_name, c['name'], c['default'])
                        conn.execute(q)
            continue

        fks = []
        for fk in inspector.get_foreign_keys(table_name):
            if not fk['name']:
                continue
            fks.append(ForeignKeyConstraint((), (), name=fk['name']))
        t = Table(table_name, metadata, *fks)
        tbs.append(t)
        all_fks.extend(fks)

    for fkc in all_fks:
        conn.execute(DropConstraint(fkc))

    for table in tbs:
        conn.execute(DropTable(table))

    trans.commit()

Example 10

Project: fuel-ostf Source File: nailgun_hooks.py
def clear_db(db_path):
    db_engine = create_engine(db_path, poolclass=NullPool)
    with db_engine.begin() as conn:
        meta = MetaData()
        meta.reflect(bind=db_engine)
        inspector = reflection.Inspector.from_engine(db_engine)

        tbs = []
        all_fks = []

        for table_name in inspector.get_table_names():
            fks = []
            for fk in inspector.get_foreign_keys(table_name):
                if not fk['name']:
                    continue
                fks.append(
                    schema.ForeignKeyConstraint(tuple(),
                                                tuple(),
                                                name=fk['name'])
                )
            t = schema.Table(
                table_name,
                meta,
                *fks,
                extend_existing=True
            )
            tbs.append(t)
            all_fks.extend(fks)

        for fkc in all_fks:
            conn.execute(schema.DropConstraint(fkc))

        for table in tbs:
            conn.execute(schema.DropTable(table))

        # such construction is available only for postgresql
        if db_engine.name == "postgresql":
            for en in _get_enums(conn):
                conn.execute("DROP TYPE {0}".format(en))

Example 11

Project: fuel-web Source File: __init__.py
def dropdb():
    from nailgun.db import migration
    conn = engine.connect()
    trans = conn.begin()
    meta = MetaData()
    meta.reflect(bind=engine)
    inspector = reflection.Inspector.from_engine(engine)

    tbs = []
    all_fks = []

    for table_name in inspector.get_table_names():
        fks = []
        for fk in inspector.get_foreign_keys(table_name):
            if not fk['name']:
                continue
            fks.append(
                schema.ForeignKeyConstraint((), (), name=fk['name'])
            )
        t = schema.Table(
            table_name,
            meta,
            *fks,
            extend_existing=True
        )
        tbs.append(t)
        all_fks.extend(fks)

    for fkc in all_fks:
        conn.execute(schema.DropConstraint(fkc))

    for table in tbs:
        conn.execute(schema.DropTable(table))

    custom_types = conn.execute(
        "SELECT n.nspname as schema, t.typname as type "
        "FROM pg_type t LEFT JOIN pg_catalog.pg_namespace n "
        "ON n.oid = t.typnamespace "
        "WHERE (t.typrelid = 0 OR (SELECT c.relkind = 'c' "
        "FROM pg_catalog.pg_class c WHERE c.oid = t.typrelid)) "
        "AND NOT EXISTS(SELECT 1 FROM pg_catalog.pg_type el "
        "WHERE el.oid = t.typelem AND el.typarray = t.oid) "
        "AND     n.nspname NOT IN ('pg_catalog', 'information_schema')"
    )

    for tp in custom_types:
        conn.execute("DROP TYPE {0}".format(tp[1]))
    trans.commit()
    migration.drop_migration_meta(engine)
    conn.close()
    engine.dispose()

Example 12

Project: glance Source File: 037_add_changes_to_satisfy_models.py
def upgrade(migrate_engine):
    meta = sqlalchemy.MetaData()
    meta.bind = migrate_engine

    if migrate_engine.name not in ['mysql', 'postgresql']:
        return

    image_properties = Table('image_properties', meta, autoload=True)
    image_members = Table('image_members', meta, autoload=True)
    images = Table('images', meta, autoload=True)

    # We have to ensure that we doesn't have `nulls` values since we are going
    # to set nullable=False
    migrate_engine.execute(
        update(image_members)
        .where(image_members.c.status == sql.expression.null())
        .values(status='pending'))

    migrate_engine.execute(
        update(images)
        .where(images.c.protected == sql.expression.null())
        .values(protected=sql.expression.false()))

    image_members.c.status.alter(nullable=False, server_default='pending')
    images.c.protected.alter(
        nullable=False, server_default=sql.expression.false())

    if migrate_engine.name == 'postgresql':
        Index('ix_image_properties_image_id_name',
              image_properties.c.image_id,
              image_properties.c.name).drop()

        # We have different names of this constraint in different versions of
        # postgresql. Since we have only one constraint on this table, we can
        # get it in the following way.
        name = migrate_engine.execute(
            """SELECT conname
               FROM pg_constraint
               WHERE conrelid =
                   (SELECT oid
                    FROM pg_class
                    WHERE relname LIKE 'image_properties')
                  AND contype = 'u';""").scalar()

        constraint = UniqueConstraint(image_properties.c.image_id,
                                      image_properties.c.name,
                                      name='%s' % name)
        migrate_engine.execute(DropConstraint(constraint))

        constraint = UniqueConstraint(image_properties.c.image_id,
                                      image_properties.c.name,
                                      name='ix_image_properties_image_id_name')
        migrate_engine.execute(AddConstraint(constraint))

        images.c.id.alter(server_default=None)
    if migrate_engine.name == 'mysql':
        constraint = UniqueConstraint(image_properties.c.image_id,
                                      image_properties.c.name,
                                      name='image_id')
        migrate_engine.execute(DropConstraint(constraint))
        image_locations = Table('image_locations', meta, autoload=True)
        if len(image_locations.foreign_keys) == 0:
            migrate_engine.execute(AddConstraint(ForeignKeyConstraint(
                [image_locations.c.image_id], [images.c.id])))

Example 13

Project: rack Source File: utils.py
def _drop_unique_constraint_in_sqlite(migrate_engine, table_name, uc_name,
                                      **col_name_col_instance):
    insp = reflection.Inspector.from_engine(migrate_engine)
    meta = MetaData(bind=migrate_engine)

    table = Table(table_name, meta, autoload=True)
    columns = []
    for column in table.columns:
        if isinstance(column.type, NullType):
            new_column = _get_not_supported_column(col_name_col_instance,
                                                   column.name)
            columns.append(new_column)
        else:
            columns.append(column.copy())

    uniques = _get_unique_constraints_in_sqlite(migrate_engine, table_name)
    table.constraints.update(uniques)

    constraints = [constraint for constraint in table.constraints
                   if not constraint.name == uc_name and
                   not isinstance(constraint, schema.ForeignKeyConstraint)]

    new_table = Table(table_name + "__tmp__", meta, *(columns + constraints))
    new_table.create()

    indexes = []
    for index in insp.get_indexes(table_name):
        column_names = [new_table.c[c] for c in index['column_names']]
        indexes.append(Index(index["name"],
                             *column_names,
                             unique=index["unique"]))
    f_keys = []
    for fk in insp.get_foreign_keys(table_name):
        refcolumns = [fk['referred_table'] + '.' + col
                      for col in fk['referred_columns']]
        f_keys.append(ForeignKeyConstraint(fk['constrained_columns'],
                                           refcolumns, table=new_table,
                                           name=fk['name']))

    ins = InsertFromSelect(new_table, table.select())
    migrate_engine.execute(ins)
    table.drop()

    [index.create(migrate_engine) for index in indexes]
    for fkey in f_keys:
        fkey.create()
    new_table.rename(table_name)

Example 14

Project: refstack Source File: __init__.py
    def drop_all_tables_and_constraints(self):
        """Drop tables and cyclical constraints between tables."""
        engine = create_engine(self.connection)
        conn = engine.connect()
        trans = conn.begin()

        inspector = reflection.Inspector.from_engine(engine)
        metadata = MetaData()

        tbs = []
        all_fks = []

        try:
            for table_name in inspector.get_table_names():
                fks = []
                for fk in inspector.get_foreign_keys(table_name):
                    if not fk['name']:
                        continue
                    fks.append(
                        ForeignKeyConstraint((), (), name=fk['name']))

                t = Table(table_name, metadata, *fks)
                tbs.append(t)
                all_fks.extend(fks)

            for fkc in all_fks:
                conn.execute(DropConstraint(fkc))

            for table in tbs:
                conn.execute(DropTable(table))

            trans.commit()
            trans.close()
            conn.close()
        except:
            trans.rollback()
            conn.close()
            raise

Example 15

Project: sqlalchemy-migrate Source File: sqlite.py
Function: get_constraints
    def _get_constraints(self, table):
        """Retrieve information about existing constraints of the table

        This feature is needed for recreate_table() to work properly.
        """

        data = table.metadata.bind.execute(
            """SELECT sql
               FROM sqlite_master
               WHERE
                   type='table' AND
                   name=:table_name""",
            table_name=table.name
        ).fetchone()[0]

        UNIQUE_PATTERN = "CONSTRAINT (\w+) UNIQUE \(([^\)]+)\)"
        constraints = []
        for name, cols in re.findall(UNIQUE_PATTERN, data):
            # Filter out any columns that were dropped from the table.
            columns = self._filter_columns(cols, table)
            if columns:
                constraints.extend(UniqueConstraint(*columns, name=name))

        FKEY_PATTERN = "CONSTRAINT (\w+) FOREIGN KEY \(([^\)]+)\)"
        for name, cols in re.findall(FKEY_PATTERN, data):
            # Filter out any columns that were dropped from the table.
            columns = self._filter_columns(cols, table)
            if columns:
                constraints.extend(ForeignKeyConstraint(*columns, name=name))

        return constraints

Example 16

Project: sqlalchemy-migrate Source File: test_changeset.py
    @fixture.usedb()
    def test_fk(self):
        """Can create columns with foreign keys"""
        # create FK's target
        reftable = Table('tmp_ref', self.meta,
            Column('id', Integer, primary_key=True),
        )
        if self.engine.has_table(reftable.name):
            reftable.drop()
        reftable.create()

        # create column with fk
        col = Column('data', Integer, ForeignKey(reftable.c.id, name='testfk'))
        col.create(self.table)

        # check if constraint is added
        for cons in self.table.constraints:
            if isinstance(cons, sqlalchemy.schema.ForeignKeyConstraint):
                break
        else:
            self.fail('No constraint found')

        # TODO: test on db level if constraints work

        if SQLA_07:
            self.assertEqual(reftable.c.id.name,
                list(col.foreign_keys)[0].column.name)
        else:
            self.assertEqual(reftable.c.id.name,
                col.foreign_keys[0].column.name)

        if self.engine.name == 'mysql':
            constraint.ForeignKeyConstraint([self.table.c.data],
                                            [reftable.c.id],
                                            name='testfk').drop()
        col.drop(self.table)

        if self.engine.has_table(reftable.name):
            reftable.drop()

Example 17

Project: sqlalchemy-migrate Source File: test_changeset.py
    @fixture.usedb()
    def test_drop_with_complex_foreign_keys(self):
        from sqlalchemy.schema import ForeignKeyConstraint
        from sqlalchemy.schema import UniqueConstraint

        self.table.drop()
        self.meta.clear()

        # NOTE(mriedem): DB2 does not currently support unique constraints
        # on nullable columns, so the columns that are used to create the
        # foreign keys here need to be non-nullable for testing with DB2
        # to work.

        # create FK's target
        reftable = Table('tmp_ref', self.meta,
            Column('id', Integer, primary_key=True),
            Column('jd', Integer, nullable=False),
            UniqueConstraint('id','jd')
            )
        if self.engine.has_table(reftable.name):
            reftable.drop()
        reftable.create()

        # add a table with a complex foreign key constraint
        self.table = Table(
            self.table_name, self.meta,
            Column('id', Integer, primary_key=True),
            Column('r1', Integer, nullable=False),
            Column('r2', Integer, nullable=False),
            ForeignKeyConstraint(['r1','r2'],
                                 [reftable.c.id,reftable.c.jd],
                                 name='test_fk')
            )
        self.table.create()

        # paranoid check
        self.assertEqual([['r1','r2']],
                         self._actual_foreign_keys())

        # delete one
        if self.engine.name == 'mysql':
            constraint.ForeignKeyConstraint([self.table.c.r1, self.table.c.r2],
                                            [reftable.c.id, reftable.c.jd],
                                            name='test_fk').drop()
        self.table.c.r2.drop()

        # check the constraint is gone, since part of it
        # is no longer there - if people hit this,
        # they may be confused, maybe we should raise an error
        # and insist that the constraint is deleted first, separately?
        self.assertEqual([],
                         self._actual_foreign_keys())