sqlalchemy.schema.Table

Here are the examples of the python api sqlalchemy.schema.Table taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

78 Examples 7

Example 1

Project: pushmanager
Source File: test_core_db.py
View license
    def test_init_db(self):
        assert isinstance(db.push_checklist, SA.schema.Table)
        assert isinstance(db.push_requests, SA.schema.Table)
        assert isinstance(db.push_plans, SA.schema.Table)
        assert isinstance(db.push_pushes, SA.schema.Table)
        assert isinstance(db.push_pushcontents, SA.schema.Table)
        assert isinstance(db.push_removals, SA.schema.Table)

Example 2

Project: cruzdb
Source File: sqlsoup.py
View license
def _selectable_name(selectable):
    if isinstance(selectable, sql.Alias):
        return _selectable_name(selectable.element)
    elif isinstance(selectable, sql.Select):
        return ''.join(_selectable_name(s) for s in selectable.froms)
    elif isinstance(selectable, schema.Table):
        return selectable.name
    else:
        x = selectable.__class__.__name__
        if x[0] == '_':
            x = x[1:]
        return x

Example 3

Project: CouchPotatoV1
Source File: sqlsoup.py
View license
def _selectable_name(selectable):
    if isinstance(selectable, sql.Alias):
        return _selectable_name(selectable.element)
    elif isinstance(selectable, sql.Select):
        return ''.join(_selectable_name(s) for s in selectable.froms)
    elif isinstance(selectable, schema.Table):
        return selectable.name.capitalize()
    else:
        x = selectable.__class__.__name__
        if x[0] == '_':
            x = x[1:]
        return x

Example 4

Project: Flexget
Source File: sqlalchemy_utils.py
View license
def table_schema(name, session):
    """
    :returns: Table schema using SQLAlchemy reflect as it currently exists in the db
    :rtype: Table
    """
    return Table(name, MetaData(bind=session.bind), autoload=True)

Example 5

Project: sandman
Source File: utils.py
View license
def add_pk_if_required(db, table, name):
    """Return a class deriving from our Model class as well as the SQLAlchemy
    model.

    :param `sqlalchemy.schema.Table` table: table to create primary key for
    :param  table: table to create primary key for

    """
    db.metadata.reflect(bind=db.engine)
    cls_dict = {'__tablename__': name}
    if not table.primary_key:
        for column in table.columns:
            column.primary_key = True
        Table(name, db.metadata, *table.columns, extend_existing=True)
        cls_dict['__table__'] = table
        db.metadata.create_all(bind=db.engine)

    return type(str(name), (sandman_model, db.Model), cls_dict)

Example 6

Project: sandman
Source File: utils.py
View license
def add_pk_if_required(db, table, name):
    """Return a class deriving from our Model class as well as the SQLAlchemy
    model.

    :param `sqlalchemy.schema.Table` table: table to create primary key for
    :param  table: table to create primary key for

    """
    db.metadata.reflect(bind=db.engine)
    cls_dict = {'__tablename__': name}
    if not table.primary_key:
        for column in table.columns:
            column.primary_key = True
        Table(name, db.metadata, *table.columns, extend_existing=True)
        cls_dict['__table__'] = table
        db.metadata.create_all(bind=db.engine)

    return type(str(name), (sandman_model, db.Model), cls_dict)

Example 7

View license
    def __call__(self, extends=None):
        """
        Builds version table.
        """
        columns = self.columns if extends is None else []
        self.manager.plugins.after_build_version_table_columns(self, columns)
        return sa.schema.Table(
            extends.name if extends is not None else self.table_name,
            self.parent_table.metadata,
            *columns,
            extend_existing=extends is not None
        )

Example 8

View license
    def __call__(self, extends=None):
        """
        Builds version table.
        """
        columns = self.columns if extends is None else []
        self.manager.plugins.after_build_version_table_columns(self, columns)
        return sa.schema.Table(
            extends.name if extends is not None else self.table_name,
            self.parent_table.metadata,
            *columns,
            extend_existing=extends is not None
        )

Example 9

Project: maraschino
Source File: sqlsoup.py
View license
def _selectable_name(selectable):
    if isinstance(selectable, sql.Alias):
        return _selectable_name(selectable.element)
    elif isinstance(selectable, sql.Select):
        return ''.join(_selectable_name(s) for s in selectable.froms)
    elif isinstance(selectable, schema.Table):
        return selectable.name.capitalize()
    else:
        x = selectable.__class__.__name__
        if x[0] == '_':
            x = x[1:]
        return x

Example 10

Project: maraschino
Source File: sqlsoup.py
View license
def _selectable_name(selectable):
    if isinstance(selectable, sql.Alias):
        return _selectable_name(selectable.element)
    elif isinstance(selectable, sql.Select):
        return ''.join(_selectable_name(s) for s in selectable.froms)
    elif isinstance(selectable, schema.Table):
        return selectable.name.capitalize()
    else:
        x = selectable.__class__.__name__
        if x[0] == '_':
            x = x[1:]
        return x

Example 11

Project: sqlaload
Source File: schema.py
View license
def create_table(engine, table_name):
    with lock:
        log.debug("Creating table: %s on %r" % (table_name, engine))
        table = Table(table_name, engine._metadata)
        col = Column('id', Integer, primary_key=True)
        table.append_column(col)
        table.create(engine)
        engine._tables[table_name] = table
        return table

Example 12

Project: sqlaload
Source File: schema.py
View license
def load_table(engine, table_name):
    with lock:
        log.debug("Loading table: %s on %r" % (table_name, engine))
        table = Table(table_name, engine._metadata, autoload=True)
        engine._tables[table_name] = table
        return table

Example 13

Project: sqlaload
Source File: schema.py
View license
def drop_table(engine, table_name):
    # Accept Connection objects here
    if hasattr(engine, 'engine'):
        engine = engine.engine

    with lock:
        if table_name in engine._tables:
            table = engine._tables[table_name]
        elif engine.has_table(table_name):
            table = Table(table_name, engine._metadata)
        else:
            return
        table.drop(engine)
        engine._tables.pop(table_name, None)

Example 14

Project: babbage
Source File: cube.py
View license
    def _load_table(self, name):
        """ Reflect a given table from the database. """
        table = self._tables.get(name, None)
        if table is not None:
            return table
        if not self.engine.has_table(name):
            raise BindingException('Table does not exist: %r' % name,
                                   table=name)
        table = Table(name, self.meta, autoload=True)
        self._tables[name] = table
        return table

Example 15

Project: babbage
Source File: util.py
View license
def create_table(table_name, columns):
    meta = MetaData()
    meta.bind = engine

    if engine.has_table(table_name):
        table = schema.Table(table_name, meta, autoload=True)
        table.drop()

    table = schema.Table(table_name, meta)
    id_col = schema.Column('_id', types.Integer, primary_key=True)
    table.append_column(id_col)
    for (_, name, typ) in sorted(column_specs(columns)):
        col = schema.Column(name, typ)
        table.append_column(col)

    table.create(engine)
    return table

Example 16

Project: spendb
Source File: fact_table.py
View license
    @property
    def table(self):
        """ Generate an appropriate table representation to mirror the
        fields known for this table. """
        if self._table is None:
            self._table = Table(self.table_name, self.meta)
            id_col = Column('_id', Unicode(42), primary_key=True)
            self._table.append_column(id_col)
            json_col = Column('_json', Unicode())
            self._table.append_column(json_col)
            self._fields_columns(self._table)
        return self._table

Example 17

View license
def upgrade(migrate_engine):
    meta.bind = migrate_engine

    status_table = Table('pool_manager_statuses', meta, autoload=True)

    status_table.c.domain_id.alter(name='zone_id')

Example 18

View license
def upgrade(migrate_engine):
    meta.bind = migrate_engine
    records = Table('records', meta, autoload=True)

    records.columns.managed_extra.alter(type=String(100))
    records.columns.managed_plugin_type.alter(type=String(50))
    records.columns.managed_plugin_name.alter(type=String(50))
    records.columns.managed_resource_type.alter(type=String(50))
    records.columns.managed_resource_region.alter(type=String(100))
    records.columns.managed_tenant_id.alter(type=String(36))

Example 19

View license
def upgrade(migrate_engine):
    LOG.info(_LI("Adding boolean column delayed_notify to table 'zones'"))
    meta.bind = migrate_engine
    zones_table = Table('zones', meta, autoload=True)
    col = Column('delayed_notify', Boolean(), default=False)
    col.create(zones_table)
    index = Index('delayed_notify', zones_table.c.delayed_notify)
    index.create(migrate_engine)

Example 20

Project: designate
Source File: 086_new_pools_tables.py
View license
def upgrade(migrate_engine):
    meta.bind = migrate_engine

    # Load the pool_attributes_table table schema for relations
    Table('pools', meta, autoload=True)

    pool_nameservers.create()
    pool_targets.create()
    pool_target_options.create()
    pool_target_masters.create()
    pool_also_notifies.create()

Example 21

Project: designate
Source File: 097_add_services.py
View license
def upgrade(migrate_engine):
    meta.bind = migrate_engine

    status_enum = Enum(name='service_statuses', metadata=meta, *SERVICE_STATES)
    status_enum.create()

    service_status_table = Table('service_statuses', meta,
        Column('id', UUID(), default=utils.generate_uuid, primary_key=True),
        Column('created_at', DateTime),
        Column('updated_at', DateTime),

        Column('service_name', String(40), nullable=False),
        Column('hostname', String(255), nullable=False),
        Column('heartbeated_at', DateTime, nullable=True),
        Column('status', status_enum, nullable=False),
        Column('stats', Text, nullable=False),
        Column('capabilities', Text, nullable=False),
    )
    service_status_table.create()

Example 22

View license
def upgrade(migrate_engine):
    meta.bind = migrate_engine
    recordsets_table = Table('recordsets', meta, autoload=True)

    Index('rrset_updated_at', recordsets_table.c.updated_at
          ).create(migrate_engine)
    Index('rrset_zoneid', recordsets_table.c.zone_id
          ).create(migrate_engine)
    Index('rrset_type', recordsets_table.c.type).create(migrate_engine)
    Index('rrset_ttl', recordsets_table.c.ttl).create(migrate_engine)
    Index('rrset_tenant_id', recordsets_table.c.tenant_id
          ).create(migrate_engine)

Example 23

Project: glance
Source File: 032_add_task_info_table.py
View license
def define_task_info_table(meta):
    Table('tasks', meta, autoload=True)
    # NOTE(nikhil): input and result are stored as text in the DB.
    # SQLAlchemy marshals the data to/from JSON using custom type
    # JSONEncodedDict. It uses simplejson underneath.
    task_info = Table('task_info',
                      meta,
                      Column('task_id', String(36),
                             ForeignKey('tasks.id'),
                             primary_key=True,
                             nullable=False),
                      Column('input', Text()),
                      Column('result', Text()),
                      Column('message', Text()),
                      mysql_engine='InnoDB',
                      mysql_charset='utf8')

    return task_info

Example 24

View license
def upgrade(migrate_engine):
    meta = MetaData(bind=migrate_engine)
    metadef_objects = Table('metadef_objects', meta, autoload=True)
    metadef_objects.c.schema.alter(name='json_schema')

    metadef_properties = Table('metadef_properties', meta, autoload=True)
    metadef_properties.c.schema.alter(name='json_schema')

Example 25

Project: iktomi
Source File: sqla.py
View license
def drop_everything(engine):
    '''Droping all tables and custom types (enums) using `engine`.
    Taken from http://www.sqlalchemy.org/trac/wiki/UsageRecipes/DropEverything

    This method is more robust than `metadata.drop_all(engine)`. B.c. when
    you change a table or a type name, `drop_all` does not consider the old one.
    Thus, DB holds some unused entities.'''
    conn = engine.connect()
    # the transaction only applies if the DB supports
    # transactional DDL, i.e. Postgresql, MS SQL Server
    trans = conn.begin()
    inspector = reflection.Inspector.from_engine(engine)
    metadata = MetaData()
    tbs = []
    all_fks = []
    types = []
    for table_name in inspector.get_table_names():
        fks = []
        for fk in inspector.get_foreign_keys(table_name):
            if not fk['name']:
                continue
            fks.append(ForeignKeyConstraint((), (), name=fk['name']))
        for col in inspector.get_columns(table_name):
            if isinstance(col['type'], SchemaType):
                types.append(col['type'])
        t = Table(table_name, metadata, *fks)
        tbs.append(t)
        all_fks.extend(fks)
    try:
        for fkc in all_fks:
            conn.execute(DropConstraint(fkc))
        for table in tbs:
            conn.execute(DropTable(table))
        for custom_type in types:
            custom_type.drop(conn)
        trans.commit()
    except: # pragma: no cover
        trans.rollback()
        raise

Example 26

Project: bubbles
Source File: objects.py
View license
    def table(self, table, schema=None, autoload=True):
        """Returns a table with `name`. If schema is not provided, then
        store's default schema is used."""
        if table is None:
            raise Exception("Table name should not be None")
        if isinstance(table, sqlalchemy.schema.Table):
            return table

        schema = schema or self.schema

        try:
            return sqlalchemy.Table(table, self.metadata,
                                    autoload=autoload, schema=schema,
                                    autoload_with=self.connectable)
        except sqlalchemy.exc.NoSuchTableError:
            if schema:
                slabel = " in schema '%s'" % schema
            else:
                slabel = ""

            raise NoSuchObjectError("Unable to find table '%s'%s" % \
                                    (table, slabel))

Example 27

Project: quick_orm
Source File: core.py
View license
    @staticmethod
    def many_to_many(ref_model, ref_name = None, backref_name = None, middle_table_name = None):
        """Class Decorator, add a many-to-many relationship between two SQLAlchemy models.
        Parameters:
            ref_table_name is the name of the destination table, it is NOT the one decorated by this method.
            ref_name is how this model reference the destination models.
            backref_name is how the destination model reference this model.
            middle_table_name is the middle table name of this many-to-many relationship.
        """
        if isinstance(ref_model, str):
            ref_model_name = ref_model
        else:
            ref_model_name = ref_model.__name__
        ref_table_name = StringUtil.camelcase_to_underscore(ref_model_name)
        ref_name = ref_name or '{0}s'.format(ref_table_name)
        def ref_table(cls):
            if backref_name:
                cls._readable_names = backref_name
            if not isinstance(ref_model, str):
                ref_model._readable_names = ref_name
                cls._many_to_models.append(ref_model)
                ref_model._many_to_models.append(cls)
            table_name = cls._readable_name

            my_middle_table_name = middle_table_name or '{0}_{1}'.format(table_name, ref_table_name)
            if table_name == ref_table_name:
                left_column_name = 'left_id'
                right_column_name = 'right_id'
            else:
                left_column_name = '{0}_id'.format(table_name)
                right_column_name = '{0}_id'.format(ref_table_name)
            middle_table = Table(my_middle_table_name, Database.Base.metadata,
                Column(left_column_name, Integer, ForeignKey('{0}.id'.format(table_name), ondelete = "CASCADE"), primary_key = True),
                Column(right_column_name, Integer, ForeignKey('{0}.id'.format(ref_table_name), ondelete = "CASCADE"), primary_key = True))

            my_backref_name = backref_name or '{0}s'.format(table_name)
            parameters = dict(secondary = middle_table, lazy = 'dynamic', backref = backref(my_backref_name, lazy = 'dynamic'))
            if table_name == ref_table_name:
                parameters['primaryjoin'] = cls.id == middle_table.c.left_id
                parameters['secondaryjoin'] = cls.id == middle_table.c.right_id

            setattr(cls, ref_name, relationship(ref_model_name, **parameters))

            return cls
        return ref_table

Example 28

Project: pokedex
Source File: multilang.py
View license
def create_translation_table(_table_name, foreign_class, relation_name,
    language_class, relation_lazy='select', **kwargs):
    """Creates a table that represents some kind of data attached to the given
    foreign class, but translated across several languages.  Returns the new
    table's mapped class.  It won't be declarative, but it will have a
    `__table__` attribute so you can retrieve the Table object.

    `foreign_class` must have a `__singlename__`, currently only used to create
    the name of the foreign key column.

    Also supports the notion of a default language, which is attached to the
    session.  This is English by default, for historical and practical reasons.

    Usage looks like this:

        class Foo(Base): ...

        create_translation_table('foo_bars', Foo, 'bars',
            name = Column(...),
        )

        # Now you can do the following:
        foo.name
        foo.name_map['en']
        foo.foo_bars['en']

        foo.name_map['en'] = "new name"
        del foo.name_map['en']

        q.options(joinedload(Foo.bars_local))
        q.options(joinedload(Foo.bars))

    The following properties are added to the passed class:

    - `(relation_name)`, a relation to the new table.  It uses a dict-based
      collection class, where the keys are language identifiers and the values
      are rows in the created tables.
    - `(relation_name)_local`, a relation to the row in the new table that
      matches the current default language.
    - `(relation_name)_table`, the class created by this function.

    Note that these are distinct relations.  Even though the former necessarily
    includes the latter, SQLAlchemy doesn't treat them as linked; loading one
    will not load the other.  Modifying both within the same transaction has
    undefined behavior.

    For each column provided, the following additional attributes are added to
    Foo:

    - `(column)_map`, an association proxy onto `foo_bars`.
    - `(column)`, an association proxy onto `foo_bars_local`.

    Pardon the naming disparity, but the grammar suffers otherwise.

    Modifying these directly is not likely to be a good idea.

    For Markdown-formatted columns, `(column)_map` and `(column)` will give
    Markdown objects.
    """
    # n.b.: language_class only exists for the sake of tests, which sometimes
    # want to create tables entirely separate from the pokedex metadata

    foreign_key_name = foreign_class.__singlename__ + '_id'

    Translations = type(_table_name, (object,), {
        '_language_identifier': association_proxy('local_language', 'identifier'),
        'relation_name': relation_name,
        '__tablename__': _table_name,
    })

    # Create the table object
    table = Table(_table_name, foreign_class.__table__.metadata,
        Column(foreign_key_name, Integer, ForeignKey(foreign_class.id),
            primary_key=True, nullable=False,
            doc=u"ID of the %s these texts relate to" % foreign_class.__singlename__),
        Column('local_language_id', Integer, ForeignKey(language_class.id),
            primary_key=True, nullable=False,
            doc=u"Language these texts are in"),
    )
    Translations.__table__ = table

    # Add ye columns
    # Column objects have a _creation_order attribute in ascending order; use
    # this to get the (unordered) kwargs sorted correctly
    kwitems = list(kwargs.items())
    kwitems.sort(key=lambda kv: kv[1]._creation_order)
    for name, column in kwitems:
        column.name = name
        table.append_column(column)

    # Construct ye mapper
    mapper(Translations, table, properties={
        'foreign_id': synonym(foreign_key_name),
        'local_language': relationship(language_class,
            primaryjoin=table.c.local_language_id == language_class.id,
            innerjoin=True),
    })

    # Add full-table relations to the original class
    # Foo.bars_table
    setattr(foreign_class, relation_name + '_table', Translations)
    # Foo.bars
    setattr(foreign_class, relation_name, relationship(Translations,
        primaryjoin=foreign_class.id == Translations.foreign_id,
        collection_class=attribute_mapped_collection('local_language'),
    ))
    # Foo.bars_local
    # This is a bit clever; it uses bindparam() to make the join clause
    # modifiable on the fly.  db sessions know the current language and
    # populate the bindparam.
    # The 'dummy' value is to trick SQLA; without it, SQLA thinks this
    # bindparam is just its own auto-generated clause and everything gets
    # fucked up.
    local_relation_name = relation_name + '_local'
    setattr(foreign_class, local_relation_name, relationship(Translations,
        primaryjoin=and_(
            Translations.foreign_id == foreign_class.id,
            Translations.local_language_id == bindparam('_default_language_id',
                value='dummy', type_=Integer, required=True),
        ),
        foreign_keys=[Translations.foreign_id, Translations.local_language_id],
        uselist=False,
        lazy=relation_lazy,
    ))

    # Add per-column proxies to the original class
    for name, column in kwitems:
        getset_factory = None
        string_getter = column.info.get('string_getter')
        if string_getter:
            getset_factory = _getset_factory_factory(
                column.name, string_getter)

        # Class.(column) -- accessor for the default language's value
        setattr(foreign_class, name,
            LocalAssociationProxy(local_relation_name, name,
                    getset_factory=getset_factory))

        # Class.(column)_map -- accessor for the language dict
        # Need a custom creator since Translations doesn't have an init, and
        # these are passed as *args anyway
        def creator(language, value):
            row = Translations()
            row.local_language = language
            setattr(row, name, value)
            return row
        setattr(foreign_class, name + '_map',
            association_proxy(relation_name, name, creator=creator,
                    getset_factory=getset_factory))

    # Add to the list of translation classes
    foreign_class.translation_classes.append(Translations)

    # Done
    return Translations

Example 29

Project: wtforms-sqlalchemy
Source File: tests.py
View license
    def _do_tables(self, mapper, engine):
        metadata = MetaData()

        test_table = Table(
            'test', metadata,
            Column('id', sqla_types.Integer, primary_key=True, nullable=False),
            Column('name', sqla_types.String, nullable=False),
        )

        pk_test_table = Table(
            'pk_test', metadata,
            Column('foobar', sqla_types.String, primary_key=True, nullable=False),
            Column('baz', sqla_types.String, nullable=False),
        )

        Test = type(str('Test'), (Base, ), {})
        PKTest = type(str('PKTest'), (Base, ), {
            '__unicode__': lambda x: x.baz,
            '__str__': lambda x: x.baz,
        })

        mapper(Test, test_table, order_by=[test_table.c.name])
        mapper(PKTest, pk_test_table, order_by=[pk_test_table.c.baz])
        self.Test = Test
        self.PKTest = PKTest

        metadata.create_all(bind=engine)

Example 30

Project: wtforms-sqlalchemy
Source File: tests.py
View license
    def setUp(self):
        Model = declarative_base()

        student_course = Table(
            'student_course', Model.metadata,
            Column('student_id', sqla_types.Integer, ForeignKey('student.id')),
            Column('course_id', sqla_types.Integer, ForeignKey('course.id'))
        )

        class Course(Model):
            __tablename__ = "course"
            id = Column(sqla_types.Integer, primary_key=True)
            name = Column(sqla_types.String(255), nullable=False)
            # These are for better model form testing
            cost = Column(sqla_types.Numeric(5, 2), nullable=False)
            description = Column(sqla_types.Text, nullable=False)
            level = Column(sqla_types.Enum('Primary', 'Secondary'))
            has_prereqs = Column(sqla_types.Boolean, nullable=False)
            started = Column(sqla_types.DateTime, nullable=False)
            grade = Column(AnotherInteger, nullable=False)

        class School(Model):
            __tablename__ = "school"
            id = Column(sqla_types.Integer, primary_key=True)
            name = Column(sqla_types.String(255), nullable=False)

        class Student(Model):
            __tablename__ = "student"
            id = Column(sqla_types.Integer, primary_key=True)
            full_name = Column(sqla_types.String(255), nullable=False, unique=True)
            dob = Column(sqla_types.Date(), nullable=True)
            current_school_id = Column(sqla_types.Integer, ForeignKey(School.id), nullable=False)

            current_school = relationship(School, backref=backref('students'))
            courses = relationship(
                "Course",
                secondary=student_course,
                backref=backref("students", lazy='dynamic')
            )

        self.School = School
        self.Student = Student
        self.Course = Course

        engine = create_engine('sqlite:///:memory:', echo=False)
        Session = sessionmaker(bind=engine)
        self.metadata = Model.metadata
        self.metadata.create_all(bind=engine)
        self.sess = Session()

Example 31

Project: query
Source File: core.py
View license
    def __init__(self, orm_object, db):
        self._db = db
        if orm_object.__class__ == sqlalchemy.schema.Table:
            self._is_table = True
            self.table = orm_object
            self.column = None

            # Support custom styling of Pandas dataframe by calling .to_html() over _repr_html()
            # incl. not displaying dimensions, for example
            self._column_df = pd.DataFrame(
                [(c.name, c.type, c.primary_key) for c in self.table.columns.values()],
                columns=["Column", "Type", "Primary Key"]
            )
            self._html = df_to_html(self._column_df, ("Column Information for the %s Table"
                                                      % self.table.name))

        else:
            self._is_table = False
            self.table = orm_object.table
            self.column = orm_object
            self._html = ("<em>Inspecting column %s of the %s table. "
                          "Try the .head(), .tail(), and .where() "
                          "methods to further explore.<em>" %
                          (self.column.name, self.table.name))

Example 32

Project: django-sqlalchemy
Source File: base.py
View license
def _as_declarative(cls, classname, dict_):
    cls._decl_class_registry[classname] = cls

    # this sets up our_stuff which reads in the attributes and converts
    # them to SA columns, etc...
    our_stuff = util.OrderedDict()

    # just here to handle SA declarative, not needed for Django
    for k in dict_:
        value = dict_[k]
        if (isinstance(value, tuple) and len(value) == 1 and
            isinstance(value[0], (Column, MapperProperty))):
            util.warn("Ignoring declarative-like tuple value of attribute "
                      "%s: possibly a copy-and-paste error with a comma "
                      "left at the end of the line?" % k)
            continue
        if not isinstance(value, (Column, MapperProperty)):
            continue
        prop = _deferred_relation(cls, value)
        our_stuff[k] = prop

    # Django will *always* have set the pk before we get here. Check if
    # it is a Django AutoField so we can override it with our own. This
    # will allow for a custom primary key to just work.
    if isinstance(cls._meta.pk, AutoField):
        # we need to add in the django-sqlalchemy version of the AutoField
        # because the one that Django adds will not work for our purposes.
        auto_field = DSAutoField(verbose_name='ID', primary_key=True, auto_created=True)
        # this might seem redundant but without it the name is not set 
        # for SA
        auto_field.name = "id"
        # Call set_attributes_from_name as it normally only gets called
        # during Django's metaclass.
        auto_field.set_attributes_from_name(auto_field.name)
        # HACK: we need to force the use of our AutoField over Django's
        # AutoField.
        cls._meta.pk = auto_field
        for i, field in enumerate(cls._meta.fields):
            if isinstance(field, AutoField):
                cls._meta.fields[i] = auto_field
    for field in cls._meta.fields + cls._meta.many_to_many:
        sa_field = field.create_column()
        # A ManyToManyField will return None for the column as it does
        # not need a column.
        if sa_field is not None:
            # this allows us to build up more complex structures
            if isinstance(sa_field, dict):
                our_stuff.update(sa_field)
            else:
                our_stuff[sa_field.name] = sa_field

    # set up attributes in the order they were created
    our_stuff.sort(lambda x, y: cmp(our_stuff[x]._creation_order,
                                    our_stuff[y]._creation_order))

    table = None
    tablename = cls._meta.db_table

    # this is to support SA's declarative to support declaring a Table
    if '__table__' not in cls.__dict__:
        # this is just to support SA's declarative of allowing the
        # specification of the table name using this syntax
        if '__tablename__' in cls.__dict__:
            tablename = cls.__tablename__

        # SA supports autoloading the model from database, but that will
        # not work for Django. We're leaving this here just for future
        # consideration.
        autoload = cls.__dict__.get('__autoload__')
        if autoload:
            table_kw = {'autoload': True}
        else:
            table_kw = {}

        # this allows us to pick up only the Column types for our table
        # definition.
        cols = []
        for key, c in our_stuff.iteritems():
            if isinstance(c, ColumnProperty):
                for col in c.columns:
                    if isinstance(col, Column) and col.table is None:
                        _undefer_column_name(key, col)
                        cols.append(col)
            elif isinstance(c, Column):
                _undefer_column_name(key, c)
                cols.append(c)
        cls.__table__ = table = Table(tablename, cls.metadata,
                                      *cols, **table_kw)
    else:
        table = cls.__table__

    mapper_args = getattr(cls, '__mapper_args__', {})
    if 'inherits' not in mapper_args:
        inherits = cls.__mro__[1]
        inherits = cls._decl_class_registry.get(inherits.__name__, None)
        if inherits:
            mapper_args['inherits'] = inherits
            if not mapper_args.get('concrete', False) and table:
                # figure out the inherit condition with relaxed rules
                # about nonexistent tables, to allow for ForeignKeys to
                # not-yet-defined tables (since we know for sure that our
                # parent table is defined within the same MetaData)
                mapper_args['inherit_condition'] = sql_util.join_condition(
                    inherits.__table__, table,
                    ignore_nonexistent_tables=True)

    # declarative allows you to specify a mapper as well
    if hasattr(cls, '__mapper_cls__'):
        mapper_cls = util.unbound_method_to_callable(cls.__mapper_cls__)
    else:
        mapper_cls = mapper

    cls.__mapper__ = mapper_cls(cls, table, properties=our_stuff,
                                **mapper_args)
    cls.query = session.query_property()

Example 33

Project: kardboard
Source File: ext_sqlalchemy.py
View license
    def _do_tables(self, mapper, engine):
        metadata = MetaData()

        test_table = Table('test', metadata, 
            Column('id', Integer, primary_key=True, nullable=False),
            Column('name', String, nullable=False),
        )

        pk_test_table = Table('pk_test', metadata, 
            Column('foobar', String, primary_key=True, nullable=False),
            Column('baz', String, nullable=False),
        )

        Test = type('Test', (Base, ), {})
        PKTest = type('PKTest', (Base, ), {
            '__unicode__': lambda x: x.baz,
            '__str__': lambda x: x.baz,
        })

        mapper(Test, test_table, order_by=[test_table.c.name])
        mapper(PKTest, pk_test_table, order_by=[pk_test_table.c.baz])
        self.Test = Test
        self.PKTest = PKTest

        metadata.create_all(bind=engine)

Example 34

Project: open-event-orga-server
Source File: drop_db.py
View license
def db_drop_everything(db):
    # From http://www.sqlalchemy.org/trac/wiki/UsageRecipes/DropEverything

    conn = db.engine.connect()

    # the transaction only applies if the DB supports
    # transactional DDL, i.e. Postgresql, MS SQL Server
    trans = conn.begin()

    inspector = reflection.Inspector.from_engine(db.engine)

    # gather all data first before dropping anything.
    # some DBs lock after things have been dropped in
    # a transaction.
    metadata = MetaData()

    tbs = []
    all_fks = []

    for table_name in inspector.get_table_names():
        fks = []
        for fk in inspector.get_foreign_keys(table_name):
            if not fk['name']:
                continue
            fks.append(
                ForeignKeyConstraint((), (), name=fk['name'])
            )
        t = Table(table_name, metadata, *fks)
        tbs.append(t)
        all_fks.extend(fks)

    for fkc in all_fks:
        conn.execute(DropConstraint(fkc))

    for table in tbs:
        conn.execute(DropTable(table))

    trans.commit()

Example 35

Project: depsy
Source File: utils.py
View license
def drop_everything(db, app):
    from sqlalchemy.engine import reflection
    from sqlalchemy import create_engine
    from sqlalchemy.schema import (
        MetaData,
        Table,
        DropTable,
        ForeignKeyConstraint,
        DropConstraint,
        )

    conn = db.session()

    engine = create_engine(app.config['SQLALCHEMY_DATABASE_URI'])
    inspector = reflection.Inspector.from_engine(engine)

    # gather all data first before dropping anything.
    # some DBs lock after things have been dropped in 
    # a transaction.
    
    metadata = MetaData()

    tbs = []
    all_fks = []

    for table_name in inspector.get_table_names():
        fks = []
        for fk in inspector.get_foreign_keys(table_name):
            if not fk['name']:
                continue
            fks.append(
                ForeignKeyConstraint((),(),name=fk['name'])
                )
        t = Table(table_name,metadata,*fks)
        tbs.append(t)
        all_fks.extend(fks)

    for fkc in all_fks:
        conn.execute(DropConstraint(fkc))

    for table in tbs:
        conn.execute(DropTable(table))

    db.session.commit()    

Example 36

Project: total-impact-core
Source File: utils.py
View license
def drop_everything(db, app):
    from sqlalchemy.engine import reflection
    from sqlalchemy import create_engine
    from sqlalchemy.schema import (
        MetaData,
        Table,
        DropTable,
        ForeignKeyConstraint,
        DropConstraint,
        )

    conn = db.session()

    engine = create_engine(app.config['SQLALCHEMY_DATABASE_URI'])
    inspector = reflection.Inspector.from_engine(engine)

    # gather all data first before dropping anything.
    # some DBs lock after things have been dropped in 
    # a transaction.
    
    metadata = MetaData()

    tbs = []
    all_fks = []

    for table_name in inspector.get_table_names():
        fks = []
        for fk in inspector.get_foreign_keys(table_name):
            if not fk['name']:
                continue
            fks.append(
                ForeignKeyConstraint((),(),name=fk['name'])
                )
        t = Table(table_name,metadata,*fks)
        tbs.append(t)
        all_fks.extend(fks)

    conn.execute("""drop view if exists min_biblio""")

    for fkc in all_fks:
        conn.execute(DropConstraint(fkc))

    for table in tbs:
        conn.execute(DropTable(table))

    db.session.commit()    

Example 37

Project: total-impact-webapp
Source File: utils.py
View license
def drop_everything(db, app):
    from sqlalchemy.engine import reflection
    from sqlalchemy import create_engine
    from sqlalchemy.schema import (
        MetaData,
        Table,
        DropTable,
        ForeignKeyConstraint,
        DropConstraint,
        )

    conn = db.session()

    engine = create_engine(app.config['SQLALCHEMY_DATABASE_URI'])
    inspector = reflection.Inspector.from_engine(engine)

    # gather all data first before dropping anything.
    # some DBs lock after things have been dropped in 
    # a transaction.
    
    metadata = MetaData()

    tbs = []
    all_fks = []

    for table_name in inspector.get_table_names():
        fks = []
        for fk in inspector.get_foreign_keys(table_name):
            if not fk['name']:
                continue
            fks.append(
                ForeignKeyConstraint((),(),name=fk['name'])
                )
        t = Table(table_name,metadata,*fks)
        tbs.append(t)
        all_fks.extend(fks)

    conn.execute("""drop view if exists min_biblio""")

    for fkc in all_fks:
        conn.execute(DropConstraint(fkc))

    for table in tbs:
        conn.execute(DropTable(table))

    db.session.commit()    

Example 38

Project: amivapi
Source File: bootstrap.py
View license
def clear_database(engine):
    """ Clears all the tables from the database
    To do this first all ForeignKey constraints are removed,
    then all tables are dropped.

    Code is from
    https://bitbucket.org/zzzeek/sqlalchemy/wiki/UsageRecipes/DropEverything

    :param engine: SQLalchemy engine to use
    """

    conn = engine.connect()

    # the transaction only applies if the DB supports
    # transactional DDL, i.e. Postgresql, MS SQL Server
    trans = conn.begin()

    inspector = reflection.Inspector.from_engine(engine)

    # gather all data first before dropping anything.
    # some DBs lock after things have been dropped in
    # a transaction.

    metadata = MetaData()

    tbs = []
    all_fks = []

    for table_name in inspector.get_table_names():
        fks = []
        for fk in inspector.get_foreign_keys(table_name):
            if not fk['name']:
                continue
            fks.append(
                ForeignKeyConstraint((), (), name=fk['name'])
            )
        t = Table(table_name, metadata, *fks)
        tbs.append(t)
        all_fks.extend(fks)

    for fkc in all_fks:
        conn.execute(DropConstraint(fkc))

    for table in tbs:
        conn.execute(DropTable(table))

    trans.commit()

Example 39

Project: JARR
Source File: __init__.py
View license
def db_empty(db):
    "Will drop every datas stocked in db."
    # From http://www.sqlalchemy.org/trac/wiki/UsageRecipes/DropEverything
    conn = db.engine.connect()

    # the transaction only applies if the DB supports
    # transactional DDL, i.e. Postgresql, MS SQL Server
    trans = conn.begin()

    inspector = reflection.Inspector.from_engine(db.engine)

    # gather all data first before dropping anything.
    # some DBs lock after things have been dropped in
    # a transaction.
    metadata = MetaData()

    tbs = []
    all_fks = []

    for table_name in inspector.get_table_names():
        fks = []
        for fk in inspector.get_foreign_keys(table_name):
            if not fk['name']:
                continue
            fks.append(ForeignKeyConstraint((), (), name=fk['name']))
        t = Table(table_name, metadata, *fks)
        tbs.append(t)
        all_fks.extend(fks)

    for fkc in all_fks:
        conn.execute(DropConstraint(fkc))

    for table in tbs:
        conn.execute(DropTable(table))

    trans.commit()

Example 40

View license
def apply():
    """
    Triggers the patch execution.

    :return: Definition success.
    :rtype: bool
    """

    database_directory = os.path.join(RuntimeGlobals.user_application_data_directory, Constants.database_directory)

    migrations_directory = os.path.join(database_directory, "migrations")
    if foundations.common.path_exists(migrations_directory):
        foundations.io.remove(migrations_directory)

    database_file = os.path.join(database_directory, Constants.database_file)
    engine = create_engine("sqlite:///{0}".format(database_file))
    connection = engine.connect()
    transaction = connection.begin()
    inspector = reflection.Inspector.from_engine(engine)
    metadata = MetaData()
    for name in inspector.get_table_names():
        if name in ("Migrate", "{{ locals().pop('version_table') }}"):
            connection.execute(DropTable(Table(name, metadata)))
    transaction.commit()

    return True

Example 41

View license
def apply():
    """
    Triggers the patch execution.

    :return: Definition success.
    :rtype: bool
    """

    database_directory = os.path.join(RuntimeGlobals.user_application_data_directory, Constants.database_directory)

    migrations_directory = os.path.join(database_directory, "migrations")
    if foundations.common.path_exists(migrations_directory):
        foundations.io.remove(migrations_directory)

    database_file = os.path.join(database_directory, Constants.database_file)
    engine = create_engine("sqlite:///{0}".format(database_file))
    connection = engine.connect()
    transaction = connection.begin()
    inspector = reflection.Inspector.from_engine(engine)
    metadata = MetaData()
    for name in inspector.get_table_names():
        if name in ("Migrate", "{{ locals().pop('version_table') }}"):
            connection.execute(DropTable(Table(name, metadata)))
    transaction.commit()

    return True

Example 42

Project: sqlalchemy-utils
Source File: foreign_keys.py
View license
def non_indexed_foreign_keys(metadata, engine=None):
    """
    Finds all non indexed foreign keys from all tables of given MetaData.

    Very useful for optimizing postgresql database and finding out which
    foreign keys need indexes.

    :param metadata: MetaData object to inspect tables from
    """
    reflected_metadata = MetaData()

    if metadata.bind is None and engine is None:
        raise Exception(
            'Either pass a metadata object with bind or '
            'pass engine as a second parameter'
        )

    constraints = defaultdict(list)

    for table_name in metadata.tables.keys():
        table = Table(
            table_name,
            reflected_metadata,
            autoload=True,
            autoload_with=metadata.bind or engine
        )

        for constraint in table.constraints:
            if not isinstance(constraint, ForeignKeyConstraint):
                continue

            if not has_index(constraint):
                constraints[table.name].append(constraint)

    return dict(constraints)

Example 43

Project: sqlalchemy-utils
Source File: foreign_keys.py
View license
def non_indexed_foreign_keys(metadata, engine=None):
    """
    Finds all non indexed foreign keys from all tables of given MetaData.

    Very useful for optimizing postgresql database and finding out which
    foreign keys need indexes.

    :param metadata: MetaData object to inspect tables from
    """
    reflected_metadata = MetaData()

    if metadata.bind is None and engine is None:
        raise Exception(
            'Either pass a metadata object with bind or '
            'pass engine as a second parameter'
        )

    constraints = defaultdict(list)

    for table_name in metadata.tables.keys():
        table = Table(
            table_name,
            reflected_metadata,
            autoload=True,
            autoload_with=metadata.bind or engine
        )

        for constraint in table.constraints:
            if not isinstance(constraint, ForeignKeyConstraint):
                continue

            if not has_index(constraint):
                constraints[table.name].append(constraint)

    return dict(constraints)

Example 44

Project: wtforms-alchemy
Source File: utils.py
View license
def table(model):
    if isinstance(model, sa.schema.Table):
        return model
    else:
        return model.__table__

Example 45

Project: wtforms-alchemy
Source File: utils.py
View license
def table(model):
    if isinstance(model, sa.schema.Table):
        return model
    else:
        return model.__table__

Example 46

Project: adhocracy
Source File: drop_data.py
View license
def main():
    parser = create_parser(description=__doc__, use_instance=False)
    parser.add_argument('-f', dest='force', default=False, action='store_true',
                        help="force deletion without asking for confirmation")
    args = parser.parse_args()

    if not args.force:
        input = raw_input('Delete all data? No backup will be done! '
                          'If so type "yes": ')
        if input != 'yes':
            print 'Answer not "yes", but: "%s"\nAborting.' % input
            exit(1)

    config = config_from_args(args)
    engine = get_engine(config, echo=True)
    conn = engine.connect()

    # the transaction only applies if the DB supports
    # transactional DDL, i.e. Postgresql, MS SQL Server
    trans = conn.begin()

    inspector = reflection.Inspector.from_engine(engine)

    # gather all data first before dropping anything.
    # some DBs lock after things have been dropped in
    # a transaction.

    metadata = MetaData()

    tbs = []
    all_fks = []

    for table_name in inspector.get_table_names():
        fks = []
        for fk in inspector.get_foreign_keys(table_name):
            if not fk['name']:
                continue
            fks.append(
                ForeignKeyConstraint((), (), name=fk['name'])
                )
        t = Table(table_name, metadata, *fks)
        tbs.append(t)
        all_fks.extend(fks)

    for fkc in all_fks:
        conn.execute(DropConstraint(fkc))

    for table in tbs:
        conn.execute(DropTable(table))

    trans.commit()

Example 47

Project: adhocracy
Source File: drop_data.py
View license
def main():
    parser = create_parser(description=__doc__, use_instance=False)
    parser.add_argument('-f', dest='force', default=False, action='store_true',
                        help="force deletion without asking for confirmation")
    args = parser.parse_args()

    if not args.force:
        input = raw_input('Delete all data? No backup will be done! '
                          'If so type "yes": ')
        if input != 'yes':
            print 'Answer not "yes", but: "%s"\nAborting.' % input
            exit(1)

    config = config_from_args(args)
    engine = get_engine(config, echo=True)
    conn = engine.connect()

    # the transaction only applies if the DB supports
    # transactional DDL, i.e. Postgresql, MS SQL Server
    trans = conn.begin()

    inspector = reflection.Inspector.from_engine(engine)

    # gather all data first before dropping anything.
    # some DBs lock after things have been dropped in
    # a transaction.

    metadata = MetaData()

    tbs = []
    all_fks = []

    for table_name in inspector.get_table_names():
        fks = []
        for fk in inspector.get_foreign_keys(table_name):
            if not fk['name']:
                continue
            fks.append(
                ForeignKeyConstraint((), (), name=fk['name'])
                )
        t = Table(table_name, metadata, *fks)
        tbs.append(t)
        all_fks.extend(fks)

    for fkc in all_fks:
        conn.execute(DropConstraint(fkc))

    for table in tbs:
        conn.execute(DropTable(table))

    trans.commit()

Example 48

Project: newspipe
Source File: __init__.py
View license
def db_empty(db):
    "Will drop every datas stocked in db."
    # From http://www.sqlalchemy.org/trac/wiki/UsageRecipes/DropEverything
    conn = db.engine.connect()

    # the transaction only applies if the DB supports
    # transactional DDL, i.e. Postgresql, MS SQL Server
    trans = conn.begin()

    inspector = reflection.Inspector.from_engine(db.engine)

    # gather all data first before dropping anything.
    # some DBs lock after things have been dropped in
    # a transaction.
    metadata = MetaData()

    tbs = []
    all_fks = []

    for table_name in inspector.get_table_names():
        fks = []
        for fk in inspector.get_foreign_keys(table_name):
            if not fk['name']:
                continue
            fks.append(ForeignKeyConstraint((), (), name=fk['name']))
        t = Table(table_name, metadata, *fks)
        tbs.append(t)
        all_fks.extend(fks)

    for fkc in all_fks:
        conn.execute(DropConstraint(fkc))

    for table in tbs:
        conn.execute(DropTable(table))

    trans.commit()

Example 49

Project: newspipe
Source File: __init__.py
View license
def db_empty(db):
    "Will drop every datas stocked in db."
    # From http://www.sqlalchemy.org/trac/wiki/UsageRecipes/DropEverything
    conn = db.engine.connect()

    # the transaction only applies if the DB supports
    # transactional DDL, i.e. Postgresql, MS SQL Server
    trans = conn.begin()

    inspector = reflection.Inspector.from_engine(db.engine)

    # gather all data first before dropping anything.
    # some DBs lock after things have been dropped in
    # a transaction.
    metadata = MetaData()

    tbs = []
    all_fks = []

    for table_name in inspector.get_table_names():
        fks = []
        for fk in inspector.get_foreign_keys(table_name):
            if not fk['name']:
                continue
            fks.append(ForeignKeyConstraint((), (), name=fk['name']))
        t = Table(table_name, metadata, *fks)
        tbs.append(t)
        all_fks.extend(fks)

    for fkc in all_fks:
        conn.execute(DropConstraint(fkc))

    for table in tbs:
        conn.execute(DropTable(table))

    trans.commit()

Example 50

Project: sync-engine
Source File: db.py
View license
def drop_everything(engine, keep_tables=[], reset_columns={}):
    """ Drops all tables in the db unless their name is in `keep_tables`.
        `reset_columns` is used to specify the columns that should be reset to
        default value in the tables that we're keeping -
        provided as a dict of table_name: list_of_column_names.
    """

    conn = engine.connect()
    trans = conn.begin()

    inspector = reflection.Inspector.from_engine(engine)

    # gather all data first before dropping anything.
    # some DBs lock after things have been dropped in
    # a transaction.

    metadata = MetaData()

    tbs = []
    all_fks = []

    for table_name in inspector.get_table_names():
        if table_name in keep_tables:
            # Reset certain columns in certain tables we're keeping
            if table_name in reset_columns:
                t = Table(table_name, metadata)

                column_names = reset_columns[table_name]
                for c in inspector.get_columns(table_name):
                    if c['name'] in column_names:
                        assert c['default']

                        q = "UPDATE {0} SET {1}={2};".\
                            format(table_name, c['name'], c['default'])
                        conn.execute(q)
            continue

        fks = []
        for fk in inspector.get_foreign_keys(table_name):
            if not fk['name']:
                continue
            fks.append(ForeignKeyConstraint((), (), name=fk['name']))
        t = Table(table_name, metadata, *fks)
        tbs.append(t)
        all_fks.extend(fks)

    for fkc in all_fks:
        conn.execute(DropConstraint(fkc))

    for table in tbs:
        conn.execute(DropTable(table))

    trans.commit()