sqlalchemy.sa_schema.DefaultClause

Here are the examples of the python api sqlalchemy.sa_schema.DefaultClause taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

5 Examples 7

Example 1

Project: alembic Source File: render.py
Function: render_server_default
def _render_server_default(default, autogen_context, repr_=True):
    rendered = _user_defined_render("server_default", default, autogen_context)
    if rendered is not False:
        return rendered

    if isinstance(default, sa_schema.DefaultClause):
        if isinstance(default.arg, compat.string_types):
            default = default.arg
        else:
            return _render_potential_expr(default.arg, autogen_context)

    if isinstance(default, string_types) and repr_:
        default = repr(re.sub(r"^'|'$", "", default))

    return default

Example 2

Project: kokoropy Source File: render.py
Function: render_server_default
def _render_server_default(default, autogen_context):
    rendered = _user_defined_render("server_default", default, autogen_context)
    if rendered is not False:
        return rendered

    if isinstance(default, sa_schema.DefaultClause):
        if isinstance(default.arg, string_types):
            default = default.arg
        else:
            default = str(default.arg.compile(
                            dialect=autogen_context['dialect']))
    if isinstance(default, string_types):
        # TODO: this is just a hack to get
        # tests to pass until we figure out
        # WTF sqlite is doing
        default = re.sub(r"^'|'$", "", default)
        return repr(default)
    else:
        return None

Example 3

Project: alembic Source File: compare.py
def _render_server_default_for_compare(metadata_default,
                                       metadata_col, autogen_context):
    rendered = _user_defined_render(
        "server_default", metadata_default, autogen_context)
    if rendered is not False:
        return rendered

    if isinstance(metadata_default, sa_schema.DefaultClause):
        if isinstance(metadata_default.arg, compat.string_types):
            metadata_default = metadata_default.arg
        else:
            metadata_default = str(metadata_default.arg.compile(
                dialect=autogen_context.dialect))
    if isinstance(metadata_default, compat.string_types):
        if metadata_col.type._type_affinity is sqltypes.String:
            metadata_default = re.sub(r"^'|'$", "", metadata_default)
            return repr(metadata_default)
        else:
            return metadata_default
    else:
        return None

Example 4

Project: CouchPotatoV1 Source File: reflection.py
    def reflecttable(self, table, include_columns):
        """Given a Table object, load its internal constructs based on introspection.
        
        This is the underlying method used by most dialects to produce 
        table reflection.  Direct usage is like::
        
            from sqlalchemy import create_engine, MetaData, Table
            from sqlalchemy.engine import reflection
            
            engine = create_engine('...')
            meta = MetaData()
            user_table = Table('user', meta)
            insp = Inspector.from_engine(engine)
            insp.reflecttable(user_table, None)
            
        :param table: a :class:`~sqlalchemy.schema.Table` instance.
        :param include_columns: a list of string column names to include
          in the reflection process.  If ``None``, all columns are reflected.
            
        """
        dialect = self.bind.dialect

        # MySQL dialect does this.  Applicable with other dialects?
        if hasattr(dialect, '_connection_charset') \
                                        and hasattr(dialect, '_adjust_casing'):
            charset = dialect._connection_charset
            dialect._adjust_casing(table)

        # table attributes we might need.
        reflection_options = dict(
            (k, table.kwargs.get(k)) for k in dialect.reflection_options if k in table.kwargs)

        schema = table.schema
        table_name = table.name

        # apply table options
        tbl_opts = self.get_table_options(table_name, schema, **table.kwargs)
        if tbl_opts:
            table.kwargs.update(tbl_opts)

        # table.kwargs will need to be passed to each reflection method.  Make
        # sure keywords are strings.
        tblkw = table.kwargs.copy()
        for (k, v) in tblkw.items():
            del tblkw[k]
            tblkw[str(k)] = v

        # Py2K
        if isinstance(schema, str):
            schema = schema.decode(dialect.encoding)
        if isinstance(table_name, str):
            table_name = table_name.decode(dialect.encoding)
        # end Py2K

        # columns
        found_table = False
        for col_d in self.get_columns(table_name, schema, **tblkw):
            found_table = True
            name = col_d['name']
            if include_columns and name not in include_columns:
                continue

            coltype = col_d['type']
            col_kw = {
                'nullable':col_d['nullable'],
            }
            if 'autoincrement' in col_d:
                col_kw['autoincrement'] = col_d['autoincrement']
            if 'quote' in col_d:
                col_kw['quote'] = col_d['quote']
                
            colargs = []
            if col_d.get('default') is not None:
                # the "default" value is assumed to be a literal SQL expression,
                # so is wrapped in text() so that no quoting occurs on re-issuance.
                colargs.append(sa_schema.DefaultClause(sql.text(col_d['default'])))
                
            if 'sequence' in col_d:
                # TODO: mssql, maxdb and sybase are using this.
                seq = col_d['sequence']
                sequence = sa_schema.Sequence(seq['name'], 1, 1)
                if 'start' in seq:
                    sequence.start = seq['start']
                if 'increment' in seq:
                    sequence.increment = seq['increment']
                colargs.append(sequence)
                
            col = sa_schema.Column(name, coltype, *colargs, **col_kw)
            table.append_column(col)

        if not found_table:
            raise exc.NoSuchTableError(table.name)

        # Primary keys
        pk_cons = self.get_pk_constraint(table_name, schema, **tblkw)
        if pk_cons:
            primary_key_constraint = sa_schema.PrimaryKeyConstraint(name=pk_cons.get('name'), 
                *[table.c[pk] for pk in pk_cons['constrained_columns']
                if pk in table.c]
            )

            table.append_constraint(primary_key_constraint)

        # Foreign keys
        fkeys = self.get_foreign_keys(table_name, schema, **tblkw)
        for fkey_d in fkeys:
            conname = fkey_d['name']
            constrained_columns = fkey_d['constrained_columns']
            referred_schema = fkey_d['referred_schema']
            referred_table = fkey_d['referred_table']
            referred_columns = fkey_d['referred_columns']
            refspec = []
            if referred_schema is not None:
                sa_schema.Table(referred_table, table.metadata,
                                autoload=True, schema=referred_schema,
                                autoload_with=self.bind,
                                **reflection_options
                                )
                for column in referred_columns:
                    refspec.append(".".join(
                        [referred_schema, referred_table, column]))
            else:
                sa_schema.Table(referred_table, table.metadata, autoload=True,
                                autoload_with=self.bind,
                                **reflection_options
                                )
                for column in referred_columns:
                    refspec.append(".".join([referred_table, column]))
            table.append_constraint(
                sa_schema.ForeignKeyConstraint(constrained_columns, refspec,
                                               conname, link_to_name=True))
        # Indexes 
        indexes = self.get_indexes(table_name, schema)
        for index_d in indexes:
            name = index_d['name']
            columns = index_d['column_names']
            unique = index_d['unique']
            flavor = index_d.get('type', 'unknown type')
            if include_columns and \
                            not set(columns).issubset(include_columns):
                util.warn(
                    "Omitting %s KEY for (%s), key covers omitted columns." %
                    (flavor, ', '.join(columns)))
                continue
            sa_schema.Index(name, *[table.columns[c] for c in columns], 
                         **dict(unique=unique))

Example 5

Project: maraschino Source File: reflection.py
    def reflecttable(self, table, include_columns):
        """Given a Table object, load its internal constructs based on introspection.

        This is the underlying method used by most dialects to produce 
        table reflection.  Direct usage is like::

            from sqlalchemy import create_engine, MetaData, Table
            from sqlalchemy.engine import reflection

            engine = create_engine('...')
            meta = MetaData()
            user_table = Table('user', meta)
            insp = Inspector.from_engine(engine)
            insp.reflecttable(user_table, None)

        :param table: a :class:`~sqlalchemy.schema.Table` instance.
        :param include_columns: a list of string column names to include
          in the reflection process.  If ``None``, all columns are reflected.

        """
        dialect = self.bind.dialect

        # table attributes we might need.
        reflection_options = dict(
            (k, table.kwargs.get(k)) for k in dialect.reflection_options if k in table.kwargs)

        schema = table.schema
        table_name = table.name

        # apply table options
        tbl_opts = self.get_table_options(table_name, schema, **table.kwargs)
        if tbl_opts:
            table.kwargs.update(tbl_opts)

        # table.kwargs will need to be passed to each reflection method.  Make
        # sure keywords are strings.
        tblkw = table.kwargs.copy()
        for (k, v) in tblkw.items():
            del tblkw[k]
            tblkw[str(k)] = v

        # Py2K
        if isinstance(schema, str):
            schema = schema.decode(dialect.encoding)
        if isinstance(table_name, str):
            table_name = table_name.decode(dialect.encoding)
        # end Py2K

        # columns
        found_table = False
        for col_d in self.get_columns(table_name, schema, **tblkw):
            found_table = True
            table.dispatch.column_reflect(table, col_d)

            name = col_d['name']
            if include_columns and name not in include_columns:
                continue

            coltype = col_d['type']
            col_kw = {
                'nullable':col_d['nullable'],
            }
            for k in ('autoincrement', 'quote', 'info', 'key'):
                if k in col_d:
                    col_kw[k] = col_d[k]

            colargs = []
            if col_d.get('default') is not None:
                # the "default" value is assumed to be a literal SQL expression,
                # so is wrapped in text() so that no quoting occurs on re-issuance.
                colargs.append(
                    sa_schema.DefaultClause(
                        sql.text(col_d['default']), _reflected=True
                    )
                )

            if 'sequence' in col_d:
                # TODO: mssql, maxdb and sybase are using this.
                seq = col_d['sequence']
                sequence = sa_schema.Sequence(seq['name'], 1, 1)
                if 'start' in seq:
                    sequence.start = seq['start']
                if 'increment' in seq:
                    sequence.increment = seq['increment']
                colargs.append(sequence)

            col = sa_schema.Column(name, coltype, *colargs, **col_kw)
            table.append_column(col)

        if not found_table:
            raise exc.NoSuchTableError(table.name)

        # Primary keys
        pk_cons = self.get_pk_constraint(table_name, schema, **tblkw)
        if pk_cons:
            primary_key_constraint = sa_schema.PrimaryKeyConstraint(name=pk_cons.get('name'), 
                *[table.c[pk] for pk in pk_cons['constrained_columns']
                if pk in table.c]
            )

            table.append_constraint(primary_key_constraint)

        # Foreign keys
        fkeys = self.get_foreign_keys(table_name, schema, **tblkw)
        for fkey_d in fkeys:
            conname = fkey_d['name']
            constrained_columns = fkey_d['constrained_columns']
            referred_schema = fkey_d['referred_schema']
            referred_table = fkey_d['referred_table']
            referred_columns = fkey_d['referred_columns']
            refspec = []
            if referred_schema is not None:
                sa_schema.Table(referred_table, table.metadata,
                                autoload=True, schema=referred_schema,
                                autoload_with=self.bind,
                                **reflection_options
                                )
                for column in referred_columns:
                    refspec.append(".".join(
                        [referred_schema, referred_table, column]))
            else:
                sa_schema.Table(referred_table, table.metadata, autoload=True,
                                autoload_with=self.bind,
                                **reflection_options
                                )
                for column in referred_columns:
                    refspec.append(".".join([referred_table, column]))
            table.append_constraint(
                sa_schema.ForeignKeyConstraint(constrained_columns, refspec,
                                               conname, link_to_name=True))
        # Indexes 
        indexes = self.get_indexes(table_name, schema)
        for index_d in indexes:
            name = index_d['name']
            columns = index_d['column_names']
            unique = index_d['unique']
            flavor = index_d.get('type', 'unknown type')
            if include_columns and \
                            not set(columns).issubset(include_columns):
                util.warn(
                    "Omitting %s KEY for (%s), key covers omitted columns." %
                    (flavor, ', '.join(columns)))
                continue
            sa_schema.Index(name, *[table.columns[c] for c in columns], 
                         **dict(unique=unique))