sqlalchemy.Table

Here are the examples of the python api sqlalchemy.Table taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

1906 Examples 7

3 Source : db.py
with MIT License
from Aidbox

def create_table(table_name):
    return Table(
        table_name,
        table_metadata,
        Column("id", Text, primary_key=True),
        Column("txid", BigInteger, nullable=False),
        Column("ts", DateTime(True), server_default=text("CURRENT_TIMESTAMP")),
        Column("cts", DateTime(True), server_default=text("CURRENT_TIMESTAMP")),
        Column("resource_type", Text, server_default=text("'App'::text")),
        Column(
            "status",
            Enum("created", "updated", "deleted", "recreated", name="resource_status"),
            nullable=False,
        ),
        Column("resource", _JSONB(astext_type=Text()), nullable=False, index=True),
    )


class DBProxy(object):

3 Source : client.py
with MIT License
from aiguofer

    def get_table(self, name, schema=None):
        """Fetch metadata for the given table name. This will add it to the current
        metadata and save it for later use.

        :param str name: Name of the table, can include schema name with dot notation
        :param str schema: Explicitly give schema name (Default value = None)

        """
        name, schema = _parse_table_name(name, schema or self.default_schema)
        return Table(name, self.metadata, autoload=True, schema=schema)

    def table_factory(self, name, schema=None, primarykey=None):

3 Source : relations.py
with Apache License 2.0
from amakelov

    def make_table(self, name:str, db_meta:MetaData, schema:str) -> Table:
        extra_args = []
        for index_cols in self.indices:
            # note that index names must be unique DB-wide
            extra_args.append(Index(f'multicol_{name}', *index_cols, unique=True))
        table = Table(name, db_meta, *self.col_objs, *extra_args, 
                      extend_existing=self.extend_existing, schema=schema)
        return table


class RelStorage(ABC):

3 Source : psql_impl.py
with Apache License 2.0
from amakelov

    def init_schema(self, conn:Connection=None):
        # create initial schemas and tables
        self.create_schema(name=self.GRAPH_SCHEMA, conn=conn)
        self.create_schema(name=self.METADATA_SCHEMA, conn=conn)
        # works as dict
        _ = Table(self.METADATA_TABLE, self.sql_meta,
                  Column('key', String(), primary_key=True),
                  Column('value', LargeBinary()),
                  schema=self.METADATA_SCHEMA)
        self.sql_meta.create_all(bind=conn)

    @property

3 Source : sqlite_impl.py
with Apache License 2.0
from amakelov

    def init_schema(self, conn:Connection=None):
        _ = Table(self.METADATA_TABLE, self.sql_meta,
                  Column('key', String(), primary_key=True),
                  Column('value', LargeBinary()), 
                  schema='main', extend_existing=True)
        self.sql_meta.create_all(bind=conn)

    @transaction()

3 Source : test_ddl.py
with MIT License
from analyzeDFIR

    def _simple_fixture(self):
        return Table('test_table', self.metadata,
                     Column('id', Integer, primary_key=True,
                            autoincrement=False),
                     Column('data', String(50))
                     )

    def _underscore_fixture(self):

3 Source : test_ddl.py
with MIT License
from analyzeDFIR

    def _underscore_fixture(self):
        return Table('_test_table', self.metadata,
                     Column('id', Integer, primary_key=True,
                            autoincrement=False),
                     Column('_data', String(50))
                     )

    def _simple_roundtrip(self, table):

3 Source : database_explorer.py
with Apache License 2.0
from arkhn

def table_exists(sql_engine, table_name):
    try:
        # Don't use Sqlalchemy Inspector as it uses reflection and
        # it takes a very long time on Oracle.
        metadata = MetaData(bind=sql_engine)
        return True, Table(table_name, metadata, autoload=True)
    except NoSuchTableError:
        return False, None


@contextmanager

3 Source : builder.py
with GNU General Public License v3.0
from Artikash

    def remove(self):
        """
        Removes the table provided by the TableBuilder from the database.
        """
        # get drop table statement
        table = Table(self.PROVIDES, self.db.metadata)
        table.drop()
        # remove table from metadata so that recreating a table with a different
        #   schema won't raise an exception. Especially for tables created via
        #   plain sql create command
        self.db.metadata.remove(table)

    def findFile(self, fileNames, fileType=None):

3 Source : dbconnector.py
with GNU General Public License v3.0
from Artikash

    def _tableGetter(self):
        """
        Returns a function that retrieves a SQLAlchemy Table object for a given
        table name.
        """
        def getTable(tableName):
            schema = self._findTable(tableName)
            if schema is not None:
                return Table(tableName, self.metadata, autoload=True,
                    autoload_with=self.engine, schema=schema)

            raise KeyError("Table '%s' not found in any database" % tableName)

        return getTable

    def _findTable(self, tableName):

3 Source : __init__.py
with GNU General Public License v3.0
from Artikash

    def version(self):
        """Version (date) of the dictionary. ``None`` if not available."""
        try:
            versionTable = Table('Version', self.db.metadata, autoload=True,
                autoload_with=self.db.engine,
                schema=self.db.tables[self.DICTIONARY_TABLE].schema)

            return self.db.selectScalar(select([versionTable.c.ReleaseDate],
                versionTable.c.TableName == self.DICTIONARY_TABLE))
        except NoSuchTableError:
            pass

    def _search(self, whereClause, filters, limit, orderBy):

3 Source : test_models.py
with BSD 3-Clause "New" or "Revised" License
from awesometoolbox

def test_model_class():
    assert User.__fields__.keys() == {"id", "name"}

    assert issubclass(User.__fields__["id"].type_, int)
    assert issubclass(User.__fields__["id"].type_, orm.fields.ColumnFactory)
    assert User.__fields__["id"].type_.primary_key is True

    assert issubclass(User.__fields__["name"].type_, str)
    assert issubclass(User.__fields__["name"].type_, orm.fields.ColumnFactory)
    assert User.__fields__["name"].type_.max_length == 100

    assert isinstance(User.Mapping.table, sqlalchemy.Table)
    assert User.Mapping.pk_name == "id"


def test_model_pk():

3 Source : table_builder.py
with MIT License
from BazaroZero

    def build_table(self, meta: Optional[MetaData] = None) -> Table:
        """Creates a new table with columns that were added via
        :meth:`~.TableBuilder.add_column`.

        :param meta: metadata context
        :return: new table
        """
        if meta is None:
            meta = MetaData()
        return Table(self._table_name, meta, *self._columns.values())


# TODO: maybe inherit it from ``defaultdict``?
class BuilderGroup:

3 Source : queries.py
with MIT License
from bihealth

    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self.query_results = Table(
            "variants_%squery_query_results" % self._get_query_type(), get_meta()
        )

    def _get_query_type(self):

3 Source : test_postgres.py
with MIT License
from biocatchltd

def test_alchemy_usage(docker_client):
    with PostgreSQLService.run(docker_client) as service:
        table = Table('foo', MetaData(),
                      Column('x', Integer),
                      Column('y', String))

        with service.connection() as connection:
            connection.execute("""
            CREATE TABLE foo (x INTEGER, y TEXT);
            INSERT INTO foo VALUES (1,'one'), (2, 'two'), (3, 'three'), (10, 'ten');
            """)
            results = connection.execute(select([table.c.x]).where(table.c.y.like('t%')))
        vals = [row['x'] for row in results]
        assert vals == [2, 3, 10]


def test_remote_connection_string(docker_client, create_and_pull):

3 Source : db.py
with MIT License
from CiscoDevNet

    def __init__(self, dbname, username="" """Not used""", password="" """Not used"""):
        self._db_engine = create_engine(f"sqlite:///{dbname}")
        metadata = MetaData(self._db_engine)
        missing_table = False
        for table, tdef in TABLES.items():
            if not self._db_engine.dialect.has_table(self._db_engine, table):
                missing_table = True
                Table(table, metadata, *tdef)

        if missing_table:
            metadata.create_all()

    def get_scheduled_labs(self, starting=None):

3 Source : core.py
with Apache License 2.0
from CloudmindsRobot

    def get_table(self, table_name: str, schema: Optional[str] = None) -> Table:
        extra = self.get_extra()
        meta = MetaData(**extra.get("metadata_params", {}))
        return Table(
            table_name,
            meta,
            schema=schema or None,
            autoload=True,
            autoload_with=self.get_sqla_engine(),
        )

    def get_columns(

3 Source : core.py
with Apache License 2.0
from CloudmindsRobot

def generic_find_constraint_name(
    table: str, columns: Set[str], referenced: str, database: SQLA
) -> Optional[str]:
    """Utility to find a constraint name in alembic migrations"""
    tbl = sa.Table(
        table, database.metadata, autoload=True, autoload_with=database.engine
    )

    for fk in tbl.foreign_key_constraints:
        if fk.referred_table.name == referenced and set(fk.column_keys) == columns:
            return fk.name

    return None


def generic_find_fk_constraint_name(  # pylint: disable=invalid-name

3 Source : test_models.py
with MIT License
from collerek

def test_model_class():
    assert list(User.Meta.model_fields.keys()) == ["id", "name"]
    assert issubclass(User.Meta.model_fields["id"].__class__, pydantic.fields.FieldInfo)
    assert User.Meta.model_fields["id"].primary_key is True
    assert isinstance(User.Meta.model_fields["name"], pydantic.fields.FieldInfo)
    assert User.Meta.model_fields["name"].max_length == 100
    assert isinstance(User.Meta.table, sqlalchemy.Table)


def test_wrong_field_name():

3 Source : test_model_definition.py
with MIT License
from collerek

def test_sqlalchemy_table_is_created(example):
    assert issubclass(example.Meta.table.__class__, sqlalchemy.Table)
    assert all([field in example.Meta.table.columns for field in fields_to_check])


@typing.no_type_check

3 Source : passivessldb.py
with GNU Affero General Public License v3.0
from D4-project

    def connect(self):
        """ Connect to the database server """
        try:
            # connect to the PostgreSQL server
            print('Connecting to the PostgreSQL database...')
            self.conn = engine_from_config(self.params, prefix='sqlalchemy.')
            # self.conn = engine_from_config(self.params, prefix='sqlalchemy.', echo = True)
            self.meta = MetaData(self.conn)
            self.pkTable = Table('public_key', self.meta, autoload=True)
            self.certTable = Table('certificate', self.meta, autoload=True)
            self.pkcLink = Table('many_certificate_has_many_public_key', self.meta, autoload=True)
            self.sessionTable = Table('sessionRecord', self.meta, autoload=True)
            self.srcLink = Table('many_sessionRecord_has_many_certificate', self.meta, autoload=True)

        except (Exception) as error:
            print(error)

    def disconnect(self):

3 Source : db_ops.py
with GNU General Public License v3.0
from deepdivesec

    def get_table(self, table_name, engine):
        """Get table metadata."""
        meta = MetaData(engine)
        meta.reflect()
        table = Table(table_name, meta, autoload=True)
        return table

    def test_str_length(self):

3 Source : import_cci.py
with GNU General Public License v3.0
from dermatologist

    def get_cci():
        engine = pgsql.get_schema_engine(C.CDM_USER_SCHEMA)  # Access the DB Engine
        if not engine.dialect.has_table(engine, C.CDM_USER_CCI_TABLE):  # If table don't exist, Create.
            metadata = MetaData(engine)
            # Create a table with the appropriate Columns
            Table(C.CDM_USER_CCI_TABLE, metadata,
                  Column('cci_id', Integer, primary_key=True, nullable=False),
                  Column('cci_code', String, nullable=False),
                  Column('cci_short', String),
                  Column('cci_long', String))
            metadata.create_all()
        _respath = pkg_resources.resource_filename('hephaestus', 'resources') + '/'
        url = 'https://secure.cihi.ca/free_products/ICD-10-CA-and-CCI-Trending-Evolution2-en.xlsx'
        print('Downloading cci xlsx...')
        urllib.request.urlretrieve(url, _respath + 'ICD-10-CA-and-CCI-Trending-Evolution2-en.xlsx')
        df = pd.read_excel(_respath + 'ICD-10-CA-and-CCI-Trending-Evolution2-en.xlsx'
                           , sheet_name='2. 2018 CCI Codes', skiprows=5
                           )  # for an earlier version of Excel, you may need to use the file extension of 'xls'
        df.to_csv(_respath + C.SOURCE_USER_CCI_FILE)

3 Source : relational_meta_store.py
with MIT License
from DHI-GRAS

    def db_version(self) -> str:
        """Terracotta version used to create the database"""
        terracotta_table = sqla.Table(
            'terracotta',
            self.sqla_metadata,
            autoload_with=self.sqla_engine
        )
        stmt = sqla.select(terracotta_table.c.version)
        version = self.connection.execute(stmt).scalar()
        return version

    @convert_exceptions('Could not create database')

3 Source : relational_meta_store.py
with MIT License
from DHI-GRAS

    def get_keys(self) -> OrderedDict:
        keys_table = sqla.Table('key_names', self.sqla_metadata, autoload_with=self.sqla_engine)
        result = self.connection.execute(
            sqla.select(
                keys_table.c.get('key_name'),
                keys_table.c.get('description')
            )
            .order_by(keys_table.c.get('index')))
        return OrderedDict(result.all())

    @property

3 Source : relational_meta_store.py
with MIT License
from DHI-GRAS

    def delete(self, keys: KeysType) -> None:
        if not self.get_datasets(keys):
            raise exceptions.DatasetNotFoundError(f'No dataset found with keys {keys}')

        datasets_table = sqla.Table('datasets', self.sqla_metadata, autoload_with=self.sqla_engine)
        metadata_table = sqla.Table('metadata', self.sqla_metadata, autoload_with=self.sqla_engine)

        self.connection.execute(
            datasets_table
            .delete()
            .where(*[datasets_table.c.get(column) == value for column, value in keys.items()])
        )
        self.connection.execute(
            metadata_table
            .delete()
            .where(*[metadata_table.c.get(column) == value for column, value in keys.items()])
        )

    @staticmethod

3 Source : clickhouse.py
with Apache License 2.0
from droher

    def metadata_transform(metadata: MetaData) -> MetaData:
        # Just need to remove autoincrement cols

        new_metadata = MetaData(schema=metadata.schema)

        for table in metadata.tables.values():
            cols = [c for c in table.columns.values() if c.autoincrement is not True]
            new_cols = []
            for col in cols:
                typ = types.Nullable(type_lookup[type(col.type)])
                new_cols.append(Column(col.name, typ))
            t = Table(table.name, new_metadata, *new_cols)
            t.engine = Memory()
        return new_metadata

    def make_copy_ddl(self, metadata: MetaData) -> DdlString:

3 Source : sqlite.py
with Apache License 2.0
from droher

    def metadata_transform(metadata: MetaData) -> MetaData:
        new_metadata = MetaData()
        for table in metadata.tables.values():
            # Need to namespace in the tablename because no schemas in sqlite
            table_name = "{}_{}".format(metadata.schema, table.name)
            # Remove dummy cols as no need for PKs (and we can't autoincrement anyway)
            # and change booleans to int to stop checks from generating in the ddl
            old_cols = [c for c in table.columns.values() if c.autoincrement is not True]
            new_cols = []
            for col in old_cols:
                typ = col.type if not isinstance(col.type, Boolean) else SmallInteger
                new_cols.append(Column(col.name, typ))

            Table(table_name, new_metadata, *new_cols)
        return new_metadata

    def make_copy_ddl(self, metadata: MetaData) -> DdlString:

3 Source : table_per_association.py
with Apache License 2.0
from gethue

    def addresses(cls):
        address_association = Table(
            "%s_addresses" % cls.__tablename__,
            cls.metadata,
            Column("address_id", ForeignKey("address.id"), primary_key=True),
            Column(
                "%s_id" % cls.__tablename__,
                ForeignKey("%s.id" % cls.__tablename__),
                primary_key=True,
            ),
        )
        return relationship(Address, secondary=address_association)


class Customer(HasAddresses, Base):

3 Source : test_compiler.py
with Apache License 2.0
from gethue

    def test_select_with_nolock_schema(self):
        m = MetaData()
        t = Table(
            "sometable", m, Column("somecolumn", Integer), schema="test_schema"
        )
        self.assert_compile(
            t.select().with_hint(t, "WITH (NOLOCK)"),
            "SELECT test_schema.sometable.somecolumn "
            "FROM test_schema.sometable WITH (NOLOCK)",
        )

    def test_select_w_order_by_collate(self):

3 Source : test_compiler.py
with Apache License 2.0
from gethue

    def test_select_w_order_by_collate(self):
        m = MetaData()
        t = Table("sometable", m, Column("somecolumn", String))

        self.assert_compile(
            select([t]).order_by(
                t.c.somecolumn.collate("Latin1_General_CS_AS_KS_WS_CI").asc()
            ),
            "SELECT sometable.somecolumn FROM sometable "
            "ORDER BY sometable.somecolumn COLLATE "
            "Latin1_General_CS_AS_KS_WS_CI ASC",
        )

    def test_join_with_hint(self):

3 Source : test_compiler.py
with Apache License 2.0
from gethue

    def test_force_schema_quoted_name_w_dot_case_insensitive(self):
        metadata = MetaData()
        tbl = Table(
            "test",
            metadata,
            Column("id", Integer, primary_key=True),
            schema=quoted_name("foo.dbo", True),
        )
        self.assert_compile(
            select([tbl]), "SELECT [foo.dbo].test.id FROM [foo.dbo].test"
        )

    def test_force_schema_quoted_w_dot_case_insensitive(self):

3 Source : test_compiler.py
with Apache License 2.0
from gethue

    def test_force_schema_quoted_w_dot_case_insensitive(self):
        metadata = MetaData()
        tbl = Table(
            "test",
            metadata,
            Column("id", Integer, primary_key=True),
            schema=quoted_name("foo.dbo", True),
        )
        self.assert_compile(
            select([tbl]), "SELECT [foo.dbo].test.id FROM [foo.dbo].test"
        )

    def test_force_schema_quoted_name_w_dot_case_sensitive(self):

3 Source : test_compiler.py
with Apache License 2.0
from gethue

    def test_force_schema_quoted_name_w_dot_case_sensitive(self):
        metadata = MetaData()
        tbl = Table(
            "test",
            metadata,
            Column("id", Integer, primary_key=True),
            schema=quoted_name("Foo.dbo", True),
        )
        self.assert_compile(
            select([tbl]), "SELECT [Foo.dbo].test.id FROM [Foo.dbo].test"
        )

    def test_force_schema_quoted_w_dot_case_sensitive(self):

3 Source : test_compiler.py
with Apache License 2.0
from gethue

    def test_force_schema_quoted_w_dot_case_sensitive(self):
        metadata = MetaData()
        tbl = Table(
            "test",
            metadata,
            Column("id", Integer, primary_key=True),
            schema="[Foo.dbo]",
        )
        self.assert_compile(
            select([tbl]), "SELECT [Foo.dbo].test.id FROM [Foo.dbo].test"
        )

    def test_schema_autosplit_w_dot_case_insensitive(self):

3 Source : test_compiler.py
with Apache License 2.0
from gethue

    def test_schema_autosplit_w_dot_case_insensitive(self):
        metadata = MetaData()
        tbl = Table(
            "test",
            metadata,
            Column("id", Integer, primary_key=True),
            schema="foo.dbo",
        )
        self.assert_compile(
            select([tbl]), "SELECT foo.dbo.test.id FROM foo.dbo.test"
        )

    def test_schema_autosplit_w_dot_case_sensitive(self):

3 Source : test_compiler.py
with Apache License 2.0
from gethue

    def test_schema_autosplit_w_dot_case_sensitive(self):
        metadata = MetaData()
        tbl = Table(
            "test",
            metadata,
            Column("id", Integer, primary_key=True),
            schema="Foo.dbo",
        )
        self.assert_compile(
            select([tbl]), "SELECT [Foo].dbo.test.id FROM [Foo].dbo.test"
        )

    def test_delete_schema(self):

3 Source : test_compiler.py
with Apache License 2.0
from gethue

    def test_function(self):
        self.assert_compile(func.foo(1, 2), "foo(:foo_1, :foo_2)")
        self.assert_compile(func.current_time(), "CURRENT_TIME")
        self.assert_compile(func.foo(), "foo()")
        m = MetaData()
        t = Table(
            "sometable", m, Column("col1", Integer), Column("col2", Integer)
        )
        self.assert_compile(
            select([func.max(t.c.col1)]),
            "SELECT max(sometable.col1) AS max_1 FROM " "sometable",
        )

    def test_function_overrides(self):

3 Source : test_compiler.py
with Apache License 2.0
from gethue

    def test_primary_key_no_identity(self):
        metadata = MetaData()
        tbl = Table(
            "test",
            metadata,
            Column("id", Integer, autoincrement=False, primary_key=True),
        )
        self.assert_compile(
            schema.CreateTable(tbl),
            "CREATE TABLE test (id INTEGER NOT NULL, " "PRIMARY KEY (id))",
        )

    def test_primary_key_defaults_to_identity(self):

3 Source : test_compiler.py
with Apache License 2.0
from gethue

    def test_primary_key_defaults_to_identity(self):
        metadata = MetaData()
        tbl = Table("test", metadata, Column("id", Integer, primary_key=True))
        self.assert_compile(
            schema.CreateTable(tbl),
            "CREATE TABLE test (id INTEGER NOT NULL IDENTITY(1,1), "
            "PRIMARY KEY (id))",
        )

    def test_identity_no_primary_key(self):

3 Source : test_compiler.py
with Apache License 2.0
from gethue

    def test_identity_no_primary_key(self):
        metadata = MetaData()
        tbl = Table(
            "test", metadata, Column("id", Integer, autoincrement=True)
        )
        self.assert_compile(
            schema.CreateTable(tbl),
            "CREATE TABLE test (id INTEGER NOT NULL IDENTITY(1,1)" ")",
        )

    def test_identity_separate_from_primary_key(self):

3 Source : test_compiler.py
with Apache License 2.0
from gethue

    def test_identity_separate_from_primary_key(self):
        metadata = MetaData()
        tbl = Table(
            "test",
            metadata,
            Column("id", Integer, autoincrement=False, primary_key=True),
            Column("x", Integer, autoincrement=True),
        )
        self.assert_compile(
            schema.CreateTable(tbl),
            "CREATE TABLE test (id INTEGER NOT NULL, "
            "x INTEGER NOT NULL IDENTITY(1,1), "
            "PRIMARY KEY (id))",
        )

    def test_identity_illegal_two_autoincrements(self):

3 Source : test_compiler.py
with Apache License 2.0
from gethue

    def test_identity_illegal_two_autoincrements(self):
        metadata = MetaData()
        tbl = Table(
            "test",
            metadata,
            Column("id", Integer, autoincrement=True),
            Column("id2", Integer, autoincrement=True),
        )
        # this will be rejected by the database, just asserting this is what
        # the two autoincrements will do right now
        self.assert_compile(
            schema.CreateTable(tbl),
            "CREATE TABLE test (id INTEGER NOT NULL IDENTITY(1,1), "
            "id2 INTEGER NOT NULL IDENTITY(1,1))",
        )

    def test_identity_start_0(self):

3 Source : test_compiler.py
with Apache License 2.0
from gethue

    def test_identity_start_0(self):
        metadata = MetaData()
        tbl = Table(
            "test",
            metadata,
            Column("id", Integer, mssql_identity_start=0, primary_key=True),
        )
        self.assert_compile(
            schema.CreateTable(tbl),
            "CREATE TABLE test (id INTEGER NOT NULL IDENTITY(0,1), "
            "PRIMARY KEY (id))",
        )

    def test_identity_increment_5(self):

3 Source : test_compiler.py
with Apache License 2.0
from gethue

    def test_identity_increment_5(self):
        metadata = MetaData()
        tbl = Table(
            "test",
            metadata,
            Column(
                "id", Integer, mssql_identity_increment=5, primary_key=True
            ),
        )
        self.assert_compile(
            schema.CreateTable(tbl),
            "CREATE TABLE test (id INTEGER NOT NULL IDENTITY(1,5), "
            "PRIMARY KEY (id))",
        )

    def test_sequence_start_0(self):

3 Source : test_compiler.py
with Apache License 2.0
from gethue

    def test_sequence_start_0(self):
        metadata = MetaData()
        tbl = Table(
            "test",
            metadata,
            Column("id", Integer, Sequence("", 0), primary_key=True),
        )
        with testing.expect_deprecated(
            "Use of Sequence with SQL Server in order to affect "
        ):
            self.assert_compile(
                schema.CreateTable(tbl),
                "CREATE TABLE test (id INTEGER NOT NULL IDENTITY(0,1), "
                "PRIMARY KEY (id))",
            )

    def test_sequence_non_primary_key(self):

3 Source : test_compiler.py
with Apache License 2.0
from gethue

    def test_sequence_non_primary_key(self):
        metadata = MetaData()
        tbl = Table(
            "test",
            metadata,
            Column("id", Integer, Sequence("", start=5), primary_key=False),
        )
        with testing.expect_deprecated(
            "Use of Sequence with SQL Server in order to affect "
        ):
            self.assert_compile(
                schema.CreateTable(tbl),
                "CREATE TABLE test (id INTEGER NOT NULL IDENTITY(5,1))",
            )

    def test_sequence_ignore_nullability(self):

3 Source : test_compiler.py
with Apache License 2.0
from gethue

    def test_sequence_ignore_nullability(self):
        metadata = MetaData()
        tbl = Table(
            "test",
            metadata,
            Column("id", Integer, Sequence("", start=5), nullable=True),
        )
        with testing.expect_deprecated(
            "Use of Sequence with SQL Server in order to affect "
        ):
            self.assert_compile(
                schema.CreateTable(tbl),
                "CREATE TABLE test (id INTEGER NOT NULL IDENTITY(5,1))",
            )

    def test_table_pkc_clustering(self):

3 Source : test_compiler.py
with Apache License 2.0
from gethue

    def test_table_pkc_clustering(self):
        metadata = MetaData()
        tbl = Table(
            "test",
            metadata,
            Column("x", Integer, autoincrement=False),
            Column("y", Integer, autoincrement=False),
            PrimaryKeyConstraint("x", "y", mssql_clustered=True),
        )
        self.assert_compile(
            schema.CreateTable(tbl),
            "CREATE TABLE test (x INTEGER NOT NULL, y INTEGER NOT NULL, "
            "PRIMARY KEY CLUSTERED (x, y))",
        )

    def test_table_pkc_explicit_nonclustered(self):

3 Source : test_compiler.py
with Apache License 2.0
from gethue

    def test_table_pkc_explicit_nonclustered(self):
        metadata = MetaData()
        tbl = Table(
            "test",
            metadata,
            Column("x", Integer, autoincrement=False),
            Column("y", Integer, autoincrement=False),
            PrimaryKeyConstraint("x", "y", mssql_clustered=False),
        )
        self.assert_compile(
            schema.CreateTable(tbl),
            "CREATE TABLE test (x INTEGER NOT NULL, y INTEGER NOT NULL, "
            "PRIMARY KEY NONCLUSTERED (x, y))",
        )

    def test_table_idx_explicit_nonclustered(self):

See More Examples