Here are the examples of the python api sqlalchemy.schema.Table taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
148 Examples
3
Source : sqlsoup.py
with GNU General Public License v3.0
from Artikash
with GNU General Public License v3.0
from Artikash
def _selectable_name(selectable):
if isinstance(selectable, sql.Alias):
return _selectable_name(selectable.element)
elif isinstance(selectable, sql.Select):
return ''.join(_selectable_name(s) for s in selectable.froms)
elif isinstance(selectable, schema.Table):
return selectable.name.capitalize()
else:
x = selectable.__class__.__name__
if x[0] == '_':
x = x[1:]
return x
def _class_for_table(session, engine, selectable, base_cls, mapper_kwargs):
3
Source : schemaobj.py
with MIT License
from bkerler
with MIT License
from bkerler
def index(self, name, tablename, columns, schema=None, **kw):
t = sa_schema.Table(
tablename or 'no_table', self.metadata(),
schema=schema
)
idx = sa_schema.Index(
name,
*[util.sqla_compat._textual_index_column(t, n) for n in columns],
**kw)
return idx
def _parse_table_key(self, table_key):
3
Source : schemaobj.py
with MIT License
from bkerler
with MIT License
from bkerler
def _ensure_table_for_fk(self, metadata, fk):
"""create a placeholder Table object for the referent of a
ForeignKey.
"""
if isinstance(fk._colspec, string_types):
table_key, cname = fk._colspec.rsplit('.', 1)
sname, tname = self._parse_table_key(table_key)
if table_key not in metadata.tables:
rel_t = sa_schema.Table(tname, metadata, schema=sname)
else:
rel_t = metadata.tables[table_key]
if cname not in rel_t.c:
rel_t.append_column(sa_schema.Column(cname, NULLTYPE))
3
Source : table.py
with MIT License
from bkerler
with MIT License
from bkerler
def _reflect_table(self):
"""Load the tables definition from the database."""
with self.db.lock:
try:
self._table = SQLATable(self.name,
self.db.metadata,
schema=self.db.schema,
autoload=True)
except NoSuchTableError:
pass
def _threading_warn(self):
3
Source : schemaobj.py
with MIT License
from DiptoChakrabarty
with MIT License
from DiptoChakrabarty
def index(self, name, tablename, columns, schema=None, **kw):
t = sa_schema.Table(
tablename or "no_table", self.metadata(), schema=schema
)
idx = sa_schema.Index(
name,
*[util.sqla_compat._textual_index_column(t, n) for n in columns],
**kw
)
return idx
def _parse_table_key(self, table_key):
3
Source : schemaobj.py
with MIT License
from DiptoChakrabarty
with MIT License
from DiptoChakrabarty
def _ensure_table_for_fk(self, metadata, fk):
"""create a placeholder Table object for the referent of a
ForeignKey.
"""
if isinstance(fk._colspec, string_types):
table_key, cname = fk._colspec.rsplit(".", 1)
sname, tname = self._parse_table_key(table_key)
if table_key not in metadata.tables:
rel_t = sa_schema.Table(tname, metadata, schema=sname)
else:
rel_t = metadata.tables[table_key]
if cname not in rel_t.c:
rel_t.append_column(sa_schema.Column(cname, NULLTYPE))
3
Source : Version.py
with MIT License
from elbakramer
with MIT License
from elbakramer
def delete(self):
session = object_session(self)
if self.table_name is not None:
table_reference_count = (
session.query(Version)
.filter(Version.table_name == self.table_name)
.count()
)
if table_reference_count < = 1:
session.execute(
DropTable(Table(self.table_name, MetaData()), if_exists=True)
)
session.delete(self)
3
Source : test_reflection.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_autoload_with_imply_autoload(self,):
meta = self.metadata
t = Table(
"t",
meta,
Column("id", sa.Integer, primary_key=True),
Column("x", sa.String(20)),
Column("y", sa.Integer),
)
meta.create_all()
meta2 = MetaData()
reflected_t = Table("t", meta2, autoload_with=testing.db)
self.assert_tables_equal(t, reflected_t)
@testing.provide_metadata
3
Source : test_reflection.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_autoload_replace_primary_key(self):
Table("a", self.metadata, Column("id", Integer))
self.metadata.create_all()
m2 = MetaData()
a2 = Table("a", m2, Column("id", Integer, primary_key=True))
Table(
"a",
m2,
autoload=True,
autoload_with=testing.db,
autoload_replace=False,
extend_existing=True,
)
eq_(list(a2.primary_key), [a2.c.id])
def test_autoload_replace_arg(self):
3
Source : test_reflection.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_fk_error(self):
metadata = MetaData(testing.db)
Table(
"slots",
metadata,
Column("slot_id", sa.Integer, primary_key=True),
Column("pkg_id", sa.Integer, sa.ForeignKey("pkgs.pkg_id")),
Column("slot", sa.String(128)),
)
assert_raises_message(
sa.exc.InvalidRequestError,
"Foreign key associated with column 'slots.pkg_id' "
"could not find table 'pkgs' with which to generate "
"a foreign key to target column 'pkg_id'",
metadata.create_all,
)
def test_composite_pks(self):
3
Source : test_reflection.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_schema_translation(self):
Table(
"foob",
self.metadata,
Column("q", Integer),
schema=config.test_schema,
)
self.metadata.create_all()
m = MetaData()
map_ = {"foob": config.test_schema}
with config.db.connect().execution_options(
schema_translate_map=map_
) as conn:
t = Table("foob", m, schema="foob", autoload_with=conn)
eq_(t.schema, "foob")
eq_(t.c.keys(), ["q"])
@testing.requires.schemas
3
Source : test_reflection.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_direct_quoting(self):
m = MetaData(testing.db)
t = Table("weird_casing", m, autoload=True)
self.assert_compile(
t.select(),
"SELECT weird_casing.col1, "
'weird_casing."Col2", weird_casing."col3" '
"FROM weird_casing",
)
class CaseSensitiveTest(fixtures.TablesTest):
3
Source : test_reflection.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def define_tables(cls, metadata):
Table(
"SomeTable",
metadata,
Column("x", Integer, primary_key=True),
test_needs_fk=True,
)
Table(
"SomeOtherTable",
metadata,
Column("x", Integer, primary_key=True),
Column("y", Integer, sa.ForeignKey("SomeTable.x")),
test_needs_fk=True,
)
@testing.fails_if(testing.requires._has_mysql_on_windows)
3
Source : test_reflection.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_reflect_exact_name(self):
m = MetaData()
t1 = Table("SomeTable", m, autoload=True, autoload_with=testing.db)
eq_(t1.name, "SomeTable")
assert t1.c.x is not None
@testing.fails_if(
3
Source : test_reflection.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_reflect_via_fk(self):
m = MetaData()
t2 = Table(
"SomeOtherTable", m, autoload=True, autoload_with=testing.db
)
eq_(t2.name, "SomeOtherTable")
assert "SomeTable" in m.tables
@testing.fails_if(testing.requires._has_mysql_fully_case_sensitive)
3
Source : test_reflection.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_reflect_case_insensitive(self):
m = MetaData()
t2 = Table("sOmEtAbLe", m, autoload=True, autoload_with=testing.db)
eq_(t2.name, "sOmEtAbLe")
class ColumnEventsTest(fixtures.RemovesEvents, fixtures.TestBase):
3
Source : test_reflection.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def setup_class(cls):
cls.metadata = MetaData()
cls.to_reflect = Table(
"to_reflect",
cls.metadata,
Column("x", sa.Integer, primary_key=True),
Column("y", sa.Integer),
test_needs_fk=True,
)
cls.related = Table(
"related",
cls.metadata,
Column("q", sa.Integer, sa.ForeignKey("to_reflect.x")),
test_needs_fk=True,
)
sa.Index("some_index", cls.to_reflect.c.y)
cls.metadata.create_all(testing.db)
@classmethod
3
Source : test_reflection.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_table_reflection(self):
meta = MetaData()
table = Table("computed_column_table", meta, autoload_with=config.db)
self.check_table_column(
table,
"computed_no_flag",
"normal+42",
testing.requires.computed_columns_default_persisted.enabled,
)
if testing.requires.computed_columns_virtual.enabled:
self.check_table_column(
table, "computed_virtual", "normal+2", False,
)
if testing.requires.computed_columns_stored.enabled:
self.check_table_column(
table, "computed_stored", "normal-42", True,
)
3
Source : test_operators.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_column_proxy(self):
t = Table("t", MetaData(), Column("foo", self._add_override_factory()))
proxied = t.select().c.foo
self._assert_add_override(proxied)
self._assert_and_override(proxied)
def test_alias_proxy(self):
3
Source : test_operators.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_alias_proxy(self):
t = Table("t", MetaData(), Column("foo", self._add_override_factory()))
proxied = t.alias().c.foo
self._assert_add_override(proxied)
self._assert_and_override(proxied)
def test_binary_propagate(self):
3
Source : test_operators.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def t_fixture(self):
m = MetaData()
t = Table(
"tab1",
m,
Column("arrval", ARRAY(Integer)),
Column("data", Integer),
)
return t
def test_any_array(self, t_fixture):
3
Source : schemaobj.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def _ensure_table_for_fk(
self, metadata: "MetaData", fk: "ForeignKey"
) -> None:
"""create a placeholder Table object for the referent of a
ForeignKey.
"""
if isinstance(fk._colspec, str): # type:ignore[attr-defined]
table_key, cname = fk._colspec.rsplit( # type:ignore[attr-defined]
".", 1
)
sname, tname = self._parse_table_key(table_key)
if table_key not in metadata.tables:
rel_t = sa_schema.Table(tname, metadata, schema=sname)
else:
rel_t = metadata.tables[table_key]
if cname not in rel_t.c:
rel_t.append_column(sa_schema.Column(cname, NULLTYPE))
3
Source : test_reflection.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_autoload_with_imply_autoload(self, metadata, connection):
meta = metadata
t = Table(
"t",
meta,
Column("id", sa.Integer, primary_key=True),
Column("x", sa.String(20)),
Column("y", sa.Integer),
)
meta.create_all(connection)
meta2 = MetaData()
reflected_t = Table("t", meta2, autoload_with=connection)
self.assert_tables_equal(t, reflected_t)
def test_two_foreign_keys(self, metadata, connection):
3
Source : test_reflection.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_autoload_replace_primary_key(self, connection, metadata):
Table("a", metadata, Column("id", Integer))
metadata.create_all(connection)
m2 = MetaData()
a2 = Table("a", m2, Column("id", Integer, primary_key=True))
Table(
"a",
m2,
autoload_with=connection,
autoload_replace=False,
extend_existing=True,
)
eq_(list(a2.primary_key), [a2.c.id])
def test_autoload_replace_arg(self):
3
Source : test_reflection.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_unknown_types(self, connection, metadata):
"""Test the handling of unknown types for the given dialect.
sqlite is skipped because it has special rules for unknown types using
'affinity types' - this feature is tested in that dialect's test spec.
"""
meta = metadata
t = Table("test", meta, Column("foo", sa.DateTime))
t.create(connection)
with mock.patch.object(connection.dialect, "ischema_names", {}):
m2 = MetaData()
with testing.expect_warnings("Did not recognize type"):
t3 = Table("test", m2, autoload_with=connection)
is_(t3.c.foo.type.__class__, sa.types.NullType)
def test_basic_override(self, connection, metadata):
3
Source : test_reflection.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_fk_error(self, connection, metadata):
Table(
"slots",
metadata,
Column("slot_id", sa.Integer, primary_key=True),
Column("pkg_id", sa.Integer, sa.ForeignKey("pkgs.pkg_id")),
Column("slot", sa.String(128)),
)
assert_raises_message(
sa.exc.InvalidRequestError,
"Foreign key associated with column 'slots.pkg_id' "
"could not find table 'pkgs' with which to generate "
"a foreign key to target column 'pkg_id'",
metadata.create_all,
connection,
)
def test_composite_pks(self, connection, metadata):
3
Source : test_reflection.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_schema_translation(self, connection, metadata):
Table(
"foob",
metadata,
Column("q", Integer),
schema=config.test_schema,
)
metadata.create_all(connection)
m = MetaData()
map_ = {"foob": config.test_schema}
c2 = connection.execution_options(schema_translate_map=map_)
t = Table("foob", m, schema="foob", autoload_with=c2)
eq_(t.schema, "foob")
eq_(t.c.keys(), ["q"])
@testing.requires.schemas
3
Source : test_reflection.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_direct_quoting(self, connection):
m = MetaData()
t = Table("weird_casing", m, autoload_with=connection)
self.assert_compile(
t.select(),
"SELECT weird_casing.col1, "
'weird_casing."Col2", weird_casing."col3" '
"FROM weird_casing",
)
class CaseSensitiveTest(fixtures.TablesTest):
3
Source : test_reflection.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_reflect_exact_name(self, connection):
m = MetaData()
t1 = Table("SomeTable", m, autoload_with=connection)
eq_(t1.name, "SomeTable")
assert t1.c.x is not None
@testing.fails_if(
3
Source : test_reflection.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_reflect_via_fk(self, connection):
m = MetaData()
t2 = Table("SomeOtherTable", m, autoload_with=connection)
eq_(t2.name, "SomeOtherTable")
assert "SomeTable" in m.tables
@testing.fails_if(testing.requires._has_mysql_fully_case_sensitive)
3
Source : test_reflection.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_reflect_case_insensitive(self, connection):
m = MetaData()
t2 = Table("sOmEtAbLe", m, autoload_with=connection)
eq_(t2.name, "sOmEtAbLe")
class ColumnEventsTest(fixtures.RemovesEvents, fixtures.TablesTest):
3
Source : test_reflection.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def define_tables(cls, metadata):
to_reflect = Table(
"to_reflect",
metadata,
Column("x", sa.Integer, primary_key=True, autoincrement=False),
Column("y", sa.Integer),
test_needs_fk=True,
)
Table(
"related",
metadata,
Column("q", sa.Integer, sa.ForeignKey("to_reflect.x")),
test_needs_fk=True,
)
sa.Index("some_index", to_reflect.c.y)
def _do_test(
3
Source : test_reflection.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_listen_metadata_obj(self, connection):
m1 = MetaData()
m2 = MetaData()
canary = []
@event.listens_for(m1, "column_reflect")
def go(insp, table, info):
canary.append(info["name"])
Table("related", m1, autoload_with=connection)
Table("related", m2, autoload_with=connection)
eq_(canary, ["q", "x", "y"])
def test_listen_metadata_cls(self, connection):
3
Source : test_reflection.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def define_tables(cls, metadata):
Table(
"t1",
metadata,
Column("normal", Integer),
Column("id1", Integer, Identity(start=2, increment=3)),
)
def test_table_reflection(self):
3
Source : test_reflection.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_table_reflection(self):
meta = MetaData()
table = Table("t1", meta, autoload_with=config.db)
eq_(table.c.normal.identity, None)
is_true(table.c.id1.identity is not None)
eq_(table.c.id1.identity.start, 2)
eq_(table.c.id1.identity.increment, 3)
3
Source : test_lambdas.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def user_address_fixture(self, metadata):
users = Table(
"users",
metadata,
Column("id", Integer, primary_key=True),
Column("name", String(50)),
)
addresses = Table(
"addresses",
metadata,
Column("id", Integer),
Column("user_id", ForeignKey("users.id")),
Column("email", String(50)),
)
return users, addresses
@testing.metadata_fixture()
3
Source : test_lambdas.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def boolean_table_fixture(self, metadata):
return Table(
"boolean_data",
metadata,
Column("id", Integer, primary_key=True),
Column("data", Boolean),
)
def test_adapt_select(self, user_address_fixture):
3
Source : test_operators.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_column_proxy(self):
t = Table("t", MetaData(), Column("foo", self._add_override_factory()))
with testing.expect_deprecated(
"The SelectBase.c and SelectBase.columns attributes "
"are deprecated"
):
proxied = t.select().c.foo
self._assert_add_override(proxied)
self._assert_and_override(proxied)
def test_subquery_proxy(self):
3
Source : test_operators.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_subquery_proxy(self):
t = Table("t", MetaData(), Column("foo", self._add_override_factory()))
proxied = t.select().subquery().c.foo
self._assert_add_override(proxied)
self._assert_and_override(proxied)
def test_alias_proxy(self):
3
Source : test_operators.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_scalar_subquery_wo_type(self):
"""test for :ticket:`6181`"""
m = MetaData()
t = Table("t", m, Column("a", Integer))
# the scalar subquery of this will have no type; coercions will
# want to call _with_binary_element_type(); that has to return
# a scalar select
req = select(column("scan"))
self.assert_compile(
select(t.c.a).where(t.c.a.in_(req)),
"SELECT t.a FROM t WHERE t.a IN (SELECT scan)",
)
def test_type_inference_one(self):
3
Source : test_operators.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def t_fixture(self):
m = MetaData()
t = Table(
"tab1",
m,
Column("arrval", ARRAY(Integer)),
Column("data", Integer),
)
return t
@testing.combinations(
0
Source : database.py
with MIT License
from analyzeDFIR
with MIT License
from analyzeDFIR
def create_view(name, selectable, metadata, materialized=False):
'''
Args:
name: String => name of materialized view to create
selectable: FromClause => query to create view as
metadata: MetaData => metadata to listen for events on
materialized: Boolean => whether to create standard or materialized view
Returns:
Table
Table object bound to temporary MetaData object with columns as
columns returned from selectable (essentially creates table as view)
NOTE:
For non-postgresql backends, creating a materialized view
will result in a standard view, which cannot be indexed
Preconditions:
name is of type String
selectable is of type FromClause
metadata is of type Metadata
materialized is of type Boolean
'''
assert isinstance(name, str), 'Name is not of type String'
assert isinstance(selectable, FromClause), 'Selectable is not of type FromClause'
assert isinstance(metadata, MetaData), 'Metadata is not of type MetaData'
assert isinstance(materialized, bool), 'Materialized is not of type Boolean'
_tmp_mt = MetaData()
tbl = Table(name, _tmp_mt)
for c in selectable.c:
tbl.append_column(Column(c.name, c.type, primary_key=c.primary_key))
listen(\
metadata,\
'after_create',\
CreateMaterializedViewExpression(name, selectable) if materialized else CreateViewExpression(name, selectable))
listen(\
metadata,\
'before_drop',\
DropMaterializedViewExpression(name) if materialized else DropViewExpression(name))
return tbl
0
Source : declarative.py
with GNU General Public License v3.0
from Artikash
with GNU General Public License v3.0
from Artikash
def _as_declarative(cls, classname, dict_):
# dict_ will be a dictproxy, which we can't write to, and we need to!
dict_ = dict(dict_)
column_copies = {}
potential_columns = {}
mapper_args = {}
table_args = inherited_table_args = None
tablename = None
parent_columns = ()
declarative_props = (declared_attr, util.classproperty)
for base in cls.__mro__:
class_mapped = _is_mapped_class(base)
if class_mapped:
parent_columns = base.__table__.c.keys()
for name,obj in vars(base).items():
if name == '__mapper_args__':
if not mapper_args and (
not class_mapped or
isinstance(obj, declarative_props)
):
mapper_args = cls.__mapper_args__
elif name == '__tablename__':
if not tablename and (
not class_mapped or
isinstance(obj, declarative_props)
):
tablename = cls.__tablename__
elif name == '__table_args__':
if not table_args and (
not class_mapped or
isinstance(obj, declarative_props)
):
table_args = cls.__table_args__
if not isinstance(table_args, (tuple, dict, type(None))):
raise exc.ArgumentError(
"__table_args__ value must be a tuple, "
"dict, or None")
if base is not cls:
inherited_table_args = True
elif class_mapped:
continue
elif base is not cls:
# we're a mixin.
if isinstance(obj, Column):
if obj.foreign_keys:
raise exc.InvalidRequestError(
"Columns with foreign keys to other columns "
"must be declared as @declared_attr callables "
"on declarative mixin classes. ")
if name not in dict_ and not (
'__table__' in dict_ and
(obj.name or name) in dict_['__table__'].c
) and name not in potential_columns:
potential_columns[name] = \
column_copies[obj] = \
obj.copy()
column_copies[obj]._creation_order = \
obj._creation_order
elif isinstance(obj, MapperProperty):
raise exc.InvalidRequestError(
"Mapper properties (i.e. deferred,"
"column_property(), relationship(), etc.) must "
"be declared as @declared_attr callables "
"on declarative mixin classes.")
elif isinstance(obj, declarative_props):
dict_[name] = ret = \
column_copies[obj] = getattr(cls, name)
if isinstance(ret, (Column, MapperProperty)) and \
ret.doc is None:
ret.doc = obj.__doc__
# apply inherited columns as we should
for k, v in potential_columns.items():
if tablename or (v.name or k) not in parent_columns:
dict_[k] = v
if inherited_table_args and not tablename:
table_args = None
# make sure that column copies are used rather
# than the original columns from any mixins
for k in ('version_id_col', 'polymorphic_on',):
if k in mapper_args:
v = mapper_args[k]
mapper_args[k] = column_copies.get(v,v)
if classname in cls._decl_class_registry:
util.warn("The classname %r is already in the registry of this"
" declarative base, mapped to %r" % (
classname,
cls._decl_class_registry[classname]
))
cls._decl_class_registry[classname] = cls
our_stuff = util.OrderedDict()
for k in dict_:
value = dict_[k]
if isinstance(value, declarative_props):
value = getattr(cls, k)
if (isinstance(value, tuple) and len(value) == 1 and
isinstance(value[0], (Column, MapperProperty))):
util.warn("Ignoring declarative-like tuple value of attribute "
"%s: possibly a copy-and-paste error with a comma "
"left at the end of the line?" % k)
continue
if not isinstance(value, (Column, MapperProperty)):
continue
if k == 'metadata':
raise exc.InvalidRequestError(
"Attribute name 'metadata' is reserved "
"for the MetaData instance when using a "
"declarative base class."
)
prop = _deferred_relationship(cls, value)
our_stuff[k] = prop
# set up attributes in the order they were created
our_stuff.sort(key=lambda key: our_stuff[key]._creation_order)
# extract columns from the class dict
cols = []
for key, c in our_stuff.iteritems():
if isinstance(c, ColumnProperty):
for col in c.columns:
if isinstance(col, Column) and col.table is None:
_undefer_column_name(key, col)
cols.append(col)
elif isinstance(c, Column):
_undefer_column_name(key, c)
cols.append(c)
# if the column is the same name as the key,
# remove it from the explicit properties dict.
# the normal rules for assigning column-based properties
# will take over, including precedence of columns
# in multi-column ColumnProperties.
if key == c.key:
del our_stuff[key]
table = None
if '__table__' not in dict_:
if tablename is not None:
if isinstance(table_args, dict):
args, table_kw = (), table_args
elif isinstance(table_args, tuple):
args = table_args[0:-1]
table_kw = table_args[-1]
if len(table_args) < 2 or not isinstance(table_kw, dict):
raise exc.ArgumentError(
"Tuple form of __table_args__ is "
"(arg1, arg2, arg3, ..., {'kw1':val1, "
"'kw2':val2, ...})"
)
else:
args, table_kw = (), {}
autoload = dict_.get('__autoload__')
if autoload:
table_kw['autoload'] = True
cls.__table__ = table = Table(tablename, cls.metadata,
*(tuple(cols) + tuple(args)),
**table_kw)
else:
table = cls.__table__
if cols:
for c in cols:
if not table.c.contains_column(c):
raise exc.ArgumentError(
"Can't add additional column %r when "
"specifying __table__" % c.key
)
if 'inherits' not in mapper_args:
for c in cls.__bases__:
if _is_mapped_class(c):
mapper_args['inherits'] = cls._decl_class_registry.get(
c.__name__, None)
break
if hasattr(cls, '__mapper_cls__'):
mapper_cls = util.unbound_method_to_callable(cls.__mapper_cls__)
else:
mapper_cls = mapper
if table is None and 'inherits' not in mapper_args:
raise exc.InvalidRequestError(
"Class %r does not have a __table__ or __tablename__ "
"specified and does not inherit from an existing "
"table-mapped class." % cls
)
elif 'inherits' in mapper_args and not mapper_args.get('concrete', False):
inherited_mapper = class_mapper(mapper_args['inherits'],
compile=False)
inherited_table = inherited_mapper.local_table
if table is None:
# single table inheritance.
# ensure no table args
if table_args:
raise exc.ArgumentError(
"Can't place __table_args__ on an inherited class "
"with no table."
)
# add any columns declared here to the inherited table.
for c in cols:
if c.primary_key:
raise exc.ArgumentError(
"Can't place primary key columns on an inherited "
"class with no table."
)
if c.name in inherited_table.c:
raise exc.ArgumentError(
"Column '%s' on class %s conflicts with "
"existing column '%s'" %
(c, cls, inherited_table.c[c.name])
)
inherited_table.append_column(c)
# single or joined inheritance
# exclude any cols on the inherited table which are not mapped on the
# parent class, to avoid
# mapping columns specific to sibling/nephew classes
inherited_mapper = class_mapper(mapper_args['inherits'],
compile=False)
inherited_table = inherited_mapper.local_table
if 'exclude_properties' not in mapper_args:
mapper_args['exclude_properties'] = exclude_properties = \
set([c.key for c in inherited_table.c
if c not in inherited_mapper._columntoproperty])
exclude_properties.difference_update([c.key for c in cols])
# look through columns in the current mapper that
# are keyed to a propname different than the colname
# (if names were the same, we'd have popped it out above,
# in which case the mapper makes this combination).
# See if the superclass has a similar column property.
# If so, join them together.
for k, col in our_stuff.items():
if not isinstance(col, expression.ColumnElement):
continue
if k in inherited_mapper._props:
p = inherited_mapper._props[k]
if isinstance(p, ColumnProperty):
# note here we place the superclass column
# first. this corresponds to the
# append() in mapper._configure_property().
# change this ordering when we do [ticket:1892]
our_stuff[k] = p.columns + [col]
cls.__mapper__ = mapper_cls(cls,
table,
properties=our_stuff,
**mapper_args)
class DeclarativeMeta(type):
0
Source : query.py
with GNU General Public License v3.0
from Artikash
with GNU General Public License v3.0
from Artikash
def delete(self, synchronize_session='evaluate'):
"""Perform a bulk delete query.
Deletes rows matched by this query from the database.
:param synchronize_session: chooses the strategy for the removal of
matched objects from the session. Valid values are:
False - don't synchronize the session. This option is the most
efficient and is reliable once the session is expired, which
typically occurs after a commit(), or explicitly using
expire_all(). Before the expiration, objects may still remain in
the session which were in fact deleted which can lead to confusing
results if they are accessed via get() or already loaded
collections.
'fetch' - performs a select query before the delete to find
objects that are matched by the delete query and need to be
removed from the session. Matched objects are removed from the
session.
'evaluate' - Evaluate the query's criteria in Python straight on
the objects in the session. If evaluation of the criteria isn't
implemented, an error is raised. In that case you probably
want to use the 'fetch' strategy as a fallback.
The expression evaluator currently doesn't account for differing
string collations between the database and Python.
Returns the number of rows deleted, excluding any cascades.
The method does *not* offer in-Python cascading of relationships - it
is assumed that ON DELETE CASCADE is configured for any foreign key
references which require it. The Session needs to be expired (occurs
automatically after commit(), or call expire_all()) in order for the
state of dependent objects subject to delete or delete-orphan cascade
to be correctly represented.
Also, the ``before_delete()`` and ``after_delete()``
:class:`~sqlalchemy.orm.interfaces.MapperExtension` methods are not
called from this method. For a delete hook here, use the
:meth:`.SessionExtension.after_bulk_delete()` event hook.
"""
#TODO: lots of duplication and ifs - probably needs to be
# refactored to strategies
#TODO: cascades need handling.
if synchronize_session not in [False, 'evaluate', 'fetch']:
raise sa_exc.ArgumentError(
"Valid strategies for session "
"synchronization are False, 'evaluate' and "
"'fetch'")
self._no_select_modifiers("delete")
self = self.enable_eagerloads(False)
context = self._compile_context()
if len(context.statement.froms) != 1 or \
not isinstance(context.statement.froms[0], schema.Table):
raise sa_exc.ArgumentError("Only deletion via a single table "
"query is currently supported")
primary_table = context.statement.froms[0]
session = self.session
if synchronize_session == 'evaluate':
try:
evaluator_compiler = evaluator.EvaluatorCompiler()
if self.whereclause is not None:
eval_condition = evaluator_compiler.process(
self.whereclause)
else:
def eval_condition(obj):
return True
except evaluator.UnevaluatableError:
raise sa_exc.InvalidRequestError(
"Could not evaluate current criteria in Python. "
"Specify 'fetch' or False for the synchronize_session "
"parameter.")
delete_stmt = sql.delete(primary_table, context.whereclause)
if synchronize_session == 'fetch':
#TODO: use RETURNING when available
select_stmt = context.statement.with_only_columns(
primary_table.primary_key)
matched_rows = session.execute(
select_stmt,
params=self._params).fetchall()
if self._autoflush:
session._autoflush()
result = session.execute(delete_stmt, params=self._params)
if synchronize_session == 'evaluate':
target_cls = self._mapper_zero().class_
#TODO: detect when the where clause is a trivial primary key match
objs_to_expunge = [
obj for (cls, pk),obj in
session.identity_map.iteritems()
if issubclass(cls, target_cls) and
eval_condition(obj)]
for obj in objs_to_expunge:
session._remove_newly_deleted(attributes.instance_state(obj))
elif synchronize_session == 'fetch':
target_mapper = self._mapper_zero()
for primary_key in matched_rows:
identity_key = target_mapper.identity_key_from_primary_key(
list(primary_key))
if identity_key in session.identity_map:
session._remove_newly_deleted(
attributes.instance_state(
session.identity_map[identity_key]
)
)
for ext in session.extensions:
ext.after_bulk_delete(session, self, context, result)
return result.rowcount
def update(self, values, synchronize_session='evaluate'):
0
Source : query.py
with GNU General Public License v3.0
from Artikash
with GNU General Public License v3.0
from Artikash
def update(self, values, synchronize_session='evaluate'):
"""Perform a bulk update query.
Updates rows matched by this query in the database.
:param values: a dictionary with attributes names as keys and literal
values or sql expressions as values.
:param synchronize_session: chooses the strategy to update the
attributes on objects in the session. Valid values are:
False - don't synchronize the session. This option is the most
efficient and is reliable once the session is expired, which
typically occurs after a commit(), or explicitly using
expire_all(). Before the expiration, updated objects may still
remain in the session with stale values on their attributes, which
can lead to confusing results.
'fetch' - performs a select query before the update to find
objects that are matched by the update query. The updated
attributes are expired on matched objects.
'evaluate' - Evaluate the Query's criteria in Python straight on
the objects in the session. If evaluation of the criteria isn't
implemented, an exception is raised.
The expression evaluator currently doesn't account for differing
string collations between the database and Python.
Returns the number of rows matched by the update.
The method does *not* offer in-Python cascading of relationships - it
is assumed that ON UPDATE CASCADE is configured for any foreign key
references which require it.
The Session needs to be expired (occurs automatically after commit(),
or call expire_all()) in order for the state of dependent objects
subject foreign key cascade to be correctly represented.
Also, the ``before_update()`` and ``after_update()``
:class:`~sqlalchemy.orm.interfaces.MapperExtension` methods are not
called from this method. For an update hook here, use the
:meth:`.SessionExtension.after_bulk_update()` event hook.
"""
#TODO: value keys need to be mapped to corresponding sql cols and
# instr.attr.s to string keys
#TODO: updates of manytoone relationships need to be converted to
# fk assignments
#TODO: cascades need handling.
if synchronize_session == 'expire':
util.warn_deprecated("The 'expire' value as applied to "
"the synchronize_session argument of "
"query.update() is now called 'fetch'")
synchronize_session = 'fetch'
if synchronize_session not in [False, 'evaluate', 'fetch']:
raise sa_exc.ArgumentError(
"Valid strategies for session synchronization "
"are False, 'evaluate' and 'fetch'")
self._no_select_modifiers("update")
self = self.enable_eagerloads(False)
context = self._compile_context()
if len(context.statement.froms) != 1 or \
not isinstance(context.statement.froms[0], schema.Table):
raise sa_exc.ArgumentError(
"Only update via a single table query is "
"currently supported")
primary_table = context.statement.froms[0]
session = self.session
if synchronize_session == 'evaluate':
try:
evaluator_compiler = evaluator.EvaluatorCompiler()
if self.whereclause is not None:
eval_condition = evaluator_compiler.process(
self.whereclause)
else:
def eval_condition(obj):
return True
value_evaluators = {}
for key,value in values.iteritems():
key = _attr_as_key(key)
value_evaluators[key] = evaluator_compiler.process(
expression._literal_as_binds(value))
except evaluator.UnevaluatableError:
raise sa_exc.InvalidRequestError(
"Could not evaluate current criteria in Python. "
"Specify 'fetch' or False for the "
"synchronize_session parameter.")
update_stmt = sql.update(primary_table, context.whereclause, values)
if synchronize_session == 'fetch':
select_stmt = context.statement.with_only_columns(
primary_table.primary_key)
matched_rows = session.execute(
select_stmt,
params=self._params).fetchall()
if self._autoflush:
session._autoflush()
result = session.execute(update_stmt, params=self._params)
if synchronize_session == 'evaluate':
target_cls = self._mapper_zero().class_
for (cls, pk),obj in session.identity_map.iteritems():
evaluated_keys = value_evaluators.keys()
if issubclass(cls, target_cls) and eval_condition(obj):
state, dict_ = attributes.instance_state(obj),\
attributes.instance_dict(obj)
# only evaluate unmodified attributes
to_evaluate = state.unmodified.intersection(
evaluated_keys)
for key in to_evaluate:
dict_[key] = value_evaluators[key](obj)
state.commit(dict_, list(to_evaluate))
# expire attributes with pending changes
# (there was no autoflush, so they are overwritten)
state.expire_attributes(dict_,
set(evaluated_keys).
difference(to_evaluate))
elif synchronize_session == 'fetch':
target_mapper = self._mapper_zero()
for primary_key in matched_rows:
identity_key = target_mapper.identity_key_from_primary_key(
list(primary_key))
if identity_key in session.identity_map:
session.expire(
session.identity_map[identity_key],
[_attr_as_key(k) for k in values]
)
for ext in session.extensions:
ext.after_bulk_update(session, self, context, result)
return result.rowcount
def _compile_context(self, labels=True):
0
Source : compiler.py
with GNU General Public License v3.0
from Artikash
with GNU General Public License v3.0
from Artikash
def visit_ddl(self, ddl, **kwargs):
# table events can substitute table and schema name
context = ddl.context
if isinstance(ddl.target, schema.Table):
context = context.copy()
preparer = self.dialect.identifier_preparer
path = preparer.format_table_seq(ddl.target)
if len(path) == 1:
table, sch = path[0], ''
else:
table, sch = path[-1], path[0]
context.setdefault('table', table)
context.setdefault('schema', sch)
context.setdefault('fullname', preparer.format_table(ddl.target))
return ddl.statement % context
def visit_create_table(self, create):
0
Source : table.py
with MIT License
from bkerler
with MIT License
from bkerler
def _sync_table(self, columns):
"""Lazy load, create or adapt the table structure in the database."""
if self._table is None:
# Load an existing table from the database.
self._reflect_table()
if self._table is None:
# Create the table with an initial set of columns.
if not self._auto_create:
raise DatasetException("Table does not exist: %s" % self.name)
# Keep the lock scope small because this is run very often.
with self.db.lock:
self._threading_warn()
self._table = SQLATable(self.name,
self.db.metadata,
schema=self.db.schema)
if self._primary_id is not False:
# This can go wrong on DBMS like MySQL and SQLite where
# tables cannot have no columns.
primary_id = self._primary_id or self.PRIMARY_DEFAULT
primary_type = self._primary_type or Types.integer
increment = primary_type in [Types.integer, Types.bigint]
column = Column(primary_id, primary_type,
primary_key=True,
autoincrement=increment)
self._table.append_column(column)
for column in columns:
if not column.name == self._primary_id:
self._table.append_column(column)
self._table.create(self.db.executable, checkfirst=True)
elif len(columns):
with self.db.lock:
self._reflect_table()
self._threading_warn()
for column in columns:
if not self.has_column(column.name):
self.db.op.add_column(self.name, column, self.db.schema)
self._reflect_table()
def _sync_columns(self, row, ensure, types=None):
0
Source : __init__.py
with GNU Affero General Public License v3.0
from CASES-LU
with GNU Affero General Public License v3.0
from CASES-LU
def db_empty(db):
"Will drop every datas stocked in db."
# From http://www.sqlalchemy.org/trac/wiki/UsageRecipes/DropEverything
conn = db.engine.connect()
# the transaction only applies if the DB supports
# transactional DDL, i.e. Postgresql, MS SQL Server
with conn.begin() as trans:
inspector = reflection.Inspector.from_engine(db.engine)
# gather all data first before dropping anything.
# some DBs lock after things have been dropped in
# a transaction.
metadata = MetaData()
tbs = []
all_fks = []
for table_name in inspector.get_table_names():
fks = []
for fk in inspector.get_foreign_keys(table_name):
if not fk["name"]:
continue
fks.append(ForeignKeyConstraint((), (), name=fk["name"]))
t = Table(table_name, metadata, *fks)
tbs.append(t)
all_fks.extend(fks)
for fkc in all_fks:
conn.execute(DropConstraint(fkc))
for table in tbs:
conn.execute(DropTable(table))
trans.commit()
0
Source : db.py
with GNU Affero General Public License v3.0
from closeio
with GNU Affero General Public License v3.0
from closeio
def drop_everything(engine, keep_tables=None, reset_columns=None):
""" Drops all tables in the db unless their name is in `keep_tables`.
`reset_columns` is used to specify the columns that should be reset to
default value in the tables that we're keeping -
provided as a dict of table_name: list_of_column_names.
"""
keep_tables = keep_tables or []
reset_columns = reset_columns or {}
conn = engine.connect()
trans = conn.begin()
inspector = reflection.Inspector.from_engine(engine)
# gather all data first before dropping anything.
# some DBs lock after things have been dropped in
# a transaction.
metadata = MetaData()
tbs = []
all_fks = []
for table_name in inspector.get_table_names():
if table_name in keep_tables:
# Reset certain columns in certain tables we're keeping
if table_name in reset_columns:
t = Table(table_name, metadata)
column_names = reset_columns[table_name]
for c in inspector.get_columns(table_name):
if c["name"] in column_names:
assert c["default"]
q = "UPDATE {} SET {}={};".format(
table_name, c["name"], c["default"]
)
conn.execute(q)
continue
fks = []
for fk in inspector.get_foreign_keys(table_name):
if not fk["name"]:
continue
fks.append(ForeignKeyConstraint((), (), name=fk["name"]))
t = Table(table_name, metadata, *fks)
tbs.append(t)
all_fks.extend(fks)
for fkc in all_fks:
conn.execute(DropConstraint(fkc))
for table in tbs:
conn.execute(DropTable(table))
trans.commit()
0
Source : SQLiteFileLibrary.py
with MIT License
from elbakramer
with MIT License
from elbakramer
def read_as_cursor(self, symbol, time_column=None, start_time=None, end_time=None):
records = Table(symbol, MetaData(), autoload_with=self._engine)
statement = select(records)
if time_column is not None:
time_column = records.columns[
time_column
] # pylint: disable=unsubscriptable-object
statement = statement.order_by(time_column)
if start_time is not None:
start_time = pd.Timestamp(start_time)
if time_column is None:
time_column = 0
time_column = records.columns[
time_column
] # pylint: disable=unsubscriptable-object
statement = statement.order_by(time_column)
if Timestamp.is_naive(start_time):
start_time = start_time.tz_localize(Timestamp.local_timezone)
start_time = start_time.astimezone(Timestamp.utc)
statement = statement.where(
time_column >= start_time
) # pylint: disable=unsubscriptable-object
if end_time is not None:
end_time = pd.Timestamp(end_time)
if time_column is None:
time_column = 0
time_column = records.columns[
time_column
] # pylint: disable=unsubscriptable-object
statement = statement.order_by(time_column)
if Timestamp.is_naive(end_time):
end_time = end_time.tz_localize(Timestamp.local_timezone)
end_time = end_time.astimezone(Timestamp.utc)
statement = statement.where(
time_column < = end_time
) # pylint: disable=unsubscriptable-object
data = self._engine.execute(statement)
return data
def read(self, *args, **kwargs):
See More Examples