Here are the examples of the python api sqlalchemy.testing.in_ taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
60 Examples
3
Source : test_utils.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_keys_in_dir(self):
data = {"hello": "bla"}
props = util.Properties(data)
in_("hello", dir(props))
def test_pickle_immuatbleprops(self):
3
Source : test_indexable.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_modified(self):
Json = self.classes.Json
s = Session(testing.db)
j = Json(json={})
s.add(j)
s.commit()
i = inspect(j)
is_(i.modified, False)
in_("json", i.unmodified)
j.other = 42
is_(i.modified, True)
not_in_("json", i.unmodified)
def test_cast_type(self):
3
Source : test_resultset.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_column_label_overlap_fallback(self):
content, bar = self.tables.content, self.tables.bar
row = testing.db.execute(
select([content.c.type.label("content_type")])
).first()
not_in_(content.c.type, row)
not_in_(bar.c.content_type, row)
in_(sql.column("content_type"), row)
row = testing.db.execute(
select([func.now().label("content_type")])
).first()
not_in_(content.c.type, row)
not_in_(bar.c.content_type, row)
in_(sql.column("content_type"), row)
def test_column_label_overlap_fallback_2(self):
3
Source : test_resultset.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_column_label_overlap_fallback_2(self):
content, bar = self.tables.content, self.tables.bar
row = testing.db.execute(content.select(use_labels=True)).first()
in_(content.c.type, row)
not_in_(bar.c.content_type, row)
not_in_(sql.column("content_type"), row)
def test_columnclause_schema_column_one(self):
3
Source : test_resultset.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_columnclause_schema_column_one(self):
keyed2 = self.tables.keyed2
# this is addressed by [ticket:2932]
# ColumnClause._compare_name_for_result allows the
# columns which the statement is against to be lightweight
# cols, which results in a more liberal comparison scheme
a, b = sql.column("a"), sql.column("b")
stmt = select([a, b]).select_from(table("keyed2"))
row = testing.db.execute(stmt).first()
in_(keyed2.c.a, row)
in_(keyed2.c.b, row)
in_(a, row)
in_(b, row)
def test_columnclause_schema_column_two(self):
3
Source : test_resultset.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_columnclause_schema_column_two(self):
keyed2 = self.tables.keyed2
a, b = sql.column("a"), sql.column("b")
stmt = select([keyed2.c.a, keyed2.c.b])
row = testing.db.execute(stmt).first()
in_(keyed2.c.a, row)
in_(keyed2.c.b, row)
in_(a, row)
in_(b, row)
def test_columnclause_schema_column_three(self):
3
Source : test_resultset.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_columnclause_schema_column_three(self):
keyed2 = self.tables.keyed2
# this is also addressed by [ticket:2932]
a, b = sql.column("a"), sql.column("b")
stmt = text("select a, b from keyed2").columns(a=CHAR, b=CHAR)
row = testing.db.execute(stmt).first()
in_(keyed2.c.a, row)
in_(keyed2.c.b, row)
in_(a, row)
in_(b, row)
in_(stmt.c.a, row)
in_(stmt.c.b, row)
def test_columnclause_schema_column_four(self):
3
Source : test_resultset.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_columnclause_schema_column_four(self):
keyed2 = self.tables.keyed2
# this is also addressed by [ticket:2932]
a, b = sql.column("keyed2_a"), sql.column("keyed2_b")
stmt = text("select a AS keyed2_a, b AS keyed2_b from keyed2").columns(
a, b
)
row = testing.db.execute(stmt).first()
in_(keyed2.c.a, row)
in_(keyed2.c.b, row)
in_(a, row)
in_(b, row)
in_(stmt.c.keyed2_a, row)
in_(stmt.c.keyed2_b, row)
def test_columnclause_schema_column_five(self):
3
Source : test_resultset.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_columnclause_schema_column_five(self):
keyed2 = self.tables.keyed2
# this is also addressed by [ticket:2932]
stmt = text("select a AS keyed2_a, b AS keyed2_b from keyed2").columns(
keyed2_a=CHAR, keyed2_b=CHAR
)
row = testing.db.execute(stmt).first()
in_(keyed2.c.a, row)
in_(keyed2.c.b, row)
in_(stmt.c.keyed2_a, row)
in_(stmt.c.keyed2_b, row)
class PositionalTextTest(fixtures.TablesTest):
3
Source : test_autogen_diffs.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_all_existings_filled_in_server_default(self, pk):
m1 = MetaData()
m2 = MetaData()
Table(
"a", m1, Column("x", Integer, server_default="5", primary_key=pk)
)
Table(
"a",
m2,
Column(
"x", Integer, server_default="5", comment="new", primary_key=pk
),
)
alter_col = self._assert_alter_col(m1, m2, pk)
in_("5", alter_col.existing_server_default.arg.text)
def _assert_alter_col(self, m1, m2, pk, nullable=None):
3
Source : test_dialect.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_special_encodings(self, enc):
eng = engines.testing_engine(
options={"connect_args": {"charset": enc, "use_unicode": 0}}
)
conn = eng.connect()
detected = conn.dialect._connection_charset
if enc == "utf8mb4":
eq_(detected, enc)
else:
in_(detected, ["utf8", "utf8mb3"])
@testing.only_on("mariadb+mariadbconnector")
3
Source : test_indexable.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_modified(self):
Json = self.classes.Json
s = Session(testing.db)
j = Json(json={})
s.add(j)
s.commit()
i = inspect(j)
is_(i.modified, False)
in_("json", i.unmodified)
j.other = 42
is_(i.modified, True)
not_in("json", i.unmodified)
def test_cast_type(self):
3
Source : test_resultset.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_row_mapping_keys(self, connection):
users = self.tables.users
connection.execute(users.insert(), dict(user_id=1, user_name="foo"))
result = connection.execute(users.select())
eq_(result.keys(), ["user_id", "user_name"])
row = result.first()
eq_(list(row._mapping.keys()), ["user_id", "user_name"])
eq_(row._fields, ("user_id", "user_name"))
in_("user_id", row._fields)
not_in("foo", row._fields)
in_(users.c.user_id, row._mapping.keys())
def test_row_keys_legacy_dont_warn(self, connection):
3
Source : test_resultset.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_columnclause_schema_column_one(self, connection):
# originally addressed by [ticket:2932], however liberalized
# Column-targeting rules are deprecated
a, b = sql.column("a"), sql.column("b")
stmt = select(a, b).select_from(table("keyed2"))
row = connection.execute(stmt).first()
in_(a, row._mapping)
in_(b, row._mapping)
keyed2 = self.tables.keyed2
not_in(keyed2.c.a, row._mapping)
not_in(keyed2.c.b, row._mapping)
def test_columnclause_schema_column_two(self, connection):
3
Source : test_resultset.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_columnclause_schema_column_two(self, connection):
keyed2 = self.tables.keyed2
stmt = select(keyed2.c.a, keyed2.c.b)
row = connection.execute(stmt).first()
in_(keyed2.c.a, row._mapping)
in_(keyed2.c.b, row._mapping)
# in 1.x, would warn for string match, but return a result
a, b = sql.column("a"), sql.column("b")
not_in(a, row._mapping)
not_in(b, row._mapping)
def test_columnclause_schema_column_three(self, connection):
0
Source : test_reflection.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_max_ident_in_varchar_not_present(self):
"""test [ticket:3504].
Here we are testing not just that the "max" token comes back
as None, but also that these types accept "max" as the value
of "length" on construction, which isn't a directly documented
pattern however is likely in common use.
"""
metadata = self.metadata
Table(
"t",
metadata,
Column("t1", types.String),
Column("t2", types.Text("max")),
Column("t3", types.Text("max")),
Column("t4", types.LargeBinary("max")),
Column("t5", types.VARBINARY("max")),
)
metadata.create_all()
for col in inspect(testing.db).get_columns("t"):
is_(col["type"].length, None)
in_("max", str(col["type"].compile(dialect=testing.db.dialect)))
class InfoCoerceUnicodeTest(fixtures.TestBase, AssertsCompiledSQL):
0
Source : test_reflection.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_resolve_fks_false_table(self):
meta = self.metadata
Table(
"t1",
meta,
Column("id", sa.Integer, primary_key=True),
Column("t2id", sa.Integer, sa.ForeignKey("t2.id")),
test_needs_fk=True,
)
Table(
"t2",
meta,
Column("id", sa.Integer, primary_key=True),
test_needs_fk=True,
)
meta.create_all()
meta2 = MetaData()
t1 = Table("t1", meta2, resolve_fks=False, autoload_with=testing.db)
in_("t1", meta2.tables)
not_in_("t2", meta2.tables)
assert_raises(
sa.exc.NoReferencedTableError,
lambda: list(t1.c.t2id.foreign_keys)[0].column,
)
t2 = Table("t2", meta2, autoload_with=testing.db)
# now it resolves
is_true(t1.c.t2id.references(t2.c.id))
@testing.provide_metadata
0
Source : test_reflection.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_resolve_fks_false_extend_existing(self):
meta = self.metadata
Table(
"t1",
meta,
Column("id", sa.Integer, primary_key=True),
Column("t2id", sa.Integer, sa.ForeignKey("t2.id")),
test_needs_fk=True,
)
Table(
"t2",
meta,
Column("id", sa.Integer, primary_key=True),
test_needs_fk=True,
)
meta.create_all()
meta2 = MetaData()
Table("t1", meta2)
in_("t1", meta2.tables)
t1 = Table(
"t1",
meta2,
resolve_fks=False,
autoload_with=testing.db,
extend_existing=True,
)
not_in_("t2", meta2.tables)
assert_raises(
sa.exc.NoReferencedTableError,
lambda: list(t1.c.t2id.foreign_keys)[0].column,
)
t2 = Table("t2", meta2, autoload_with=testing.db)
# now it resolves
is_true(t1.c.t2id.references(t2.c.id))
@testing.provide_metadata
0
Source : test_reflection.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_resolve_fks_false_metadata(self):
meta = self.metadata
Table(
"t1",
meta,
Column("id", sa.Integer, primary_key=True),
Column("t2id", sa.Integer, sa.ForeignKey("t2.id")),
test_needs_fk=True,
)
Table(
"t2",
meta,
Column("id", sa.Integer, primary_key=True),
test_needs_fk=True,
)
meta.create_all()
meta2 = MetaData()
meta2.reflect(testing.db, resolve_fks=False, only=["t1"])
in_("t1", meta2.tables)
not_in_("t2", meta2.tables)
t1 = meta2.tables["t1"]
assert_raises(
sa.exc.NoReferencedTableError,
lambda: list(t1.c.t2id.foreign_keys)[0].column,
)
meta2.reflect(testing.db, resolve_fks=False)
t2 = meta2.tables["t2"]
is_true(t1.c.t2id.references(t2.c.id))
def test_nonexistent(self):
0
Source : test_indexable.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_modified(self):
from sqlalchemy import inspect
Array = self.classes.Array
s = Session(testing.db)
a = Array(array=[1, 2, 3])
s.add(a)
s.commit()
i = inspect(a)
is_(i.modified, False)
in_("array", i.unmodified)
a.first = 10
is_(i.modified, True)
not_in_("array", i.unmodified)
class IndexPropertyJsonTest(fixtures.DeclarativeMappedTest):
0
Source : test_deprecations.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_expunge_cascade(self):
User, Order, orders, users = (
self.classes.User,
self.classes.Order,
self.tables.orders,
self.tables.users,
)
mapper(Order, orders)
mapper(
User,
users,
properties={
"orders": relationship(
Order,
primaryjoin=(
self.tables.users.c.id
== foreign(self.tables.orders.c.user_id)
),
cascade="expunge",
viewonly=True,
)
},
)
sess = Session()
u = User(id=1, name="jack")
sess.add(u)
sess.add_all(
[
Order(id=1, user_id=1, description="someorder"),
Order(id=2, user_id=1, description="someotherorder"),
]
)
sess.commit()
u1 = sess.query(User).first()
orders = u1.orders
eq_(len(orders), 2)
in_(orders[0], sess)
in_(orders[1], sess)
sess.expunge(u1)
not_in_(orders[0], sess)
not_in_(orders[1], sess)
def test_default_save_update_cascade(self):
0
Source : test_deprecations.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_default_save_update_cascade(self):
User, Order, orders, users = (
self.classes.User,
self.classes.Order,
self.tables.orders,
self.tables.users,
)
mapper(Order, orders)
mapper(
User,
users,
properties={
"orders": relationship(
Order,
primaryjoin=(
self.tables.users.c.id
== foreign(self.tables.orders.c.user_id)
),
viewonly=True,
)
},
)
sess = Session()
u1 = User(id=1, name="jack")
sess.add(u1)
o1, o2 = (
Order(id=1, user_id=1, description="someorder"),
Order(id=2, user_id=1, description="someotherorder"),
)
u1.orders.append(o1)
u1.orders.append(o2)
# in 1.4, this becomes "not_in_"
in_(o1, sess)
in_(o2, sess)
def test_default_merge_cascade(self):
0
Source : test_eager_relations.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_multi_path_load(self):
A, B, C, D = self.classes("A", "B", "C", "D")
s = Session()
c = C(d=D())
s.add(A(b=B(c=c), c=c))
s.commit()
c_alias_1 = aliased(C)
c_alias_2 = aliased(C)
q = s.query(A)
q = q.join(A.b).join(c_alias_1, B.c).join(c_alias_1.d)
q = q.options(
contains_eager(A.b)
.contains_eager(B.c, alias=c_alias_1)
.contains_eager(C.d)
)
q = q.join(c_alias_2, A.c)
q = q.options(contains_eager(A.c, alias=c_alias_2))
a1 = q.all()[0]
# ensure 'd' key was populated in dict. Varies based on
# PYTHONHASHSEED
in_("d", a1.c.__dict__)
def test_multi_path_load_of_type(self):
0
Source : test_eager_relations.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_multi_path_load_of_type(self):
A, B, C, D = self.classes("A", "B", "C", "D")
s = Session()
c = C(d=D())
s.add(A(b=B(c=c), c=c))
s.commit()
c_alias_1 = aliased(C)
c_alias_2 = aliased(C)
q = s.query(A)
q = q.join(A.b).join(B.c.of_type(c_alias_1)).join(c_alias_1.d)
q = q.options(
contains_eager(A.b)
.contains_eager(B.c.of_type(c_alias_1))
.contains_eager(c_alias_1.d)
)
q = q.join(A.c.of_type(c_alias_2))
q = q.options(contains_eager(A.c.of_type(c_alias_2)))
a1 = q.all()[0]
# ensure 'd' key was populated in dict. Varies based on
# PYTHONHASHSEED
in_("d", a1.c.__dict__)
class EntityViaMultiplePathTestTwo(fixtures.DeclarativeMappedTest):
0
Source : test_eager_relations.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_multi_path_load(self):
User, LD, A, LDA = self.classes("User", "LD", "A", "LDA")
s = Session()
u0 = User(data=42)
l0 = LD(user=u0)
z0 = A(ld=l0)
lz0 = LDA(ld=l0, a=z0)
s.add_all([u0, l0, z0, lz0])
s.commit()
l_ac = aliased(LD)
u_ac = aliased(User)
# these paths don't work out correctly?
lz_test = (
s.query(LDA)
.join("ld")
.options(contains_eager("ld"))
.join("a", (l_ac, "ld"), (u_ac, "user"))
.options(
contains_eager("a")
.contains_eager("ld", alias=l_ac)
.contains_eager("user", alias=u_ac)
)
.first()
)
in_("user", lz_test.a.ld.__dict__)
def test_multi_path_load_of_type(self):
0
Source : test_eager_relations.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_multi_path_load_of_type(self):
User, LD, A, LDA = self.classes("User", "LD", "A", "LDA")
s = Session()
u0 = User(data=42)
l0 = LD(user=u0)
z0 = A(ld=l0)
lz0 = LDA(ld=l0, a=z0)
s.add_all([u0, l0, z0, lz0])
s.commit()
l_ac = aliased(LD)
u_ac = aliased(User)
lz_test = (
s.query(LDA)
.join(LDA.ld)
.options(contains_eager(LDA.ld))
.join(LDA.a)
.join(LDA.ld.of_type(l_ac))
.join(l_ac.user.of_type(u_ac))
.options(
contains_eager(LDA.a),
contains_eager(LDA.ld.of_type(l_ac)).contains_eager(
l_ac.user.of_type(u_ac)
),
)
.first()
)
in_("user", lz_test.a.ld.__dict__)
class LazyLoadOptSpecificityTest(fixtures.DeclarativeMappedTest):
0
Source : test_merge.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_deferred_column_mapping(self):
# defer 'excerpt' at mapping level instead of query level
Book, book = self.classes.Book, self.tables.book
mapper(Book, book, properties={"excerpt": deferred(book.c.excerpt)})
sess = sessionmaker()()
b = Book(
id=1,
title="Essential SQLAlchemy",
summary="some summary",
excerpt="some excerpt",
)
sess.add(b)
sess.commit()
b1 = sess.query(Book).first()
sess.expire(b1, ["summary"])
sess.close()
def go():
b2 = sess.merge(b1, load=False)
# should not emit load for deferred 'excerpt'
eq_(b2.summary, "some summary")
not_in_("excerpt", b2.__dict__)
# now it should emit load for deferred 'excerpt'
eq_(b2.excerpt, "some excerpt")
in_("excerpt", b2.__dict__)
self.sql_eq_(
go,
[
(
"SELECT book.summary AS book_summary "
"FROM book WHERE book.id = :param_1",
{"param_1": 1},
),
(
"SELECT book.excerpt AS book_excerpt "
"FROM book WHERE book.id = :param_1",
{"param_1": 1},
),
],
)
def test_deferred_column_query(self):
0
Source : test_merge.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_deferred_column_query(self):
Book, book = self.classes.Book, self.tables.book
mapper(Book, book)
sess = sessionmaker()()
b = Book(
id=1,
title="Essential SQLAlchemy",
summary="some summary",
excerpt="some excerpt",
)
sess.add(b)
sess.commit()
# defer 'excerpt' at query level instead of mapping level
b1 = sess.query(Book).options(defer(Book.excerpt)).first()
sess.expire(b1, ["summary"])
sess.close()
def go():
b2 = sess.merge(b1, load=False)
# should not emit load for deferred 'excerpt'
eq_(b2.summary, "some summary")
not_in_("excerpt", b2.__dict__)
# now it should emit load for deferred 'excerpt'
eq_(b2.excerpt, "some excerpt")
in_("excerpt", b2.__dict__)
self.sql_eq_(
go,
[
(
"SELECT book.summary AS book_summary "
"FROM book WHERE book.id = :param_1",
{"param_1": 1},
),
(
"SELECT book.excerpt AS book_excerpt "
"FROM book WHERE book.id = :param_1",
{"param_1": 1},
),
],
)
class MutableMergeTest(fixtures.MappedTest):
0
Source : test_relationships.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_m2o(self):
users, Address, addresses, User = (
self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User,
)
mapper(User, users, properties={"addresses": relationship(Address)})
mapper(
Address,
addresses,
properties={
"user": relationship(
User, back_populates="addresses", lazy="noload"
)
},
)
u1 = User()
a1 = Address()
a1.user = u1
in_(a1, u1.addresses)
class JoinConditionErrorTest(fixtures.TestBase):
0
Source : test_deprecations.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_case_sensitive(self):
reg = functions._registry["_default"]
cs_reg = functions._case_sensitive_registry["_default"]
class MYFUNC(GenericFunction):
type = DateTime
assert isinstance(func.MYFUNC().type, DateTime)
assert isinstance(func.MyFunc().type, DateTime)
assert isinstance(func.mYfUnC().type, DateTime)
assert isinstance(func.myfunc().type, DateTime)
in_("myfunc", reg)
not_in_("MYFUNC", reg)
not_in_("MyFunc", reg)
in_("myfunc", cs_reg)
eq_(set(cs_reg["myfunc"].keys()), set(["MYFUNC"]))
with testing.expect_deprecated(
"GenericFunction 'MyFunc' is already registered with"
" different letter case, so the previously registered function "
"'MYFUNC' is switched into case-sensitive mode. "
"GenericFunction objects will be fully case-insensitive in a "
"future release.",
regex=False,
):
class MyFunc(GenericFunction):
type = Integer
assert isinstance(func.MYFUNC().type, DateTime)
assert isinstance(func.MyFunc().type, Integer)
with pytest.raises(AssertionError):
assert isinstance(func.mYfUnC().type, Integer)
with pytest.raises(AssertionError):
assert isinstance(func.myfunc().type, Integer)
eq_(reg["myfunc"], functions._CASE_SENSITIVE)
not_in_("MYFUNC", reg)
not_in_("MyFunc", reg)
in_("myfunc", cs_reg)
eq_(set(cs_reg["myfunc"].keys()), set(["MYFUNC", "MyFunc"]))
def test_replace_function_case_sensitive(self):
0
Source : test_deprecations.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_replace_function_case_sensitive(self):
reg = functions._registry["_default"]
cs_reg = functions._case_sensitive_registry["_default"]
class replaceable_func(GenericFunction):
type = Integer
identifier = "REPLACEABLE_FUNC"
assert isinstance(func.REPLACEABLE_FUNC().type, Integer)
assert isinstance(func.Replaceable_Func().type, Integer)
assert isinstance(func.RePlAcEaBlE_fUnC().type, Integer)
assert isinstance(func.replaceable_func().type, Integer)
in_("replaceable_func", reg)
not_in_("REPLACEABLE_FUNC", reg)
not_in_("Replaceable_Func", reg)
in_("replaceable_func", cs_reg)
eq_(set(cs_reg["replaceable_func"].keys()), set(["REPLACEABLE_FUNC"]))
with testing.expect_deprecated(
"GenericFunction 'Replaceable_Func' is already registered with"
" different letter case, so the previously registered function "
"'REPLACEABLE_FUNC' is switched into case-sensitive mode. "
"GenericFunction objects will be fully case-insensitive in a "
"future release.",
regex=False,
):
class Replaceable_Func(GenericFunction):
type = DateTime
identifier = "Replaceable_Func"
assert isinstance(func.REPLACEABLE_FUNC().type, Integer)
assert isinstance(func.Replaceable_Func().type, DateTime)
assert isinstance(func.RePlAcEaBlE_fUnC().type, NullType)
assert isinstance(func.replaceable_func().type, NullType)
eq_(reg["replaceable_func"], functions._CASE_SENSITIVE)
not_in_("REPLACEABLE_FUNC", reg)
not_in_("Replaceable_Func", reg)
in_("replaceable_func", cs_reg)
eq_(
set(cs_reg["replaceable_func"].keys()),
set(["REPLACEABLE_FUNC", "Replaceable_Func"]),
)
with testing.expect_warnings(
"The GenericFunction 'REPLACEABLE_FUNC' is already registered and "
"is going to be overriden.",
regex=False,
):
class replaceable_func_override(GenericFunction):
type = DateTime
identifier = "REPLACEABLE_FUNC"
with testing.expect_deprecated(
"GenericFunction(s) '['REPLACEABLE_FUNC', 'Replaceable_Func']' "
"are already registered with different letter cases and might "
"interact with 'replaceable_func'. GenericFunction objects will "
"be fully case-insensitive in a future release.",
regex=False,
):
class replaceable_func_lowercase(GenericFunction):
type = String
identifier = "replaceable_func"
with testing.expect_warnings(
"The GenericFunction 'Replaceable_Func' is already registered and "
"is going to be overriden.",
regex=False,
):
class Replaceable_Func_override(GenericFunction):
type = Integer
identifier = "Replaceable_Func"
assert isinstance(func.REPLACEABLE_FUNC().type, DateTime)
assert isinstance(func.Replaceable_Func().type, Integer)
assert isinstance(func.RePlAcEaBlE_fUnC().type, NullType)
assert isinstance(func.replaceable_func().type, String)
eq_(reg["replaceable_func"], functions._CASE_SENSITIVE)
not_in_("REPLACEABLE_FUNC", reg)
not_in_("Replaceable_Func", reg)
in_("replaceable_func", cs_reg)
eq_(
set(cs_reg["replaceable_func"].keys()),
set(["REPLACEABLE_FUNC", "Replaceable_Func", "replaceable_func"]),
)
class DDLListenerDeprecationsTest(fixtures.TestBase):
0
Source : test_resultset.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_column_label_overlap_fallback(self):
content = Table("content", self.metadata, Column("type", String(30)))
bar = Table("bar", self.metadata, Column("content_type", String(30)))
self.metadata.create_all(testing.db)
testing.db.execute(content.insert().values(type="t1"))
row = testing.db.execute(content.select(use_labels=True)).first()
in_(content.c.type, row)
not_in_(bar.c.content_type, row)
in_(sql.column("content_type"), row)
row = testing.db.execute(
select([content.c.type.label("content_type")])
).first()
in_(content.c.type, row)
not_in_(bar.c.content_type, row)
in_(sql.column("content_type"), row)
row = testing.db.execute(
select([func.now().label("content_type")])
).first()
not_in_(content.c.type, row)
not_in_(bar.c.content_type, row)
in_(sql.column("content_type"), row)
def test_pickled_rows(self):
0
Source : test_resultset.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_row_case_sensitive(self):
row = testing.db.execute(
select(
[
literal_column("1").label("case_insensitive"),
literal_column("2").label("CaseSensitive"),
]
)
).first()
eq_(list(row.keys()), ["case_insensitive", "CaseSensitive"])
in_("case_insensitive", row._keymap)
in_("CaseSensitive", row._keymap)
not_in_("casesensitive", row._keymap)
eq_(row["case_insensitive"], 1)
eq_(row["CaseSensitive"], 2)
assert_raises(KeyError, lambda: row["Case_insensitive"])
assert_raises(KeyError, lambda: row["casesensitive"])
def test_row_case_sensitive_unoptimized(self):
0
Source : test_resultset.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_row_case_sensitive_unoptimized(self):
ins_db = engines.testing_engine(options={"case_sensitive": True})
row = ins_db.execute(
select(
[
literal_column("1").label("case_insensitive"),
literal_column("2").label("CaseSensitive"),
text("3 AS screw_up_the_cols"),
]
)
).first()
eq_(
list(row.keys()),
["case_insensitive", "CaseSensitive", "screw_up_the_cols"],
)
in_("case_insensitive", row._keymap)
in_("CaseSensitive", row._keymap)
not_in_("casesensitive", row._keymap)
eq_(row["case_insensitive"], 1)
eq_(row["CaseSensitive"], 2)
eq_(row["screw_up_the_cols"], 3)
assert_raises(KeyError, lambda: row["Case_insensitive"])
assert_raises(KeyError, lambda: row["casesensitive"])
assert_raises(KeyError, lambda: row["screw_UP_the_cols"])
def test_row_case_insensitive(self):
0
Source : test_resultset.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_row_case_insensitive(self):
ins_db = engines.testing_engine(options={"case_sensitive": False})
row = ins_db.execute(
select(
[
literal_column("1").label("case_insensitive"),
literal_column("2").label("CaseSensitive"),
]
)
).first()
eq_(list(row.keys()), ["case_insensitive", "CaseSensitive"])
in_("case_insensitive", row._keymap)
in_("CaseSensitive", row._keymap)
in_("casesensitive", row._keymap)
eq_(row["case_insensitive"], 1)
eq_(row["CaseSensitive"], 2)
eq_(row["Case_insensitive"], 1)
eq_(row["casesensitive"], 2)
def test_row_case_insensitive_unoptimized(self):
0
Source : test_resultset.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_row_case_insensitive_unoptimized(self):
ins_db = engines.testing_engine(options={"case_sensitive": False})
row = ins_db.execute(
select(
[
literal_column("1").label("case_insensitive"),
literal_column("2").label("CaseSensitive"),
text("3 AS screw_up_the_cols"),
]
)
).first()
eq_(
list(row.keys()),
["case_insensitive", "CaseSensitive", "screw_up_the_cols"],
)
in_("case_insensitive", row._keymap)
in_("CaseSensitive", row._keymap)
in_("casesensitive", row._keymap)
eq_(row["case_insensitive"], 1)
eq_(row["CaseSensitive"], 2)
eq_(row["screw_up_the_cols"], 3)
eq_(row["Case_insensitive"], 1)
eq_(row["casesensitive"], 2)
eq_(row["screw_UP_the_cols"], 3)
def test_row_as_args(self):
0
Source : test_reflection.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_max_ident_in_varchar_not_present(self, metadata, connection):
"""test [ticket:3504].
Here we are testing not just that the "max" token comes back
as None, but also that these types accept "max" as the value
of "length" on construction, which isn't a directly documented
pattern however is likely in common use.
"""
Table(
"t",
metadata,
Column("t1", types.String),
Column("t2", types.Text("max")),
Column("t3", types.Text("max")),
Column("t4", types.LargeBinary("max")),
Column("t5", types.VARBINARY("max")),
)
metadata.create_all(connection)
for col in inspect(connection).get_columns("t"):
is_(col["type"].length, None)
in_("max", str(col["type"].compile(dialect=connection.dialect)))
class InfoCoerceUnicodeTest(fixtures.TestBase, AssertsCompiledSQL):
0
Source : test_reflection.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_resolve_fks_false_table(self, connection, metadata):
meta = metadata
Table(
"t1",
meta,
Column("id", sa.Integer, primary_key=True),
Column("t2id", sa.Integer, sa.ForeignKey("t2.id")),
test_needs_fk=True,
)
Table(
"t2",
meta,
Column("id", sa.Integer, primary_key=True),
test_needs_fk=True,
)
meta.create_all(connection)
meta2 = MetaData()
t1 = Table("t1", meta2, resolve_fks=False, autoload_with=connection)
in_("t1", meta2.tables)
not_in("t2", meta2.tables)
assert_raises(
sa.exc.NoReferencedTableError,
lambda: list(t1.c.t2id.foreign_keys)[0].column,
)
t2 = Table("t2", meta2, autoload_with=connection)
# now it resolves
is_true(t1.c.t2id.references(t2.c.id))
def test_resolve_fks_false_extend_existing(self, connection, metadata):
0
Source : test_reflection.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_resolve_fks_false_extend_existing(self, connection, metadata):
meta = metadata
Table(
"t1",
meta,
Column("id", sa.Integer, primary_key=True),
Column("t2id", sa.Integer, sa.ForeignKey("t2.id")),
test_needs_fk=True,
)
Table(
"t2",
meta,
Column("id", sa.Integer, primary_key=True),
test_needs_fk=True,
)
meta.create_all(connection)
meta2 = MetaData()
Table("t1", meta2)
in_("t1", meta2.tables)
t1 = Table(
"t1",
meta2,
resolve_fks=False,
autoload_with=connection,
extend_existing=True,
)
not_in("t2", meta2.tables)
assert_raises(
sa.exc.NoReferencedTableError,
lambda: list(t1.c.t2id.foreign_keys)[0].column,
)
t2 = Table("t2", meta2, autoload_with=connection)
# now it resolves
is_true(t1.c.t2id.references(t2.c.id))
def test_resolve_fks_false_metadata(self, connection, metadata):
0
Source : test_reflection.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_resolve_fks_false_metadata(self, connection, metadata):
meta = metadata
Table(
"t1",
meta,
Column("id", sa.Integer, primary_key=True),
Column("t2id", sa.Integer, sa.ForeignKey("t2.id")),
test_needs_fk=True,
)
Table(
"t2",
meta,
Column("id", sa.Integer, primary_key=True),
test_needs_fk=True,
)
meta.create_all(connection)
meta2 = MetaData()
meta2.reflect(connection, resolve_fks=False, only=["t1"])
in_("t1", meta2.tables)
not_in("t2", meta2.tables)
t1 = meta2.tables["t1"]
assert_raises(
sa.exc.NoReferencedTableError,
lambda: list(t1.c.t2id.foreign_keys)[0].column,
)
meta2.reflect(connection, resolve_fks=False)
t2 = meta2.tables["t2"]
is_true(t1.c.t2id.references(t2.c.id))
def test_nonexistent(self, connection):
0
Source : test_indexable.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_modified(self):
from sqlalchemy import inspect
Array = self.classes.Array
s = Session(testing.db)
a = Array(array=[1, 2, 3])
s.add(a)
s.commit()
i = inspect(a)
is_(i.modified, False)
in_("array", i.unmodified)
a.first = 10
is_(i.modified, True)
not_in("array", i.unmodified)
class IndexPropertyJsonTest(fixtures.DeclarativeMappedTest):
0
Source : test_cascade.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_expunge_cascade(self):
User, Order, orders, users = (
self.classes.User,
self.classes.Order,
self.tables.orders,
self.tables.users,
)
self.mapper_registry.map_imperatively(Order, orders)
self.mapper_registry.map_imperatively(
User,
users,
properties={
"orders": relationship(
Order,
primaryjoin=(
self.tables.users.c.id
== foreign(self.tables.orders.c.user_id)
),
cascade="expunge",
viewonly=True,
)
},
)
sess = fixture_session()
u = User(id=1, name="jack")
sess.add(u)
sess.add_all(
[
Order(id=1, user_id=1, description="someorder"),
Order(id=2, user_id=1, description="someotherorder"),
]
)
sess.commit()
u1 = sess.query(User).first()
orders = u1.orders
eq_(len(orders), 2)
in_(orders[0], sess)
in_(orders[1], sess)
sess.expunge(u1)
not_in(orders[0], sess)
not_in(orders[1], sess)
def test_default_none_cascade(self):
0
Source : test_eager_relations.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_multi_path_load(self):
A, B, C, D = self.classes("A", "B", "C", "D")
s = fixture_session()
c = C(d=D())
s.add(A(b=B(c=c), c=c))
s.commit()
c_alias_1 = aliased(C)
c_alias_2 = aliased(C)
q = s.query(A)
q = q.join(A.b).join(c_alias_1, B.c).join(c_alias_1.d)
q = q.options(
contains_eager(A.b)
.contains_eager(B.c, alias=c_alias_1)
.contains_eager(C.d)
)
q = q.join(c_alias_2, A.c)
q = q.options(contains_eager(A.c, alias=c_alias_2))
a1 = q.all()[0]
# ensure 'd' key was populated in dict. Varies based on
# PYTHONHASHSEED
in_("d", a1.c.__dict__)
def test_multi_path_load_of_type(self):
0
Source : test_eager_relations.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_multi_path_load_of_type(self):
A, B, C, D = self.classes("A", "B", "C", "D")
s = fixture_session()
c = C(d=D())
s.add(A(b=B(c=c), c=c))
s.commit()
c_alias_1 = aliased(C)
c_alias_2 = aliased(C)
q = s.query(A)
q = q.join(A.b).join(B.c.of_type(c_alias_1)).join(c_alias_1.d)
q = q.options(
contains_eager(A.b)
.contains_eager(B.c.of_type(c_alias_1))
.contains_eager(c_alias_1.d)
)
q = q.join(A.c.of_type(c_alias_2))
q = q.options(contains_eager(A.c.of_type(c_alias_2)))
a1 = q.all()[0]
# ensure 'd' key was populated in dict. Varies based on
# PYTHONHASHSEED
in_("d", a1.c.__dict__)
class EntityViaMultiplePathTestTwo(fixtures.DeclarativeMappedTest):
0
Source : test_eager_relations.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_multi_path_load_legacy_join_style(self):
User, LD, A, LDA = self.classes("User", "LD", "A", "LDA")
s = fixture_session()
u0 = User(data=42)
l0 = LD(user=u0)
z0 = A(ld=l0)
lz0 = LDA(ld=l0, a=z0)
s.add_all([u0, l0, z0, lz0])
s.commit()
l_ac = aliased(LD)
u_ac = aliased(User)
# these paths don't work out correctly?
lz_test = (
s.query(LDA)
.join(LDA.ld)
.options(contains_eager(LDA.ld))
.join(LDA.a)
.join(A.ld.of_type(l_ac))
.join(l_ac.user.of_type(u_ac))
.options(
contains_eager(LDA.a)
.contains_eager(A.ld, alias=l_ac)
.contains_eager(LD.user, alias=u_ac)
)
.first()
)
in_("user", lz_test.a.ld.__dict__)
def test_multi_path_load_of_type(self):
0
Source : test_eager_relations.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_multi_path_load_of_type(self):
User, LD, A, LDA = self.classes("User", "LD", "A", "LDA")
s = fixture_session()
u0 = User(data=42)
l0 = LD(user=u0)
z0 = A(ld=l0)
lz0 = LDA(ld=l0, a=z0)
s.add_all([u0, l0, z0, lz0])
s.commit()
l_ac = aliased(LD)
u_ac = aliased(User)
lz_test = (
s.query(LDA)
.join(LDA.ld)
# this conflicts in 2.0
# .options(contains_eager(LDA.ld))
.join(LDA.a)
.join(LDA.ld.of_type(l_ac))
.join(l_ac.user.of_type(u_ac))
.options(
contains_eager(LDA.a),
contains_eager(LDA.ld.of_type(l_ac)).contains_eager(
l_ac.user.of_type(u_ac)
),
)
.first()
)
in_("user", lz_test.a.ld.__dict__)
class LazyLoadOptSpecificityTest(fixtures.DeclarativeMappedTest):
0
Source : test_merge.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_deferred_column_mapping(self):
# defer 'excerpt' at mapping level instead of query level
Book, book = self.classes.Book, self.tables.book
self.mapper_registry.map_imperatively(
Book, book, properties={"excerpt": deferred(book.c.excerpt)}
)
sess = fixture_session()
b = Book(
id=1,
title="Essential SQLAlchemy",
summary="some summary",
excerpt="some excerpt",
)
sess.add(b)
sess.commit()
b1 = sess.query(Book).first()
sess.expire(b1, ["summary"])
sess.close()
def go():
b2 = sess.merge(b1, load=False)
# should not emit load for deferred 'excerpt'
eq_(b2.summary, "some summary")
not_in("excerpt", b2.__dict__)
# now it should emit load for deferred 'excerpt'
eq_(b2.excerpt, "some excerpt")
in_("excerpt", b2.__dict__)
self.sql_eq_(
go,
[
(
"SELECT book.summary AS book_summary "
"FROM book WHERE book.id = :pk_1",
{"pk_1": 1},
),
(
"SELECT book.excerpt AS book_excerpt "
"FROM book WHERE book.id = :pk_1",
{"pk_1": 1},
),
],
)
def test_deferred_column_query(self):
0
Source : test_merge.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_deferred_column_query(self):
Book, book = self.classes.Book, self.tables.book
self.mapper_registry.map_imperatively(Book, book)
sess = fixture_session()
b = Book(
id=1,
title="Essential SQLAlchemy",
summary="some summary",
excerpt="some excerpt",
)
sess.add(b)
sess.commit()
# defer 'excerpt' at query level instead of mapping level
b1 = sess.query(Book).options(defer(Book.excerpt)).first()
sess.expire(b1, ["summary"])
sess.close()
def go():
b2 = sess.merge(b1, load=False)
# should not emit load for deferred 'excerpt'
eq_(b2.summary, "some summary")
not_in("excerpt", b2.__dict__)
# now it should emit load for deferred 'excerpt'
eq_(b2.excerpt, "some excerpt")
in_("excerpt", b2.__dict__)
self.sql_eq_(
go,
[
(
"SELECT book.summary AS book_summary "
"FROM book WHERE book.id = :pk_1",
{"pk_1": 1},
),
(
"SELECT book.excerpt AS book_excerpt "
"FROM book WHERE book.id = :pk_1",
{"pk_1": 1},
),
],
)
class MutableMergeTest(fixtures.MappedTest):
0
Source : test_relationships.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_m2o(self):
users, Address, addresses, User = (
self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User,
)
self.mapper_registry.map_imperatively(
User, users, properties={"addresses": relationship(Address)}
)
self.mapper_registry.map_imperatively(
Address,
addresses,
properties={
"user": relationship(
User, back_populates="addresses", lazy="noload"
)
},
)
u1 = User()
a1 = Address()
a1.user = u1
in_(a1, u1.addresses)
class JoinConditionErrorTest(fixtures.TestBase):
0
Source : test_update_delete.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_delete_fetch_returning(self):
User = self.classes.User
sess = fixture_session()
john, jack, jill, jane = sess.query(User).order_by(User.id).all()
in_(john, sess)
in_(jack, sess)
with self.sql_execution_asserter() as asserter:
sess.query(User).filter(User.age > 29).delete(
synchronize_session="fetch"
)
if testing.db.dialect.full_returning:
asserter.assert_(
CompiledSQL(
"DELETE FROM users WHERE users.age_int > %(age_int_1)s "
"RETURNING users.id",
[{"age_int_1": 29}],
dialect="postgresql",
),
)
else:
asserter.assert_(
CompiledSQL(
"SELECT users.id FROM users "
"WHERE users.age_int > :age_int_1",
[{"age_int_1": 29}],
),
CompiledSQL(
"DELETE FROM users WHERE users.age_int > :age_int_1",
[{"age_int_1": 29}],
),
)
in_(john, sess)
not_in(jack, sess)
in_(jill, sess)
not_in(jane, sess)
def test_delete_fetch_returning_lambda(self):
See More Examples