Here are the examples of the python api sqlalchemy.orm.backref taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
174 Examples
3
Source : test_cascade.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def setup_mappers(cls):
Node = cls.classes.Node
node = cls.tables.node
mapper(
Node,
node,
properties={
"children": relationship(
Node,
cascade="all, delete-orphan",
backref=backref("parent", remote_side=node.c.id),
)
},
)
def test_self_referential_delete(self):
3
Source : test_cycles.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def setup_mappers(cls):
item, TT = cls.tables.item, cls.classes.TT
mapper(
TT,
item,
properties={
"children": relationship(
TT,
remote_side=[item.c.parent_uuid],
backref=backref("parent", remote_side=[item.c.uuid]),
)
},
)
def test_basic(self):
3
Source : test_dynamic.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_m2m(self):
Order, Item = self._order_item_fixture(
items_args={"backref": backref("orders", lazy="dynamic")}
)
sess = create_session()
o1 = Order(id=15, description="order 10")
i1 = Item(id=10, description="item 8")
o1.items.append(i1)
sess.add(o1)
sess.flush()
assert o1 in i1.orders.all()
assert i1 in o1.items.all()
@testing.exclude(
3
Source : test_dynamic.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_backref_pop_persistent_autoflush_o2m_active_hist(self):
u1, a1, s = self._persistent_fixture(
addresses_args={"backref": backref("user", active_history=True)}
)
u1.addresses.append(a1)
s.flush()
s.expire_all()
a1.user = None
self._assert_history(u1, ([], [], [a1]))
def test_backref_pop_persistent_autoflush_m2m(self):
3
Source : test_joins.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def setup_mappers(cls):
Node, nodes = cls.classes.Node, cls.tables.nodes
mapper(
Node,
nodes,
properties={
"children": relationship(
Node,
lazy="select",
join_depth=3,
backref=backref("parent", remote_side=[nodes.c.id]),
)
},
)
@classmethod
3
Source : test_query.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def _eagerload_mappings(self, addresses_lazy=True, user_lazy=True):
User, Address = self.classes("User", "Address")
users, addresses = self.tables("users", "addresses")
mapper(
User,
users,
properties={
"addresses": relationship(
Address,
lazy=addresses_lazy,
backref=backref("user", lazy=user_lazy),
)
},
)
mapper(Address, addresses)
def test_basic(self):
3
Source : adjacency_list.py
with MIT License
from OneGov
with MIT License
from OneGov
def children(cls):
return relationship(
cls.__name__,
order_by=cls.order,
# cascade deletions - it's not the job of this model to prevent
# the user from deleting all his content
cascade="all, delete-orphan",
# many to one + adjacency list - remote_side
# is required to reference the 'remote'
# column in the join condition.
backref=backref("parent", remote_side=cls.id)
)
#: the order of the items - items are added at the end by default
order = Column(Integer, default=2 ** 16)
3
Source : test_cascade.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def setup_mappers(cls):
Node = cls.classes.Node
node = cls.tables.node
cls.mapper_registry.map_imperatively(
Node,
node,
properties={
"children": relationship(
Node,
cascade="all, delete-orphan",
backref=backref("parent", remote_side=node.c.id),
)
},
)
def test_self_referential_delete(self):
3
Source : test_cycles.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def setup_mappers(cls):
item, TT = cls.tables.item, cls.classes.TT
cls.mapper_registry.map_imperatively(
TT,
item,
properties={
"children": relationship(
TT,
remote_side=[item.c.parent_uuid],
backref=backref("parent", remote_side=[item.c.uuid]),
)
},
)
def test_basic(self):
3
Source : test_dynamic.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def test_m2m(self):
Order, Item = self._order_item_fixture(
items_args={"backref": backref("orders", lazy="dynamic")}
)
sess = fixture_session()
o1 = Order(id=15, description="order 10")
i1 = Item(id=10, description="item 8")
o1.items.append(i1)
sess.add(o1)
sess.flush()
assert o1 in i1.orders.all()
assert i1 in o1.items.all()
@testing.exclude(
3
Source : test_joins.py
with MIT License
from sqlalchemy
with MIT License
from sqlalchemy
def setup_mappers(cls):
Node, nodes = cls.classes.Node, cls.tables.nodes
cls.mapper_registry.map_imperatively(
Node,
nodes,
properties={
"children": relationship(
Node,
lazy="select",
join_depth=3,
backref=backref("parent", remote_side=[nodes.c.id]),
)
},
)
@classmethod
0
Source : db.py
with MIT License
from alexmojaki
with MIT License
from alexmojaki
def __init__(self, db_uri=None, _skip_version_check=False):
self.db_uri = db_uri = (
db_uri
or os.environ.get('BIRDSEYE_DB')
or os.path.join(os.path.expanduser('~'),
'.birdseye.db'))
kwargs = dict(
pool_recycle=280,
echo=False, # for convenience when debugging
)
try:
engine = create_engine(db_uri, **kwargs)
except ArgumentError:
db_uri = 'sqlite:///' + db_uri
engine = create_engine(db_uri, **kwargs)
self.engine = engine
self.Session = sessionmaker(bind=engine)
class Base(object):
@declared_attr
def __tablename__(cls):
return cls.__name__.lower()
Base = declarative_base(cls=Base) # type: ignore
class KeyValue(Base):
key = Column(String(50), primary_key=True)
value = Column(Text)
db_self = self
class KeyValueStore(object):
def __getitem__(self, item):
with db_self.session_scope() as session:
return (session
.query(KeyValue.value)
.filter_by(key=item)
.scalar())
def __setitem__(self, key, value):
with db_self.session_scope() as session:
session.query(KeyValue).filter_by(key=key).delete()
session.add(KeyValue(key=key, value=str(value)))
__getattr__ = __getitem__
__setattr__ = __setitem__
LongText = LONGTEXT if engine.name == 'mysql' else Text
class Call(Base):
id = Column(String(length=32), primary_key=True)
function_id = Column(Integer, ForeignKey('function.id'), index=True)
function = relationship('Function', backref=backref('calls', lazy='dynamic'))
arguments = Column(Text)
return_value = Column(Text)
exception = Column(Text)
traceback = Column(Text)
data = Column(LongText)
start_time = Column(DateTime, index=True)
@property
def pretty_start_time(self):
return self._pretty_time(self.start_time)
@staticmethod
def _pretty_time(dt):
if not dt:
return ''
return Markup('%s (%s)' % (
dt.strftime('%Y-%m-%d %H:%M:%S'),
naturaltime(dt)))
@property
def state_icon(self):
return Markup(' < span class="glyphicon glyphicon-%s" '
'style="color: %s"> < /span>' % (
('ok', 'green') if self.success else
('remove', 'red')))
@property
def success(self):
if self.exception:
assert self.traceback
assert self.return_value == 'None'
return False
else:
assert not self.traceback
return True
@property
def result(self):
if self.success:
return str(self.return_value)
else:
return str(self.exception)
@property
def arguments_list(self):
return json.loads(self.arguments)
@property
def parsed_data(self):
return json.loads(self.data)
@staticmethod
def basic_dict(call):
return dict(arguments=call.arguments_list,
**select_attrs(call, 'id function_id return_value traceback '
'exception start_time'))
basic_columns = (id, function_id, return_value,
traceback, exception, start_time, arguments)
class Function(Base):
id = Column(Integer, Sequence('function_id_seq'), primary_key=True)
file = Column(Text)
name = Column(Text)
type = Column(Text) # function or module
html_body = Column(LongText)
lineno = Column(Integer)
data = Column(LongText)
hash = Column(String(length=64), index=True)
body_hash = Column(String(length=64), index=True)
__table_args__ = (
UniqueConstraint('hash',
name='everything_unique'),
Index('idx_file', 'file', mysql_length=256),
Index('idx_name', 'name', mysql_length=32),
)
@property
def parsed_data(self):
return json.loads(self.data)
@staticmethod
def basic_dict(func):
return select_attrs(func, 'file name lineno hash body_hash type')
basic_columns = (file, name, lineno, hash, body_hash, type)
self.Call = Call
self.Function = Function
self._KeyValue = KeyValue
self.key_value_store = kv = KeyValueStore()
if _skip_version_check:
return
if not self.table_exists(Function):
Base.metadata.create_all(engine)
kv.version = DB_VERSION
elif not self.table_exists(KeyValue) or int(kv.version) < DB_VERSION:
sys.exit('The birdseye database schema is out of date. '
'Run "python -m birdseye.clear_db" to delete the existing tables.')
def table_exists(self, table):
0
Source : topic.py
with GNU Affero General Public License v3.0
from andrewcooke
with GNU Affero General Public License v3.0
from andrewcooke
def children(cls):
# http://docs.sqlalchemy.org/en/latest/orm/self_referential.html
return relationship('DiaryTopic', backref=backref('parent', remote_side=[cls.id]))
def __init__(self, id=None, parent=None, parent_id=None, schedule=None, title=None, description=None, sort=None):
0
Source : topic.py
with GNU Affero General Public License v3.0
from andrewcooke
with GNU Affero General Public License v3.0
from andrewcooke
def children(cls):
# http://docs.sqlalchemy.org/en/latest/orm/self_referential.html
return relationship('ActivityTopic', backref=backref('parent', remote_side=[cls.id]))
def __str__(self):
0
Source : test_sqlalchemy.py
with GNU Affero General Public License v3.0
from andrewcooke
with GNU Affero General Public License v3.0
from andrewcooke
def test_sqlalchemy(self):
# using postgres with log_statement=all so that we can see the incorrect queries
# (use a transient docker instance)
dbname = ''.join(choice(ascii_letters) for _ in range(16)).lower()
# https://stackoverflow.com/questions/6506578/how-to-create-a-new-database-using-sqlalchemy
engine = create_engine('postgresql://postgres@localhost:5432/postgres')
conn = engine.connect()
conn.execute('commit')
conn.execute(f'create database {dbname}')
conn.close()
engine = create_engine(f'postgresql://postgres@localhost:5432/{dbname}')
Base = declarative_base()
Session = sessionmaker(engine)
class SourceType(IntEnum):
SOURCE = 0
ACTIVITY = 2
ACTIVITY_TOPIC = 10
class StatisticJournalType(IntEnum):
STATISTIC = 0
TIMESTAMP = 4
class FileHash(Base):
__tablename__ = 'file_hash'
id = Column(Integer, primary_key=True)
class Source(Base):
__tablename__ = 'source'
id = Column(Integer, primary_key=True)
type = Column(Integer, nullable=False, index=True)
__mapper_args__ = {
'polymorphic_identity': SourceType.SOURCE,
'polymorphic_on': type
}
class GroupedSource(Source):
__abstract__ = True
class ActivityJournal(GroupedSource):
__tablename__ = 'activity_journal'
id = Column(Integer, ForeignKey('source.id', ondelete='cascade'), primary_key=True)
file_hash_id = Column(Integer, ForeignKey('file_hash.id'), nullable=False, index=True, unique=True)
file_hash = relationship('FileHash', backref=backref('activity_journal', uselist=False))
__mapper_args__ = {
'polymorphic_identity': SourceType.ACTIVITY
}
class ActivityTopicJournal(GroupedSource):
__tablename__ = 'activity_topic_journal'
id = Column(Integer, ForeignKey('source.id', ondelete='cascade'), primary_key=True)
file_hash_id = Column(Integer, ForeignKey('file_hash.id'),
nullable=False, index=True, unique=True)
file_hash = relationship('FileHash', backref=backref('activity_topic_journal', uselist=False))
__mapper_args__ = {
'polymorphic_identity': SourceType.ACTIVITY_TOPIC
}
class StatisticName(Base):
__tablename__ = 'statistic_name'
id = Column(Integer, primary_key=True)
name = Column(Text, nullable=False)
class StatisticJournal(Base):
__tablename__ = 'statistic_journal'
id = Column(Integer, primary_key=True)
type = Column(Integer, nullable=False, index=True)
statistic_name_id = Column(Integer, ForeignKey('statistic_name.id', ondelete='cascade'), nullable=False)
statistic_name = relationship('StatisticName')
source_id = Column(Integer, ForeignKey('source.id', ondelete='cascade'), nullable=False)
source = relationship('Source')
__mapper_args__ = {
'polymorphic_identity': StatisticJournalType.STATISTIC,
'polymorphic_on': 'type'
}
class StatisticJournalTimestamp(StatisticJournal):
__tablename__ = 'statistic_journal_timestamp'
id = Column(Integer, ForeignKey('statistic_journal.id', ondelete='cascade'), primary_key=True)
value = Column(DateTime, nullable=False)
__mapper_args__ = {
'polymorphic_identity': StatisticJournalType.TIMESTAMP
}
Base.metadata.create_all(engine)
def build_source_query(s, value):
q = s.query(Source.id). \
join(StatisticJournalTimestamp). \
join(StatisticName). \
filter(StatisticName.name.like('start')). \
filter(StatisticJournalTimestamp.value > value)
q_direct = s.query(ActivityJournal.id). \
filter(ActivityJournal.id.in_(q.subquery().select()))
q_via_topic = s.query(ActivityJournal.id). \
join(FileHash). \
join(ActivityTopicJournal). \
filter(ActivityTopicJournal.id.in_(q.subquery().select()))
constraints = union(q_direct, q_via_topic).subquery().select()
return s.query(Source).filter(Source.id.in_(constraints))
with Session() as s:
build_source_query(s, dt.datetime(2020, 1, 1, 3, 0, tzinfo=pytz.UTC)).all()
build_source_query(s, dt.datetime(2021, 1, 1, 3, 0, tzinfo=pytz.UTC)).all()
0
Source : init.py
with MIT License
from bolinette
with MIT License
from bolinette
async def init_relational_models(context: BolinetteContext):
models = {}
for model_cls in context.inject.registered(of_type=Model):
model: Model = context.inject.require(model_cls, immediate=True)
if model.__props__.database.relational:
models[model.__blnt__.name] = model
orm_tables = {}
orm_cols: dict[str, dict[str, sqlalchemy.Column]] = {}
for model_name, model in models.items():
orm_cols[model_name] = {}
for col_name, col in model.__props__.get_columns():
ref = None
if col.reference:
ref = sqlalchemy.ForeignKey(col.reference.target_path)
orm_cols[model_name][col_name] = sqlalchemy.Column(
col_name,
col.type.sqlalchemy_type,
ref,
default=col.default,
index=col.entity_key,
primary_key=col.primary_key,
nullable=col.nullable,
unique=col.unique,
autoincrement=col.auto_increment,
)
if not isinstance(model.__props__.database, RelationalDatabase):
raise InternalError(f"model.not_relational:{model.__blnt__.name}")
orm_tables[model_name] = sqlalchemy.Table(
model_name,
model.__props__.database.base.metadata,
*(orm_cols[model_name].values()),
)
for model_name, model in models.items():
orm_defs = {}
for rel_name, rel in model.__props__.get_relationships():
kwargs = {}
rel.name = rel_name
if rel.backref:
kwargs["backref"] = sqlalchemy_orm.backref(
rel.backref.key, lazy=rel.backref.lazy
)
if rel.foreign_key:
kwargs["foreign_keys"] = orm_cols[model_name][rel.foreign_key.name]
if rel.remote_side:
kwargs["remote_side"] = orm_cols[model_name][rel.remote_side.name]
if rel.secondary:
kwargs["secondary"] = orm_tables[rel.secondary.__blnt__.name]
orm_defs[rel_name] = sqlalchemy_orm.relationship(
rel.target_model_name, lazy=rel.lazy, **kwargs
)
orm_defs["__table__"] = orm_tables[model_name]
if not isinstance(model.__props__.database, RelationalDatabase):
raise InternalError(f"model.not_relational:{model.__blnt__.name}")
orm_model = type(model_name, (model.__props__.database.base,), orm_defs)
for att_name, attribute in model.__props__.get_properties():
setattr(orm_model, att_name, property(attribute.function))
if isinstance(model.__props__.database, RelationalDatabase):
model.__props__.database.add_table(model_name, orm_model)
@ext.init_func()
0
Source : __init__.py
with MIT License
from brettkromkamp
with MIT License
from brettkromkamp
def create_app(test_config=None):
# Create app
app = Flask(__name__, instance_relative_config=True)
# Configure app
app.config.from_object("contextualise.settings")
app.config.from_envvar("CONTEXTUALISE_SETTINGS")
app.config.from_mapping(
DEBUG=False,
DATABASE_PATH=os.path.join(app.instance_path, app.config["DATABASE_FILE"]),
SECRET_KEY=os.environ.get("SECRET_KEY", "ppBcUQ5AL7gEmvb0blMDyEOpiBEQUupGmk_a3DMaF34"),
SECURITY_PASSWORD_SALT=os.environ.get("SECURITY_PASSWORD_SALT", "139687009245803364536588051620840970665"),
SECURITY_REGISTERABLE=True,
SECURITY_RECOVERABLE=True,
SECURITY_URL_PREFIX="/auth",
SECURITY_POST_LOGIN_VIEW="/maps/",
SECURITY_POST_REGISTER_VIEW="/maps/",
MAIL_USE_SSL=False,
MAX_CONTENT_LENGTH=4 * 1024 * 1024, # 4 megabytes
)
# Set up app
mail = Mail(app)
csrf = SeaSurf(app)
if test_config is None:
# Load the instance config, if it exists, when not testing
app.config.from_pyfile("config.py", silent=True)
else:
# Load the test config if passed in
app.config.from_mapping(test_config)
# Ensure the instance folder exists
try:
os.makedirs(app.instance_path)
except OSError:
pass
@app.route("/")
def home():
maps = get_topic_store().get_promoted_maps()
maps = [map for map in maps if map.published]
# Reset breadcrumbs and (current) scope
session["breadcrumbs"] = []
session["current_scope"] = UNIVERSAL_SCOPE
session["scope_filter"] = 1
return render_template("index.html", maps=maps)
@app.route("/health")
def hello():
return "Healthy!"
# HTTP error handlers
def forbidden(e):
return render_template("403.html"), 403
app.register_error_handler(403, forbidden)
def page_not_found(e):
return render_template("404.html"), 404
app.register_error_handler(404, page_not_found)
def internal_server_error(e):
return render_template("500.html"), 500
app.register_error_handler(500, internal_server_error)
def request_entity_too_large(e):
return render_template("413.html"), 413
app.register_error_handler(413, request_entity_too_large)
# Setup Flask-Security
engine = create_engine(f"sqlite:///{app.config['DATABASE_PATH']}")
db_session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=engine))
Base = declarative_base()
Base.query = db_session.query_property()
class RolesUsers(Base):
__tablename__ = "roles_users"
id = Column(Integer(), primary_key=True)
user_id = Column("user_id", Integer(), ForeignKey("user.id"))
role_id = Column("role_id", Integer(), ForeignKey("role.id"))
class Role(Base, RoleMixin):
__tablename__ = "role"
id = Column(Integer(), primary_key=True)
name = Column(String(80), unique=True)
description = Column(String(255))
class User(Base, UserMixin):
__tablename__ = "user"
id = Column(Integer, primary_key=True)
email = Column(String(255), unique=True)
username = Column(String(255), unique=True, nullable=True)
password = Column(String(255), nullable=False)
last_login_at = Column(DateTime())
current_login_at = Column(DateTime())
last_login_ip = Column(String(100))
current_login_ip = Column(String(100))
login_count = Column(Integer)
active = Column(Boolean())
fs_uniquifier = Column(String(255), unique=True, nullable=False)
confirmed_at = Column(DateTime())
roles = relationship("Role", secondary="roles_users", backref=backref("users", lazy="dynamic"))
user_datastore = SQLAlchemySessionUserDatastore(db_session, User, Role)
security = Security(app, user_datastore)
@user_registered.connect_via(app)
def user_registered_handler(app, user, confirm_token, form_data, **extra_args):
default_role = user_datastore.find_role("user")
user_datastore.add_role_to_user(user, default_role)
db_session.commit()
@user_authenticated.connect_via(app)
def user_authenticated_handler(app, user, authn_via, **extra_args):
app.logger.info(f"User logged in successfully: [{user.email}], authentication method: [{authn_via}]")
@app.before_first_request
def create_user():
Base.metadata.create_all(bind=engine)
# Create roles
admin_role = user_datastore.find_or_create_role(name="admin", description="Administrator")
user_role = user_datastore.find_or_create_role(name="user", description="End user")
db_session.commit()
# Create users
admin_user = user_datastore.find_user(email="[email protected]")
if not admin_user:
admin_user = user_datastore.create_user(
email="[email protected]", password=hash_password("Passw0rd1")
)
db_session.commit()
user_user = user_datastore.find_user(email="[email protected]")
if not user_user:
user_user = user_datastore.create_user(email="[email protected]", password=hash_password("Passw0rd1"))
db_session.commit()
# Assign roles
user_datastore.add_role_to_user(user_user, user_role)
user_datastore.add_role_to_user(admin_user, admin_role)
db_session.commit()
# Create database structure
get_topic_store().create_database()
@app.teardown_request
def checkin_db(exc):
db_session.remove()
# Register custom filters
filters.register_filters(app)
# Register Blueprints
from contextualise import api
app.register_blueprint(api.bp)
csrf.exempt(api.create_topic)
csrf.exempt(api.create_association)
from contextualise import map
app.register_blueprint(map.bp)
from contextualise import topic
app.register_blueprint(topic.bp)
from contextualise import image
app.register_blueprint(image.bp)
from contextualise import file
app.register_blueprint(file.bp)
from contextualise import link
app.register_blueprint(link.bp)
from contextualise import video
app.register_blueprint(video.bp)
from contextualise import association
app.register_blueprint(association.bp)
from contextualise import note
app.register_blueprint(note.bp)
from contextualise import three_d
app.register_blueprint(three_d.bp)
from contextualise import attribute
app.register_blueprint(attribute.bp)
from contextualise import visualisation
app.register_blueprint(visualisation.bp)
from contextualise import tag
app.register_blueprint(tag.bp)
# Set up logging
if not app.debug:
logs_directory = os.path.join(app.instance_path, "logs")
if not os.path.exists(logs_directory):
os.mkdir(logs_directory)
file_handler = RotatingFileHandler(
os.path.join(logs_directory, "contextualise.log"), maxBytes=10240, backupCount=10
)
file_handler.setFormatter(
logging.Formatter("%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]")
)
file_handler.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
app.logger.setLevel(logging.INFO)
app.logger.info("Contextualise startup")
return app
# For debugging purposes (inside PyCharm)
if __name__ == "__main__":
0
Source : db_model.py
with GNU General Public License v2.0
from christoph2
with GNU General Public License v2.0
from christoph2
def value_association(cls):
name = cls.__name__
discriminator = name.lower()
assoc_cls = type(
"%sValueAssociation" % name,
(ValueAssociation,),
dict(
__tablename__=None,
__mapper_args__={"polymorphic_identity": discriminator},
),
)
cls.raw_values = association_proxy(
"value_association",
"raw_values",
creator=lambda raw_values: assoc_cls(raw_values=raw_values),
)
cls.converted_values = association_proxy(
"value_association",
"converted_values",
creator=lambda converted_values: assoc_cls(converted_values=converted_values),
)
return relationship(assoc_cls, backref=backref("parent", uselist=False))
################################################################################
class BaseCharacteristic(Base):
0
Source : 024_remote_folders_and_inbox_tags_split.py
with GNU Affero General Public License v3.0
from closeio
with GNU Affero General Public License v3.0
from closeio
def upgrade():
easupdate = False
print("Creating new tables and columns...")
op.create_table(
"folder",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("account_id", sa.Integer(), nullable=False),
sa.Column(
"name", sa.String(length=191, collation="utf8mb4_general_ci"), nullable=True
),
sa.ForeignKeyConstraint(["account_id"], ["account.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("account_id", "name"),
)
op.create_table(
"internaltag",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("public_id", mysql.BINARY(16), nullable=False),
sa.Column("namespace_id", sa.Integer(), nullable=False),
sa.Column("name", sa.String(length=191), nullable=False),
sa.Column("thread_id", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(["namespace_id"], ["namespace.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(["thread_id"], ["thread.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("namespace_id", "name"),
)
op.add_column("folderitem", sa.Column("folder_id", sa.Integer(), nullable=True))
op.create_foreign_key(
"fk_folder_id",
"folderitem",
"folder",
["folder_id"],
["id"],
ondelete="CASCADE",
)
op.add_column("account", sa.Column("inbox_folder_id", sa.Integer, nullable=True))
op.add_column("account", sa.Column("sent_folder_id", sa.Integer, nullable=True))
op.add_column("account", sa.Column("drafts_folder_id", sa.Integer, nullable=True))
op.add_column("account", sa.Column("spam_folder_id", sa.Integer, nullable=True))
op.add_column("account", sa.Column("trash_folder_id", sa.Integer, nullable=True))
op.add_column("account", sa.Column("archive_folder_id", sa.Integer, nullable=True))
op.add_column("account", sa.Column("all_folder_id", sa.Integer, nullable=True))
op.add_column("account", sa.Column("starred_folder_id", sa.Integer, nullable=True))
op.create_foreign_key(
"account_ibfk_2", "account", "folder", ["inbox_folder_id"], ["id"]
)
op.create_foreign_key(
"account_ibfk_3", "account", "folder", ["sent_folder_id"], ["id"]
)
op.create_foreign_key(
"account_ibfk_4", "account", "folder", ["drafts_folder_id"], ["id"]
)
op.create_foreign_key(
"account_ibfk_5", "account", "folder", ["spam_folder_id"], ["id"]
)
op.create_foreign_key(
"account_ibfk_6", "account", "folder", ["trash_folder_id"], ["id"]
)
op.create_foreign_key(
"account_ibfk_7", "account", "folder", ["archive_folder_id"], ["id"]
)
op.create_foreign_key(
"account_ibfk_8", "account", "folder", ["all_folder_id"], ["id"]
)
op.create_foreign_key(
"account_ibfk_9", "account", "folder", ["starred_folder_id"], ["id"]
)
op.add_column("imapuid", sa.Column("folder_id", sa.Integer, nullable=True))
op.create_foreign_key("imapuid_ibfk_3", "imapuid", "folder", ["folder_id"], ["id"])
from inbox.ignition import main_engine
from inbox.models.session import session_scope
engine = main_engine(pool_size=1, max_overflow=0)
Base = declarative_base()
Base.metadata.reflect(engine)
if "easuid" in Base.metadata.tables:
easupdate = True
print("Adding new EASUid columns...")
op.add_column("easuid", sa.Column("fld_uid", sa.Integer(), nullable=True))
op.add_column("easuid", sa.Column("folder_id", sa.Integer(), nullable=True))
op.create_foreign_key(
"easuid_ibfk_3", "easuid", "folder", ["folder_id"], ["id"]
)
op.create_unique_constraint(
"uq_easuid_folder_id_msg_uid_easaccount_id",
"easuid",
["folder_id", "msg_uid", "easaccount_id"],
)
op.create_index(
"easuid_easaccount_id_folder_id", "easuid", ["easaccount_id", "folder_id"]
)
# Include our changes to the EASUid table:
Base = declarative_base()
Base.metadata.reflect(engine)
class Folder(Base):
__table__ = Base.metadata.tables["folder"]
account = relationship(
"Account", foreign_keys="Folder.account_id", backref="folders"
)
class FolderItem(Base):
__table__ = Base.metadata.tables["folderitem"]
folder = relationship("Folder", backref="threads", lazy="joined")
class Thread(Base):
__table__ = Base.metadata.tables["thread"]
folderitems = relationship(
"FolderItem",
backref="thread",
single_parent=True,
cascade="all, delete, delete-orphan",
)
namespace = relationship("Namespace", backref="threads")
class Namespace(Base):
__table__ = Base.metadata.tables["namespace"]
account = relationship("Account", backref=backref("namespace", uselist=False))
class Account(Base):
__table__ = Base.metadata.tables["account"]
inbox_folder = relationship("Folder", foreign_keys="Account.inbox_folder_id")
sent_folder = relationship("Folder", foreign_keys="Account.sent_folder_id")
drafts_folder = relationship("Folder", foreign_keys="Account.drafts_folder_id")
spam_folder = relationship("Folder", foreign_keys="Account.spam_folder_id")
trash_folder = relationship("Folder", foreign_keys="Account.trash_folder_id")
starred_folder = relationship(
"Folder", foreign_keys="Account.starred_folder_id"
)
archive_folder = relationship(
"Folder", foreign_keys="Account.archive_folder_id"
)
all_folder = relationship("Folder", foreign_keys="Account.all_folder_id")
class ImapUid(Base):
__table__ = Base.metadata.tables["imapuid"]
folder = relationship("Folder", backref="imapuids", lazy="joined")
if easupdate:
class EASUid(Base):
__table__ = Base.metadata.tables["easuid"]
folder = relationship(
"Folder",
foreign_keys="EASUid.folder_id",
backref="easuids",
lazy="joined",
)
print("Creating Folder rows and migrating FolderItems...")
# not many folders per account, so shouldn't grow that big
with session_scope(versioned=False) as db_session:
folders = dict(
[((i.account_id, i.name), i) for i in db_session.query(Folder).all()]
)
count = 0
for folderitem in (
db_session.query(FolderItem)
.join(Thread)
.join(Namespace)
.yield_per(CHUNK_SIZE)
):
account_id = folderitem.thread.namespace.account_id
if folderitem.thread.namespace.account.provider == "gmail":
if folderitem.folder_name in folder_name_subst_map:
new_folder_name = folder_name_subst_map[folderitem.folder_name]
else:
new_folder_name = folderitem.folder_name
elif folderitem.thread.namespace.account.provider == "eas":
new_folder_name = folderitem.folder_name.title()
if (account_id, new_folder_name) in folders:
f = folders[(account_id, new_folder_name)]
else:
f = Folder(account_id=account_id, name=new_folder_name)
folders[(account_id, new_folder_name)] = f
folderitem.folder = f
count += 1
if count > CHUNK_SIZE:
db_session.commit()
count = 0
db_session.commit()
print("Migrating ImapUids to reference Folder rows...")
for imapuid in db_session.query(ImapUid).yield_per(CHUNK_SIZE):
account_id = imapuid.imapaccount_id
if imapuid.folder_name in folder_name_subst_map:
new_folder_name = folder_name_subst_map[imapuid.folder_name]
else:
new_folder_name = imapuid.folder_name
if (account_id, new_folder_name) in folders:
f = folders[(account_id, new_folder_name)]
else:
f = Folder(account_id=account_id, name=new_folder_name)
folders[(account_id, new_folder_name)] = f
imapuid.folder = f
count += 1
if count > CHUNK_SIZE:
db_session.commit()
count = 0
db_session.commit()
if easupdate:
print("Migrating EASUids to reference Folder rows...")
for easuid in db_session.query(EASUid).yield_per(CHUNK_SIZE):
account_id = easuid.easaccount_id
new_folder_name = easuid.folder_name
if (account_id, new_folder_name) in folders:
f = folders[(account_id, new_folder_name)]
else:
f = Folder(account_id=account_id, name=new_folder_name)
folders[(account_id, new_folder_name)] = f
easuid.folder = f
count += 1
if count > CHUNK_SIZE:
db_session.commit()
count = 0
db_session.commit()
print("Migrating *_folder_name fields to reference Folder rows...")
for account in db_session.query(Account).filter_by(provider="gmail"):
if account.inbox_folder_name:
# hard replace INBOX with canonicalized caps
k = (account.id, "Inbox")
if k in folders:
account.inbox_folder = folders[k]
else:
account.inbox_folder = Folder(
account_id=account.id,
name=folder_name_subst_map[account.inbox_folder_name],
)
if account.sent_folder_name:
k = (account.id, account.sent_folder_name)
if k in folders:
account.sent_folder = folders[k]
else:
account.sent_folder = Folder(
account_id=account.id, name=account.sent_folder_name
)
if account.drafts_folder_name:
k = (account.id, account.drafts_folder_name)
if k in folders:
account.drafts_folder = folders[k]
else:
account.drafts_folder = Folder(
account_id=account.id, name=account.drafts_folder_name
)
# all/archive mismatch is intentional; semantics have changed
if account.archive_folder_name:
k = (account.id, account.archive_folder_name)
if k in folders:
account.all_folder = folders[k]
else:
account.all_folder = Folder(
account_id=account.id, name=account.archive_folder_name
)
db_session.commit()
if easupdate:
print(
"Migrating EAS accounts' *_folder_name fields to reference "
"Folder rows..."
)
for account in db_session.query(Account).filter_by(provider="eas"):
if account.inbox_folder_name:
k = (account.id, account.inbox_folder_name)
if k in folders:
account.inbox_folder = folders[k]
else:
account.inbox_folder = Folder(
account_id=account.id, name=account.inbox_folder_name
)
if account.sent_folder_name:
k = (account.id, account.sent_folder_name)
if k in folders:
account.sent_folder = folders[k]
else:
account.sent_folder = Folder(
account_id=account.id, name=account.sent_folder_name
)
if account.drafts_folder_name:
k = (account.id, account.drafts_folder_name)
if k in folders:
account.drafts_folder = folders[k]
else:
account.drafts_folder = Folder(
account_id=account.id, name=account.drafts_folder_name
)
if account.archive_folder_name:
k = (account.id, account.archive_folder_name)
if k in folders:
account.archive_folder = folders[k]
else:
account.archive_folder = Folder(
account_id=account.id, name=account.archive_folder_name
)
db_session.commit()
print("Final schema tweaks and new constraint enforcement")
op.alter_column(
"folderitem", "folder_id", existing_type=sa.Integer(), nullable=False
)
op.drop_constraint("folder_name", "folderitem", type_="unique")
op.drop_constraint("folder_name", "imapuid", type_="unique")
op.create_unique_constraint(
"uq_imapuid_folder_id_msg_uid_imapaccount_id",
"imapuid",
["folder_id", "msg_uid", "imapaccount_id"],
)
op.drop_column("folderitem", "folder_name")
op.drop_column("imapuid", "folder_name")
op.drop_column("account", "inbox_folder_name")
op.drop_column("account", "drafts_folder_name")
op.drop_column("account", "sent_folder_name")
op.drop_column("account", "archive_folder_name")
if easupdate:
print("Dropping old EASUid columns...")
op.drop_constraint("folder_name", "easuid", type_="unique")
op.drop_index("easuid_easaccount_id_folder_name", "easuid")
op.drop_column("easuid", "folder_name")
def downgrade():
0
Source : 030_add_is_read_attribute_to_messages.py
with GNU Affero General Public License v3.0
from closeio
with GNU Affero General Public License v3.0
from closeio
def upgrade():
op.add_column(
"message",
sa.Column(
"is_read",
sa.Boolean(),
server_default=sa.sql.expression.false(),
nullable=False,
),
)
op.alter_column(
"usertagitem", "created_at", existing_type=mysql.DATETIME(), nullable=False
)
op.alter_column(
"usertagitem", "updated_at", existing_type=mysql.DATETIME(), nullable=False
)
from inbox.ignition import main_engine
from inbox.models.session import session_scope
engine = main_engine(pool_size=1, max_overflow=0)
Base = declarative_base()
Base.metadata.reflect(engine)
class Message(Base):
__table__ = Base.metadata.tables["message"]
class ImapUid(Base):
__table__ = Base.metadata.tables["imapuid"]
message = relationship(
"Message",
backref=backref(
"imapuids",
primaryjoin="and_("
"Message.id == ImapUid.message_id, "
"ImapUid.deleted_at == None)",
),
primaryjoin="and_("
"ImapUid.message_id == Message.id,"
"Message.deleted_at == None)",
)
with session_scope(versioned=False) as db_session:
for uid in db_session.query(ImapUid).yield_per(500):
if uid.is_seen:
uid.message.is_read = True
db_session.commit()
def downgrade():
0
Source : discriminator_on_association.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def address_association(cls):
name = cls.__name__
discriminator = name.lower()
assoc_cls = type(
"%sAddressAssociation" % name,
(AddressAssociation,),
dict(
__tablename__=None,
__mapper_args__={"polymorphic_identity": discriminator},
),
)
cls.addresses = association_proxy(
"address_association",
"addresses",
creator=lambda addresses: assoc_cls(addresses=addresses),
)
return relationship(
assoc_cls, backref=backref("parent", uselist=False)
)
class Customer(HasAddresses, Base):
0
Source : generic_fk.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def setup_listener(mapper, class_):
name = class_.__name__
discriminator = name.lower()
class_.addresses = relationship(
Address,
primaryjoin=and_(
class_.id == foreign(remote(Address.parent_id)),
Address.discriminator == discriminator,
),
backref=backref(
"parent_%s" % discriminator,
primaryjoin=remote(class_.id) == foreign(Address.parent_id),
),
)
@event.listens_for(class_.addresses, "append")
def append_address(target, value, initiator):
value.discriminator = discriminator
class Customer(HasAddresses, Base):
0
Source : test_basic.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_unicode_string_resolve_backref(self):
class User(Base, fixtures.ComparableEntity):
__tablename__ = "users"
id = Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
)
name = Column("name", String(50))
class Address(Base, fixtures.ComparableEntity):
__tablename__ = "addresses"
id = Column(
Integer, primary_key=True, test_needs_autoincrement=True
)
email = Column(String(50), key="_email")
user_id = Column(
"user_id", Integer, ForeignKey("users.id"), key="_user_id"
)
user = relationship(
User,
backref=backref("addresses", order_by=util.u("Address.email")),
)
assert Address.user.property.mapper.class_ is User
def test_no_table(self):
0
Source : test_basic.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_string_dependency_resolution(self):
class User(Base, fixtures.ComparableEntity):
__tablename__ = "users"
id = Column(
Integer, primary_key=True, test_needs_autoincrement=True
)
name = Column(String(50))
addresses = relationship(
"Address",
order_by="desc(Address.email)",
primaryjoin="User.id==Address.user_id",
foreign_keys="[Address.user_id]",
backref=backref(
"user",
primaryjoin="User.id==Address.user_id",
foreign_keys="[Address.user_id]",
),
)
class Address(Base, fixtures.ComparableEntity):
__tablename__ = "addresses"
id = Column(
Integer, primary_key=True, test_needs_autoincrement=True
)
email = Column(String(50))
user_id = Column(Integer) # note no foreign key
Base.metadata.create_all()
sess = create_session()
u1 = User(
name="ed",
addresses=[
Address(email="abc"),
Address(email="def"),
Address(email="xyz"),
],
)
sess.add(u1)
sess.flush()
sess.expunge_all()
eq_(
sess.query(User).filter(User.name == "ed").one(),
User(
name="ed",
addresses=[
Address(email="xyz"),
Address(email="def"),
Address(email="abc"),
],
),
)
class Foo(Base, fixtures.ComparableEntity):
__tablename__ = "foo"
id = Column(Integer, primary_key=True)
rel = relationship("User", primaryjoin="User.addresses==Foo.id")
assert_raises_message(
exc.InvalidRequestError,
"'addresses' is not an instance of " "ColumnProperty",
configure_mappers,
)
def test_string_dependency_resolution_synonym(self):
0
Source : test_baked.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_useget_cancels_eager(self):
"""test that a one to many lazyload cancels the unnecessary
eager many-to-one join on the other side."""
User = self.classes.User
Address = self.classes.Address
mapper(User, self.tables.users)
mapper(
Address,
self.tables.addresses,
properties={
"user": relationship(
User,
lazy="joined",
backref=backref("addresses", lazy="baked_select"),
)
},
)
sess = Session()
u1 = sess.query(User).filter(User.id == 8).one()
def go():
eq_(u1.addresses[0].user, u1)
self.assert_sql_execution(
testing.db,
go,
CompiledSQL(
"SELECT addresses.id AS addresses_id, addresses.user_id AS "
"addresses_user_id, addresses.email_address AS "
"addresses_email_address FROM addresses WHERE :param_1 = "
"addresses.user_id",
{"param_1": 8},
),
)
def test_useget_cancels_eager_propagated_present(self):
0
Source : test_baked.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_useget_cancels_eager_propagated_present(self):
"""test that a one to many lazyload cancels the unnecessary
eager many-to-one join on the other side, even when a propagated
option is present."""
User = self.classes.User
Address = self.classes.Address
mapper(User, self.tables.users)
mapper(
Address,
self.tables.addresses,
properties={
"user": relationship(
User,
lazy="joined",
backref=backref("addresses", lazy="baked_select"),
)
},
)
from sqlalchemy.orm.interfaces import MapperOption
class MyBogusOption(MapperOption):
propagate_to_loaders = True
sess = Session()
u1 = (
sess.query(User)
.options(MyBogusOption())
.filter(User.id == 8)
.one()
)
def go():
eq_(u1.addresses[0].user, u1)
self.assert_sql_execution(
testing.db,
go,
CompiledSQL(
"SELECT addresses.id AS addresses_id, addresses.user_id AS "
"addresses_user_id, addresses.email_address AS "
"addresses_email_address FROM addresses WHERE :param_1 = "
"addresses.user_id",
{"param_1": 8},
),
)
def test_simple_lazy_clause_no_race_on_generate(self):
0
Source : test_magazine.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def _setup_mapping(self, use_unions, use_joins):
(
Publication,
Issue,
Location,
LocationName,
PageSize,
Magazine,
Page,
MagazinePage,
ClassifiedPage,
) = self.classes(
"Publication",
"Issue",
"Location",
"LocationName",
"PageSize",
"Magazine",
"Page",
"MagazinePage",
"ClassifiedPage",
)
mapper(Publication, self.tables.publication)
mapper(
Issue,
self.tables.issue,
properties={
"publication": relationship(
Publication,
backref=backref("issues", cascade="all, delete-orphan"),
)
},
)
mapper(LocationName, self.tables.location_name)
mapper(
Location,
self.tables.location,
properties={
"issue": relationship(
Issue,
backref=backref(
"locations",
lazy="joined",
cascade="all, delete-orphan",
),
),
"name": relationship(LocationName),
},
)
mapper(PageSize, self.tables.page_size)
mapper(
Magazine,
self.tables.magazine,
properties={
"location": relationship(
Location, backref=backref("magazine", uselist=False)
),
"size": relationship(PageSize),
},
)
if use_unions:
page_join = polymorphic_union(
{
"m": self.tables.page.join(self.tables.magazine_page),
"c": self.tables.page.join(self.tables.magazine_page).join(
self.tables.classified_page
),
"p": self.tables.page.select(
self.tables.page.c.type == "p"
),
},
None,
"page_join",
)
page_mapper = mapper(
Page,
self.tables.page,
with_polymorphic=("*", page_join),
polymorphic_on=page_join.c.type,
polymorphic_identity="p",
)
elif use_joins:
page_join = self.tables.page.outerjoin(
self.tables.magazine_page
).outerjoin(self.tables.classified_page)
page_mapper = mapper(
Page,
self.tables.page,
with_polymorphic=("*", page_join),
polymorphic_on=self.tables.page.c.type,
polymorphic_identity="p",
)
else:
page_mapper = mapper(
Page,
self.tables.page,
polymorphic_on=self.tables.page.c.type,
polymorphic_identity="p",
)
if use_unions:
magazine_join = polymorphic_union(
{
"m": self.tables.page.join(self.tables.magazine_page),
"c": self.tables.page.join(self.tables.magazine_page).join(
self.tables.classified_page
),
},
None,
"page_join",
)
magazine_page_mapper = mapper(
MagazinePage,
self.tables.magazine_page,
with_polymorphic=("*", magazine_join),
inherits=page_mapper,
polymorphic_identity="m",
properties={
"magazine": relationship(
Magazine,
backref=backref(
"pages", order_by=magazine_join.c.page_no
),
)
},
)
elif use_joins:
magazine_join = self.tables.page.join(
self.tables.magazine_page
).outerjoin(self.tables.classified_page)
magazine_page_mapper = mapper(
MagazinePage,
self.tables.magazine_page,
with_polymorphic=("*", magazine_join),
inherits=page_mapper,
polymorphic_identity="m",
properties={
"magazine": relationship(
Magazine,
backref=backref(
"pages", order_by=self.tables.page.c.page_no
),
)
},
)
else:
magazine_page_mapper = mapper(
MagazinePage,
self.tables.magazine_page,
inherits=page_mapper,
polymorphic_identity="m",
properties={
"magazine": relationship(
Magazine,
backref=backref(
"pages", order_by=self.tables.page.c.page_no
),
)
},
)
mapper(
ClassifiedPage,
self.tables.classified_page,
inherits=magazine_page_mapper,
polymorphic_identity="c",
primary_key=[self.tables.page.c.id],
)
@testing.combinations(
0
Source : test_poly_linked_list.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def define_tables(cls, metadata):
global Table1, Table1B, Table2, Table3, Data
table1 = Table(
"table1",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column(
"related_id", Integer, ForeignKey("table1.id"), nullable=True
),
Column("type", String(30)),
Column("name", String(30)),
)
table2 = Table(
"table2",
metadata,
Column("id", Integer, ForeignKey("table1.id"), primary_key=True),
)
table3 = Table(
"table3",
metadata,
Column("id", Integer, ForeignKey("table1.id"), primary_key=True),
)
data = Table(
"data",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("node_id", Integer, ForeignKey("table1.id")),
Column("data", String(30)),
)
# join = polymorphic_union(
# {
# 'table3' : table1.join(table3),
# 'table2' : table1.join(table2),
# 'table1' : table1.select(table1.c.type.in_(['table1', 'table1b'])),
# }, None, 'pjoin')
join = table1.outerjoin(table2).outerjoin(table3).alias("pjoin")
# join = None
class Table1(object):
def __init__(self, name, data=None):
self.name = name
if data is not None:
self.data = data
def __repr__(self):
return "%s(%s, %s, %s)" % (
self.__class__.__name__,
self.id,
repr(str(self.name)),
repr(self.data),
)
class Table1B(Table1):
pass
class Table2(Table1):
pass
class Table3(Table1):
pass
class Data(object):
def __init__(self, data):
self.data = data
def __repr__(self):
return "%s(%s, %s)" % (
self.__class__.__name__,
self.id,
repr(str(self.data)),
)
try:
# this is how the mapping used to work. ensure that this raises an
# error now
table1_mapper = mapper(
Table1,
table1,
select_table=join,
polymorphic_on=table1.c.type,
polymorphic_identity="table1",
properties={
"nxt": relationship(
Table1,
backref=backref(
"prev", foreignkey=join.c.id, uselist=False
),
uselist=False,
primaryjoin=join.c.id == join.c.related_id,
),
"data": relationship(mapper(Data, data)),
},
)
configure_mappers()
assert False
except Exception:
assert True
clear_mappers()
# currently, the "eager" relationships degrade to lazy relationships
# due to the polymorphic load.
# the "nxt" relationship used to have a "lazy='joined'" on it, but the
# EagerLoader raises the "self-referential"
# exception now. since eager loading would never work for that
# relationship anyway, its better that the user
# gets an exception instead of it silently not eager loading.
# NOTE: using "nxt" instead of "next" to avoid 2to3 turning it into
# __next__() for some reason.
table1_mapper = mapper(
Table1,
table1,
# select_table=join,
polymorphic_on=table1.c.type,
polymorphic_identity="table1",
properties={
"nxt": relationship(
Table1,
backref=backref(
"prev", remote_side=table1.c.id, uselist=False
),
uselist=False,
primaryjoin=table1.c.id == table1.c.related_id,
),
"data": relationship(
mapper(Data, data), lazy="joined", order_by=data.c.id
),
},
)
mapper(Table1B, inherits=table1_mapper, polymorphic_identity="table1b")
mapper(
Table2,
table2,
inherits=table1_mapper,
polymorphic_identity="table2",
)
mapper(
Table3,
table3,
inherits=table1_mapper,
polymorphic_identity="table3",
)
configure_mappers()
assert table1_mapper.primary_key == (
table1.c.id,
), table1_mapper.primary_key
def test_one(self):
0
Source : test_poly_loading.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def setup_classes(cls):
Base = cls.DeclarativeBasic
class Parent(fixtures.ComparableEntity, Base):
__tablename__ = "parent"
id = Column(Integer, primary_key=True)
class Child(fixtures.ComparableEntity, Base):
__tablename__ = "child"
id = Column(Integer, primary_key=True)
parent_id = Column(Integer, ForeignKey("parent.id"))
parent = relationship("Parent", backref=backref("children"))
type = Column(String(50), nullable=False)
__mapper_args__ = {"polymorphic_on": type}
class ChildSubclass1(Child):
__tablename__ = "child_subclass1"
id = Column(Integer, ForeignKey("child.id"), primary_key=True)
__mapper_args__ = {
"polymorphic_identity": "subclass1",
"polymorphic_load": "selectin",
}
class Other(fixtures.ComparableEntity, Base):
__tablename__ = "other"
id = Column(Integer, primary_key=True)
child_subclass_id = Column(
Integer, ForeignKey("child_subclass1.id")
)
child_subclass = relationship(
"ChildSubclass1", backref=backref("others")
)
@classmethod
0
Source : test_productspec.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_one(self):
product_mapper = mapper(
Product,
products_table,
polymorphic_on=products_table.c.product_type,
polymorphic_identity="product",
)
mapper(Detail, inherits=product_mapper, polymorphic_identity="detail")
mapper(
Assembly, inherits=product_mapper, polymorphic_identity="assembly"
)
mapper(
SpecLine,
specification_table,
properties=dict(
master=relationship(
Assembly,
foreign_keys=[specification_table.c.master_id],
primaryjoin=specification_table.c.master_id
== products_table.c.product_id,
lazy="select",
backref=backref("specification"),
uselist=False,
),
slave=relationship(
Product,
foreign_keys=[specification_table.c.slave_id],
primaryjoin=specification_table.c.slave_id
== products_table.c.product_id,
lazy="select",
uselist=False,
),
quantity=specification_table.c.quantity,
),
)
session = create_session()
a1 = Assembly(name="a1")
p1 = Product(name="p1")
a1.specification.append(SpecLine(slave=p1))
d1 = Detail(name="d1")
a1.specification.append(SpecLine(slave=d1))
session.add(a1)
orig = repr(a1)
session.flush()
session.expunge_all()
a1 = session.query(Product).filter_by(name="a1").one()
new = repr(a1)
print(orig)
print(new)
assert (
orig == new == " < Assembly a1> specification=[ < SpecLine 1.0 "
" < Product p1>>, < SpecLine 1.0 < Detail d1>>] documents=None"
)
def test_two(self):
0
Source : test_productspec.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_three(self):
product_mapper = mapper(
Product,
products_table,
polymorphic_on=products_table.c.product_type,
polymorphic_identity="product",
)
mapper(Detail, inherits=product_mapper, polymorphic_identity="detail")
mapper(
Assembly, inherits=product_mapper, polymorphic_identity="assembly"
)
mapper(
SpecLine,
specification_table,
properties=dict(
master=relationship(
Assembly,
lazy="joined",
uselist=False,
foreign_keys=[specification_table.c.master_id],
primaryjoin=specification_table.c.master_id
== products_table.c.product_id,
backref=backref(
"specification", cascade="all, delete-orphan"
),
),
slave=relationship(
Product,
lazy="joined",
uselist=False,
foreign_keys=[specification_table.c.slave_id],
primaryjoin=specification_table.c.slave_id
== products_table.c.product_id,
),
quantity=specification_table.c.quantity,
),
)
document_mapper = mapper(
Document,
documents_table,
polymorphic_on=documents_table.c.document_type,
polymorphic_identity="document",
properties=dict(
name=documents_table.c.name,
data=deferred(documents_table.c.data),
product=relationship(
Product,
lazy="select",
backref=backref("documents", cascade="all, delete-orphan"),
),
),
)
mapper(
RasterDocument,
inherits=document_mapper,
polymorphic_identity="raster_document",
)
session = create_session()
a1 = Assembly(name="a1")
a1.specification.append(SpecLine(slave=Detail(name="d1")))
a1.documents.append(Document("doc1"))
a1.documents.append(RasterDocument("doc2"))
session.add(a1)
orig = repr(a1)
session.flush()
session.expunge_all()
a1 = session.query(Product).filter_by(name="a1").one()
new = repr(a1)
print(orig)
print(new)
assert (
orig == new == " < Assembly a1> specification="
"[ < SpecLine 1.0 < Detail d1>>] "
"documents=[ < Document doc1>, < RasterDocument doc2>]"
)
def test_four(self):
0
Source : test_productspec.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_four(self):
"""this tests the RasterDocument being attached to the Assembly, but
*not* the Document. this means only a "sub-class" task, i.e.
corresponding to an inheriting mapper but not the base mapper,
is created. """
product_mapper = mapper(
Product,
products_table,
polymorphic_on=products_table.c.product_type,
polymorphic_identity="product",
)
mapper(Detail, inherits=product_mapper, polymorphic_identity="detail")
mapper(
Assembly, inherits=product_mapper, polymorphic_identity="assembly"
)
document_mapper = mapper(
Document,
documents_table,
polymorphic_on=documents_table.c.document_type,
polymorphic_identity="document",
properties=dict(
name=documents_table.c.name,
data=deferred(documents_table.c.data),
product=relationship(
Product,
lazy="select",
backref=backref("documents", cascade="all, delete-orphan"),
),
),
)
mapper(
RasterDocument,
inherits=document_mapper,
polymorphic_identity="raster_document",
)
session = create_session()
a1 = Assembly(name="a1")
a1.documents.append(RasterDocument("doc2"))
session.add(a1)
orig = repr(a1)
session.flush()
session.expunge_all()
a1 = session.query(Product).filter_by(name="a1").one()
new = repr(a1)
print(orig)
print(new)
assert (
orig == new == " < Assembly a1> specification=None documents="
"[ < RasterDocument doc2>]"
)
del a1.documents[0]
session.flush()
session.expunge_all()
a1 = session.query(Product).filter_by(name="a1").one()
assert len(session.query(Document).all()) == 0
def test_five(self):
0
Source : test_productspec.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_five(self):
"""tests the late compilation of mappers"""
mapper(
SpecLine,
specification_table,
properties=dict(
master=relationship(
Assembly,
lazy="joined",
uselist=False,
foreign_keys=[specification_table.c.master_id],
primaryjoin=specification_table.c.master_id
== products_table.c.product_id,
backref=backref("specification"),
),
slave=relationship(
Product,
lazy="joined",
uselist=False,
foreign_keys=[specification_table.c.slave_id],
primaryjoin=specification_table.c.slave_id
== products_table.c.product_id,
),
quantity=specification_table.c.quantity,
),
)
mapper(
Product,
products_table,
polymorphic_on=products_table.c.product_type,
polymorphic_identity="product",
properties={
"documents": relationship(
Document,
lazy="select",
backref="product",
cascade="all, delete-orphan",
)
},
)
mapper(Detail, inherits=Product, polymorphic_identity="detail")
mapper(
Document,
documents_table,
polymorphic_on=documents_table.c.document_type,
polymorphic_identity="document",
properties=dict(
name=documents_table.c.name,
data=deferred(documents_table.c.data),
),
)
mapper(
RasterDocument,
inherits=Document,
polymorphic_identity="raster_document",
)
mapper(Assembly, inherits=Product, polymorphic_identity="assembly")
session = create_session()
a1 = Assembly(name="a1")
a1.specification.append(SpecLine(slave=Detail(name="d1")))
a1.documents.append(Document("doc1"))
a1.documents.append(RasterDocument("doc2"))
session.add(a1)
orig = repr(a1)
session.flush()
session.expunge_all()
a1 = session.query(Product).filter_by(name="a1").one()
new = repr(a1)
print(orig)
print(new)
assert (
orig == new == " < Assembly a1> specification="
"[ < SpecLine 1.0 < Detail d1>>] documents=[ < Document doc1>, "
" < RasterDocument doc2>]"
)
0
Source : test_relationship.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def setup_classes(cls):
Base = cls.DeclarativeBasic
class Person(Base):
__tablename__ = "people"
id = Column(Integer, primary_key=True)
discriminator = Column("type", String(50))
__mapper_args__ = {"polymorphic_on": discriminator}
class Manager(Person):
__tablename__ = "managers"
__mapper_args__ = {"polymorphic_identity": "manager"}
id = Column(Integer, ForeignKey("people.id"), primary_key=True)
class Engineer(Person):
__tablename__ = "engineers"
__mapper_args__ = {"polymorphic_identity": "engineer"}
id = Column(Integer, ForeignKey("people.id"), primary_key=True)
primary_language = Column(String(50))
manager_id = Column(Integer, ForeignKey("managers.id"))
manager = relationship(
Manager, primaryjoin=(Manager.id == manager_id)
)
class LastSeen(Base):
__tablename__ = "seen"
id = Column(Integer, ForeignKey("people.id"), primary_key=True)
timestamp = Column(Integer)
taggable = relationship(
Person,
primaryjoin=(Person.id == id),
backref=backref("last_seen", lazy=False),
)
def test_query(self):
0
Source : test_assorted_eager.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def setup_mappers(cls):
Right, Middle, middle, right, left, Left = (
cls.classes.Right,
cls.classes.Middle,
cls.tables.middle,
cls.tables.right,
cls.tables.left,
cls.classes.Left,
)
# set up bi-directional eager loads
mapper(Left, left)
mapper(Right, right)
mapper(
Middle,
middle,
properties=dict(
left=relationship(
Left,
lazy="joined",
backref=backref("middle", lazy="joined"),
),
right=relationship(
Right,
lazy="joined",
backref=backref("middle", lazy="joined"),
),
),
),
def test_eager_terminate(self):
0
Source : test_assorted_eager.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_nesting_with_functions(self):
Stat, Foo, stats, foo, Data, datas = (
self.classes.Stat,
self.classes.Foo,
self.tables.stats,
self.tables.foo,
self.classes.Data,
self.tables.datas,
)
mapper(Data, datas)
mapper(
Foo,
foo,
properties={
"data": relationship(
Data, backref=backref("foo", uselist=False)
)
},
)
mapper(Stat, stats, properties={"data": relationship(Data)})
session = create_session()
data = [Data(a=x) for x in range(5)]
session.add_all(data)
session.add_all(
(
Stat(data=data[0], somedata=1),
Stat(data=data[1], somedata=2),
Stat(data=data[2], somedata=3),
Stat(data=data[3], somedata=4),
Stat(data=data[4], somedata=5),
Stat(data=data[0], somedata=6),
Stat(data=data[1], somedata=7),
Stat(data=data[2], somedata=8),
Stat(data=data[3], somedata=9),
Stat(data=data[4], somedata=10),
)
)
session.flush()
arb_data = sa.select(
[stats.c.data_id, sa.func.max(stats.c.somedata).label("max")],
stats.c.data_id < = 5,
group_by=[stats.c.data_id],
)
arb_result = arb_data.execute().fetchall()
# order the result list descending based on 'max'
arb_result.sort(key=lambda a: a["max"], reverse=True)
# extract just the "data_id" from it
arb_result = [row["data_id"] for row in arb_result]
arb_data = arb_data.alias("arb")
# now query for Data objects using that above select, adding the
# "order by max desc" separately
q = (
session.query(Data)
.options(sa.orm.joinedload("foo"))
.select_from(
datas.join(arb_data, arb_data.c.data_id == datas.c.id)
)
.order_by(sa.desc(arb_data.c.max))
.limit(10)
)
# extract "data_id" from the list of result objects
verify_result = [d.id for d in q]
eq_(verify_result, arb_result)
class EagerTest4(fixtures.MappedTest):
0
Source : test_assorted_eager.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_one(self):
(
Part,
inherited_part,
design_types,
DesignType,
parts,
design,
Design,
InheritedPart,
) = (
self.classes.Part,
self.tables.inherited_part,
self.tables.design_types,
self.classes.DesignType,
self.tables.parts,
self.tables.design,
self.classes.Design,
self.classes.InheritedPart,
)
p_m = mapper(Part, parts)
mapper(
InheritedPart,
inherited_part,
properties=dict(part=relationship(Part, lazy="joined")),
)
d_m = mapper(
Design,
design,
properties=dict(
inheritedParts=relationship(
InheritedPart,
cascade="all, delete-orphan",
backref="design",
)
),
)
mapper(DesignType, design_types)
d_m.add_property(
"type", relationship(DesignType, lazy="joined", backref="designs")
)
p_m.add_property(
"design",
relationship(
Design,
lazy="joined",
backref=backref("parts", cascade="all, delete-orphan"),
),
)
d = Design()
sess = create_session()
sess.add(d)
sess.flush()
sess.expunge_all()
x = sess.query(Design).get(1)
x.inheritedParts
class EagerTest7(fixtures.MappedTest):
0
Source : test_assorted_eager.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def setup_mappers(cls):
Account, Transaction, transactions, accounts, entries, Entry = (
cls.classes.Account,
cls.classes.Transaction,
cls.tables.transactions,
cls.tables.accounts,
cls.tables.entries,
cls.classes.Entry,
)
mapper(Account, accounts)
mapper(Transaction, transactions)
mapper(
Entry,
entries,
properties=dict(
account=relationship(
Account,
uselist=False,
backref=backref(
"entries", lazy="select", order_by=entries.c.entry_id
),
),
transaction=relationship(
Transaction,
uselist=False,
backref=backref(
"entries", lazy="joined", order_by=entries.c.entry_id
),
),
),
)
def test_joinedload_on_path(self):
0
Source : test_backref_mutations.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def setup_mappers(cls):
Address, addresses, users, User = (
cls.classes.Address,
cls.tables.addresses,
cls.tables.users,
cls.classes.User,
)
mapper(Address, addresses)
mapper(
User,
users,
properties={
"address": relationship(
Address, backref=backref("user"), uselist=False
)
},
)
def test_collection_move_preloaded(self):
0
Source : test_backref_mutations.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def setup_mappers(cls):
Address, addresses, users, User = (
cls.classes.Address,
cls.tables.addresses,
cls.tables.users,
cls.classes.User,
)
mapper(Address, addresses)
mapper(
User,
users,
properties={
"address": relationship(
Address,
uselist=False,
backref=backref(
"user",
single_parent=True,
cascade="all, delete-orphan",
),
)
},
)
def test_m2o_event(self):
0
Source : test_backref_mutations.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def setup_mappers(cls):
keywords, items, item_keywords, Keyword, Item = (
cls.tables.keywords,
cls.tables.items,
cls.tables.item_keywords,
cls.classes.Keyword,
cls.classes.Item,
)
mapper(
Item,
items,
properties={
"keyword": relationship(
Keyword,
secondary=item_keywords,
uselist=False,
backref=backref("item", uselist=False),
)
},
)
mapper(Keyword, keywords)
def test_collection_move_preloaded(self):
0
Source : test_bind.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_mapped_binds(self):
Address, addresses, users, User = (
self.classes.Address,
self.tables.addresses,
self.tables.users,
self.classes.User,
)
# ensure tables are unbound
m2 = sa.MetaData()
users_unbound = users.tometadata(m2)
addresses_unbound = addresses.tometadata(m2)
mapper(Address, addresses_unbound)
mapper(
User,
users_unbound,
properties={
"addresses": relationship(
Address,
backref=backref("user", cascade="all"),
cascade="all",
)
},
)
sess = Session(
binds={User: self.metadata.bind, Address: self.metadata.bind}
)
u1 = User(id=1, name="ed")
sess.add(u1)
eq_(
sess.query(User).filter(User.id == 1).all(),
[User(id=1, name="ed")],
)
# test expression binding
sess.execute(users_unbound.insert(), params=dict(id=2, name="jack"))
eq_(
sess.execute(
users_unbound.select(users_unbound.c.id == 2)
).fetchall(),
[(2, "jack")],
)
eq_(
sess.execute(users_unbound.select(User.id == 2)).fetchall(),
[(2, "jack")],
)
sess.execute(users_unbound.delete())
eq_(sess.execute(users_unbound.select()).fetchall(), [])
sess.close()
def test_table_binds(self):
0
Source : test_bind.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_table_binds(self):
Address, addresses, users, User = (
self.classes.Address,
self.tables.addresses,
self.tables.users,
self.classes.User,
)
# ensure tables are unbound
m2 = sa.MetaData()
users_unbound = users.tometadata(m2)
addresses_unbound = addresses.tometadata(m2)
mapper(Address, addresses_unbound)
mapper(
User,
users_unbound,
properties={
"addresses": relationship(
Address,
backref=backref("user", cascade="all"),
cascade="all",
)
},
)
Session = sessionmaker(
binds={
users_unbound: self.metadata.bind,
addresses_unbound: self.metadata.bind,
}
)
sess = Session()
u1 = User(id=1, name="ed")
sess.add(u1)
eq_(
sess.query(User).filter(User.id == 1).all(),
[User(id=1, name="ed")],
)
sess.execute(users_unbound.insert(), params=dict(id=2, name="jack"))
eq_(
sess.execute(
users_unbound.select(users_unbound.c.id == 2)
).fetchall(),
[(2, "jack")],
)
eq_(
sess.execute(users_unbound.select(User.id == 2)).fetchall(),
[(2, "jack")],
)
sess.execute(users_unbound.delete())
eq_(sess.execute(users_unbound.select()).fetchall(), [])
sess.close()
def test_bind_from_metadata(self):
0
Source : test_cascade.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def setup_mappers(cls):
Address, addresses, users, User = (
cls.classes.Address,
cls.tables.addresses,
cls.tables.users,
cls.classes.User,
)
mapper(Address, addresses)
mapper(
User,
users,
properties={
"address": relationship(
Address,
backref=backref("user", single_parent=True),
uselist=False,
)
},
)
def test_single_parent_raise(self):
0
Source : test_cascade.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def setup_mappers(cls):
Address, addresses, users, User = (
cls.classes.Address,
cls.tables.addresses,
cls.tables.users,
cls.classes.User,
)
mapper(Address, addresses)
mapper(
User,
users,
properties={
"address": relationship(
Address,
backref=backref(
"user",
single_parent=True,
cascade="all, delete-orphan",
),
uselist=False,
)
},
)
def test_replace_attribute_no_flush(self):
0
Source : test_cascade.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def _one_to_many_fixture(
self,
o2m_cascade=True,
m2o_cascade=True,
o2m=False,
m2o=False,
o2m_cascade_backrefs=True,
m2o_cascade_backrefs=True,
):
Address, addresses, users, User = (
self.classes.Address,
self.tables.addresses,
self.tables.users,
self.classes.User,
)
if o2m:
if m2o:
addresses_rel = {
"addresses": relationship(
Address,
cascade_backrefs=o2m_cascade_backrefs,
cascade=o2m_cascade and "save-update" or "",
backref=backref(
"user",
cascade=m2o_cascade and "save-update" or "",
cascade_backrefs=m2o_cascade_backrefs,
),
)
}
else:
addresses_rel = {
"addresses": relationship(
Address,
cascade=o2m_cascade and "save-update" or "",
cascade_backrefs=o2m_cascade_backrefs,
)
}
user_rel = {}
elif m2o:
user_rel = {
"user": relationship(
User,
cascade=m2o_cascade and "save-update" or "",
cascade_backrefs=m2o_cascade_backrefs,
)
}
addresses_rel = {}
else:
addresses_rel = {}
user_rel = {}
mapper(User, users, properties=addresses_rel)
mapper(Address, addresses, properties=user_rel)
def _many_to_many_fixture(
0
Source : test_cascade.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def _many_to_many_fixture(
self,
fwd_cascade=True,
bkd_cascade=True,
fwd=False,
bkd=False,
fwd_cascade_backrefs=True,
bkd_cascade_backrefs=True,
):
keywords, items, item_keywords, Keyword, Item = (
self.tables.keywords,
self.tables.items,
self.tables.item_keywords,
self.classes.Keyword,
self.classes.Item,
)
if fwd:
if bkd:
keywords_rel = {
"keywords": relationship(
Keyword,
secondary=item_keywords,
cascade_backrefs=fwd_cascade_backrefs,
cascade=fwd_cascade and "save-update" or "",
backref=backref(
"items",
cascade=bkd_cascade and "save-update" or "",
cascade_backrefs=bkd_cascade_backrefs,
),
)
}
else:
keywords_rel = {
"keywords": relationship(
Keyword,
secondary=item_keywords,
cascade=fwd_cascade and "save-update" or "",
cascade_backrefs=fwd_cascade_backrefs,
)
}
items_rel = {}
elif bkd:
items_rel = {
"items": relationship(
Item,
secondary=item_keywords,
cascade=bkd_cascade and "save-update" or "",
cascade_backrefs=bkd_cascade_backrefs,
)
}
keywords_rel = {}
else:
keywords_rel = {}
items_rel = {}
mapper(Item, items, properties=keywords_rel)
mapper(Keyword, keywords, properties=items_rel)
def test_o2m_only_child_pending(self):
0
Source : test_cascade.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_unidirectional_cascade_o2m(self):
User, Order, users, orders = (
self.classes.User,
self.classes.Order,
self.tables.users,
self.tables.orders,
)
mapper(Order, orders)
mapper(
User,
users,
properties=dict(
orders=relationship(
Order, backref=backref("user", cascade=None)
)
),
)
sess = create_session()
o1 = Order()
sess.add(o1)
u1 = User(orders=[o1])
assert u1 not in sess
assert o1 in sess
sess.expunge_all()
o1 = Order()
u1 = User(orders=[o1])
sess.add(o1)
assert u1 not in sess
assert o1 in sess
def test_unidirectional_cascade_m2o(self):
0
Source : test_cascade.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_unidirectional_cascade_m2o(self):
User, Order, users, orders = (
self.classes.User,
self.classes.Order,
self.tables.users,
self.tables.orders,
)
mapper(
Order,
orders,
properties={
"user": relationship(
User, backref=backref("orders", cascade=None)
)
},
)
mapper(User, users)
sess = create_session()
u1 = User()
sess.add(u1)
o1 = Order()
o1.user = u1
assert o1 not in sess
assert u1 in sess
sess.expunge_all()
u1 = User()
o1 = Order()
o1.user = u1
sess.add(u1)
assert o1 not in sess
assert u1 in sess
def test_unidirectional_cascade_m2m(self):
0
Source : test_cascade.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def setup_mappers(cls):
t2, T2, T3, t1, t3, T1 = (
cls.tables.t2,
cls.classes.T2,
cls.classes.T3,
cls.tables.t1,
cls.tables.t3,
cls.classes.T1,
)
mapper(
T1,
t1,
properties=dict(
t2=relationship(
T2, cascade="all, delete-orphan", single_parent=True
)
),
)
mapper(
T2,
t2,
properties=dict(
t3=relationship(
T3,
cascade="all, delete-orphan",
single_parent=True,
backref=backref("t2", uselist=False),
)
),
)
mapper(T3, t3)
def test_cascade_delete(self):
See More Examples