sqlalchemy.sql.select.where

Here are the examples of the python api sqlalchemy.sql.select.where taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

68 Examples 7

3 Source : gene_db.py
with GNU General Public License v3.0
from ailabstw

def get_gene_chrom(conn, gene_id):
    """return chromsome which the gene is on
    """
    query = select([gene]).where(gene.c.id == gene_id)
    results = conn.execute(query)
    result = next(results, None)
    if not result:
        return None
    return Gene(*result).chrom


def get_gene_tx(conn, gene_id):

3 Source : gene_db.py
with GNU General Public License v3.0
from ailabstw

def get_gene_tx(conn, gene_id):
    """return all transcripts belong to the gene
    """
    query = select([transcript]).where(transcript.c.gene_id == gene_id)
    results = conn.execute(query)
    return list(starmap(Transcript, results))


def get_refseq_tx(conn, refseq):

3 Source : gene_db.py
with GNU General Public License v3.0
from ailabstw

def get_refseq_tx(conn, refseq):
    """return all transcripts belong to the refseq
    """
    query = select([transcript]).where(transcript.c.name == refseq)
    results = conn.execute(query)
    return list(starmap(Transcript, results))


def get_rsid_chromvar(conn, name):

3 Source : gene_db.py
with GNU General Public License v3.0
from ailabstw

def get_rsid_chromvar(conn, name):
    """return all chromosome variant of the rsid
    """
    if not name[2:].isdigit():
        return []
    query = select([rsid]).where(rsid.c.name == int(name[2:]))
    results = conn.execute(query)
    return list(starmap(RSID, results))

3 Source : models.py
with MIT License
from AngelLiang

    def update_changed(cls, mapper, connection, target):
        """
        :param mapper: the Mapper which is the target of this event
        :param connection: the Connection being used
        :param target: the mapped instance being persisted
        """
        s = connection.execute(select([PeriodicTaskChanged]).
                               where(PeriodicTaskChanged.id == 1).limit(1))
        if not s:
            s = connection.execute(insert(PeriodicTaskChanged),
                                   last_update=dt.datetime.now())
        else:
            s = connection.execute(update(PeriodicTaskChanged).
                                   where(PeriodicTaskChanged.id == 1).
                                   values(last_update=dt.datetime.now()))

    @classmethod

3 Source : queries.py
with MIT License
from biobdeveloper

async def insert_telegram_user(conn: SAConn, **user_data):
    try:
        await conn.execute(insert(TelegramUser).values(**user_data))
        cursor = await conn.execute(
            select([TelegramUser.id]).where(TelegramUser.id == user_data["id"])
        )
        result = await cursor.fetchone()
        return result[0]
    except Exception as ex:
        log.debug(ex)


async def select_random_funny_photo(conn: SAConn):

3 Source : queries.py
with MIT License
from biobdeveloper

async def select_random_funny_photo(conn: SAConn):
    try:
        cursor = await conn.execute(
            select([FunnyPhoto]).where(FunnyPhoto.approved == True).as_scalar()
        )
        if cursor.rowcount == 0:
            return
        result = await cursor.fetchall()
        return random.choice(result)
    except Exception as ex:
        log.debug(ex)


async def insert_photo(conn: SAConn, filename, user_id, caption=None):

3 Source : queries.py
with MIT License
from biobdeveloper

async def select_telegram_user(conn: SAConn, user_id):
    try:
        cursor = await conn.execute(
            select([TelegramUser]).where(TelegramUser.id == user_id)
        )
        result = await cursor.fetchone()
        return result
    except Exception as ex:
        log.debug(ex)


async def update_telegram_user(conn, user_id, **values):

3 Source : config_sql.py
with GNU General Public License v3.0
from ddc

    def get_program_version(self):
        from sqlalchemy.sql import select
        sql = select(self.table.columns.program_version).where(self.table.columns.id == 1)
        return self.database.select(sql)


    def set_default_configs(self):

3 Source : db_ops.py
with GNU General Public License v3.0
from deepdivesec

    def get_recent_hash(self, r_id):
        """Get the hash of the most recent commit."""
        cnxn, enxn = self.create_conn()
        result = ''
        tbl = self.get_table('repo_info', enxn)
        stmt = select([tbl.c.repo_latest_commit]).where(tbl.c.repo_id == r_id)
        try:
            res = cnxn.execute(stmt)
        except:
            res = 'no commit in db'
        for row in res:
            result = row[0]
        res.close()
        return result

    def update_repo_info(self, in_list, commit_hash):

3 Source : util.py
with GNU General Public License v3.0
from dismantl

def get_detail_loc(case_number):
    with db_session() as db:
        detail_loc = db.execute(
                select([Case.detail_loc])\
                .where(Case.case_number == case_number)
            ).scalar()
    return detail_loc

@contextmanager

3 Source : users.py
with MIT License
from iit-technology-ambit

    def getTagPriority(self, tag):
        s = select([userTagJunction]).where(and_(
            userTagJunction.c.user_id == self.id,
            userTagJunction.c.keyword_id == tag.id))
        result = list(db.session.execute(s))
        try:
            return result[0]["priority"]
        except IndexError:
            return None

    def savePost(self, post):

3 Source : sso.py
with GNU General Public License v2.0
from makedeb

def is_account_suspended(conn, user_id):
    row = conn.execute(
        select([Users.c.Suspended]).where(Users.c.ID == user_id)
    ).fetchone()
    return row is not None and bool(row[0])


def open_session(request, conn, user_id):

3 Source : forgot_password.py
with MIT License
from russellromney

def forgot_submit(submit,email):
    # get first name
    table = user_table()
    statement = select([table.c.first]).\
                where(table.c.email==email)
    conn = engine.connect()
    resp = list(conn.execute(statement))
    if len(resp)==0:
        return failure_alert, no_update    
    else:
        firstname = resp[0].first
    conn.close()
    
    # if it does, send password reset and save info
    if send_password_key(email, firstname, engine):
        return success_alert, '/change'
    else:
        return failure_alert, no_update

3 Source : auth.py
with MIT License
from russellromney

def validate_password_key(email, key, engine):
    # email exists
    if not user_exists(email, engine):
        return False

    # there is entry matching key and email
    table = password_change_table()
    statement = select([table.c.email, table.c.password_key, table.c.timestamp]).where(
        and_(table.c.email == email, table.c.password_key == key)
    )
    with engine.connect() as conn:
        resp = list(conn.execute(statement))
        if len(resp) == 1:
            if (resp[0].timestamp - (datetime.now() - timedelta(1))).days   <   1:
                return True
        return False

    # finished with no erros; return True
    return True

3 Source : test_lambdas.py
with MIT License
from sqlalchemy

    def test_select_whereclause(self):
        t1 = table("t1", column("q"), column("p"))

        x = 10
        y = 5

        def go():
            return select(t1).where(lambda: and_(t1.c.q == x, t1.c.p == y))

        self.assert_compile(
            go(), "SELECT t1.q, t1.p FROM t1 WHERE t1.q = :x_1 AND t1.p = :y_1"
        )

        self.assert_compile(
            go(), "SELECT t1.q, t1.p FROM t1 WHERE t1.q = :x_1 AND t1.p = :y_1"
        )

    def test_global_tracking(self):

3 Source : test_lambdas.py
with MIT License
from sqlalchemy

    def test_boolean_constants(self):
        t1 = table("t1", column("q"), column("p"))

        def go():
            xy = True
            stmt = select(t1).where(lambda: t1.c.q == xy)
            return stmt

        self.assert_compile(
            go(), "SELECT t1.q, t1.p FROM t1 WHERE t1.q = :xy_1"
        )

    def test_execute_boolean(self, boolean_table_fixture, connection):

3 Source : test_lambdas.py
with MIT License
from sqlalchemy

    def test_detect_embedded_callables_two(self):
        t1 = table("t1", column("q"), column("y"))

        def go():
            def foo():
                return t1.c.y

            stmt = select(t1).where(lambda: t1.c.q == foo())
            return stmt

        self.assert_compile(
            go(), "SELECT t1.q, t1.y FROM t1 WHERE t1.q = t1.y"
        )

    def test_detect_embedded_callables_three(self):

3 Source : test_lambdas.py
with MIT License
from sqlalchemy

    def test_detect_embedded_callables_three(self):
        t1 = table("t1", column("q"), column("y"))

        def go():
            def foo():
                t1.c.y

            stmt = select(t1).where(lambda: t1.c.q == getattr(t1.c, "y"))
            return stmt

        self.assert_compile(
            go(), "SELECT t1.q, t1.y FROM t1 WHERE t1.q = t1.y"
        )

    def test_detect_embedded_callables_four(self):

3 Source : test_lambdas.py
with MIT License
from sqlalchemy

    def test_offline_cache_key_no_paramtrack(self):
        def go():
            stmt = lambdas.lambda_stmt(
                lambda: select(column("x")).where(
                    column("y") == bindparam("q")
                ),
                global_track_bound_values=False,
            )

            return stmt

        s1 = go()

        eq_(
            s1._generate_cache_key().to_offline_string({}, s1, {"q": 5}),
            "('SELECT x \\nWHERE y = :q', (5,))",
        )

    def test_offline_cache_key_paramtrack(self):

3 Source : test_lambdas.py
with MIT License
from sqlalchemy

    def test_offline_cache_key_paramtrack(self):
        def go(param):
            stmt = lambdas.lambda_stmt(
                lambda: select(column("x")).where(column("y") == param),
            )

            return stmt

        s1 = go(5)

        param_key = s1._resolved._where_criteria[0].right.key
        eq_(
            s1._generate_cache_key().to_offline_string(
                {}, s1, {param_key: 10}
            ),
            "('SELECT x \\nWHERE y = :param_1', (10,))",
        )

    def test_stmt_lambda_w_list_of_opts(self):

3 Source : test_lambdas.py
with MIT License
from sqlalchemy

    def test_coercion_where_clause(self):
        assert_raises_message(
            exc.ArgumentError,
            "SQL expression for WHERE/HAVING role expected, got 5",
            select(column("q")).where,
            5,
        )

    def test_propagate_attrs_full_stmt(self):

3 Source : test_lambdas.py
with MIT License
from sqlalchemy

    def test_in_parameters_one(self):

        expr1 = select(1).where(column("q").in_(["a", "b", "c"]))
        self.assert_compile(expr1, "SELECT 1 WHERE q IN (__[POSTCOMPILE_q_1])")

        self.assert_compile(
            expr1,
            "SELECT 1 WHERE q IN (:q_1_1, :q_1_2, :q_1_3)",
            render_postcompile=True,
            checkparams={"q_1_1": "a", "q_1_2": "b", "q_1_3": "c"},
        )

    def test_in_parameters_two(self):

3 Source : test_lambdas.py
with MIT License
from sqlalchemy

    def test_in_parameters_two(self):
        expr2 = select(1).where(lambda: column("q").in_(["a", "b", "c"]))
        self.assert_compile(expr2, "SELECT 1 WHERE q IN (__[POSTCOMPILE_q_1])")
        self.assert_compile(
            expr2,
            "SELECT 1 WHERE q IN (:q_1_1, :q_1_2, :q_1_3)",
            render_postcompile=True,
            checkparams={"q_1_1": "a", "q_1_2": "b", "q_1_3": "c"},
        )

    def test_in_parameters_three(self):

3 Source : test_lambdas.py
with MIT License
from sqlalchemy

    def test_in_parameters_three(self):
        expr3 = lambdas.lambda_stmt(
            lambda: select(1).where(column("q").in_(["a", "b", "c"]))
        )
        self.assert_compile(expr3, "SELECT 1 WHERE q IN (__[POSTCOMPILE_q_1])")
        self.assert_compile(
            expr3,
            "SELECT 1 WHERE q IN (:q_1_1, :q_1_2, :q_1_3)",
            render_postcompile=True,
            checkparams={"q_1_1": "a", "q_1_2": "b", "q_1_3": "c"},
        )

    def test_in_parameters_four(self):

3 Source : test_lambdas.py
with MIT License
from sqlalchemy

    def test_in_parameters_four(self):
        def go(names):
            return lambdas.lambda_stmt(
                lambda: select(1).where(column("q").in_(names))
            )

        expr4 = go(["a", "b", "c"])
        self.assert_compile(
            expr4, "SELECT 1 WHERE q IN (__[POSTCOMPILE_names_1])"
        )
        self.assert_compile(
            expr4,
            "SELECT 1 WHERE q IN (:names_1_1, :names_1_2, :names_1_3)",
            render_postcompile=True,
            checkparams={"names_1_1": "a", "names_1_2": "b", "names_1_3": "c"},
        )

    def test_in_parameters_five(self):

3 Source : test_lambdas.py
with MIT License
from sqlalchemy

    def test_in_columnelement(self):
        # test issue #5768

        def go():
            v = [literal("a"), literal("b")]
            expr1 = select(1).where(lambda: column("q").in_(v))
            return expr1

        self.assert_compile(go(), "SELECT 1 WHERE q IN (:param_1, :param_2)")

        self.assert_compile(
            go(),
            "SELECT 1 WHERE q IN (:param_1, :param_2)",
            render_postcompile=True,
            checkparams={"param_1": "a", "param_2": "b"},
        )

    def test_select_columns_clause(self):

3 Source : test_lambdas.py
with MIT License
from sqlalchemy

    def test_insert_statement(self, user_address_fixture):
        users, addresses = user_address_fixture

        def ins(id_, name):
            stmt = lambda_stmt(lambda: users.insert())
            stmt += lambda s: s.values(id=id_, name=name)
            return stmt

        with testing.db.begin() as conn:
            conn.execute(ins(12, "foo"))

            eq_(
                conn.execute(select(users).where(users.c.id == 12)).first(),
                (12, "foo"),
            )

    def test_update_statement(self, user_address_fixture):

3 Source : scheduler.py
with GNU General Public License v2.0
from StykMartin

    def is_expired(cls):  # pylint: disable=E0213
        # We *could* rely on the caller to join Job.retention_tag and then use
        # RetentionTag.expire_in_days directly here, instead of a having this
        # correlated subquery. We have used that approach elsewhere in other
        # hybrids. It produces faster queries too. *BUT* if we did that here,
        # and then the caller accidentally forgot to do the join, this clause
        # would silently result in EVERY JOB being expired which would be
        # a huge disaster.
        expire_in_days_subquery = select([RetentionTag.expire_in_days]) \
            .where(RetentionTag.jobs).correlate(Job).label('expire_in_days')
        return and_(
            Job.deleted == None,
            expire_in_days_subquery > 0,
            Job.completed_n_days_ago(expire_in_days_subquery)
        )

    @classmethod

0 Source : db_ops.py
with GNU General Public License v3.0
from deepdivesec

    def display_repos(self, post_dict):
        """Query and display repo info based on filtering items in web app."""
        # display list of repos, based on some constraint (time)
        cnxn, enxn = self.create_conn()
        r_info = self.get_table('repo_info', enxn)
        if post_dict:
            if post_dict['r_user'] == '':
                post_dict['r_user'] = '%'
            if post_dict['r_name'] == '':
                post_dict['r_name'] == '%'
            if post_dict['r_cloned'] == 'Any':
                post_dict['r_cloned'] = '%'
            if post_dict['r_desc'] == '':
                post_dict['r_desc'] = '%'
            if post_dict['r_checked'] == '':
                post_dict['r_checked'] = '%'

            r_u = post_dict['r_user']
            r_n = post_dict['r_name']
            r_c = post_dict['r_cloned']
            r_d = post_dict['r_desc']
            r_chk = post_dict['r_checked']

            if r_c == '%':
                stmt = select(
                    [r_info.c.repo_id, r_info.c.repo_owner_id, r_info.c.repo_user,
                     r_info.c.repo_name, r_info.c.repo_size, r_info.c.repo_cloned,
                     r_info.c.repo_description, r_info.c.repo_last_checked,
                     r_info.c.repo_latest_commit]).where(
                    and_(
                        r_info.c.repo_user.like(f'%{r_u}%'),
                        r_info.c.repo_name.like(f'%{r_n}%'),
                        r_info.c.repo_cloned.like(f'%{r_c}%'),
                        r_info.c.repo_description.like(f'%{r_d}%'),
                        # r_info.c.repo_last_checked.like(f'%{r_chk}')
                    )
                ).order_by(desc(r_info.c.repo_last_checked)).limit(
                    post_dict['num_res'])
            else:
                stmt = select(
                    [r_info.c.repo_id, r_info.c.repo_owner_id, r_info.c.repo_user,
                     r_info.c.repo_name, r_info.c.repo_size, r_info.c.repo_cloned,
                     r_info.c.repo_description, r_info.c.repo_last_checked,
                     r_info.c.repo_latest_commit]).where(
                    and_(
                        r_info.c.repo_user.like(f'%{r_u}%'),
                        r_info.c.repo_name.like(f'%{r_n}%'),
                        r_info.c.repo_cloned == r_c,
                        r_info.c.repo_description.like(f'%{r_d}%'),
                        # r_info.c.repo_last_checked.like(f'%{r_chk}')
                    )
                ).order_by(desc(r_info.c.repo_last_checked)).limit(
                    post_dict['num_res'])

        if not post_dict:
            def_results = {'num_res': 100}
            post_dict.update(def_results)
            stmt = select(
                [r_info.c.repo_id, r_info.c.repo_owner_id, r_info.c.repo_user,
                 r_info.c.repo_name, r_info.c.repo_size, r_info.c.repo_cloned,
                 r_info.c.repo_description, r_info.c.repo_last_checked,
                 r_info.c.repo_latest_commit]).order_by(desc(r_info.c.repo_last_checked)).limit(post_dict['num_res'])

        res = cnxn.execute(stmt)
        nr = res.rowcount

        return res, nr

    def get_count(self, table):

0 Source : db_ops.py
with GNU General Public License v3.0
from deepdivesec

    def display_repos_api(self, post_dict={}):
        """API Query to get repo info."""
        # display list of repos, based on some constraint (time)
        cnxn, enxn = self.create_conn()
        r_info = self.get_table('repo_info', enxn)
        if post_dict:
            # handle filters and bring back appropriate results
            # Post Dict Example:
            # {"num_res":33, "page":2, "repo_user": "johndoe", "repo_name": "secretrepo",
            # "repo_cloned": ["cloned", "not_cloned"], "repo_desc": "config"}
            page = post_dict['page']
            if page > 1:
                offset = (page - 1) * 100
            elif page == 1:
                offset = 0

            if post_dict["repo_cloned"] != '%':
                stmt = select(
                    [r_info.c.repo_id, r_info.c.repo_owner_id, r_info.c.repo_user,
                     r_info.c.repo_name, r_info.c.repo_full_name, r_info.c.repo_updated_ts,
                     r_info.c.repo_size, r_info.c.repo_cloned,
                     r_info.c.repo_description, r_info.c.repo_last_checked,
                     r_info.c.repo_latest_commit]).where(
                    and_(
                        r_info.c.repo_user.like(f'%{post_dict["repo_user"]}%'),
                        r_info.c.repo_name.like(f'%{post_dict["repo_name"]}%'),
                        r_info.c.repo_cloned == post_dict["repo_cloned"],
                        r_info.c.repo_description.like(f'%{post_dict["repo_desc"]}%'),
                        # r_info.c.repo_last_checked.like(f'%{r_chk}')
                    )
                ).order_by(desc(r_info.c.repo_last_checked)).limit(
                    post_dict['num_res']).offset(offset)
            else:
                stmt = select(
                    [r_info.c.repo_id, r_info.c.repo_owner_id, r_info.c.repo_user,
                     r_info.c.repo_name, r_info.c.repo_full_name, r_info.c.repo_updated_ts,
                     r_info.c.repo_size, r_info.c.repo_cloned,
                     r_info.c.repo_description, r_info.c.repo_last_checked,
                     r_info.c.repo_latest_commit]).where(
                    and_(
                        r_info.c.repo_user.like(f'%{post_dict["repo_user"]}%'),
                        r_info.c.repo_name.like(f'%{post_dict["repo_name"]}%'),
                        r_info.c.repo_cloned.like(f'%{post_dict["repo_cloned"]}%'),
                        r_info.c.repo_description.like(f'%{post_dict["repo_desc"]}%'),
                        # r_info.c.repo_last_checked.like(f'%{r_chk}')
                    )
                ).order_by(desc(r_info.c.repo_last_checked)).limit(
                    post_dict['num_res']).offset(offset)

        if not post_dict:
            page = 1
            stmt = select(
                [r_info.c.repo_id, r_info.c.repo_owner_id, r_info.c.repo_user,
                 r_info.c.repo_name, r_info.c.repo_full_name, r_info.c.repo_updated_ts,
                 r_info.c.repo_size, r_info.c.repo_cloned,
                 r_info.c.repo_description, r_info.c.repo_last_checked,
                 r_info.c.repo_latest_commit]).order_by(desc(r_info.c.repo_last_checked)).limit(100)

        res = cnxn.execute(stmt)
        nq = res.rowcount #get the count of results for search
        return res, page, nq

    def display_match_results(self, num_results, post_dict={}):

0 Source : db_ops.py
with GNU General Public License v3.0
from deepdivesec

    def display_results_api(self, post_dict={}):
        """API Query to get results info."""
        cnxn, enxn = self.create_conn()
        r_res = self.get_table('repo_search_results', enxn)
        if post_dict:
            print(post_dict)
            # Post Dict Example:
            # {"num_res":33, "page": 2, "match_type": "Password", "match_string": "secret",
            # "match_location": "config", "match_line": "jdbc", "match_update_type": ["+","-"],
            # "match_author": "John Doe", "match_message": "config"}
            page = post_dict['page']
            if page > 1:
                offset = (page - 1) * 100
            elif page == 1:
                offset = 0
            count_stmt = res_stmt = select(
                [r_res.c.match_master_id, r_res.c.match_repo_id, r_res.c.match_inserted, r_res.c.match_type,
                 r_res.c.match_string, r_res.c.match_location, r_res.c.match_line, r_res.c.match_item_entropy,
                 r_res.c.match_line_hash, r_res.c.match_update_type, r_res.c.match_commit_hash,
                 r_res.c.match_commit_author,
                 r_res.c.match_commit_time, r_res.c.match_commit_message]).where(
                and_(
                    r_res.c.match_type.like(f'%{post_dict["match_type"]}%'),
                    r_res.c.match_string.like(f'%{post_dict["match_string"]}%'),
                    r_res.c.match_location.like(f'%{post_dict["match_location"]}%'),
                    r_res.c.match_line.like(f'%{post_dict["match_line"]}%'),
                    r_res.c.match_update_type.like(f'%{post_dict["match_update_type"]}%'),
                    r_res.c.match_commit_author.like(f'%{post_dict["match_author"]}%'),
                    r_res.c.match_commit_message.like(f'%{post_dict["match_message"]}%')
                )
            ).order_by(desc(r_res.c.match_master_id))

            res_stmt = select(
                [r_res.c.match_master_id, r_res.c.match_repo_id, r_res.c.match_inserted, r_res.c.match_type,
                 r_res.c.match_string, r_res.c.match_location, r_res.c.match_line, r_res.c.match_item_entropy,
                 r_res.c.match_line_hash, r_res.c.match_update_type, r_res.c.match_commit_hash,
                 r_res.c.match_commit_author,
                 r_res.c.match_commit_time, r_res.c.match_commit_message]).where(
                and_(
                    r_res.c.match_type.like(f'%{post_dict["match_type"]}%'),
                    r_res.c.match_string.like(f'%{post_dict["match_string"]}%'),
                    r_res.c.match_location.like(f'%{post_dict["match_location"]}%'),
                    r_res.c.match_line.like(f'%{post_dict["match_line"]}%'),
                    r_res.c.match_update_type.like(f'%{post_dict["match_update_type"]}%'),
                    r_res.c.match_commit_author.like(f'%{post_dict["match_author"]}%'),
                    r_res.c.match_commit_message.like(f'%{post_dict["match_message"]}%')
                )
            ).order_by(desc(r_res.c.match_master_id)).limit(
                post_dict['num_res']).offset(offset)
            cnt = cnxn.execute(count_stmt)
            nq = cnt.rowcount  # get the count of results for search

        if not post_dict:
            page = 1
            res_stmt = select(
                [r_res.c.match_master_id, r_res.c.match_repo_id, r_res.c.match_inserted, r_res.c.match_type,
                 r_res.c.match_string, r_res.c.match_location, r_res.c.match_line, r_res.c.match_item_entropy,
                 r_res.c.match_line_hash, r_res.c.match_update_type, r_res.c.match_commit_hash, r_res.c.match_commit_author,
                 r_res.c.match_commit_time, r_res.c.match_commit_message]).order_by(desc(r_res.c.match_master_id)).limit(100)
            nq = self.get_count(r_res)

        res = cnxn.execute(res_stmt)
        return res, page, nq


    def truncate(self, in_line, max_len):

0 Source : util.py
with GNU General Public License v3.0
from dismantl

def delete_latest_scrape(db, case_number):
    versions = [_ for _, in db.query(ScrapeVersion.s3_version_id)\
        .filter(ScrapeVersion.case_number == case_number)]
    last_version_id = versions[0]
    last_version_obj = config.s3.ObjectVersion(
        config.CASE_DETAILS_BUCKET,
        case_number,
        last_version_id
    )
    last_version_obj.delete()
    db.execute(
        ScrapeVersion.__table__.delete()\
            .where(
                and_(
                    ScrapeVersion.case_number == case_number,
                    ScrapeVersion.s3_version_id == last_version_id
                )
            )
    )
    if len(versions) > 1:
        # set last_scrape to timestamp of previous version
        db.execute(
            Case.__table__.update()\
                .where(Case.case_number == case_number)\
                .values(
                    last_scrape = select([Scrape.timestamp])\
                        .where(
                            and_(
                                Scrape.case_number == case_number,
                                Scrape.s3_version_id == versions[1]
                            )
                        ).as_scalar()
                )
        )
    elif len(versions) == 1:
        db.execute(
            Case.__table__.update()\
                .where(Case.case_number == case_number)\
                .values(last_scrape=None)
        )

def has_scrape(case_number):

0 Source : test_cte.py
with Apache License 2.0
from gethue

    def test_recursive(self):
        parts = table(
            "parts", column("part"), column("sub_part"), column("quantity")
        )

        included_parts = (
            select([parts.c.sub_part, parts.c.part, parts.c.quantity])
            .where(parts.c.part == "our part")
            .cte(recursive=True)
        )

        incl_alias = included_parts.alias()
        parts_alias = parts.alias()
        included_parts = included_parts.union(
            select(
                [
                    parts_alias.c.sub_part,
                    parts_alias.c.part,
                    parts_alias.c.quantity,
                ]
            ).where(parts_alias.c.part == incl_alias.c.sub_part)
        )

        s = (
            select(
                [
                    included_parts.c.sub_part,
                    func.sum(included_parts.c.quantity).label(
                        "total_quantity"
                    ),
                ]
            )
            .select_from(
                included_parts.join(
                    parts, included_parts.c.part == parts.c.part
                )
            )
            .group_by(included_parts.c.sub_part)
        )
        self.assert_compile(
            s,
            "WITH RECURSIVE anon_1(sub_part, part, quantity) "
            "AS (SELECT parts.sub_part AS sub_part, parts.part "
            "AS part, parts.quantity AS quantity FROM parts "
            "WHERE parts.part = :part_1 UNION "
            "SELECT parts_1.sub_part AS sub_part, "
            "parts_1.part AS part, parts_1.quantity "
            "AS quantity FROM parts AS parts_1, anon_1 AS anon_2 "
            "WHERE parts_1.part = anon_2.sub_part) "
            "SELECT anon_1.sub_part, "
            "sum(anon_1.quantity) AS total_quantity FROM anon_1 "
            "JOIN parts ON anon_1.part = parts.part "
            "GROUP BY anon_1.sub_part",
        )

        # quick check that the "WITH RECURSIVE" varies per
        # dialect
        self.assert_compile(
            s,
            "WITH anon_1(sub_part, part, quantity) "
            "AS (SELECT parts.sub_part AS sub_part, parts.part "
            "AS part, parts.quantity AS quantity FROM parts "
            "WHERE parts.part = :part_1 UNION "
            "SELECT parts_1.sub_part AS sub_part, "
            "parts_1.part AS part, parts_1.quantity "
            "AS quantity FROM parts AS parts_1, anon_1 AS anon_2 "
            "WHERE parts_1.part = anon_2.sub_part) "
            "SELECT anon_1.sub_part, "
            "sum(anon_1.quantity) AS total_quantity FROM anon_1 "
            "JOIN parts ON anon_1.part = parts.part "
            "GROUP BY anon_1.sub_part",
            dialect=mssql.dialect(),
        )

    def test_recursive_inner_cte_unioned_to_alias(self):

0 Source : test_cte.py
with Apache License 2.0
from gethue

    def test_recursive_inner_cte_unioned_to_alias(self):
        parts = table(
            "parts", column("part"), column("sub_part"), column("quantity")
        )

        included_parts = (
            select([parts.c.sub_part, parts.c.part, parts.c.quantity])
            .where(parts.c.part == "our part")
            .cte(recursive=True)
        )

        incl_alias = included_parts.alias("incl")
        parts_alias = parts.alias()
        included_parts = incl_alias.union(
            select(
                [
                    parts_alias.c.sub_part,
                    parts_alias.c.part,
                    parts_alias.c.quantity,
                ]
            ).where(parts_alias.c.part == incl_alias.c.sub_part)
        )

        s = (
            select(
                [
                    included_parts.c.sub_part,
                    func.sum(included_parts.c.quantity).label(
                        "total_quantity"
                    ),
                ]
            )
            .select_from(
                included_parts.join(
                    parts, included_parts.c.part == parts.c.part
                )
            )
            .group_by(included_parts.c.sub_part)
        )
        self.assert_compile(
            s,
            "WITH RECURSIVE incl(sub_part, part, quantity) "
            "AS (SELECT parts.sub_part AS sub_part, parts.part "
            "AS part, parts.quantity AS quantity FROM parts "
            "WHERE parts.part = :part_1 UNION "
            "SELECT parts_1.sub_part AS sub_part, "
            "parts_1.part AS part, parts_1.quantity "
            "AS quantity FROM parts AS parts_1, incl "
            "WHERE parts_1.part = incl.sub_part) "
            "SELECT incl.sub_part, "
            "sum(incl.quantity) AS total_quantity FROM incl "
            "JOIN parts ON incl.part = parts.part "
            "GROUP BY incl.sub_part",
        )

    def test_recursive_union_no_alias_one(self):

0 Source : test_cte.py
with Apache License 2.0
from gethue

    def test_upsert_from_select(self):
        orders = table(
            "orders",
            column("region"),
            column("amount"),
            column("product"),
            column("quantity"),
        )

        upsert = (
            orders.update()
            .where(orders.c.region == "Region1")
            .values(amount=1.0, product="Product1", quantity=1)
            .returning(*(orders.c._all_columns))
            .cte("upsert")
        )

        insert = orders.insert().from_select(
            orders.c.keys(),
            select(
                [
                    literal("Region1"),
                    literal(1.0),
                    literal("Product1"),
                    literal(1),
                ]
            ).where(~exists(upsert.select())),
        )

        self.assert_compile(
            insert,
            "WITH upsert AS (UPDATE orders SET amount=:amount, "
            "product=:product, quantity=:quantity "
            "WHERE orders.region = :region_1 "
            "RETURNING orders.region, orders.amount, "
            "orders.product, orders.quantity) "
            "INSERT INTO orders (region, amount, product, quantity) "
            "SELECT :param_1 AS anon_1, :param_2 AS anon_2, "
            ":param_3 AS anon_3, :param_4 AS anon_4 WHERE NOT (EXISTS "
            "(SELECT upsert.region, upsert.amount, upsert.product, "
            "upsert.quantity FROM upsert))",
        )

        eq_(insert.compile().isinsert, True)

    def test_anon_update_cte(self):

0 Source : test_cte.py
with Apache License 2.0
from gethue

    def test_pg_example_three(self):

        parts = table("parts", column("part"), column("sub_part"))

        included_parts = (
            select([parts.c.sub_part, parts.c.part])
            .where(parts.c.part == "our part")
            .cte("included_parts", recursive=True)
        )

        pr = included_parts.alias("pr")
        p = parts.alias("p")
        included_parts = included_parts.union_all(
            select([p.c.sub_part, p.c.part]).where(p.c.part == pr.c.sub_part)
        )
        stmt = (
            parts.delete()
            .where(parts.c.part.in_(select([included_parts.c.part])))
            .returning(parts.c.part)
        )

        # the outer RETURNING is a bonus over what PG's docs have
        self.assert_compile(
            stmt,
            "WITH RECURSIVE included_parts(sub_part, part) AS "
            "(SELECT parts.sub_part AS sub_part, parts.part AS part "
            "FROM parts "
            "WHERE parts.part = :part_1 "
            "UNION ALL SELECT p.sub_part AS sub_part, p.part AS part "
            "FROM parts AS p, included_parts AS pr "
            "WHERE p.part = pr.sub_part) "
            "DELETE FROM parts WHERE parts.part IN "
            "(SELECT included_parts.part FROM included_parts) "
            "RETURNING parts.part",
        )

    def test_insert_in_the_cte(self):

0 Source : test_defaults.py
with Apache License 2.0
from gethue

    def test_a_in_b(self, a, b, connection):
        q = self.tables.q
        p = self.tables.p

        conn = connection
        if a == "delete":
            conn.execute(q.insert().values(y=10, z=1))
            cte = q.delete().where(q.c.z == 1).returning(q.c.z).cte("c")
            expected = None
        elif a == "insert":
            cte = q.insert().values(z=1, y=10).returning(q.c.z).cte("c")
            expected = (2, 10)
        elif a == "update":
            conn.execute(q.insert().values(x=5, y=10, z=1))
            cte = (
                q.update()
                .where(q.c.z == 1)
                .values(x=7)
                .returning(q.c.z)
                .cte("c")
            )
            expected = (7, 5)
        elif a == "select":
            conn.execute(q.insert().values(x=5, y=10, z=1))
            cte = sa.select([q.c.z]).cte("c")
            expected = (5, 10)

        if b == "select":
            conn.execute(p.insert().values(s=1))
            stmt = select([p.c.s, cte.c.z]).where(p.c.s == cte.c.z)
        elif b == "insert":
            sel = select([1, cte.c.z])
            stmt = (
                p.insert().from_select(["s", "t"], sel).returning(p.c.s, p.c.t)
            )
        elif b == "delete":
            stmt = p.insert().values(s=1, t=cte.c.z).returning(p.c.s, cte.c.z)
        elif b == "update":
            conn.execute(p.insert().values(s=1))
            stmt = (
                p.update()
                .values(t=5)
                .where(p.c.s == cte.c.z)
                .returning(p.c.u, cte.c.z)
            )
        eq_(list(conn.execute(stmt)), [(1, 1)])

        eq_(conn.execute(select([q.c.x, q.c.y])).first(), expected)


class PKDefaultTest(fixtures.TablesTest):

0 Source : test_lateral.py
with Apache License 2.0
from gethue

    def test_plain_join(self):
        table1 = self.tables.people
        table2 = self.tables.books
        subq = select([table2.c.book_id]).where(
            table2.c.book_owner_id == table1.c.people_id
        )

        # FROM books, people?  isn't this wrong?  No!  Because
        # this is only a fragment, books isn't in any other FROM clause
        self.assert_compile(
            join(table1, lateral(subq, name="alias"), true()),
            "people JOIN LATERAL (SELECT books.book_id AS book_id "
            "FROM books, people WHERE books.book_owner_id = people.people_id) "
            "AS alias ON true",
        )

        # put it in correct context, implicit correlation works fine
        self.assert_compile(
            select([table1]).select_from(
                join(table1, lateral(subq, name="alias"), true())
            ),
            "SELECT people.people_id, people.age, people.name "
            "FROM people JOIN LATERAL (SELECT books.book_id AS book_id "
            "FROM books WHERE books.book_owner_id = people.people_id) "
            "AS alias ON true",
        )

        # explicit correlation
        subq = subq.correlate(table1)
        self.assert_compile(
            select([table1]).select_from(
                join(table1, lateral(subq, name="alias"), true())
            ),
            "SELECT people.people_id, people.age, people.name "
            "FROM people JOIN LATERAL (SELECT books.book_id AS book_id "
            "FROM books WHERE books.book_owner_id = people.people_id) "
            "AS alias ON true",
        )

    def test_join_lateral_w_select_subquery(self):

0 Source : sso.py
with GNU General Public License v2.0
from makedeb

async def authenticate(
    request: Request, redirect: str = None, conn=Depends(aurweb.db.connect)
):
    """
    Receive an OpenID Connect ID token, validate it, then process it to create
    an new AUR session.
    """
    if is_ip_banned(conn, request.client.host):
        _ = get_translator_for_request(request)
        raise HTTPException(
            status_code=HTTPStatus.FORBIDDEN,
            detail=_(
                "The login form is currently disabled for your IP address, "
                "probably due to sustained spam attacks. Sorry for the "
                "inconvenience."
            ),
        )

    try:
        token = await oauth.sso.authorize_access_token(request)
        user = await oauth.sso.parse_id_token(request, token)
    except OAuthError:
        # Here, most OAuth errors should be caused by forged or expired tokens.
        # Let’s give attackers as little information as possible.
        _ = get_translator_for_request(request)
        raise HTTPException(
            status_code=HTTPStatus.BAD_REQUEST,
            detail=_("Bad OAuth token. Please retry logging in from the start."),
        )

    sub = user.get("sub")  # this is the SSO account ID in JWT terminology
    if not sub:
        _ = get_translator_for_request(request)
        raise HTTPException(
            status_code=HTTPStatus.BAD_REQUEST,
            detail=_("JWT is missing its `sub` field."),
        )

    aur_accounts = conn.execute(
        select([Users.c.ID]).where(Users.c.SSOAccountID == sub)
    ).fetchall()
    if not aur_accounts:
        return "Sorry, we don’t seem to know you Sir " + sub
    elif len(aur_accounts) == 1:
        sid = open_session(request, conn, aur_accounts[0][Users.c.ID])
        response = RedirectResponse(
            redirect if redirect and is_aur_url(redirect) else "/"
        )
        secure_cookies = aurweb.config.getboolean("options", "disable_http_login")
        response.set_cookie(
            key="AURSID", value=sid, httponly=True, secure=secure_cookies
        )
        if "id_token" in token:
            # We save the id_token for the SSO logout. It’s not too important
            # though, so if we can’t find it, we can live without it.
            response.set_cookie(
                key="SSO_ID_TOKEN",
                value=token["id_token"],
                path="/sso/",
                httponly=True,
                secure=secure_cookies,
            )
        return util.add_samesite_fields(response, "strict")
    else:
        # We’ve got a severe integrity violation.
        raise Exception("Multiple accounts found for SSO account " + sub)


@router.get("/sso/logout")

0 Source : gi.py
with MIT License
from opennem

def facility_matcher(records: List[AEMOGIRecord]) -> None:
    with SessionLocal() as sess:

        for gi_record in records:
            if not gi_record.duid:
                continue

            gi_lookup: Optional[Facility] = sess.execute(
                select(Facility).where(Facility.code == gi_record.duid)
            ).one_or_none()

            if not gi_lookup:
                logger.info(f"MISS: {gi_record.duid} {gi_record.name}")
                continue

            gi_db: Facility = gi_lookup[0]

            logger.info(
                f"HIT {gi_record.duid} {gi_record.name} - currently {gi_db.status_id} change to => {gi_record.status_id}"
            )

            gi_db.status_id = gi_record.status_id

            sess.add(gi_db)

        sess.commit()


# debug entrypoint
if __name__ == "__main__":

0 Source : auth.py
with MIT License
from russellromney

def send_password_key(email, firstname, engine):
    """
    ensure email exists
    create random 6-number password key
    send email with Twilio Sendgrid containing that password key
    return True if that all worked
    return False if one step fails
    """

    # make sure email exists
    if not user_exists(email, engine):
        return False

    # generate password key
    key = "".join([random.choice("1234567890") for x in range(6)])

    table = user_table()
    statement = select([table.c.first]).where(table.c.email == email)
    with engine.connect() as conn:
        resp = list(conn.execute(statement))
        if len(resp) == 0:
            return False
        else:
            first = resp[0].first

    # send password key via email
    try:
        mailjet = Client(auth=(MAILJET_API_KEY, MAILJET_API_SECRET), version="v3.1")
        data = {
            "Messages": [
                {
                    "From": {"Email": FROM_EMAIL, "Name": "My App"},
                    "To": [{"Email": email, "Name": first,}],
                    "Subject": "Greetings from Mailjet.",
                    "TextPart": "My App password reset code",
                    "HTMLPart": "  <  p>Dear {}, < p>  < p>Your My App password reset code is:  < strong>{} < /strong>".format(
                        firstname, key
                    ),
                    "CustomID": "AppGettingStartedTest",
                }
            ]
        }
        result = mailjet.send.create(data=data)
        if result.status_code != "200":
            print("status not 200")
    except Exception as e:
        traceback.print_exc(e)
        return False

    # store that key in the password_key table
    table = password_change_table()
    values = dict(email=email, password_key=key, timestamp=datetime.now())
    statement = table.insert().values(**values)
    try:
        with engine.connect() as conn:
            conn.execute(statement)
    except:
        return False

    # change their current password to a random string
    # first, get first and last name
    random_password = "".join([random.choice("1234567890") for x in range(15)])
    res = change_password(email, random_password, engine)
    if res:
        # finished successfully
        return True
    return False


def validate_password_key(email, key, engine):

0 Source : test_cte.py
with MIT License
from sqlalchemy

    def test_recursive(self):
        parts = table(
            "parts", column("part"), column("sub_part"), column("quantity")
        )

        included_parts = (
            select(parts.c.sub_part, parts.c.part, parts.c.quantity)
            .where(parts.c.part == "our part")
            .cte(recursive=True)
        )

        incl_alias = included_parts.alias()
        parts_alias = parts.alias()
        included_parts = included_parts.union(
            select(
                parts_alias.c.sub_part,
                parts_alias.c.part,
                parts_alias.c.quantity,
            ).where(parts_alias.c.part == incl_alias.c.sub_part)
        )

        s = (
            select(
                included_parts.c.sub_part,
                func.sum(included_parts.c.quantity).label("total_quantity"),
            )
            .select_from(
                included_parts.join(
                    parts, included_parts.c.part == parts.c.part
                )
            )
            .group_by(included_parts.c.sub_part)
        )
        self.assert_compile(
            s,
            "WITH RECURSIVE anon_1(sub_part, part, quantity) "
            "AS (SELECT parts.sub_part AS sub_part, parts.part "
            "AS part, parts.quantity AS quantity FROM parts "
            "WHERE parts.part = :part_1 UNION "
            "SELECT parts_1.sub_part AS sub_part, "
            "parts_1.part AS part, parts_1.quantity "
            "AS quantity FROM parts AS parts_1, anon_1 AS anon_2 "
            "WHERE parts_1.part = anon_2.sub_part) "
            "SELECT anon_1.sub_part, "
            "sum(anon_1.quantity) AS total_quantity FROM anon_1 "
            "JOIN parts ON anon_1.part = parts.part "
            "GROUP BY anon_1.sub_part",
        )

        # quick check that the "WITH RECURSIVE" varies per
        # dialect
        self.assert_compile(
            s,
            "WITH anon_1(sub_part, part, quantity) "
            "AS (SELECT parts.sub_part AS sub_part, parts.part "
            "AS part, parts.quantity AS quantity FROM parts "
            "WHERE parts.part = :part_1 UNION "
            "SELECT parts_1.sub_part AS sub_part, "
            "parts_1.part AS part, parts_1.quantity "
            "AS quantity FROM parts AS parts_1, anon_1 AS anon_2 "
            "WHERE parts_1.part = anon_2.sub_part) "
            "SELECT anon_1.sub_part, "
            "sum(anon_1.quantity) AS total_quantity FROM anon_1 "
            "JOIN parts ON anon_1.part = parts.part "
            "GROUP BY anon_1.sub_part",
            dialect=mssql.dialect(),
        )

    def test_recursive_w_anon_labels(self):

0 Source : test_cte.py
with MIT License
from sqlalchemy

    def test_recursive_w_anon_labels(self):
        parts = table(
            "parts", column("part"), column("sub_part"), column("quantity")
        )

        included_parts = (
            select(
                parts.c.sub_part.label(None),
                parts.c.part.label(None),
                parts.c.quantity,
            )
            .where(parts.c.part == "our part")
            .cte(recursive=True)
        )

        incl_alias = included_parts.alias()
        parts_alias = parts.alias()
        included_parts = included_parts.union(
            select(
                parts_alias.c.sub_part,
                parts_alias.c.part,
                parts_alias.c.quantity,
            ).where(parts_alias.c.part == incl_alias.c[0])
        )

        s = (
            select(
                included_parts.c[0],
                func.sum(included_parts.c.quantity).label("total_quantity"),
            )
            .select_from(
                included_parts.join(parts, included_parts.c[1] == parts.c.part)
            )
            .group_by(included_parts.c[0])
        )
        self.assert_compile(
            s,
            "WITH RECURSIVE anon_1(sub_part_1, part_1, quantity) "
            "AS (SELECT parts.sub_part AS sub_part_1, parts.part "
            "AS part_1, parts.quantity AS quantity FROM parts "
            "WHERE parts.part = :part_2 UNION "
            "SELECT parts_1.sub_part AS sub_part, "
            "parts_1.part AS part, parts_1.quantity "
            "AS quantity FROM parts AS parts_1, anon_1 AS anon_2 "
            "WHERE parts_1.part = anon_2.sub_part_1) "
            "SELECT anon_1.sub_part_1, "
            "sum(anon_1.quantity) AS total_quantity FROM anon_1 "
            "JOIN parts ON anon_1.part_1 = parts.part "
            "GROUP BY anon_1.sub_part_1",
        )

    def test_recursive_inner_cte_unioned_to_alias(self):

0 Source : test_cte.py
with MIT License
from sqlalchemy

    def test_recursive_inner_cte_unioned_to_alias(self):
        parts = table(
            "parts", column("part"), column("sub_part"), column("quantity")
        )

        included_parts = (
            select(parts.c.sub_part, parts.c.part, parts.c.quantity)
            .where(parts.c.part == "our part")
            .cte(recursive=True)
        )

        incl_alias = included_parts.alias("incl")
        parts_alias = parts.alias()
        included_parts = incl_alias.union(
            select(
                parts_alias.c.sub_part,
                parts_alias.c.part,
                parts_alias.c.quantity,
            ).where(parts_alias.c.part == incl_alias.c.sub_part)
        )

        s = (
            select(
                included_parts.c.sub_part,
                func.sum(included_parts.c.quantity).label("total_quantity"),
            )
            .select_from(
                included_parts.join(
                    parts, included_parts.c.part == parts.c.part
                )
            )
            .group_by(included_parts.c.sub_part)
        )
        self.assert_compile(
            s,
            "WITH RECURSIVE incl(sub_part, part, quantity) "
            "AS (SELECT parts.sub_part AS sub_part, parts.part "
            "AS part, parts.quantity AS quantity FROM parts "
            "WHERE parts.part = :part_1 UNION "
            "SELECT parts_1.sub_part AS sub_part, "
            "parts_1.part AS part, parts_1.quantity "
            "AS quantity FROM parts AS parts_1, incl "
            "WHERE parts_1.part = incl.sub_part) "
            "SELECT incl.sub_part, "
            "sum(incl.quantity) AS total_quantity FROM incl "
            "JOIN parts ON incl.part = parts.part "
            "GROUP BY incl.sub_part",
        )

    def test_recursive_union_no_alias_one(self):

0 Source : test_cte.py
with MIT License
from sqlalchemy

    def test_upsert_from_select(self):
        orders = table(
            "orders",
            column("region"),
            column("amount"),
            column("product"),
            column("quantity"),
        )

        upsert = (
            orders.update()
            .where(orders.c.region == "Region1")
            .values(amount=1.0, product="Product1", quantity=1)
            .returning(*(orders.c._all_columns))
            .cte("upsert")
        )

        insert = orders.insert().from_select(
            orders.c.keys(),
            select(
                literal("Region1"),
                literal(1.0),
                literal("Product1"),
                literal(1),
            ).where(~exists(upsert.select())),
        )

        self.assert_compile(
            insert,
            "WITH upsert AS (UPDATE orders SET amount=:param_5, "
            "product=:param_6, quantity=:param_7 "
            "WHERE orders.region = :region_1 "
            "RETURNING orders.region, orders.amount, "
            "orders.product, orders.quantity) "
            "INSERT INTO orders (region, amount, product, quantity) "
            "SELECT :param_1 AS anon_1, :param_2 AS anon_2, "
            ":param_3 AS anon_3, :param_4 AS anon_4 WHERE NOT (EXISTS "
            "(SELECT upsert.region, upsert.amount, upsert.product, "
            "upsert.quantity FROM upsert))",
            checkparams={
                "param_1": "Region1",
                "param_2": 1.0,
                "param_3": "Product1",
                "param_4": 1,
                "param_5": 1.0,
                "param_6": "Product1",
                "param_7": 1,
                "region_1": "Region1",
            },
        )

        eq_(insert.compile().isinsert, True)

    @testing.combinations(

0 Source : test_cte.py
with MIT License
from sqlalchemy

    def test_pg_example_three(self):

        parts = table("parts", column("part"), column("sub_part"))

        included_parts = (
            select(parts.c.sub_part, parts.c.part)
            .where(parts.c.part == "our part")
            .cte("included_parts", recursive=True)
        )

        pr = included_parts.alias("pr")
        p = parts.alias("p")
        included_parts = included_parts.union_all(
            select(p.c.sub_part, p.c.part).where(p.c.part == pr.c.sub_part)
        )
        stmt = (
            parts.delete()
            .where(parts.c.part.in_(select(included_parts.c.part)))
            .returning(parts.c.part)
        )

        # the outer RETURNING is a bonus over what PG's docs have
        self.assert_compile(
            stmt,
            "WITH RECURSIVE included_parts(sub_part, part) AS "
            "(SELECT parts.sub_part AS sub_part, parts.part AS part "
            "FROM parts "
            "WHERE parts.part = :part_1 "
            "UNION ALL SELECT p.sub_part AS sub_part, p.part AS part "
            "FROM parts AS p, included_parts AS pr "
            "WHERE p.part = pr.sub_part) "
            "DELETE FROM parts WHERE parts.part IN "
            "(SELECT included_parts.part FROM included_parts) "
            "RETURNING parts.part",
        )

    def test_insert_in_the_cte(self):

0 Source : test_cte.py
with MIT License
from sqlalchemy

    def test_correlated_cte_in_lateral_w_add_cte(self, reverse_direction):
        """this is the original use case that led to #7759"""
        contracts = table("contracts", column("id"))

        invoices = table("invoices", column("id"), column("contract_id"))

        contracts_alias = contracts.alias()
        cte1 = (
            select(contracts_alias)
            .where(contracts_alias.c.id == contracts.c.id)
            .correlate(contracts)
            .cte(name="cte1")
        )
        cte2 = (
            select(invoices)
            .join(cte1, invoices.c.contract_id == cte1.c.id)
            .cte(name="cte2")
        )

        if reverse_direction:
            subq = select(cte1, cte2).add_cte(cte2, cte1, nest_here=True)
        else:
            subq = select(cte1, cte2).add_cte(cte1, cte2, nest_here=True)
        stmt = select(contracts).outerjoin(subq.lateral(), true())

        self.assert_compile(
            stmt,
            "SELECT contracts.id FROM contracts LEFT OUTER JOIN LATERAL "
            "(WITH cte1 AS (SELECT contracts_1.id AS id "
            "FROM contracts AS contracts_1 "
            "WHERE contracts_1.id = contracts.id), "
            "cte2 AS (SELECT invoices.id AS id, "
            "invoices.contract_id AS contract_id FROM invoices "
            "JOIN cte1 ON invoices.contract_id = cte1.id) "
            "SELECT cte1.id AS id, cte2.id AS id_1, "
            "cte2.contract_id AS contract_id "
            "FROM cte1, cte2) AS anon_1 ON true",
        )

0 Source : test_defaults.py
with MIT License
from sqlalchemy

    def test_a_in_b(self, a, b, connection):
        q = self.tables.q
        p = self.tables.p

        conn = connection
        if a == "delete":
            conn.execute(q.insert().values(y=10, z=1))
            cte = q.delete().where(q.c.z == 1).returning(q.c.z).cte("c")
            expected = None
        elif a == "insert":
            cte = q.insert().values(z=1, y=10).returning(q.c.z).cte("c")
            expected = (2, 10)
        elif a == "update":
            conn.execute(q.insert().values(x=5, y=10, z=1))
            cte = (
                q.update()
                .where(q.c.z == 1)
                .values(x=7)
                .returning(q.c.z)
                .cte("c")
            )
            expected = (7, 5)
        elif a == "select":
            conn.execute(q.insert().values(x=5, y=10, z=1))
            cte = sa.select(q.c.z).cte("c")
            expected = (5, 10)

        if b == "select":
            conn.execute(p.insert().values(s=1))
            stmt = select(p.c.s, cte.c.z).where(p.c.s == cte.c.z)
        elif b == "insert":
            sel = select(1, cte.c.z)
            stmt = (
                p.insert().from_select(["s", "t"], sel).returning(p.c.s, p.c.t)
            )
        elif b == "delete":
            stmt = p.insert().values(s=1, t=cte.c.z).returning(p.c.s, cte.c.z)
        elif b == "update":
            conn.execute(p.insert().values(s=1))
            stmt = (
                p.update()
                .values(t=5)
                .where(p.c.s == cte.c.z)
                .returning(p.c.u, cte.c.z)
            )
        eq_(list(conn.execute(stmt)), [(1, 1)])

        eq_(conn.execute(select(q.c.x, q.c.y)).first(), expected)


class PKDefaultTest(fixtures.TestBase):

0 Source : test_lambdas.py
with MIT License
from sqlalchemy

    def test_global_tracking(self):
        t1 = table("t1", column("q"), column("p"))

        global global_x, global_y

        global_x = 10
        global_y = 17

        def go():
            return select(t1).where(
                lambda: and_(t1.c.q == global_x, t1.c.p == global_y)
            )

        self.assert_compile(
            go(),
            "SELECT t1.q, t1.p FROM t1 WHERE t1.q = :global_x_1 "
            "AND t1.p = :global_y_1",
            checkparams={"global_x_1": 10, "global_y_1": 17},
        )

        global_y = 9

        self.assert_compile(
            go(),
            "SELECT t1.q, t1.p FROM t1 WHERE t1.q = :global_x_1 "
            "AND t1.p = :global_y_1",
            checkparams={"global_x_1": 10, "global_y_1": 9},
        )

    def test_boolean_constants(self):

See More Examples