sqlalchemy.func.max

Here are the examples of the python api sqlalchemy.func.max taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

35 Examples 7

Example 1

Project: tvb-framework Source File: workflow_dao.py
    def get_max_burst_id(self):
        """
        Return the maximum of the currently stored burst IDs to be used as the new burst name.
        This is not a thread-safe value, but we use it just for a label.
        """
        try:
            max_id = self.session.query(func.max(model.BurstConfiguration.id)).one()
            if max_id[0] is None:
                return 0
            return max_id[0]
        except SQLAlchemyError, excep:
            self.logger.exception(excep)
        return 0

Example 2

Project: sqlalchemy Source File: test_generative.py
    @testing.uses_deprecated('Call to deprecated function apply_max')
    def test_aggregate(self):
        foo, Foo = self.tables.foo, self.classes.Foo

        sess = create_session()
        query = sess.query(Foo)
        assert query.count() == 100
        assert sess.query(func.min(foo.c.bar)).filter(foo.c.bar<30).one() == (0,)

        assert sess.query(func.max(foo.c.bar)).filter(foo.c.bar<30).one() == (29,)
        assert next(query.filter(foo.c.bar<30).values(sa.func.max(foo.c.bar)))[0] == 29
        assert next(query.filter(foo.c.bar<30).values(sa.func.max(foo.c.bar)))[0] == 29

Example 3

Project: autonomie Source File: invoice.py
def get_next_official_number(year=None):
    """
    Return the next available official number

    :param int year: The year we'd like to query a number for
    """
    next_ = 1
    if year is None:
        year = datetime.date.today().year

    query = DBSESSION().query(func.max(Task.official_number))
    query = query.filter(extract('year', Task.date) == year)
    last = query.first()[0]
    if last:
        next_ = last + 1

    return next_

Example 4

Project: dokomoforms Source File: column_properties.py
def answer_max(survey_node: AnswerableSurveyNode):
    """Get the maximum answer."""
    return _answer_stat(
        survey_node,
        {'integer', 'decimal', 'date', 'time', 'timestamp'},
        sa.func.max,
    )

Example 5

Project: card_scan Source File: run_scan.py
def captures_to_db(captures, box_name):
	#given an iterable of captures and a box name,
	#save all the captured images to the database
	starting_index = session.query(func.max(InvCard.box_index))\
			.filter(InvCard.box==box_name).first()[0]
	if starting_index is None:
		starting_index = 0

	for i, img in enumerate(captures):
		as_png = cv.EncodeImage(".png", img).tostring()

		InvCard(
				box = box_name,
				box_index = starting_index + i,
				scan_png = as_png,
				recognition_status = "scanned",
				inventory_status = "present")

	session.commit()

Example 6

Project: python-sync-db Source File: dialects.py
def max_local(sa_class, session):
    """
    Returns the maximum primary key used for the given table.
    """
    engine = session.bind
    dialect = engine.name
    table_name = class_mapper(sa_class).mapped_table.name
    # default, strictly incorrect query
    found = session.query(func.max(getattr(sa_class, get_pk(sa_class)))).scalar()
    if dialect == 'sqlite':
        cursor = engine.execute("SELECT seq FROM sqlite_sequence WHERE name = ?",
                                table_name)
        result = cursor.fetchone()[0]
        cursor.close()
        return max(result, found)
    return found

Example 7

Project: sqlalchemy Source File: test_deprecations.py
    def test_apply_max(self):
        """Query.apply_max(col)

        max = session.query(Address).apply_max(Address.bounces)

        """

        Address = self.classes.Address

        session = create_session()

        # 0.5.0
        maxes = list(session.query(Address).values(func.max(Address.bounces)))
        max = maxes[0][0]
        assert max == 10

        max = session.query(func.max(Address.bounces)).one()[0]
        assert max == 10

Example 8

Project: data-act-broker-backend Source File: fsrs.py
Function: next_id
    @classmethod
    def nextId(cls, sess):
        """We'll often want to load "new" data -- anything with a later id
        than the awards we have. Return that max id"""
        current = sess.query(func.max(cls.id)).one()[0] or -1
        return current + 1

Example 9

Project: sqlalchemy-continuum Source File: relationship_builder.py
    def many_to_one_subquery(self, obj):
        tx_column = option(obj, 'transaction_column_name')
        reflector = VersionExpressionReflector(obj, self.property)

        return getattr(self.remote_cls, tx_column) == (
            sa.select(
                [sa.func.max(getattr(self.remote_cls, tx_column))]
            ).where(
                sa.and_(
                    getattr(self.remote_cls, tx_column) <=
                    getattr(obj, tx_column),
                    reflector(self.property.primaryjoin)
                )
            )
        )

Example 10

Project: festival Source File: subsonic.py
def get_filter_by_type(atype, fromYear=None, toYear=None, genre=None):
    if atype == 'newest':
        return lambda query: query.add_columns(func.max(Album.last_updated.label('last_updated'))).group_by(Album.id)
    elif atype == 'starred':
        return lambda query: query.filter(False)
    elif atype == 'byYear':
        return lambda query: query.filter(Album.year.between(fromYear, toYear))
    elif atype == 'genre':
        return lambda query: query.filter(TrackInfo.genre_id == genre)
    return None

Example 11

Project: fuel-ostf Source File: controllers.py
    @expose('json')
    def get_last(self, cluster_id):
        test_run_ids = request.session.query(func.max(models.TestRun.id)) \
            .group_by(models.TestRun.test_set_id)\
            .filter_by(cluster_id=cluster_id)

        test_runs = request.session.query(models.TestRun)\
            .options(joinedload('tests'))\
            .filter(models.TestRun.id.in_(test_run_ids))

        return [item.frontend for item in test_runs]

Example 12

Project: bitex Source File: models.py
    @staticmethod
    def get_last_trade_id(session):
        res = session.query(func.max(Trade.id)).one()
        return res[0]

Example 13

Project: autonomie Source File: __init__.py
    def having_last_this_year(self, attr):
        return self.filter_this_year(func.max(attr))

Example 14

Project: autonomie Source File: __init__.py
    def having_last_this_month(self, attr):
        return self.filter_this_month(func.max(attr))

Example 15

Project: autonomie Source File: __init__.py
    def having_last_previous_year(self, attr):
        return self.filter_previous_year(func.max(attr))

Example 16

Project: iktomi Source File: __init__.py
    def test_query_func(self):
        try:
            self.db.query(func.max(db1.SameName.id)).all()
        except UnboundExecutionError as exc:
            self.fail('Unexpected exception: {}'.format(exc))

Example 17

Project: Live-Blog Source File: blog_media.py
    def _nextRank(self, entryId, blogId, typeId):
        maxRank, = self.session().query(func.max(BlogMediaMapped.Rank)).filter(BlogMediaMapped.Id != entryId).filter(BlogMediaMapped.Blog == blogId).filter(BlogMediaMapped.typeId == typeId).one()
        if not maxRank: maxRank = 0
        return maxRank + 1

Example 18

Project: tvb-framework Source File: datatype_dao.py
    def get_values_of_datatype(self, project_id, datatype_class, filters=None, page_size=50):
        """
        Retrieve a list of dataTypes matching a filter inside a project.
        :returns: (results, total_count) maximum page_end rows are returned, to avoid endless time when loading a page
        """
        result = []
        count = 0

        if not issubclass(datatype_class, model.Base):
            self.logger.warning("Trying to filter not DB class:" + str(datatype_class))
            return result, count

        try:
            #Prepare generic query:
            query = self.session.query(datatype_class.id,
                                       func.max(datatype_class.type),
                                       func.max(datatype_class.gid),
                                       func.max(datatype_class.subject),
                                       func.max(model.Operation.completion_date),
                                       func.max(model.Operation.user_group),
                                       func.max(text('"OPERATION_GROUPS_1".name')),
                                       func.max(model.DataType.user_tag_1)
                        ).join((model.Operation, datatype_class.fk_from_operation == model.Operation.id)
                        ).outerjoin(model.Links
                        ).outerjoin((model.OperationGroup, model.Operation.fk_operation_group ==
                                     model.OperationGroup.id), aliased=True
                        ).filter(model.DataType.invalid == False
                        ).filter(or_(model.Operation.fk_launched_in == project_id,
                                     model.Links.fk_to_project == project_id))
            if filters:
                filter_str = filters.get_sql_filter_equivalent(datatype_to_check='datatype_class')
                if filter_str is not None:
                    query = query.filter(eval(filter_str))

            #Retrieve the results
            query = query.group_by(datatype_class.id).order_by(desc(datatype_class.id))

            result = query.limit(max(page_size, 0)).all()
            count = query.count()
        except Exception, excep:
            self.logger.exception(excep)

        return result, count

Example 19

Project: card_scan Source File: compress_boxes.py
def compress_smallest_box():
	last_box = session.query(func.max(sqlalchemy.cast(InvCard.box, sqlalchemy.Integer))).first()[0]
	box_capacity = list(metadata.bind.execute("select box,60 - count(*) as c from inv_cards where box not null and cast (box as int) > 0 group by box having c>0 order by c desc;"))
	if len(box_capacity) <= 0:
		raise Exception("there are no boxes in inventory to compress")
	remove_box = box_capacity[0][0]
	box_capacity = box_capacity[1:]

	cards_in_remove_box = InvCard.query.filter_by(box=str(remove_box)).order_by(InvCard.box_index.desc()).all()

	move_orders = fit_boxes(box_capacity, len(cards_in_remove_box))
	i=0

	print "cuem****** move %d cards from box %s **********" % (60-box_capacity[0][1], remove_box)
	print "\tall boxes: %s" % sorted([int(x) for x in [remove_box] + [b for b,o in move_orders]])
	for box, count in move_orders:
		max_index = session.query(func.max(InvCard.box_index)).filter_by(box=box).one()[0]
		print "======= moving %d cards to box %s ======" % (count, box)
		for card in cards_in_remove_box[i:count+i]:
			print u"move %s to %s/%d" % (card, box, max_index)
			max_index += 1
			card.box = box
			card.box_index = max_index
		i+=count
	
	if remove_box != last_box:
		cards_in_last_box = InvCard.query.filter_by(box=str(last_box)).order_by(InvCard.box_index).all()
		print "********** finally, move all %d cards from %s to %s **********" % (len(cards_in_last_box),last_box, remove_box)
		for card in cards_in_last_box:
			card.box = remove_box
	raw_input()
	session.commit()

Example 20

Project: card_scan Source File: website.py
@app.route("/reinsert_cards", methods=["POST","GET"])
def reinsert_cards():
	if request.method == 'POST':
		#post. reinsert the given rowids
		now = datetime.now()
		reason = request.form["reason"]
		if not reason:
			raise Exception("reason required")

		#get the cards
		rids=[]
		for key, val in request.form.items():
			if key.startswith("reinsert_"):
				rids.append(int(key.split("_")[1]))
		cards = InvCard.query.filter(InvCard.rowid.in_(rids)).order_by('name').all()

		#make sure we can insert them
		if any(card.inventory_status != "temporarily_out" for card in cards):
			raise Exception("card is not temporarily out")

		box_capacity = list(metadata.bind.execute("select box,60 - count(*) as c from inv_cards where box not null and cast( box as int) != 0 group by box having c>0 order by c desc;"))
		
		#fill in each box with count cards
		i=0
		fill_orders = fit_boxes(box_capacity, len(cards))
		fill_orders = sorted(fill_orders, key=lambda (box,count): int(box))

		for box, count in fill_orders:
			max_index = session.query(func.max(InvCard.box_index)).filter_by(box=box).one()[0]
			for card in cards[i:count+i]:
				max_index += 1
				card.box = box
				card.box_index = max_index
				card.inventory_status = 'present'
				InvLog(card=card,date=now,direction='added',reason=reason)
			i+=count

		session.commit()

		#we're done. render the list.
		return render_template("results.html", cards=cards)

	else:
		#get the temporary_out cards to reinsert
		#it will be a list of ((date, reason), (cardlist)) tuples
		cards = InvCard.query.filter_by(inventory_status = "temporarily_out")
		the_key = lambda c: (c.most_recent_log().date, c.most_recent_log().reason)
		outstanding_cards = groupby(sorted(cards,key=the_key),the_key)
		outstanding_cards = [(key, sorted(val, key=attrgetter('name'))) for key, val in outstanding_cards]
		return render_template("outstanding_cards.html",outstanding_cards=outstanding_cards)

Example 21

Project: card_scan Source File: website.py
@app.route("/move_cards", methods=["POST"])
def move_cards():
	if request.method == 'POST':
		results = []

		now = datetime.now()
		new_box = request.form.items["new_box"]
		new_box_index = session.query(func.max(InvCard.box_index)).filter_by(box=new_box).one()[0]

		try:

			for key, val in request.form.items():
				match = re.match('move_(?P<num>\d+)', key)
				if not match: #if this isn't move_id
					continue
				if not val: #if the browser passed us an unchecked checkbox
					continue

				rid = int(match.group('num'))

				card = InvCard.query.filter_by(rowid=rid).one()
				if card.inventory_status != "present":
					raise Exception("can't move non-present card")

				results.append({
					'rowid': card.rowid,
					'set_name': card.set_name,
					'name': card.name,
					'box': card.box,
					'box_index': card.box_index})

				new_box_index += 1
				card.box = new_box
				card.box_index = new_box_index
			results = sorted(results, key = lambda r: (r['box'],r['box_index']))
			return render_template("results.html", cards=results)
		except Exception as e:
			session.rollback()
			raise e

Example 22

Project: sqlalchemy Source File: test_update_delete.py
    @testing.fails_on('mysql', 'FIXME: unknown')
    def test_delete_invalid_evaluation(self):
        User = self.classes.User

        sess = Session()

        john, jack, jill, jane = sess.query(User).order_by(User.id).all()

        assert_raises(exc.InvalidRequestError,
                      sess.query(User).
                      filter(
                          User.name == select([func.max(User.name)])).delete,
                      synchronize_session='evaluate'
                      )

        sess.query(User).filter(User.name == select([func.max(User.name)])).\
            delete(synchronize_session='fetch')

        assert john not in sess

        eq_(sess.query(User).order_by(User.id).all(), [jack, jill, jane])

Example 23

Project: faf Source File: symbol_transfer.py
def process_symbol(build_id, path, offset, problem_type, create_symbol_auth_key):
    db_ssource = (db.session.query(SymbolSource)
                            .filter(SymbolSource.build_id == build_id)
                            .filter(SymbolSource.path == path)
                            .filter(SymbolSource.offset == offset)
                            .first())
    if db_ssource is None:
        if (create_symbol_auth_key
            and symbol_transfer_auth_key
            and create_symbol_auth_key == symbol_transfer_auth_key
            and problem_type in ("kerneloops", "core")):

            # We need to attach our symbols to a dummy report in order to set
            # their type
            h = sha1()
            h.update("symbol_transfer_dummy")
            h.update(problem_type)
            dummy_report_hash = h.hexdigest()
            # The thread all our frames and symbols are going to be attached to
            db_thread = (db.session.query(ReportBtThread)
                                   .join(ReportBacktrace)
                                   .join(Report)
                                   .join(ReportHash)
                                   .filter(ReportHash.hash == dummy_report_hash)
                                   .first())
            if db_thread is None:
                # Need to potentially create the whole chain of objects
                db_report = (db.session.query(Report)
                                       .join(ReportHash)
                                       .filter(ReportHash.hash == dummy_report_hash)
                                       .first())
                if db_report is None:
                    db_report = Report()
                    db_report.type = problem_type
                    db_report.first_occurence = datetime.datetime.fromtimestamp(0)
                    db_report.last_occurence = db_report.first_occurence
                    db_report.count = 0
                    # Random component
                    db_report.component = db.session.query(OpSysComponent).first()
                    db.session.add(db_report)

                    db_report_hash = ReportHash()
                    db_report_hash.hash = dummy_report_hash
                    db_report_hash.report = db_report
                    db.session.add(db_report_hash)

                db_rbt = (db.session.query(ReportBacktrace)
                                    .filter(ReportBacktrace.report == db_report)
                                    .first())
                if db_rbt is None:
                    db_rbt = ReportBacktrace()
                    db_rbt.report = db_report
                    db_rbt.quality = -1000
                    db.session.add(db_rbt)

                db_thread = ReportBtThread()
                db_thread.backtrace = db_rbt
                # This prevents this dummy thread from being clustered
                db_thread.crashthread = False
                db.session.add(db_thread)

            db_ssource = SymbolSource()
            db_ssource.build_id = build_id
            db_ssource.path = path
            db_ssource.offset = offset
            db.session.add(db_ssource)

            max_order = (db.session.query(func.max(ReportBtFrame.order))
                                   .filter(ReportBtFrame.thread == db_thread)
                                   .scalar() or 0)
            db_frame = ReportBtFrame()
            db_frame.thread = db_thread
            db_frame.symbolsource = db_ssource
            db_frame.order = max_order + 1
            db.session.add(db_frame)

            db.session.commit()

            return {"error": "SymbolSource not found but created. Please wait."}, 202

        else:
            return {"error": "SymbolSource not found"}, 404

    if db_ssource.line_number is None:
        return {"error": "SymbolSource not yet retraced. Please wait."}, 404

    return {
        "Symbol": {
            "name": db_ssource.symbol.name,
            "nice_name": db_ssource.symbol.nice_name,
            "normalized_path": db_ssource.symbol.normalized_path,
        },
        "SymbolSource": {
            "build_id": db_ssource.build_id,
            "path": db_ssource.path,
            "offset": db_ssource.offset,
            "func_offset": db_ssource.func_offset,
            "hash": db_ssource.hash,
            "source_path": db_ssource.source_path,
            "line_number": db_ssource.line_number,
        }
    }, 200

Example 24

Project: autonomie Source File: __init__.py
    def having_last_dr(self, attr):
        return self.filter_dr(func.max(attr))

Example 25

Project: autonomie Source File: __init__.py
    def having_last_previous_month(self, attr):
        return self.filter_previous_month(func.max(attr))

Example 26

Project: autonomie Source File: competence.py
def competence_radar_chart_datas(context, request):
    """
    Return the datas used to show a radar / spider chart of a user's
    competences
    context : CompetenceGrid
    """
    datas = []
    legend = []

    deadlines = CompetenceDeadline.query()
    deadlines = deadlines.filter(
        CompetenceDeadline.order <= context.deadline.order
    )
    for deadline in deadlines:
        grid = get_competence_grid(request, context.contractor_id, deadline.id)
        datas.append(grid.__radar_datas__())
        legend.append(u"Profil {0}".format(deadline.label))

    datas.append(CompetenceOption.__radar_datas__(context.deadline_id))
    legend.append(u"Profil de référence")

    config = {}
    config['levels'] = CompetenceScale.query().count()
    max_value = request.dbsession.query(
        func.max(CompetenceScale.value)
    ).all()[0][0]

    config['maxValue'] = max_value

    return {'datas': datas, 'legend': legend, "config": config}

Example 27

Project: catsnap Source File: tag.py
    @classmethod
    def get_image_data(cls, tag_names):
        from catsnap.table.image import Image
        from catsnap.table.image_tag import ImageTag
        session = Client().session()
        image_data = session.query(func.max(Image.filename),
                                   Image.image_id,
                                   func.max(Image.title),
                                   func.array_agg(Tag.name)).\
                join(ImageTag).\
                filter(ImageTag.image_id == Image.image_id).\
                filter(ImageTag.tag_id == Tag.tag_id).\
                filter(Image.image_id.in_(
                    session.query(ImageTag.image_id).\
                            join(Tag).\
                            filter(ImageTag.tag_id == Tag.tag_id).\
                            filter(Tag.name.in_(tag_names)))).\
                group_by(Image.image_id).\
                order_by(Image.filename)

        for image_struct in image_data:
            caption = Image.make_caption(title=image_struct[2],
                filename=image_struct[0],
                get_tags=lambda: image_struct[3])
            yield (image_struct[0], image_struct[1], caption)

Example 28

Project: GeoHealthCheck Source File: models.py
Function: last_run
    @property
    def last_run(self):
        return self.runs.having(func.max(Run.checked_datetime)).group_by(
            Run.checked_datetime).order_by(
                Run.checked_datetime.desc()).first()

Example 29

Project: lrrbot Source File: history.py
def build_headdata(fromkey, tokey, section, user, time):
	history = server.db.metadata.tables["history"]
	with server.db.engine.begin() as conn:
		prevkey = conn.execute(sqlalchemy.select([sqlalchemy.func.max(history.c.id)])
			.where((history.c.id < fromkey) & (history.c.section == section))).first()
		nextkey = conn.execute(sqlalchemy.select([sqlalchemy.func.min(history.c.id)])
			.where((history.c.id > fromkey) & (history.c.section == section))).first()

	if prevkey is not None:
		prevkey = prevkey[0]

	if nextkey is not None:
		nextkey = nextkey[0]

	return {
		"page": section,
		"user": user,
		"time": time,
		"fromkey": fromkey,
		"tokey": tokey,
		"prevkey": prevkey,
		"nextkey": nextkey,
		"isdiff": fromkey != tokey,
	}

Example 30

Project: osprey Source File: execute_worker.py
def run_single_trial(estimator, params, trial_id, scoring, X, y, cv,
                     sessionbuilder):

    status = None

    try:
        score = fit_and_score_estimator(
            estimator, params, cv=cv, scoring=scoring, X=X, y=y, verbose=1)
        with sessionbuilder() as session:
            trial = session.query(Trial).get(trial_id)
            trial.mean_test_score = score['mean_test_score']
            trial.mean_train_score = score['mean_train_score']
            trial.test_scores = score['test_scores']
            trial.train_scores = score['train_scores']
            trial.n_test_samples = score['n_test_samples']
            trial.n_train_samples = score['n_train_samples']

            trial.status = 'SUCCEEDED'
            best_so_far = session.query(
                func.max(Trial.mean_test_score)).first()
            print('~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~')
            print('Success! Model score = %f' % trial.mean_test_score)
            print('(best score so far   = %f)' %
                  max(trial.mean_test_score, best_so_far[0]))
            print('~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~')
            trial.completed = datetime.now()
            trial.elapsed = trial.completed - trial.started
            session.commit()
            status = trial.status

    except Exception:
        buf = cStringIO()
        traceback.print_exc(file=buf)

        with sessionbuilder() as session:
            trial = session.query(Trial).get(trial_id)
            trial.traceback = buf.getvalue()
            trial.status = 'FAILED'
            print('-'*78, file=sys.stderr)
            print('Exception encountered while fitting model')
            print('-'*78, file=sys.stderr)
            traceback.print_exc(file=sys.stderr)
            print('-'*78, file=sys.stderr)
            session.commit()
            status = trial.status

    except (KeyboardInterrupt, SystemExit):
        with sessionbuilder() as session:
            trial = session.query(Trial).get(trial_id)
            trial.status = 'FAILED'
            session.commit()
            sys.exit(1)

    return status

Example 31

Project: sync-engine Source File: 166_migrate_body_format.py
def upgrade():
    from inbox.ignition import main_engine
    from inbox.models.session import session_scope
    from inbox.security.blobstorage import encode_blob
    engine = main_engine(pool_size=1, max_overflow=0)
    Base = declarative_base()
    Base.metadata.reflect(engine)

    class Message(Base):
        __table__ = Base.metadata.tables['message']

    with session_scope(versioned=False) as db_session:
        max_id, = db_session.query(sa.func.max(Message.id)).one()
        if max_id is None:
            max_id = 0
        for i in range(0, max_id, CHUNK_SIZE):
            messages = db_session.query(Message). \
                filter(Message.id > i, Message.id <= i + CHUNK_SIZE). \
                options(load_only('_compacted_body', 'sanitized_body'))
            for message in messages:
                if message._compacted_body is None:
                    message._compacted_body = encode_blob(
                        message.sanitized_body.encode('utf-8'))
            db_session.commit()

Example 32

Project: sync-engine Source File: 170_update_easuid_schema_2.py
def upgrade():
    from sqlalchemy.ext.declarative import declarative_base
    from inbox.models.session import session_scope
    from inbox.ignition import main_engine
    engine = main_engine(pool_size=1, max_overflow=0)
    if not engine.has_table('easuid'):
        return
    Base = declarative_base()
    Base.metadata.reflect(engine)

    class EASUid(Base):
        __table__ = Base.metadata.tables['easuid']

    class EASFolderSyncStatus(Base):
        __table__ = Base.metadata.tables['easfoldersyncstatus']

    with session_scope(versioned=False) as db_session:
        max_easuid = db_session.query(sa.func.max(EASUid.id)).scalar()
        if max_easuid is None:
            return
        while True:
            results = db_session.query(EASUid, EASFolderSyncStatus). \
                join(EASFolderSyncStatus, sa.and_(
                    EASUid.fld_uid == EASFolderSyncStatus.eas_folder_id,
                    EASUid.device_id == EASFolderSyncStatus.device_id,
                    EASUid.easaccount_id == EASFolderSyncStatus.account_id,
                    EASUid.easfoldersyncstatus_id.is_(None))). \
                limit(1000).all()
            if not results:
                return
            for easuid, easfoldersyncstatus in results:
                easuid.easfoldersyncstatus_id = easfoldersyncstatus.id
            db_session.commit()

Example 33

Project: blazar Source File: utils.py
def drop_old_duplicate_entries_from_table(migrate_engine, table_name,
                                          use_soft_delete, *uc_column_names):
    """Drop all old rows having the same values for columns in uc_columns.

    This method drop (or mark ad `deleted` if use_soft_delete is True) old
    duplicate rows form table with name `table_name`.

    :param migrate_engine:  Sqlalchemy engine
    :param table_name:      Table with duplicates
    :param use_soft_delete: If True - values will be marked as `deleted`,
                            if False - values will be removed from table
    :param uc_column_names: Unique constraint columns
    """
    meta = MetaData()
    meta.bind = migrate_engine

    table = Table(table_name, meta, autoload=True)
    columns_for_group_by = [table.c[name] for name in uc_column_names]

    columns_for_select = [func.max(table.c.id)]
    columns_for_select.extend(columns_for_group_by)

    duplicated_rows_select = select(columns_for_select,
                                    group_by=columns_for_group_by,
                                    having=func.count(table.c.id) > 1)

    for row in migrate_engine.execute(duplicated_rows_select):
        # NOTE(boris-42): Do not remove row that has the biggest ID.
        delete_condition = table.c.id != row[0]
        is_none = None  # workaround for pyflakes
        delete_condition &= table.c.deleted_at == is_none
        for name in uc_column_names:
            delete_condition &= table.c[name] == row[name]

        rows_to_delete_select = select([table.c.id]).where(delete_condition)
        for row in migrate_engine.execute(rows_to_delete_select).fetchall():
            LOG.info(_LI("Deleting duplicated row with id: %(id)s from table: "
                         "%(table)s") % dict(id=row[0], table=table_name))

        if use_soft_delete:
            delete_statement = table.update().\
                where(delete_condition).\
                values({
                    'deleted': literal_column('id'),
                    'updated_at': literal_column('updated_at'),
                    'deleted_at': timeutils.utcnow()
                })
        else:
            delete_statement = table.delete().where(delete_condition)
        migrate_engine.execute(delete_statement)

Example 34

Project: oslo.db Source File: utils.py
def drop_old_duplicate_entries_from_table(migrate_engine, table_name,
                                          use_soft_delete, *uc_column_names):
    """Drop all old rows having the same values for columns in uc_columns.

    This method drop (or mark ad `deleted` if use_soft_delete is True) old
    duplicate rows form table with name `table_name`.

    :param migrate_engine:  Sqlalchemy engine
    :param table_name:      Table with duplicates
    :param use_soft_delete: If True - values will be marked as `deleted`,
                            if False - values will be removed from table
    :param uc_column_names: Unique constraint columns
    """
    meta = MetaData()
    meta.bind = migrate_engine

    table = Table(table_name, meta, autoload=True)
    columns_for_group_by = [table.c[name] for name in uc_column_names]

    columns_for_select = [func.max(table.c.id)]
    columns_for_select.extend(columns_for_group_by)

    duplicated_rows_select = sqlalchemy.sql.select(
        columns_for_select, group_by=columns_for_group_by,
        having=func.count(table.c.id) > 1)

    for row in migrate_engine.execute(duplicated_rows_select).fetchall():
        # NOTE(boris-42): Do not remove row that has the biggest ID.
        delete_condition = table.c.id != row[0]
        is_none = None  # workaround for pyflakes
        delete_condition &= table.c.deleted_at == is_none
        for name in uc_column_names:
            delete_condition &= table.c[name] == row[name]

        rows_to_delete_select = sqlalchemy.sql.select(
            [table.c.id]).where(delete_condition)
        for row in migrate_engine.execute(rows_to_delete_select).fetchall():
            LOG.info(_LI("Deleting duplicated row with id: %(id)s from table: "
                         "%(table)s"), dict(id=row[0], table=table_name))

        if use_soft_delete:
            delete_statement = table.update().\
                where(delete_condition).\
                values({
                    'deleted': literal_column('id'),
                    'updated_at': literal_column('updated_at'),
                    'deleted_at': timeutils.utcnow()
                })
        else:
            delete_statement = table.delete().where(delete_condition)
        migrate_engine.execute(delete_statement)

Example 35

Project: rack Source File: utils.py
def drop_old_duplicate_entries_from_table(migrate_engine, table_name,
                                          use_soft_delete, *uc_column_names):
    """This method is used to drop all old rows that have the same values for
    columns in uc_columns.
    """
    meta = MetaData()
    meta.bind = migrate_engine

    table = Table(table_name, meta, autoload=True)
    columns_for_group_by = [table.c[name] for name in uc_column_names]

    columns_for_select = [func.max(table.c.id)]
    columns_for_select.extend(list(columns_for_group_by))

    duplicated_rows_select = select(columns_for_select,
                                    group_by=columns_for_group_by,
                                    having=func.count(table.c.id) > 1)

    for row in migrate_engine.execute(duplicated_rows_select):
        # NOTE(boris-42): Do not remove row that has the biggest ID.
        delete_condition = table.c.id != row[0]
        for name in uc_column_names:
            delete_condition &= table.c[name] == row[name]

        rows_to_delete_select = select([table.c.id]).where(delete_condition)
        for row in migrate_engine.execute(rows_to_delete_select).fetchall():
            LOG.info(_("Deleted duplicated row with id: %(id)s from table: "
                       "%(table)s") % dict(id=row[0], table=table_name))

        if use_soft_delete:
            delete_statement = table.update().\
                where(delete_condition).\
                values({
                    'deleted': literal_column('id'),
                    'updated_at': literal_column('updated_at'),
                    'deleted_at': timeutils.utcnow()
                })
        else:
            delete_statement = table.delete().where(delete_condition)
        migrate_engine.execute(delete_statement)