sqlalchemy.orm.load_only

Here are the examples of the python api sqlalchemy.orm.load_only taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

47 Examples 7

Example 1

Project: sqlalchemy Source File: test_deferred.py
    def test_load_only_no_pk_rt(self):
        orders, Order = self.tables.orders, self.classes.Order

        mapper(Order, orders)

        sess = create_session()
        q = sess.query(Order).order_by(Order.id).\
                options(load_only("isopen", "description"))
        eq_(q.first(), Order(id=1))

Example 2

Project: sqlalchemy Source File: test_deferred.py
    def test_load_only_w_deferred(self):
        orders, Order = self.tables.orders, self.classes.Order

        mapper(Order, orders, properties={
                "description": deferred(orders.c.description)
            })

        sess = create_session()
        q = sess.query(Order).options(
                    load_only("isopen", "description"),
                    undefer("user_id")
                )
        self.assert_compile(q,
            "SELECT orders.description AS orders_description, "
            "orders.id AS orders_id, "
            "orders.user_id AS orders_user_id, "
            "orders.isopen AS orders_isopen FROM orders")

Example 3

Project: sqlalchemy Source File: test_deferred.py
    def test_load_only_subclass(self):
        s = Session()
        q = s.query(Manager).order_by(Manager.person_id).\
            options(load_only("status", "manager_name"))
        self.assert_compile(
            q,
            "SELECT managers.person_id AS managers_person_id, "
            "people.person_id AS people_person_id, "
            "people.type AS people_type, "
            "managers.status AS managers_status, "
            "managers.manager_name AS managers_manager_name "
            "FROM people JOIN managers "
            "ON people.person_id = managers.person_id "
            "ORDER BY managers.person_id"
        )

Example 4

Project: sqlalchemy Source File: test_deferred.py
    def test_load_only_subclass_and_superclass(self):
        s = Session()
        q = s.query(Boss).order_by(Person.person_id).\
            options(load_only("status", "manager_name"))
        self.assert_compile(
            q,
            "SELECT managers.person_id AS managers_person_id, "
            "people.person_id AS people_person_id, "
            "people.type AS people_type, "
            "managers.status AS managers_status, "
            "managers.manager_name AS managers_manager_name "
            "FROM people JOIN managers "
            "ON people.person_id = managers.person_id JOIN boss "
            "ON managers.person_id = boss.boss_id ORDER BY people.person_id"
        )

Example 5

Project: sqlalchemy Source File: test_deferred.py
    def test_load_only_alias_subclass(self):
        s = Session()
        m1 = aliased(Manager, flat=True)
        q = s.query(m1).order_by(m1.person_id).\
            options(load_only("status", "manager_name"))
        self.assert_compile(
            q,
            "SELECT managers_1.person_id AS managers_1_person_id, "
            "people_1.person_id AS people_1_person_id, "
            "people_1.type AS people_1_type, "
            "managers_1.status AS managers_1_status, "
            "managers_1.manager_name AS managers_1_manager_name "
            "FROM people AS people_1 JOIN managers AS "
            "managers_1 ON people_1.person_id = managers_1.person_id "
            "ORDER BY managers_1.person_id"
        )

Example 6

Project: autonomie Source File: 3_3_0_fonctionnalites_9_2b6ac7b172d3.py
def add_company_index(session, logger):
    logger.warn("Adding company_index to Task")
    from autonomie.models.company import Company
    from autonomie.models.task import Task
    for datas in session.query(Company.id):
        query = Task.query()
        query = query.options(
            sa.orm.load_only('id', 'company_id', "company_index", 'type_')
        )
        query = query.filter(Task.company_id == datas[0])
        for type_ in ('estimation', 'invoice', 'cancelinvoice'):
            index = 1
            typequery = query.filter(Task.type_ == type_)
            for task in typequery:
                task.company_index = index
                index += 1
                session.merge(task)

Example 7

Project: autonomie Source File: company.py
    @classmethod
    def get_customer_codes_and_names(cls, company):
        """
        Return a query for code and names of customers related to company
        :param company: the company we're working on
        :returns: an orm query loading Customer instances with only the columns
        we want
        :rtype: A Sqlalchemy query object
        """
        from autonomie.models.customer import Customer
        query = DBSESSION().query(Customer)
        query = query.options(load_only('code', 'name'))
        query = query.filter(Customer.code != None)
        query = query.filter(Customer.company_id == company.id)
        return query.order_by(Customer.code)

Example 8

Project: autonomie Source File: company.py
    @classmethod
    def get_project_codes_and_names(cls, company):
        """
        Return a query for code and names of projects related to company

        :param company: the company we're working on
        :returns: an orm query loading Project instances with only the columns
        we want
        :rtype: A Sqlalchemy query object
        """
        from autonomie.models.project import Project
        query = DBSESSION().query(Project)
        query = query.options(load_only('code', 'name'))
        query = query.filter(Project.code != None)
        query = query.filter(Project.company_id == company.id)
        return query.order_by(Project.code)

Example 9

Project: lastuser Source File: client.py
Function: all
    @classmethod
    def all(cls, users):
        """
        Return all AuthToken for the specified users.
        """
        if isinstance(users, QueryBaseClass):
            count = users.count()
            if count == 1:
                return cls.query.filter_by(user=users.first()).all()
            elif count > 1:
                return cls.query.filter(AuthToken.user_id.in_(users.options(load_only('id')))).all()
        else:
            count = len(users)
            if count == 1:
                return cls.query.filter_by(user=users[0]).all()
            elif count > 1:
                return cls.query.filter(AuthToken.user_id.in_([u.id for u in users])).all()

        return []

Example 10

Project: indico Source File: display.py
    @classproperty
    @classmethod
    def _category_query_options(cls):
        children_strategy = subqueryload('children')
        children_strategy.load_only('id', 'parent_id', 'title', 'protection_mode')
        children_strategy.subqueryload('acl_entries')
        children_strategy.undefer('deep_children_count')
        children_strategy.undefer('deep_events_count')
        children_strategy.undefer('has_events')
        return (children_strategy,
                load_only('id', 'parent_id', 'title', 'protection_mode'),
                subqueryload('acl_entries'),
                undefer('deep_children_count'),
                undefer('deep_events_count'),
                undefer('has_events'),
                undefer('chain'))

Example 11

Project: indico Source File: display.py
Function: category_query_options
    @classproperty
    @classmethod
    def _category_query_options(cls):
        children_strategy = joinedload('children')
        children_strategy.load_only('id')
        children_strategy.undefer('deep_events_count')
        return children_strategy, load_only('id', 'parent_id', 'protection_mode')

Example 12

Project: indico Source File: display.py
    def _process(self):
        author_contribs = (Contribution.query.with_parent(self.event_new)
                           .join(ContributionPersonLink)
                           .options(joinedload('event_new'))
                           .options(load_only('id', 'title'))
                           .filter(ContributionPersonLink.id == self.author.id,
                                   ContributionPersonLink.author_type != AuthorType.none)
                           .all())
        return WPEventDisplay.render_template('person_display.html', self._conf,
                                              author=self.author, contribs=author_contribs)

Example 13

Project: indico Source File: util.py
def get_events_registered(user, from_dt=None, to_dt=None):
    """Gets the IDs of events where the user is registered.

    :param user: A `User`
    :param from_dt: The earliest event start time to look for
    :param to_dt: The latest event start time to look for
    :return: A set of event ids
    """
    query = (user.registrations
             .options(load_only('event_id'))
             .options(joinedload(Registration.registration_form).load_only('event_id'))
             .join(Registration.registration_form)
             .join(RegistrationForm.event_new)
             .filter(Registration.is_active, ~RegistrationForm.is_deleted, ~Event.is_deleted,
                     Event.starts_between(from_dt, to_dt)))
    return {registration.event_id for registration in query}

Example 14

Project: indico Source File: util.py
def get_events_with_submitted_surveys(user, from_dt=None, to_dt=None):
    """Gets the IDs of events where the user submitted a survey.

    :param user: A `User`
    :param from_dt: The earliest event start time to look for
    :param to_dt: The latest event start time to look for
    :return: A set of event ids
    """
    # Survey submissions are not stored in links anymore, so we need to get them directly
    query = (user.survey_submissions
             .options(load_only('survey_id'))
             .options(joinedload(SurveySubmission.survey).load_only('event_id'))
             .join(Survey)
             .join(Event)
             .filter(~Survey.is_deleted, ~Event.is_deleted, Event.starts_between(from_dt, to_dt)))
    return {submission.survey.event_id for submission in query}

Example 15

Project: indico Source File: util.py
def get_events_managed_by(user, from_dt=None, to_dt=None):
    """Gets the IDs of events where the user has management privs.

    :param user: A `User`
    :param from_dt: The earliest event start time to look for
    :param to_dt: The latest event start time to look for
    :return: A set of event ids
    """
    query = (user.in_event_acls
             .join(Event)
             .options(noload('user'), noload('local_group'), load_only('event_id'))
             .filter(~Event.is_deleted, Event.starts_between(from_dt, to_dt))
             .filter(EventPrincipal.has_management_role('ANY')))
    return {principal.event_id for principal in query}

Example 16

Project: indico Source File: util.py
def get_events_created_by(user, from_dt=None, to_dt=None):
    """Gets the IDs of events created by the user

    :param user: A `User`
    :param from_dt: The earliest event start time to look for
    :param to_dt: The latest event start time to look for
    :return: A set of event ids
    """
    query = user.created_events.filter(~Event.is_deleted, Event.starts_between(from_dt, to_dt)).options(load_only('id'))
    return {event.id for event in query}

Example 17

Project: indico Source File: util.py
def get_events_with_linked_event_persons(user, from_dt=None, to_dt=None):
    """Returns a list of all events for which the user is an EventPerson

    :param user: A `User`
    :param from_dt: The earliest event start time to look for
    :param to_dt: The latest event start time to look for
    """
    query = (user.event_persons
             .options(load_only('event_id'))
             .options(noload('*'))
             .join(Event, Event.id == EventPerson.event_id)
             .filter(EventPerson.event_links.any())
             .filter(~Event.is_deleted, Event.starts_between(from_dt, to_dt)))
    return {ep.event_id for ep in query}

Example 18

Project: indico Source File: event_series.py
    def migrate_event_series(self):
        self.print_step("Migrating event series")
        all_series = self.get_event_series()
        all_series_ids = set(chain.from_iterable(all_series))
        events = {e.id: e for e in Event.find(Event.id.in_(all_series_ids)).options(load_only('id', 'series_id'))}
        for series in committing_iterator(verbose_iterator(all_series, len(all_series), lambda x: 0, lambda x: '')):
            series &= events.viewkeys()
            if len(series) < 2:
                self.print_warning('Skipping single-event series: {}'.format(sorted(series)))
                continue
            es = EventSeries(show_sequence_in_title=False)
            for id_ in series:
                events[id_].series = es
            if not self.quiet:
                self.print_success(repr(series))
        AttachmentFolder.find(AttachmentFolder.title.op('~')('^part\d+$')).update({AttachmentFolder.is_deleted: True},
                                                                                  synchronize_session=False)
        db.session.commit()

Example 19

Project: sync-engine Source File: generic.py
    def update_folder_sync_status(self, cb):
        # Loads the folder sync status and invokes the provided callback to
        # modify it. Commits any changes and updates `self.state` to ensure
        # they are never out of sync.
        with session_scope(self.namespace_id) as db_session:
            try:
                state = ImapFolderSyncStatus.state
                saved_folder_status = db_session.query(ImapFolderSyncStatus)\
                    .filter_by(account_id=self.account_id, folder_id=self.folder_id)\
                    .options(load_only(state)).one()
            except NoResultFound:
                saved_folder_status = ImapFolderSyncStatus(
                    account_id=self.account_id, folder_id=self.folder_id)
                db_session.add(saved_folder_status)

            cb(saved_folder_status)
            db_session.commit()

            self.state = saved_folder_status.state

Example 20

Project: sync-engine Source File: message.py
    @classmethod
    def api_loading_options(cls, expand=False):
        columns = ['public_id', 'is_draft', 'from_addr', 'to_addr', 'cc_addr',
                   'bcc_addr', 'is_read', 'is_starred', 'received_date',
                   'is_sent', 'subject', 'snippet', 'version', 'from_addr',
                   'to_addr', 'cc_addr', 'bcc_addr', 'reply_to',
                   '_compacted_body', 'thread_id', 'namespace_id']
        if expand:
            columns += ['message_id_header', 'in_reply_to', 'references']
        return (
            load_only(*columns),
            subqueryload('parts').joinedload('block'),
            subqueryload('thread').load_only('public_id', 'discriminator'),
            subqueryload('events').load_only('public_id', 'discriminator'),
            subqueryload('messagecategories').joinedload('category')
        )

Example 21

Project: auacm Source File: views.py
@app.route('/problems/<shortname>/info.pdf', methods=['GET'])
def get_problem_info(shortname):
    """Serve the PDF description of a problem"""
    pid = (database.session.query(Problem)
           .options(load_only('pid', 'shortname'))
           .filter(Problem.shortname == shortname)
           .first().pid)
    return serve_info_pdf(str(pid))

Example 22

Project: sqlalchemy Source File: test_deferred.py
    def test_load_only_no_pk(self):
        orders, Order = self.tables.orders, self.classes.Order

        mapper(Order, orders)

        sess = create_session()
        q = sess.query(Order).options(load_only("isopen", "description"))
        self.assert_compile(q,
            "SELECT orders.id AS orders_id, "
            "orders.description AS orders_description, "
            "orders.isopen AS orders_isopen FROM orders")

Example 23

Project: auacm Source File: views.py
@app.route("/api/submit", methods=["POST"])
@login_required
def submit():
    """
    Retrieves the submission information from the request, creates a submission,
    then begins the submissions execution. The response is simply a submission
    identifier of the new submission.

    :return: serves a 200 request code and an empty object if it is a good
            request, 403 if the filetype is unsupproted, 400 if the required
            fields are missing.
    """

    uploaded_file = request.files['file']
    if not uploaded_file:
        return serve_error('file must be uploaded', response_code=400)
    if not judge.allowed_filetype(uploaded_file.filename):
        return serve_error('filename not allowed', response_code=403)
    if not request.form['pid']:
        return serve_error('the field \'pid\' must be specified',
            response_code=400)

    # Obtain the time limit for the problem
    time_limit = session.query(ProblemData).\
            options(load_only("pid", "time_limit")).\
            filter(ProblemData.pid==request.form['pid']).\
            first().time_limit

    ext = uploaded_file.filename.rsplit('.')[1].lower()
    if 'python' in request.form:
        ext = request.form['python']

    attempt = models.Submission(
        username=current_user.username,
        pid=request.form['pid'],
        submit_time=int(time.time()),
        auto_id=0,
        file_type=ext,
        result='start')

    attempt.commit_to_session()

    submission_path = os.path.join(app.config['DATA_FOLDER'],
                                   'submits', str(attempt.job))
    os.mkdir(submission_path)
    uploaded_file.save(os.path.join(submission_path, uploaded_file.filename))

    def update_status(status, test_number):
        """Updates the status of the submission and notifies the clients that
        the submission has a new status.
        """
        attempt.update_status(status)
        Flasknado.emit('status', {
            'submissionId': attempt.job,
            'problemId': attempt.pid,
            'username': attempt.username,
            'submitTime': attempt.submit_time,
            'testNum': test_number,
            'status': judge.EVENT_STATUS[status]
        })

    judge.Judge(attempt.pid, submission_path, uploaded_file, time_limit,
            update_status).run_threaded()

    return serve_response({
        'submissionId': attempt.job
    })

Example 24

Project: lux Source File: models.py
Function: query
    def _query(self):
        if self.fields:
            fields = self.model.db_columns(self.fields)
            self.sql_query = self.sql_query.options(load_only(*fields))
        return self.sql_query

Example 25

Project: torext Source File: sql.py
Function: load_only
    def load_only(self, *args, **kwargs):
        return self.options(load_only(*args, **kwargs))

Example 26

Project: sqlalchemy Source File: test_deferred.py
    def test_load_only_path_specific(self):
        User = self.classes.User
        Address = self.classes.Address
        Order = self.classes.Order

        users = self.tables.users
        addresses = self.tables.addresses
        orders = self.tables.orders

        mapper(User, users, properties=util.OrderedDict([
                ("addresses", relationship(Address, lazy="joined")),
                ("orders", relationship(Order, lazy="joined"))
            ]))

        mapper(Address, addresses)
        mapper(Order, orders)

        sess = create_session()

        q = sess.query(User).options(
                load_only("name").defaultload("addresses").load_only("id", "email_address"),
                defaultload("orders").load_only("id")
            )

        # hmmmm joinedload seems to be forcing users.id into here...
        self.assert_compile(
            q,
            "SELECT users.id AS users_id, users.name AS users_name, "
            "addresses_1.id AS addresses_1_id, "
            "addresses_1.email_address AS addresses_1_email_address, "
            "orders_1.id AS orders_1_id FROM users "
            "LEFT OUTER JOIN addresses AS addresses_1 "
            "ON users.id = addresses_1.user_id "
            "LEFT OUTER JOIN orders AS orders_1 ON users.id = orders_1.user_id"
        )

Example 27

Project: autonomie Source File: 3_3_0_fonctionnalites_9_2b6ac7b172d3.py
def add_internal_number(session, logger):
    logger.warn("Adding internal_number to Task")
    NUMBER_TMPLS = {
        'estimation': u"{s.project.code}_{s.customer.code}_D{s.project_index}\
_{s.date:%m%y}",
        'invoice': u"{s.project.code}_{s.customer.code}_F{s.project_index}\
_{s.date:%m%y}",
        'cancelinvoice': u"{s.project.code}_{s.customer.code}_A{s.project_index}\
_{s.date:%m%y}"
    }
    from autonomie.models.task import Task
    from autonomie.models.customer import Customer
    from autonomie.models.project import Project
    from autonomie.models.project import Phase

    from sqlalchemy.orm import joinedload
    from sqlalchemy.orm import load_only

    query = Task.query().options(
        load_only("project_index", "company_index", "date", "phase_id", 'type_')
    )

    query = query.filter(
        Task.type_.in_(['invoice', 'estimation', 'cancelinvoice'])
    )
    query = query.options(joinedload(Task.customer).load_only(Customer.code))
    query = query.options(joinedload(Task.project).load_only(Project.code))

    for task in query:
        tmpl = NUMBER_TMPLS[task.type_]
        if Phase.get(task.phase_id) is None:
            session.delete(task)
        else:
            task.internal_number = tmpl.format(s=task).upper()
            session.merge(task)

Example 28

Project: autonomie Source File: commercial.py
    def turnovers(self):
        """
            Return the realised turnovers
        """
        result = dict(year_total=0)
        for month in range(1, 13):

            invoices = self.request.context.get_invoices(valid=True).options(
                load_only('ht')
            )

            date_condition = and_(
                extract('year', Invoice.date) == self.year,
                extract('month', Invoice.date) == month,
                Invoice.financial_year == self.year,
            )
            if month != 12:
                invoices = invoices.filter(date_condition)
            else:
                # for december, we also like to have invoices edited in january
                # and reported to the previous comptability year
                reported_condition = and_(
                    Invoice.financial_year == self.year,
                    extract('year', Invoice.date) != self.year,
                )
                invoices = invoices.filter(
                    or_(date_condition, reported_condition)
                )

            invoice_sum = sum([invoice.ht for invoice in invoices])

            cinvoices = self.request.context.get_cancelinvoices(valid=True).options(
                load_only('ht')
            )

            date_condition = and_(
                extract('year', CancelInvoice.date) == self.year,
                extract('month', CancelInvoice.date) == month,
                CancelInvoice.financial_year == self.year,
            )
            if month != 12:
                cinvoices = cinvoices.filter(date_condition)
            else:
                reported_condition = and_(
                    CancelInvoice.financial_year == self.year,
                    extract('year', CancelInvoice.date) != self.year,
                )
                cinvoices = cinvoices.filter(
                    or_(date_condition, reported_condition)
                )

            cinvoice_sum = sum([cinvoice.ht for cinvoice in cinvoices])

            result[month] = invoice_sum + cinvoice_sum
            result['year_total'] += result[month]
        return result

Example 29

Project: hasjob Source File: jobpost.py
Function: fetch
    @classmethod
    def fetch(cls, hashid):
        """Returns a SQLAlchemy query object for JobPost"""
        return cls.query.filter_by(hashid=hashid).options(load_only("id", "headline", "headlineb", "hashid", "datetime", "status", "email_domain", "review_comments", "company_url"))

Example 30

Project: indico Source File: generate_processing_page.py
Function: main
def main():
    html_tag_regex = '<[a-zA-Z]+.*>'
    contributions = (Contribution.query
                     .filter(Contribution.description.op('~')(html_tag_regex))
                     .options(load_only('id', 'description'))
                     .all())
    subcontributions = (SubContribution.query
                        .filter(SubContribution.description.op('~')(html_tag_regex))
                        .options(load_only('id', 'description'))
                        .all())
    categories = (Category.query
                  .filter(Category.description.op('~')(html_tag_regex))
                  .options(load_only('id', 'description'))
                  .all())

    def as_dict(objs):
        return {x.id: x.description for x in objs}

    def format_table(model):
        return model.__table__.fullname

    object_descriptions = {
        format_table(Contribution): as_dict(contributions),
        format_table(SubContribution): as_dict(subcontributions),
        format_table(Category): as_dict(categories)
    }

    env = Environment(loader=FileSystemLoader(os.path.dirname(__file__)))

    template = env.get_template('fix_descriptions_template.html')
    print template.render(object_descriptions=htmlsafe_dumps(object_descriptions))

Example 31

Project: indico Source File: output.py
    def _generate_category_path(self, event, out):
        path = [unicode(c.id) for c in event.category.chain_query.options(load_only('id'))]
        out.openTag("datafield", [["tag", "650"], ["ind1", " "], ["ind2", "7"]])
        out.writeTag("subfield", ":".join(path), [["code", "a"]])
        out.closeTag("datafield")

Example 32

Project: indico Source File: display.py
Function: process
    def _process(self):
        if not request.is_xhr:
            return WPCategory.render_template('display/calendar.html', self.category,
                                              start_dt=request.args.get('start_dt'))
        tz = self.category.display_tzinfo
        start = tz.localize(dateutil.parser.parse(request.args['start'])).astimezone(utc)
        end = tz.localize(dateutil.parser.parse(request.args['end'])).astimezone(utc)
        query = (Event.query
                 .filter(Event.starts_between(start, end),
                         Event.is_visible_in(self.category),
                         ~Event.is_deleted)
                 .options(load_only('id', 'title', 'start_dt', 'end_dt', 'category_id')))
        events = self._get_event_data(query)
        ongoing_events = (Event.query
                          .filter(Event.is_visible_in(self.category),
                                  Event.start_dt < start,
                                  Event.end_dt > end)
                          .options(load_only('id', 'title', 'start_dt', 'end_dt', 'timezone'))
                          .order_by(Event.title)
                          .all())
        return jsonify_data(flash=False, events=events, ongoing_event_count=len(ongoing_events),
                            ongoing_events_html=self._render_ongoing_events(ongoing_events))

Example 33

Project: indico Source File: management.py
    def _process(self):
        page = request.args.get('page', '1')
        order_columns = {'start_dt': Event.start_dt, 'title': db.func.lower(Event.title)}
        direction = 'desc' if request.args.get('desc', '1') == '1' else 'asc'
        order_column = order_columns[request.args.get('order', 'start_dt')]
        query = (Event.query.with_parent(self.category)
                 .options(joinedload('series'), undefer_group('series'),
                          load_only('id', 'category_id', 'created_dt',  'end_dt', 'protection_mode',  'start_dt',
                                    'title', 'type_', 'series_pos', 'series_count'))
                 .order_by(getattr(order_column, direction)())
                 .order_by(Event.id))
        if page == 'all':
            events = query.paginate(show_all=True)
        else:
            events = query.paginate(page=int(page))
        return WPCategoryManagement.render_template('management/content.html', self.category, 'content',
                                                    subcategories=self.category.children,
                                                    events=events, page=page,
                                                    order_column=request.args.get('order', 'start_dt'),
                                                    direction=direction)

Example 34

Project: indico Source File: serialize.py
def serialize_category_ical(category, user, event_filter):
    """Export the events in a category to iCal

    :param category: The category to export
    :param user: The user who needs to be able to access the events
    :param event_filter: A SQLalchemy criterion to restrict which
                         events will be returned.  Usually something
                         involving the start/end date of the event.
    """
    own_room_strategy = joinedload('own_room')
    own_room_strategy.load_only('building', 'floor', 'number', 'name')
    own_room_strategy.lazyload('owner')
    own_venue_strategy = joinedload('own_venue').load_only('name')
    query = (Event.query
             .filter(Event.category_chain_overlaps(category.id),
                     ~Event.is_deleted,
                     event_filter)
             .options(load_only('id', 'category_id', 'start_dt', 'end_dt', 'title', 'description', 'own_venue_name',
                                'own_room_name', 'protection_mode', 'access_key'),
                      subqueryload('acl_entries'),
                      joinedload('person_links'),
                      own_room_strategy,
                      own_venue_strategy)
             .order_by(Event.start_dt))
    events = [e for e in query if e.can_access(user)]
    cal = ical.Calendar()
    cal.add('version', '2.0')
    cal.add('prodid', '-//CERN//INDICO//EN')

    now = now_utc(False)
    for event in events:
        url = url_for('event.conferenceDisplay', confId=event.id, _external=True)
        location = ('{} ({})'.format(event.room_name, event.venue_name)
                    if event.venue_name and event.room_name
                    else (event.venue_name or event.room_name))
        cal_event = ical.Event()
        cal_event.add('uid', u'indico-event-{}@cern.ch'.format(event.id))
        cal_event.add('dtstamp', now)
        cal_event.add('dtstart', event.start_dt)
        cal_event.add('dtend', event.end_dt)
        cal_event.add('url', url)
        cal_event.add('summary', event.title)
        cal_event.add('location', location)
        description = []
        if event.person_links:
            speakers = [u'{} ({})'.format(x.full_name, x.affiliation) if x.affiliation else x.full_name
                        for x in event.person_links]
            description.append(u'Speakers: {}'.format(u', '.join(speakers)))

        if event.description:
            desc_text = unicode(event.description) or u'<p/>'  # get rid of RichMarkup
            try:
                description.append(unicode(html.fromstring(desc_text).text_content()))
            except ParserError:
                # this happens e.g. if desc_text contains only a html comment
                pass
        description.append(url)
        cal_event.add('description', u'\n'.join(description))
        cal.add_component(cal_event)
    return BytesIO(cal.to_ical())

Example 35

Project: indico Source File: serialize.py
def serialize_category_atom(category, url, user, event_filter):
    """Export the events in a category to Atom

    :param category: The category to export
    :param url: The URL of the feed
    :param user: The user who needs to be able to access the events
    :param event_filter: A SQLalchemy criterion to restrict which
                         events will be returned.  Usually something
                         involving the start/end date of the event.
    """
    query = (Event.query
             .filter(Event.category_chain_overlaps(category.id),
                     ~Event.is_deleted,
                     event_filter)
             .options(load_only('id', 'category_id', 'start_dt', 'title', 'description', 'protection_mode',
                                'access_key'),
                      subqueryload('acl_entries'))
             .order_by(Event.start_dt))
    events = [e for e in query if e.can_access(user)]

    feed = AtomFeed(feed_url=url, title='Indico Feed [{}]'.format(category.title))
    for event in events:
        feed.add(title=event.title,
                 summary=unicode(event.description),  # get rid of RichMarkup
                 url=url_for('event.conferenceDisplay', confId=event.id, _external=True),
                 updated=event.start_dt)
    return BytesIO(feed.to_string().encode('utf-8'))

Example 36

Project: indico Source File: util.py
@memoize_redis(3600)
@materialize_iterable()
def get_upcoming_events():
    """Get the global list of upcoming events"""
    from indico.modules.events import Event
    data = upcoming_events_settings.get_all()
    if not data['max_entries'] or not data['entries']:
        return
    tz = timezone(Config.getInstance().getDefaultTimezone())
    now = now_utc(False).astimezone(tz)
    base_query = (Event.query
                  .filter(Event.effective_protection_mode == ProtectionMode.public,
                          ~Event.is_deleted,
                          Event.end_dt.astimezone(tz) > now)
                  .options(load_only('id', 'title', 'start_dt', 'end_dt')))
    queries = []
    cols = {'category': Event.category_id,
            'event': Event.id}
    for entry in data['entries']:
        delta = timedelta(days=entry['days'])
        query = (base_query
                 .filter(cols[entry['type']] == entry['id'])
                 .filter(db.cast(Event.start_dt.astimezone(tz), db.Date) > (now - delta).date())
                 .with_entities(Event, db.literal(entry['weight']).label('weight')))
        queries.append(query)

    query = (queries[0].union(*queries[1:])
             .order_by(db.desc('weight'), Event.start_dt, Event.title)
             .limit(data['max_entries']))
    for row in query:
        event = row[0]
        # we cache the result of the function and is_deleted is used in the repr
        # and having a broken repr on the cached objects would be ugly
        set_committed_value(event, 'is_deleted', False)
        yield event

Example 37

Project: indico Source File: util.py
def get_events_with_linked_contributions(user, from_dt=None, to_dt=None):
    """Returns a dict with keys representing event_id and the values containing
    data about the user rights for contributions within the event

    :param user: A `User`
    :param from_dt: The earliest event start time to look for
    :param to_dt: The latest event start time to look for
    """
    def add_acl_data():
        query = (user.in_contribution_acls
                 .options(load_only('contribution_id', 'roles', 'full_access', 'read_access'))
                 .options(noload('*'))
                 .options(contains_eager(ContributionPrincipal.contribution).load_only('event_id'))
                 .join(Contribution)
                 .join(Event, Event.id == Contribution.event_id)
                 .filter(~Contribution.is_deleted, ~Event.is_deleted, Event.starts_between(from_dt, to_dt)))
        for principal in query:
            roles = data[principal.contribution.event_id]
            if 'submit' in principal.roles:
                roles.add('contribution_submission')
            if principal.full_access:
                roles.add('contribution_manager')
            if principal.read_access:
                roles.add('contribution_access')

    def add_contrib_data():
        has_contrib = (EventPerson.contribution_links.any(
            ContributionPersonLink.contribution.has(~Contribution.is_deleted)))
        has_subcontrib = EventPerson.subcontribution_links.any(
            SubContributionPersonLink.subcontribution.has(db.and_(
                ~SubContribution.is_deleted,
                SubContribution.contribution.has(~Contribution.is_deleted))))
        query = (Event.query
                 .options(load_only('id'))
                 .options(noload('*'))
                 .filter(~Event.is_deleted,
                         Event.starts_between(from_dt, to_dt),
                         Event.persons.any((EventPerson.user_id == user.id) & (has_contrib | has_subcontrib))))
        for event in query:
            data[event.id].add('contributor')

    data = defaultdict(set)
    add_acl_data()
    add_contrib_data()
    return data

Example 38

Project: indico Source File: util.py
def get_events_with_linked_sessions(user, from_dt=None, to_dt=None):
    """Returns a dict with keys representing event_id and the values containing
    data about the user rights for sessions within the event

    :param user: A `User`
    :param from_dt: The earliest event start time to look for
    :param to_dt: The latest event start time to look for
    """
    query = (user.in_session_acls
             .options(load_only('session_id', 'roles', 'full_access', 'read_access'))
             .options(noload('*'))
             .options(contains_eager(SessionPrincipal.session).load_only('event_id'))
             .join(Session)
             .join(Event, Event.id == Session.event_id)
             .filter(~Session.is_deleted, ~Event.is_deleted, Event.starts_between(from_dt, to_dt)))
    data = defaultdict(set)
    for principal in query:
        roles = data[principal.session.event_id]
        if 'coordinate' in principal.roles:
            roles.add('session_coordinator')
        if 'submit' in principal.roles:
            roles.add('session_submission')
        if principal.full_access:
            roles.add('session_manager')
        if principal.read_access:
            roles.add('session_access')
    return data

Example 39

Project: indico Source File: controllers.py
    def _process(self):
        self.user.settings.set('suggest_categories', True)
        tz = timezone(DisplayTZ().getDisplayTZ())
        hours, minutes = timedelta_split(tz.utcoffset(datetime.now()))[:2]
        categories = get_related_categories(self.user)
        categories_events = []
        if categories:
            category_ids = {c['categ'].id for c in categories.itervalues()}
            today = now_utc(False).astimezone(session.tzinfo).date()
            query = (Event.query
                     .filter(~Event.is_deleted,
                             Event.category_chain_overlaps(category_ids),
                             Event.start_dt.astimezone(session.tzinfo) >= today)
                     .options(joinedload('category').load_only('id', 'title'),
                              joinedload('series'),
                              subqueryload('acl_entries'),
                              load_only('id', 'category_id', 'start_dt', 'end_dt', 'title', 'access_key',
                                        'protection_mode', 'series_id', 'series_pos', 'series_count'))
                     .order_by(Event.start_dt, Event.id))
            categories_events = get_n_matching(query, 10, lambda x: x.can_access(self.user))
        from_dt = now_utc(False) - relativedelta(weeks=1, hour=0, minute=0, second=0)
        linked_events = [(event, {'management': bool(roles & self.management_roles),
                                  'reviewing': bool(roles & self.reviewer_roles),
                                  'attendance': bool(roles & self.attendance_roles)})
                         for event, roles in get_linked_events(self.user, from_dt, None, 10).iteritems()]
        return WPUserDashboard.render_template('dashboard.html', 'dashboard',
                                               timezone=unicode(tz),
                                               offset='{:+03d}:{:02d}'.format(hours, minutes), user=self.user,
                                               categories=categories,
                                               categories_events=categories_events,
                                               suggested_categories=get_suggested_categories(self.user),
                                               linked_events=linked_events)

Example 40

Project: indico Source File: util.py
def get_linked_events(user, from_dt, to_dt, limit=None):
    """Get the linked events and the user's roles in them"""
    from indico.modules.events.contributions.util import get_events_with_linked_contributions
    from indico.modules.events.registration.util import get_events_registered
    from indico.modules.events.sessions.util import get_events_with_linked_sessions
    from indico.modules.events.surveys.util import get_events_with_submitted_surveys
    from indico.modules.events.util import (get_events_managed_by, get_events_created_by,
                                            get_events_with_linked_event_persons)

    links = avatar_links.get_links(user, from_dt, to_dt) if redis_client else OrderedDict()
    for event_id in get_events_registered(user, from_dt, to_dt):
        links.setdefault(str(event_id), set()).add('registration_registrant')
    for event_id in get_events_with_submitted_surveys(user, from_dt, to_dt):
        links.setdefault(str(event_id), set()).add('survey_submitter')
    for event_id in get_events_managed_by(user, from_dt, to_dt):
        links.setdefault(str(event_id), set()).add('conference_manager')
    for event_id in get_events_created_by(user, from_dt, to_dt):
        links.setdefault(str(event_id), set()).add('conference_creator')
    for event_id, principal_roles in get_events_with_linked_sessions(user, from_dt, to_dt).iteritems():
        links.setdefault(str(event_id), set()).update(principal_roles)
    for event_id, principal_roles in get_events_with_linked_contributions(user, from_dt, to_dt).iteritems():
        links.setdefault(str(event_id), set()).update(principal_roles)
    for event_id in get_events_with_linked_event_persons(user, from_dt, to_dt):
        links.setdefault(str(event_id), set()).add('conference_chair')

    query = (Event.query
             .filter(~Event.is_deleted,
                     Event.id.in_(map(int, links)))
             .options(joinedload('series'),
                      load_only('id', 'category_id', 'title', 'start_dt', 'end_dt',
                                'series_id', 'series_pos', 'series_count'))
             .order_by(Event.start_dt, Event.id))
    if limit is not None:
        query = query.limit(limit)
    return OrderedDict((event, links[str(event.id)]) for event in query)

Example 41

Project: indico Source File: suggestions.py
def _query_categ_events(categ, start_dt, end_dt):
    return (Event.query
            .with_parent(categ)
            .filter(Event.happens_between(start_dt, end_dt))
            .options(load_only('id', 'start_dt', 'end_dt')))

Example 42

Project: sync-engine Source File: gmail.py
    def resync_uids_impl(self):
        with session_scope(self.namespace_id) as db_session:
            imap_folder_info_entry = db_session.query(ImapFolderInfo)\
                .options(load_only('uidvalidity', 'highestmodseq'))\
                .filter_by(account_id=self.account_id,
                           folder_id=self.folder_id)\
                .one()
            with self.conn_pool.get() as crispin_client:
                crispin_client.select_folder(self.folder_name,
                                             lambda *args: True)
                uidvalidity = crispin_client.selected_uidvalidity
                if uidvalidity <= imap_folder_info_entry.uidvalidity:
                    # if the remote UIDVALIDITY is less than or equal to -
                    # from my (siro) understanding it should not be less than -
                    # the local UIDVALIDITY log a debug message and exit right
                    # away
                    log.debug('UIDVALIDITY unchanged')
                    return
                msg_uids = crispin_client.all_uids()
                mapping = {g_msgid: msg_uid for msg_uid, g_msgid in
                           crispin_client.g_msgids(msg_uids).iteritems()}
            imap_uid_entries = db_session.query(ImapUid)\
                .options(load_only('msg_uid'),
                         joinedload('message').load_only('g_msgid'))\
                .filter_by(account_id=self.account_id,
                           folder_id=self.folder_id)

            chunk_size = 1000
            for entry in imap_uid_entries.yield_per(chunk_size):
                if entry.message.g_msgid in mapping:
                    log.debug('X-GM-MSGID {} from UID {} to UID {}'.format(
                        entry.message.g_msgid,
                        entry.msg_uid,
                        mapping[entry.message.g_msgid]))
                    entry.msg_uid = mapping[entry.message.g_msgid]
                else:
                    db_session.delete(entry)
            log.debug('UIDVALIDITY from {} to {}'.format(
                imap_folder_info_entry.uidvalidity, uidvalidity))
            imap_folder_info_entry.uidvalidity = uidvalidity
            imap_folder_info_entry.highestmodseq = None
            db_session.commit()

Example 43

Project: sync-engine Source File: gc.py
    def _run_impl(self):
        self.log.info('Starting LabelRenameHandler',
                      label_name=self.label_name)

        self.semaphore.acquire(blocking=True)

        try:
            with connection_pool(self.account_id).get() as crispin_client:
                folder_names = []
                with session_scope(self.account_id) as db_session:
                    folders = db_session.query(Folder).filter(
                        Folder.account_id == self.account_id)

                    folder_names = [folder.name for folder in folders]
                    db_session.expunge_all()

                for folder_name in folder_names:
                    crispin_client.select_folder(folder_name, uidvalidity_cb)

                    found_uids = crispin_client.search_uids(['X-GM-LABELS',
                                                             utf7_encode(self.label_name)])

                    for chnk in chunk(found_uids, 200):
                        flags = crispin_client.flags(chnk)

                        self.log.info('Running metadata update for folder',
                                      folder_name=folder_name)
                        with session_scope(self.account_id) as db_session:
                            fld = db_session.query(Folder).options(load_only("id"))\
                                .filter(Folder.account_id == self.account_id,
                                        Folder.name == folder_name).one()

                            common.update_metadata(self.account_id, fld.id,
                                                   fld.canonical_name, flags,
                                                   db_session)
                            db_session.commit()
        finally:
            self.semaphore.release()

Example 44

Project: sync-engine Source File: threading.py
def fetch_corresponding_thread(db_session, namespace_id, message):
    """fetch a thread matching the corresponding message. Returns None if
       there's no matching thread."""
    # FIXME: for performance reasons, we make the assumption that a reply
    # to a message always has a similar subject. This is only
    # right 95% of the time.
    clean_subject = cleanup_subject(message.subject)
    threads = db_session.query(Thread). \
        filter(Thread.namespace_id == namespace_id,
               Thread._cleaned_subject == clean_subject). \
        order_by(desc(Thread.id)). \
        options(load_only('id', 'discriminator'),
                joinedload(Thread.messages).load_only(
                    'from_addr', 'to_addr', 'bcc_addr', 'cc_addr'))

    for thread in threads:
        for match in thread.messages:
            # A lot of people BCC some address when sending mass
            # emails so ignore BCC.
            match_bcc = match.bcc_addr if match.bcc_addr else []
            message_bcc = message.bcc_addr if message.bcc_addr else []

            match_emails = [t[1] for t in match.participants
                            if t not in match_bcc]
            message_emails = [t[1] for t in message.participants
                              if t not in message_bcc]

            # A conversation takes place between two or more persons.
            # Are there more than two participants in common in this
            # thread? If yes, it's probably a related thread.
            match_participants_set = set(match_emails)
            message_participants_set = set(message_emails)

            if len(match_participants_set & message_participants_set) >= 2:
                # No need to loop through the rest of the messages
                # in the thread
                if len(thread.messages) >= MAX_THREAD_LENGTH:
                    break
                else:
                    return match.thread

            # handle the case where someone is self-sending an email.
            if not message.from_addr or not message.to_addr:
                return

            match_from = [t[1] for t in match.from_addr]
            match_to = [t[1] for t in match.from_addr]
            message_from = [t[1] for t in message.from_addr]
            message_to = [t[1] for t in message.to_addr]

            if (len(message_to) == 1 and message_from == message_to and
                    match_from == match_to and message_to == match_from):
                # Check that we're not over max thread length in this case
                # No need to loop through the rest of the messages
                # in the thread.
                if len(thread.messages) >= MAX_THREAD_LENGTH:
                    break
                else:
                    return match.thread

    return

Example 45

Project: sync-engine Source File: 058_enforce_length_limit_of_255_on_message_.py
def upgrade():
    from inbox.models.session import session_scope
    from inbox.ignition import main_engine
    engine = main_engine(pool_size=1, max_overflow=0)

    from sqlalchemy.ext.declarative import declarative_base
    Base = declarative_base()
    Base.metadata.reflect(engine)

    class Message(Base):
        __table__ = Base.metadata.tables['message']

    class Thread(Base):
        __table__ = Base.metadata.tables['thread']

    with session_scope(versioned=False) \
            as db_session:
        count = 0
        for msg in db_session.query(Message).options(
                sa.orm.load_only('subject')).yield_per(500):
            truncate_subject(msg)
            count += 1
            if count > 500:
                db_session.commit()
                count = 0
        db_session.commit()

        for thread in db_session.query(Thread).options(
                sa.orm.load_only('subject')).yield_per(500):
            truncate_subject(thread)
            count += 1
            if count > 500:
                db_session.commit()
                count = 0
        db_session.commit()

    op.alter_column('message', 'subject',
                    type_=sa.String(255), existing_nullable=True)
    op.alter_column('thread', 'subject',
                    type_=sa.String(255), existing_nullable=True)

Example 46

Project: sync-engine Source File: 166_migrate_body_format.py
def upgrade():
    from inbox.ignition import main_engine
    from inbox.models.session import session_scope
    from inbox.security.blobstorage import encode_blob
    engine = main_engine(pool_size=1, max_overflow=0)
    Base = declarative_base()
    Base.metadata.reflect(engine)

    class Message(Base):
        __table__ = Base.metadata.tables['message']

    with session_scope(versioned=False) as db_session:
        max_id, = db_session.query(sa.func.max(Message.id)).one()
        if max_id is None:
            max_id = 0
        for i in range(0, max_id, CHUNK_SIZE):
            messages = db_session.query(Message). \
                filter(Message.id > i, Message.id <= i + CHUNK_SIZE). \
                options(load_only('_compacted_body', 'sanitized_body'))
            for message in messages:
                if message._compacted_body is None:
                    message._compacted_body = encode_blob(
                        message.sanitized_body.encode('utf-8'))
            db_session.commit()

Example 47

Project: sync-engine Source File: 187_migrate_data_for_folders_overhaul.py
def migrate_messages(account_id):
    from inbox.models.session import session_scope
    from inbox.models import Message, Namespace
    from inbox.ignition import main_engine

    engine = main_engine(pool_size=1, max_overflow=0)

    with session_scope(versioned=False) as db_session:
        namespace = db_session.query(Namespace).filter_by(
            account_id=account_id).one()
        offset = 0
        while True:
            if engine.has_table('easuid'):
                additional_options = [subqueryload(Message.easuids)]
            else:
                additional_options = []

            messages = db_session.query(Message). \
                filter(Message.namespace_id == namespace.id). \
                options(load_only(Message.id, Message.is_read,
                                  Message.is_starred, Message.is_draft),
                        joinedload(Message.namespace).load_only('id'),
                        subqueryload(Message.imapuids),
                        subqueryload(Message.messagecategories),
                        *additional_options). \
                with_hint(Message, 'USE INDEX (ix_message_namespace_id)'). \
                order_by(asc(Message.id)).limit(1000).offset(offset).all()
            if not messages:
                return
            for message in messages:
                try:
                    message.update_metadata(message.is_draft)
                except IndexError:
                    # Can happen for messages without a folder.
                    pass
                log.info('Updated message', namespace_id=namespace.id,
                         message_id=message.id)
            db_session.commit()
            offset += 1000