django.db.models.Exists

Here are the examples of the python api django.db.models.Exists taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

58 Examples 7

3 Source : admin.py
with Apache License 2.0
from aropan

    def queryset(self, request, queryset):
        if self.value() in ['yes', 'no']:
            logins = Login.objects.filter(team=OuterRef('pk'), stage=OuterRef('status'))
            queryset = queryset.annotate(has_login=Exists(logins))
            queryset = queryset.filter(has_login=self.value() == 'yes')
        return queryset


@admin_register(Team)

3 Source : update_auto_rating.py
with Apache License 2.0
from aropan

    def handle(self, *args, **options):

        qs = AutoRating.objects.filter(deadline__gt=timezone.now())
        qs = qs.select_related('party')
        qs = qs.prefetch_related('party__rating_set')
        for auto_rating in tqdm.tqdm(qs, desc='update auto rating'):
            party = auto_rating.party
            contests = Contest.objects.filter(**auto_rating.info['filter'])

            party_contests = party.rating_set.filter(contest_id=OuterRef('pk'))
            contests = contests.annotate(in_party=Exists(party_contests)).filter(in_party=False)

            for contest in contests:
                rating = Rating(party=party, contest=contest)
                rating.save()

3 Source : models.py
with GNU Affero General Public License v3.0
from betagouv

    def has_approval(self):
        """
        Annotate values with a boolean `_has_approval` attribute which can be
        filtered, e.g.:
        EligibilityDiagnosis.objects.has_approval().filter(_has_approval=True)
        """
        has_approval = Approval.objects.filter(user=OuterRef("job_seeker")).valid()
        return self.annotate(_has_approval=Exists(has_approval))


class EligibilityDiagnosisManager(models.Manager):

3 Source : models.py
with GNU Affero General Public License v3.0
from betagouv

    def active_lookup(self):
        # Prefer a sub query to a join for performance reasons.
        # See `self.with_count_recent_received_job_apps`.
        has_active_convention = Exists(SiaeConvention.objects.filter(id=OuterRef("convention_id"), is_active=True))
        return (
            # GEIQ, EA, EATT, ACIPHC... have no convention logic and thus are always active.
            # `~` means NOT, similarly to dataframes.
            ~Q(kind__in=Siae.ASP_MANAGED_KINDS)
            # Staff created siaes are always active until eventually
            # converted to ASP source siaes by import_siae script.
            # Such siaes are created by our staff when ASP data is lacking
            # the most recent data about them.
            | Q(source=Siae.SOURCE_STAFF_CREATED)
            # ASP source siaes and user created siaes are active if and only
            # if they have an active convention.
            | has_active_convention
        )

    def with_has_convention_in_grace_period(self):

3 Source : models.py
with GNU Affero General Public License v3.0
from betagouv

    def with_has_convention_in_grace_period(self):
        now = timezone.now()
        grace_period = timezone.timedelta(days=SiaeConvention.DEACTIVATION_GRACE_PERIOD_IN_DAYS)
        # Prefer a sub query to a join for performance reasons.
        # See `self.with_count_recent_received_job_apps`.
        has_convention_in_grace_period = Exists(
            SiaeConvention.objects.filter(id=OuterRef("convention_id"), deactivated_at__gte=now - grace_period)
        )
        return self.annotate(has_convention_in_grace_period=has_convention_in_grace_period)

    def active(self):

3 Source : models.py
with GNU Affero General Public License v3.0
from betagouv

    def with_has_active_members(self):
        # Prefer a sub query to a join for performance reasons.
        # See `self.with_count_recent_received_job_apps`.
        return self.annotate(
            has_active_members=Exists(SiaeMembership.objects.filter(siae=OuterRef("pk"), is_active=True))
        )


class Siae(AddressMixin, OrganizationAbstract):

3 Source : admin.py
with GNU Affero General Public License v3.0
from betagouv

    def get_queryset(self, request):
        """
        Exclude superusers. The purpose is to prevent staff users
        to change the password of a superuser.
        """
        qs = super().get_queryset(request)
        if not request.user.is_superuser:
            qs = qs.exclude(is_superuser=True)
        if request.resolver_match.view_name.endswith("changelist"):
            has_verified_email = EmailAddress.objects.filter(email=OuterRef("email"), verified=True)
            is_peamu = SocialAccount.objects.filter(user_id=OuterRef("pk"), provider="peamu")
            qs = qs.annotate(_has_verified_email=Exists(has_verified_email))
            qs = qs.annotate(_is_peamu=Exists(is_peamu))
        return qs

    def get_readonly_fields(self, request, obj=None):

3 Source : views.py
with MIT License
from csev

    def get(self, request) :
        if not request.user.is_authenticated:
            thing_list = Thing.objects.all()
        else:
            thing_list = Thing.objects.annotate(
                FAV_USER_ID=Exists(Fav.objects.filter(user=self.request.user,thing_id=OuterRef('id')))
                ).all()
        ctx = {'thing_list' : thing_list}
        return render(request, self.template_name, ctx)

# https://stackoverflow.com/questions/2314920/django-show-log-orm-sql-calls-from-python-shell
# pip install django-extensions
# ./manage.py shell_plus --print-sql

# Below this line, we see raw sql...   With great power comes great responsibility
# https://docs.djangoproject.com/en/3.0/topics/db/sql/

# A List view using raw SQL - super efficient
class RawSQLListView(OwnerListView):

3 Source : test_qs_combinators.py
with Apache License 2.0
from gethue

    def test_union_with_values_list_on_annotated_and_unannotated(self):
        ReservedName.objects.create(name='rn1', order=1)
        qs1 = Number.objects.annotate(
            has_reserved_name=Exists(ReservedName.objects.filter(order=OuterRef('num')))
        ).filter(has_reserved_name=True)
        qs2 = Number.objects.filter(num=9)
        self.assertCountEqual(qs1.union(qs2).values_list('num', flat=True), [1, 9])

    def test_count_union(self):

3 Source : next_task.py
with Apache License 2.0
from heartexlabs

def _try_ground_truth(tasks, project, user):
    """Returns task from ground truth set"""
    ground_truth = Annotation.objects.filter(task=OuterRef('pk'), ground_truth=True)
    not_solved_tasks_with_ground_truths = tasks.annotate(has_ground_truths=Exists(ground_truth)).filter(
        has_ground_truths=True
    )
    if not_solved_tasks_with_ground_truths.exists():
        if project.sampling == project.SEQUENCE:
            return _get_first_unlocked(not_solved_tasks_with_ground_truths, user)
        return _get_random_unlocked(not_solved_tasks_with_ground_truths, user)


def _try_tasks_with_overlap(tasks):

3 Source : syncphonenumberlookups.py
with GNU General Public License v3.0
from JustFixNYC

def find_users_without_lookups():
    """
    Return a QuerySet of users that we don't have phone number lookups for.
    """

    lookups = PhoneNumberLookup.objects.filter(phone_number=OuterRef("phone_number"))
    return JustfixUser.objects.annotate(is_phone_number_looked_up=Exists(lookups)).exclude(
        is_phone_number_looked_up=True
    )


class Command(BaseCommand):

3 Source : dataloaders.py
with GNU General Public License v3.0
from Saleor-Multi-Vendor

    def batch_load(self, keys):
        orders = Order.objects.filter(channel=OuterRef("pk"))
        channels = Channel.objects.annotate(has_orders=Exists(orders)).in_bulk(keys)
        return [channels.get(channel_id) for channel_id in keys]


class ShippingZonesByChannelIdLoader(DataLoader):

3 Source : filters.py
with GNU General Public License v3.0
from Saleor-Multi-Vendor

def filter_gift_cards_by_products(qs, product_ids):
    products = product_models.Product.objects.filter(pk__in=product_ids)
    return qs.filter(Exists(products.filter(pk=OuterRef("product_id"))))


def filter_used_by(qs, _, value):

3 Source : filters.py
with GNU General Public License v3.0
from Saleor-Multi-Vendor

def filter_gift_cards_by_used_by_user(qs, user_pks):
    users = account_models.User.objects.filter(pk__in=user_pks)
    return qs.filter(Exists(users.filter(pk=OuterRef("used_by_id"))))


def filter_tags_list(qs, _, value):

3 Source : middleware.py
with GNU General Public License v3.0
from Saleor-Multi-Vendor

def get_app(auth_token) -> Optional[App]:
    tokens = AppToken.objects.filter(auth_token=auth_token).values("pk")
    return App.objects.filter(
        Exists(tokens.filter(app_id=OuterRef("pk"))), is_active=True
    ).first()


def app_middleware(next, root, info, **kwargs):

3 Source : filters.py
with GNU General Public License v3.0
from Saleor-Multi-Vendor

def filter_products_by_categories(qs, category_ids):
    categories = Category.objects.filter(pk__in=category_ids)
    categories = Category.tree.get_queryset_descendants(
        categories, include_self=True
    ).values("pk")
    return qs.filter(Exists(categories.filter(pk=OuterRef("category_id"))))


def filter_products_by_collections(qs, collection_pks):

3 Source : filters.py
with GNU General Public License v3.0
from Saleor-Multi-Vendor

def filter_products_by_collections(qs, collection_pks):
    collection_products = CollectionProduct.objects.filter(
        collection_id__in=collection_pks
    ).values("product_id")
    return qs.filter(Exists(collection_products.filter(product_id=OuterRef("pk"))))


def filter_products_by_stock_availability(qs, stock_availability, channel_slug):

3 Source : filters.py
with GNU General Public License v3.0
from Saleor-Multi-Vendor

def filter_has_preordered_variants(qs, _, value):
    variants = (
        ProductVariant.objects.filter(is_preorder=True)
        .filter(
            Q(preorder_end_date__isnull=True) | Q(preorder_end_date__gt=timezone.now())
        )
        .values("product_id")
    )
    if value:
        return qs.filter(Exists(variants.filter(product_id=OuterRef("pk"))))
    else:
        return qs.filter(~Exists(variants.filter(product_id=OuterRef("pk"))))


def filter_collections(qs, _, value):

3 Source : filters.py
with GNU General Public License v3.0
from Saleor-Multi-Vendor

def filter_gift_card(qs, _, value):
    product_types = ProductType.objects.filter(kind=ProductTypeKind.GIFT_CARD)
    lookup = Exists(product_types.filter(id=OuterRef("product_type_id")))
    return qs.filter(lookup) if value is True else qs.exclude(lookup)


def filter_product_type_configurable(qs, _, value):

3 Source : resolvers.py
with GNU General Public License v3.0
from Saleor-Multi-Vendor

def resolve_products(info, requestor, channel_slug=None, **_kwargs) -> ChannelQsContext:
    qs = models.Product.objects.visible_to_user(requestor, channel_slug)
    if not has_one_of_permissions(requestor, ALL_PRODUCTS_PERMISSIONS):
        channels = Channel.objects.filter(slug=str(channel_slug))
        product_channel_listings = models.ProductChannelListing.objects.filter(
            Exists(channels.filter(pk=OuterRef("channel_id"))),
            visible_in_listings=True,
        )
        qs = qs.filter(
            Exists(product_channel_listings.filter(product_id=OuterRef("pk")))
        )
    return ChannelQsContext(qs=qs, channel_slug=channel_slug)


@traced_resolver

3 Source : models.py
with GNU General Public License v3.0
from Saleor-Multi-Vendor

    def published(self, channel_slug: str):
        today = datetime.date.today()
        channels = Channel.objects.filter(
            slug=str(channel_slug), is_active=True
        ).values("id")
        channel_listings = ProductChannelListing.objects.filter(
            Q(publication_date__lte=today) | Q(publication_date__isnull=True),
            Exists(channels.filter(pk=OuterRef("channel_id"))),
            is_published=True,
        ).values("id")
        return self.filter(Exists(channel_listings.filter(product_id=OuterRef("pk"))))

    def not_published(self, channel_slug: str):

3 Source : models.py
with GNU General Public License v3.0
from Saleor-Multi-Vendor

    def published_with_variants(self, channel_slug: str):
        published = self.published(channel_slug)
        channels = Channel.objects.filter(
            slug=str(channel_slug), is_active=True
        ).values("id")
        variant_channel_listings = ProductVariantChannelListing.objects.filter(
            Exists(channels.filter(pk=OuterRef("channel_id"))),
            price_amount__isnull=False,
        ).values("id")
        variants = ProductVariant.objects.filter(
            Exists(variant_channel_listings.filter(variant_id=OuterRef("pk")))
        )
        return published.filter(Exists(variants.filter(product_id=OuterRef("pk"))))

    def visible_to_user(self, requestor: Union["User", "App"], channel_slug: str):

3 Source : models.py
with GNU General Public License v3.0
from Saleor-Multi-Vendor

    def visible_to_user(self, requestor: Union["User", "App"], channel_slug: str):
        if has_one_of_permissions(requestor, ALL_PRODUCTS_PERMISSIONS):
            if channel_slug:
                channels = Channel.objects.filter(slug=str(channel_slug)).values("id")
                channel_listings = ProductChannelListing.objects.filter(
                    Exists(channels.filter(pk=OuterRef("channel_id")))
                ).values("id")
                return self.filter(
                    Exists(channel_listings.filter(product_id=OuterRef("pk")))
                )
            return self.all()
        return self.published_with_variants(channel_slug)

    def annotate_publication_info(self, channel_slug: str):

3 Source : django-django-forms-models.py
with Apache License 2.0
from SMAT-Lab

def apply_limit_choices_to_to_formfield(formfield):
    """Apply limit_choices_to to the formfield's queryset if needed."""
    from django.db.models import Exists, OuterRef, Q
    if hasattr(formfield, 'queryset') and hasattr(formfield, 'get_limit_choices_to'):
        limit_choices_to = formfield.get_limit_choices_to()
        if limit_choices_to:
            complex_filter = limit_choices_to
            if not isinstance(complex_filter, Q):
                complex_filter = Q(**limit_choices_to)
            complex_filter &= Q(pk=OuterRef('pk'))
            # Use Exists() to avoid potential duplicates.
            formfield.queryset = formfield.queryset.filter(
                Exists(formfield.queryset.model._base_manager.filter(complex_filter)),
            )


def fields_for_model(model, fields=None, exclude=None, widgets=None,

3 Source : views.py
with MIT License
from techlib

    def without_interest_definition(self, request, organization_pk):
        org_filter = organization_filter_from_org_id(organization_pk, request.user)
        import_batch_query = ImportBatch.objects.filter(platform_id=OuterRef('pk'))
        qs = Platform.objects.filter(**org_filter, interest_reports__isnull=True).annotate(
            has_data=Exists(import_batch_query)
        )
        return Response(DetailedPlatformSerializer(qs, many=True).data)

    @action(methods=['GET'], url_path='title-count', detail=False)

0 Source : backends.py
with MIT License
from Air-999

    def with_perm(self, perm, is_active=True, include_superusers=True, obj=None):
        """
        Return users that have permission "perm". By default, filter out
        inactive users and include superusers.
        """
        if isinstance(perm, str):
            try:
                app_label, codename = perm.split('.')
            except ValueError:
                raise ValueError(
                    'Permission name should be in the form '
                    'app_label.permission_codename.'
                )
        elif not isinstance(perm, Permission):
            raise TypeError(
                'The `perm` argument must be a string or a permission instance.'
            )

        UserModel = get_user_model()
        if obj is not None:
            return UserModel._default_manager.none()

        permission_q = Q(group__user=OuterRef('pk')) | Q(user=OuterRef('pk'))
        if isinstance(perm, Permission):
            permission_q &= Q(pk=perm.pk)
        else:
            permission_q &= Q(codename=codename, content_type__app_label=app_label)

        user_q = Exists(Permission.objects.filter(permission_q))
        if include_superusers:
            user_q |= Q(is_superuser=True)
        if is_active is not None:
            user_q &= Q(is_active=is_active)

        return UserModel._default_manager.filter(user_q)

    def get_user(self, user_id):

0 Source : mailing_password.py
with Apache License 2.0
from aropan

    def handle(self, *args, **options):
        conf_file = options['conf_file']
        if os.path.exists(conf_file):
            with open(options['conf_file'], 'r') as fo:
                conf = yaml.safe_load(fo)
        else:
            conf = {}

        event = Event.objects.filter(name__regex=options['event']).order_by('-created').first()
        self.stdout.write(f'event = {event}')
        status = getattr(TeamStatus, options['status'].upper())

        event = Event.objects.order_by('-created').first()
        logins_subquery = Login.objects.filter(team=OuterRef('pk'))
        teams = Team.objects \
            .filter(modified__lt=timezone.now() - timedelta(minutes=15), event=event, status=status) \
            .annotate(has_login=Exists(logins_subquery)) \
            .filter(has_login=False)

        cache = {}
        done = 0
        if not options['dryrun']:
            for t in tqdm.tqdm(teams):
                created, login = t.attach_login(cache=cache)
                if created:
                    done += 1
        self.stdout.write(f'Successfully attach login: {done} of {teams.count()}')

        logins = Login.objects.filter(is_sent=False, team__event=event)

        n_attempet = 3
        failed = 0
        done = 0
        time_wait_on_success = 2
        time_wait_on_failed = 10
        if not options['dryrun']:
            with event.email_backend() as connection:
                for login in tqdm.tqdm(logins):
                    for i in range(n_attempet):
                        if login.send_email(connection=connection):
                            done += 1
                            time.sleep(time_wait_on_success)
                            break
                        time.sleep(time_wait_on_failed)
                    else:
                        failed += 1
        self.stdout.write(f'Successfully mailing: {done} of {logins.count()} ({failed} fails)')

        if options['ya_contest_id']:
            logins = Login.objects.filter(is_sent=True, team__event=event)
            filter_time = conf.get('change_names_last_update_time')
            if not options['dryrun']:
                conf['change_names_last_update_time'] = timezone.now()
            if filter_time:
                filter_time = arrow.get(filter_time).format()
                logins = logins.filter(
                    Q(modified__gte=filter_time) |
                    Q(team__modified__gte=filter_time) |
                    Q(team__participants__modified__gte=filter_time) |
                    Q(team__participants__organization__modified__gte=filter_time)
                ).distinct('pk')
            names = []
            for login in logins:
                names.append({'login': login.username, 'name': login.team.title})
            if names and not options['dryrun']:
                result = change_names(contest_id=options['ya_contest_id'], names=names)
            else:
                result = ''
            self.stdout.write(f'Change names for {len(names)} team(s) = {result}')

        with open(options['conf_file'], 'w') as fo:
            yaml.dump(conf, fo, indent=2)

0 Source : parse_statistic.py
with Apache License 2.0
from aropan

    def handle(self, *args, **options):
        self.stdout.write(str(options))
        args = AttrDict(options)

        if args.resources:
            if len(args.resources) == 1:
                contests = Contest.objects.filter(resource__module__resource__host__iregex=args.resources[0])
            else:
                resources = [Resource.objects.get(host__iregex=r) for r in args.resources]
                contests = Contest.objects.filter(resource__module__resource__host__in=resources)
        else:
            has_module = Module.objects.filter(resource_id=OuterRef('resource__pk'))
            contests = Contest.objects.annotate(has_module=Exists(has_module)).filter(has_module=True)

        if args.only_new:
            has_statistics = Statistics.objects.filter(contest_id=OuterRef('pk'))
            contests = contests.annotate(has_statistics=Exists(has_statistics)).filter(has_statistics=False)

        if args.year:
            contests = contests.filter(start_time__year=args.year)

        if args.stage:
            contests = contests.filter(stage__isnull=False)

        if args.division:
            contests = contests.filter(info__problems__division__isnull=False)

        self.parse_statistic(
            contests=contests,
            previous_days=args.days,
            limit=args.limit,
            with_check=not args.no_check_timing,
            stop_on_error=args.stop_on_error,
            random_order=args.random_order,
            no_update_results=args.no_update_results,
            freshness_days=args.freshness_days,
            title_regex=args.event,
            users=args.users,
            with_stats=not args.no_stats,
            update_without_new_rating=args.update_without_new_rating,
            force_problems=args.force_problems,
        )

0 Source : views.py
with Apache License 2.0
from aropan

def standings_list(request, template='standings_list.html', extra_context=None):
    contests = Contest.objects \
        .select_related('timing') \
        .select_related('resource') \
        .annotate(has_module=Exists(Module.objects.filter(resource=OuterRef('resource_id')))) \
        .filter(Q(n_statistics__gt=0) | Q(end_time__lte=timezone.now())) \
        .order_by('-end_time', 'pk')

    all_standings = False
    if request.user.is_authenticated:
        all_standings = request.user.coder.settings.get('all_standings')

    switch = request.GET.get('switch')
    if bool(all_standings) == bool(switch) and switch != 'all' or switch == 'parsed':
        contests = contests.filter(Q(invisible=False) | Q(stage__isnull=False))
        contests = contests.filter(n_statistics__gt=0, has_module=True)
        if request.user.is_authenticated:
            contests = contests.filter(request.user.coder.get_contest_filter(['list']))

    search = request.GET.get('search')
    if search is not None:
        contests = contests.filter(get_iregex_filter(
            search,
            'title', 'host', 'resource__host',
            mapping={
                'name': {'fields': ['title__iregex']},
                'slug': {'fields': ['slug']},
                'resource': {'fields': ['host', 'resource__host'], 'suff': '__iregex'},
                'writer': {'fields': ['info__writers__contains']},
                'coder': {'fields': ['statistics__account__coders__username']},
                'account': {'fields': ['statistics__account__key', 'statistics__account__name'], 'suff': '__iregex'},
                'stage': {'fields': ['stage'], 'suff': '__isnull', 'func': lambda v: False},
                'medal': {'fields': ['info__standings__medals'], 'suff': '__isnull', 'func': lambda v: False},
                'year': {'fields': ['start_time__year', 'end_time__year']},
            },
            logger=request.logger,
        ))

    resources = [r for r in request.GET.getlist('resource') if r]
    if resources:
        contests = contests.filter(resource_id__in=resources)
        resources = list(Resource.objects.filter(pk__in=resources))

    if request.user.is_authenticated:
        contests = contests.prefetch_related(Prefetch(
            'statistics_set',
            to_attr='stats',
            queryset=Statistics.objects.filter(account__coders=request.user.coder),
        ))

    active_stage_query = Q(stage__isnull=False, end_time__gt=timezone.now())
    context = {
        'stages': contests.filter(active_stage_query),
        'contests': contests.exclude(active_stage_query),
        'timezone': get_timezone(request),
        'timeformat': get_timeformat(request),
        'all_standings': all_standings,
        'switch': switch,
        'params': {
            'resources': resources,
        },
    }

    if extra_context is not None:
        context.update(extra_context)

    return render(request, template, context)


def _standings_highlight(statistics, options):

0 Source : models.py
with GNU Affero General Public License v3.0
from betagouv

    def with_has_suspended_approval(self):
        has_suspended_approval = Suspension.objects.filter(approval=OuterRef("approval")).in_progress()
        return self.annotate(has_suspended_approval=Exists(has_suspended_approval))

    def with_last_change(self):

0 Source : models.py
with GNU Affero General Public License v3.0
from betagouv

    def eligible_as_employee_record(self, siae):
        """
        List job applications that will have to be transfered to ASP
        via the employee record app.

        These job applications must:
        - be definitely accepted
        - have no one-to-one relationship with an employee record
        - have been created after production date

        An eligible job application *may* or *may not* have an employee record object linked
        to it.

        For instance, when creating a new employee record from an eligible job application
        and NOT finishing the entire creation process.
        (employee record object creation occurs half-way of the "tunnel")
        """

        # Exclude existing employee records with same approval and asp_id
        # Rule: you can only create *one* employee record for a given asp_id / approval pair
        subquery = Subquery(
            self.exclude(to_siae=siae).filter(
                employee_record__asp_id=siae.asp_id,
                employee_record__approval_number=OuterRef("approval__number"),
            )
        )

        return (
            # Job application without approval are out of scope
            self.exclude(approval=None)
            # Prevent employee records creation (batch import for example).
            .filter(create_employee_record=True)
            # See `subquery` above : exclude possible ASP duplicates
            .exclude(Exists(subquery))
            # Only ACCEPTED job applications can be transformed into employee records
            .accepted()
            # Accept only job applications without linked or processed employee record
            .filter(Q(employee_record__status="NEW") | Q(employee_record__isnull=True))
            .filter(
                # Only for current SIAE
                to_siae=siae,
                # Hiring must start after production date:
                hiring_start_at__gte=settings.EMPLOYEE_RECORD_FEATURE_AVAILABILITY_DATE,
            )
            .select_related("job_seeker", "approval")
            .order_by("-hiring_start_at")
        )


class JobApplication(xwf_models.WorkflowEnabled, models.Model):

0 Source : move_siae_data.py
with GNU Affero General Public License v3.0
from betagouv

def move_siae_data(from_id, to_id, dry_run=False, only_job_applications=False):
    if from_id == to_id:
        logger.error("Unable to use the same siae as source and destination (ID %s)", from_id)
        return

    from_siae_qs = siaes_models.Siae.objects.filter(pk=from_id)
    try:
        from_siae = from_siae_qs.get()
    except siaes_models.Siae.DoesNotExist:
        logger.error("Unable to find the siae ID %s", from_id)
        return

    to_siae_qs = siaes_models.Siae.objects.filter(pk=to_id)
    try:
        to_siae = to_siae_qs.get()
    except siaes_models.Siae.DoesNotExist:
        logger.error("Unable to find the siae ID %s", to_id)
        return

    if from_siae.kind != to_siae.kind:
        logger.error("Both siaes should have the same kind but they don't")
        return

    # Intermediate variable for better readability
    move_all_data = not only_job_applications

    logger.info(
        "MOVE %s OF siae.id=%s - %s %s - %s",
        "DATA" if move_all_data else "JOB APPLICATIONS",
        from_siae.pk,
        from_siae.kind,
        from_siae.siret,
        from_siae.display_name,
    )

    job_applications_sent = job_applications_models.JobApplication.objects.filter(sender_siae_id=from_id)
    logger.info("| Job applications sent: %s", job_applications_sent.count())

    job_applications_received = job_applications_models.JobApplication.objects.filter(to_siae_id=from_id)
    logger.info("| Job applications received: %s", job_applications_received.count())

    if move_all_data:
        # Move Job Description not already present in siae destination, Job Applications related will be attached to
        # Job Description present in siae destination
        appellation_subquery = Subquery(
            siaes_models.SiaeJobDescription.objects.filter(siae_id=to_id, appellation_id=OuterRef("appellation_id"))
        )
        job_descriptions = siaes_models.SiaeJobDescription.objects.filter(siae_id=from_id).exclude(
            Exists(appellation_subquery)
        )
        logger.info("| Job descriptions: %s", job_descriptions.count())

        # Move users not already present in siae destination
        members = siaes_models.SiaeMembership.objects.filter(siae_id=from_id).exclude(
            user__in=users_models.User.objects.filter(siaemembership__siae_id=to_id)
        )
        logger.info("| Members: %s", members.count())

        diagnoses = eligibility_models.EligibilityDiagnosis.objects.filter(author_siae_id=from_id)
        logger.info("| Diagnoses: %s", diagnoses.count())

        prolongations = approvals_models.Prolongation.objects.filter(declared_by_siae_id=from_id)
        logger.info("| Prolongations: %s", prolongations.count())

        suspensions = approvals_models.Suspension.objects.filter(siae_id=from_id)
        logger.info("| Suspensions: %s", suspensions.count())

        # Don't move invitations for existing members
        # The goal is to keep information about the original information
        invitations = invitations_models.SiaeStaffInvitation.objects.filter(siae_id=from_id).exclude(
            email__in=users_models.User.objects.filter(siaemembership__siae_id=to_id).values_list("email", flat=True)
        )
        logger.info("| Invitations: %s", invitations.count())

    logger.info(
        "INTO siae.id=%s - %s %s - %s",
        to_siae.pk,
        to_siae.kind,
        to_siae.siret,
        to_siae.display_name,
    )

    dest_siae_job_applications_sent = job_applications_models.JobApplication.objects.filter(sender_siae_id=to_id)
    logger.info("| Job applications sent: %s", dest_siae_job_applications_sent.count())

    dest_siae_job_applications_received = job_applications_models.JobApplication.objects.filter(to_siae_id=to_id)
    logger.info("| Job applications received: %s", dest_siae_job_applications_received.count())

    if move_all_data:
        logger.info(f"| Brand '{to_siae.brand}' will be updated with '{from_siae.display_name}'")
        logger.info(f"| Description \n{to_siae.description}\nwill be updated with\n{from_siae.description}")
        logger.info(f"| Phone '{to_siae.phone}' will be updated with '{from_siae.phone}'")
        logger.info(f"| Coords '{to_siae.coords}' will be updated with '{from_siae.coords}'")
        logger.info(f"| Geoscore '{to_siae.geocoding_score}' will be updated with '{from_siae.geocoding_score}'")

    if dry_run:
        logger.info("Nothing to do in dry run mode.")
        return

    with transaction.atomic():

        # If we move the job applications without moving the job descriptions as well, we need to unlink them,
        # as job applications will be attached to siae B but job descriptions will stay attached to siae A.
        if only_job_applications:
            for job_application in job_applications_sent:
                job_application.selected_jobs.clear()
            for job_application in job_applications_received:
                job_application.selected_jobs.clear()

        # If we move job_description, we have to take care of existant job_description linked to siae B (destination),
        # because we can't have 2 job_applications with the same Appellation for one siae. Job applications linked to
        # these kind of job_description have to be unlinked to be transfered. Job_description can be different enough
        # to be irrelevant.
        if move_all_data:
            # find Appellation linked to job_description siae B
            to_siae_appellation_id = siaes_models.SiaeJobDescription.objects.filter(siae_id=to_id).values_list(
                "appellation_id", flat=True
            )

            # find job_applications in siae A, linked with job_description which Appellation is found in siae B
            job_applications_to_clear = job_applications_models.JobApplication.objects.filter(
                to_siae_id=from_id,
                selected_jobs__in=siaes_models.SiaeJobDescription.objects.filter(
                    siae_id=from_id, appellation_id__in=to_siae_appellation_id
                ),
            )

            # clean job_applications to let them be transfered in siae B
            for job_application in job_applications_to_clear:
                job_application.selected_jobs.clear()

        job_applications_sent.update(sender_siae_id=to_id)
        job_applications_received.update(to_siae_id=to_id)

        if move_all_data:
            # do not move duplicated job_descriptions
            job_descriptions.exclude(appellation_id__in=to_siae_appellation_id).update(siae_id=to_id)
            members.update(siae_id=to_id)
            diagnoses.update(author_siae_id=to_id)
            prolongations.update(declared_by_siae_id=to_id)
            suspensions.update(siae_id=to_id)
            invitations.update(siae_id=to_id)
            to_siae_qs.update(
                brand=from_siae.display_name,
                description=from_siae.description,
                phone=from_siae.phone,
                coords=from_siae.coords,
                geocoding_score=from_siae.geocoding_score,
            )
            from_siae_qs.update(
                block_job_applications=True,
                job_applications_blocked_at=timezone.now(),
                # Make sure the old siae no longer appears in results
                coords=None,
                geocoding_score=None,
            )

    logger.info("MOVE %s OF siae.id=%s FINISHED", "DATA" if move_all_data else "JOB APPLICATIONS", from_siae.pk)
    orig_job_applications_sent = job_applications_models.JobApplication.objects.filter(sender_siae_id=from_id)
    logger.info("| Job applications sent: %s", orig_job_applications_sent.count())

    orig_job_applications_received = job_applications_models.JobApplication.objects.filter(to_siae_id=from_id)
    logger.info("| Job applications received: %s", orig_job_applications_received.count())

    logger.info("INTO siae.id=%s", to_siae.pk)

    dest_siae_job_applications_sent = job_applications_models.JobApplication.objects.filter(sender_siae_id=to_id)
    logger.info("| Job applications sent: %s", dest_siae_job_applications_sent.count())

    dest_siae_job_applications_received = job_applications_models.JobApplication.objects.filter(to_siae_id=to_id)
    logger.info("| Job applications received: %s", dest_siae_job_applications_received.count())


class Command(BaseCommand):

0 Source : models.py
with GNU Affero General Public License v3.0
from betagouv

    def with_job_app_score(self):
        """
        Employers search results boost SIAE which did not receive enough job applications
        compared to their total job descriptions.
        To do so, the following score is computed:
        ** (total of recent job applications) / (total of active job descriptions) **
        """
        # Transform integer into a float to avoid any weird side effect.
        # See self.with_count_recent_received_job_apps()
        count_recent_received_job_apps = Cast("count_recent_received_job_apps", output_field=models.FloatField())

        # Check if a job description exists before computing the score.
        has_active_job_desc = Exists(SiaeJobDescription.objects.filter(siae=OuterRef("pk"), is_active=True))

        # Transform integer into a float to avoid any weird side effect.
        # See self.with_count_active_job_descriptions
        count_active_job_descriptions = Cast("count_active_job_descriptions", output_field=models.FloatField())

        # Score computing.
        get_score = Cast(
            count_recent_received_job_apps / count_active_job_descriptions, output_field=models.FloatField()
        )

        return (
            self.with_count_recent_received_job_apps()
            .with_count_active_job_descriptions()
            .annotate(
                job_app_score=Case(
                    When(has_active_job_desc, then=get_score),
                    default=None,
                )
            )
        )

    def with_has_active_members(self):

0 Source : query.py
with MIT License
from blb-ventures

def filter_for_user(
    qs: QuerySet,
    user: UserType,
    perms: TypeOrIterable[str],
    *,
    any_perm: bool = True,
    with_groups: bool = True,
    with_superuser: bool = False,
):
    if with_superuser and user.is_active and user.is_superuser:
        return qs

    if user.is_anonymous:
        return qs.none()

    if isinstance(perms, str):
        perms = [perms]

    model = qs.model
    if model._meta.concrete_model:
        model = model._meta.concrete_model

    # We don't want to query the database here because this might not be async safe
    # Try to retrieve the ContentType from cache. If it is not there, we will
    # query it through the queryset
    ctype: Optional[ContentType] = None
    try:
        meta = model._meta
        ctype = ContentType.objects._get_from_cache(meta)  # type:ignore
    except KeyError:  # pragma:nocover
        # If we are not running async, retrieve it
        if not is_async():
            ctype = ContentType.objects.get_for_model(model)

    app_labels = set()
    perms_list = []
    for p in perms:
        parts = p.split(".")
        if len(parts) > 1:
            app_labels.add(parts[0])
        perms_list.append(parts[-1])

    if len(app_labels) == 1 and ctype is not None:
        app_label = app_labels.pop()
        if app_label != ctype.app_label:  # pragma:nocover
            raise ValueError(
                f"Given perms must have same app label ({app_label!r} != {ctype.app_label!r})"
            )
    elif len(app_labels) > 1:  # pragma:nocover
        raise ValueError(f"Cannot mix app_labels ({app_labels!r})")

    # Small optimization if the user's permissions are cached
    if hasattr(user, "_perm_cache"):  # pragma:nocover
        f = any if any_perm else all
        user_perms: Set[str] = {
            p.codename for p in user._perm_cache  # type:ignore
        }
        if f(p in user_perms for p in perms_list):
            return qs

    q = Q(
        Exists(
            _filter(
                user.user_permissions,
                perms_list,
                model=model,
                ctype=ctype,
            )
        )
    )
    if with_groups:
        q |= Q(
            Exists(
                _filter(
                    user.groups,
                    perms_list,
                    lookup="permissions",
                    model=model,
                    ctype=ctype,
                )
            )
        )

    if has_guardian:
        perm_models = get_object_permission_models(qs.model)

        user_model = perm_models.user
        user_qs = _filter(
            user_model.objects.filter(user=user),
            perms_list,
            lookup="permission",
            model=model,
            ctype=ctype,
        )
        if user_model.objects.is_generic():
            user_qs = user_qs.filter(content_type=F("permission__content_type"))
        else:
            user_qs = user_qs.annotate(object_pk=F("content_object"))

        obj_qs = user_qs.values_list("object_pk", flat=True).distinct()

        if with_groups:
            group_model = perm_models.group
            groups_field = get_user_model()._meta.get_field("groups")
            group_qs = _filter(
                group_model.objects.filter(
                    **{
                        f"group__{groups_field.related_query_name()}": user,  # type:ignore
                    },
                ),
                perms_list,
                lookup="permission",
                model=model,
                ctype=ctype,
            )
            if group_model.objects.is_generic():
                group_qs = group_qs.filter(content_type=F("permission__content_type"))
            else:
                group_qs = group_qs.annotate(object_pk=F("content_object"))

            obj_qs = obj_qs.union(group_qs.values_list("object_pk", flat=True).distinct())

        q |= Q(pk__in=obj_qs)

    return qs.filter(q)

0 Source : views.py
with MIT License
from cmu-lib

def article_archive(request, article_id):
    """
    : article_id = an int representing the pk of the article requested
    Displays a list of previous version of an article
    """
    # get current article
    article = get_object_or_404(Article, pk=article_id)

    # determine if current article is archived.
    is_article_archived = Archive.objects.filter(issue__articles=article).exists()

    # ensure current article is either an update or the parent of another article
    if hasattr(article, 'version'):
        base_article = article.version.base_article
    else:
        base_article = article

    # Create a subquery to check if articles have any archvied editions at all
    archives_subquery = Archive.objects.filter(issue__articles = OuterRef('pk'))

    # get queryset of all articles with same base_article (including original
    # base article) and compute a boolean field is_archived
    versions = Article.objects.filter(
            (
                Q(version__base_article=base_article) | Q(pk=base_article.pk)
            ),
             stage=STAGE_PUBLISHED
        ).order_by('-date_published').annotate(is_archived=Exists(archives_subquery))

    context = {
                'base_article': base_article,
                'orig_article_archived': is_article_archived,
                'orig_article': article,
                'versions': versions,
                'journal': request.journal
                }

    template = "archive_plugin/article_version_list.html"
    return render(request, template, context)


@author_user_required

0 Source : admin_data_downloads.py
with GNU General Public License v3.0
from JustFixNYC

def execute_evictionfree_users_query(user):
    return (
        JustfixUser.objects.values(
            "id",
            "date_joined",
            "first_name",
            "last_name",
            "email",
            "phone_number",
            "locale",
            "onboarding_info__address",
            "onboarding_info__state",
            "onboarding_info__zipcode",
            "onboarding_info__apt_number",
            "onboarding_info__can_rtc_sms",
            "onboarding_info__can_hj4a_sms",
            "hardship_declaration_details__index_number",
            "hardship_declaration_details__has_financial_hardship",
            "hardship_declaration_details__has_health_risk",
            borough_if_inside_nyc=F("onboarding_info__borough"),
            city_if_outside_nyc=F("onboarding_info__non_nyc_city"),
            hardship_declaration_mailed_at=F("submitted_hardship_declaration__mailed_at"),
            hardship_declaration_emailed_at=F("submitted_hardship_declaration__emailed_at"),
            bbl=F("onboarding_info__pad_bbl"),
            is_nycha_bbl=Exists(
                NychaProperty.objects.filter(pad_bbl=OuterRef("onboarding_info__pad_bbl"))
            ),
        )
        .filter(
            onboarding_info__agreed_to_evictionfree_terms=True,
        )
        .order_by("id")
    )


DATA_DOWNLOADS = [

0 Source : models.py
with MIT License
from rossm6

    def cash_book_in_and_out_report(self, current_cb_period):
        """
        Used in the dashboard

        Get the ins and outs for each of five consecutive periods
        where the current_cb_period is the middle period.
        """
        earlier_periods = Period.objects.filter(fy_and_period__lt=current_cb_period.fy_and_period).values(
            'fy_and_period').order_by("-fy_and_period")
        later_periods = Period.objects.filter(fy_and_period__gt=current_cb_period.fy_and_period).values(
            'fy_and_period').order_by("fy_and_period")
        return (
            self
            .annotate(
                monies_in=Case(
                    When(value__gt=0, then=F('value')),
                    default=Value('0')
                )
            )
            .annotate(
                monies_out=Case(
                    When(value__lt=0, then=F('value') * -1),
                    default=0
                )
            )
            .values('period__fy_and_period')
            .annotate(
                total_monies_in=Sum('monies_in')
            )
            .annotate(
                total_monies_out=Sum('monies_out')
            )
            .filter(
                period__fy_and_period__gte=(
                    Period.objects.annotate(
                        earliest_period=(
                            Case(
                                When(
                                    Exists(earlier_periods),
                                    then=earlier_periods[:1]
                                ),
                                default=Value("000000")
                            )
                        )
                    ).values('earliest_period')[:1]
                )
            )
            .filter(
                period__fy_and_period__lte=(
                    Period.objects.annotate(
                        latest_period=(
                            Case(
                                When(
                                    Exists(later_periods),
                                    then=later_periods[:1]
                                ),
                                default=Value("999999")
                            )
                        )
                    ).values('latest_period')[:1]
                )
            )
        )


class CashBookTransaction(MultiLedgerTransactions):

0 Source : forms.py
with MIT License
from rossm6

    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        """
        You cannot post into a period in a FY which has been finalised.
        """
        t = NominalTransaction.objects.filter(module="NL").filter(type="nbf").values(
            "period__fy_and_period").order_by("-period__fy_and_period")
        w = When(Exists(t), then=t[:1])
        q = (
            Period.objects.filter(
                fy_and_period__gte=(
                    Period.objects.annotate(
                        earliest_period=(
                            Case(w, default=Value("000000"))
                        )
                    ).values('earliest_period')[:1]
                )
            )
        )
        for field in self.fields:
            q = q.all()
            self.fields[field].queryset = q
        self.helper = FormHelper()
        self.helper.layout = Layout(
            HTML(
                "  <  h1 class='font-weight-bold h5'>Module Settings < /h1>",
            ),
            Div(
                Div(
                    LabelAndFieldAndErrors(
                        'cash_book_period', css_class="w-100"),
                    css_class="my-1 col-12"
                ),
                Div(
                    LabelAndFieldAndErrors(
                        'nominals_period', css_class="w-100"),
                    css_class="my-1 col-12"
                ),
                Div(
                    LabelAndFieldAndErrors(
                        'purchases_period', css_class="w-100"),
                    css_class="my-1 col-12"
                ),
                Div(
                    LabelAndFieldAndErrors('sales_period', css_class="w-100"),
                    css_class="my-1 col-12"
                ),
                css_class="row"
            ),
            Submit("save", "Save", css_class="mt-3")
        )

0 Source : filters.py
with GNU General Public License v3.0
from Saleor-Multi-Vendor

def filter_user_search(qs, _, value):
    if value:
        UserAddress = User.addresses.through
        addresses = Address.objects.filter(
            Q(first_name__ilike=value)
            | Q(last_name__ilike=value)
            | Q(city__ilike=value)
            | Q(country__ilike=value)
            | Q(phone=value)
        ).values("id")
        user_addresses = UserAddress.objects.filter(
            Exists(addresses.filter(pk=OuterRef("address_id")))
        ).values("user_id")
        qs = qs.filter(
            Q(email__ilike=value)
            | Q(first_name__ilike=value)
            | Q(last_name__ilike=value)
            | Q(Exists(user_addresses.filter(user_id=OuterRef("pk"))))
        )
    return qs


def filter_search(qs, _, value):

0 Source : filters.py
with GNU General Public License v3.0
from Saleor-Multi-Vendor

def _clean_product_attributes_range_filter_input(filter_value, queries):
    attributes = Attribute.objects.filter(input_type=AttributeInputType.NUMERIC)
    values = (
        AttributeValue.objects.filter(
            Exists(attributes.filter(pk=OuterRef("attribute_id")))
        )
        .annotate(numeric_value=Cast("name", FloatField()))
        .select_related("attribute")
    )

    attributes_map: Dict[str, int] = {}
    values_map: Dict[str, Dict[str, int]] = defaultdict(dict)
    for value_data in values.values_list(
        "attribute_id", "attribute__slug", "pk", "numeric_value"
    ):
        attr_pk, attr_slug, pk, numeric_value = value_data
        attributes_map[attr_slug] = attr_pk
        values_map[attr_slug][numeric_value] = pk

    for attr_name, val_range in filter_value:
        if attr_name not in attributes_map:
            raise ValueError("Unknown numeric attribute name: %r" % (attr_name,))
        gte, lte = val_range.get("gte", 0), val_range.get("lte", math.inf)
        attr_pk = attributes_map[attr_name]
        attr_values = values_map[attr_name]
        matching_values = [
            value for value in attr_values.keys() if gte   <  = value and lte >= value
        ]
        attr_val_pks = [attr_values[value] for value in matching_values]
        queries[attr_pk] += attr_val_pks


def _clean_product_attributes_date_time_range_filter_input(

0 Source : filters.py
with GNU General Public License v3.0
from Saleor-Multi-Vendor

def filter_products_by_attributes_values(qs, queries: T_PRODUCT_FILTER_QUERIES):
    filters = []
    for values in queries.values():
        assigned_product_attribute_values = (
            AssignedProductAttributeValue.objects.filter(value_id__in=values)
        )
        assigned_product_attributes = AssignedProductAttribute.objects.filter(
            Exists(
                assigned_product_attribute_values.filter(assignment_id=OuterRef("pk"))
            )
        )
        product_attribute_filter = Q(
            Exists(assigned_product_attributes.filter(product_id=OuterRef("pk")))
        )

        assigned_variant_attribute_values = (
            AssignedVariantAttributeValue.objects.filter(value_id__in=values)
        )
        assigned_variant_attributes = AssignedVariantAttribute.objects.filter(
            Exists(
                assigned_variant_attribute_values.filter(assignment_id=OuterRef("pk"))
            )
        )
        product_variants = ProductVariant.objects.filter(
            Exists(assigned_variant_attributes.filter(variant_id=OuterRef("pk")))
        )
        variant_attribute_filter = Q(
            Exists(product_variants.filter(product_id=OuterRef("pk")))
        )

        filters.append(product_attribute_filter | variant_attribute_filter)

    return qs.filter(*filters)


def filter_products_by_attributes(

0 Source : filters.py
with GNU General Public License v3.0
from Saleor-Multi-Vendor

def filter_products_by_variant_price(qs, channel_slug, price_lte=None, price_gte=None):
    channels = Channel.objects.filter(slug=channel_slug).values("pk")
    product_variant_channel_listings = ProductVariantChannelListing.objects.filter(
        Exists(channels.filter(pk=OuterRef("channel_id")))
    )
    if price_lte:
        product_variant_channel_listings = product_variant_channel_listings.filter(
            Q(price_amount__lte=price_lte) | Q(price_amount__isnull=True)
        )
    if price_gte:
        product_variant_channel_listings = product_variant_channel_listings.filter(
            Q(price_amount__gte=price_gte) | Q(price_amount__isnull=True)
        )
    product_variant_channel_listings = product_variant_channel_listings.values(
        "variant_id"
    )
    variants = ProductVariant.objects.filter(
        Exists(product_variant_channel_listings.filter(variant_id=OuterRef("pk")))
    ).values("product_id")
    return qs.filter(Exists(variants.filter(product_id=OuterRef("pk"))))


def filter_products_by_minimal_price(

0 Source : filters.py
with GNU General Public License v3.0
from Saleor-Multi-Vendor

def filter_products_by_minimal_price(
    qs, channel_slug, minimal_price_lte=None, minimal_price_gte=None
):
    channels = Channel.objects.filter(slug=channel_slug).values("pk")
    product_channel_listings = ProductChannelListing.objects.filter(
        Exists(channels.filter(pk=OuterRef("channel_id")))
    )
    if minimal_price_lte:
        product_channel_listings = product_channel_listings.filter(
            discounted_price_amount__lte=minimal_price_lte,
            discounted_price_amount__isnull=False,
        )
    if minimal_price_gte:
        product_channel_listings = product_channel_listings.filter(
            discounted_price_amount__gte=minimal_price_gte,
            discounted_price_amount__isnull=False,
        )
    product_channel_listings = product_channel_listings.values("product_id")
    return qs.filter(Exists(product_channel_listings.filter(product_id=OuterRef("pk"))))


def filter_products_by_categories(qs, category_ids):

0 Source : filters.py
with GNU General Public License v3.0
from Saleor-Multi-Vendor

def filter_products_by_stock_availability(qs, stock_availability, channel_slug):
    allocations = (
        Allocation.objects.values("stock_id")
        .filter(quantity_allocated__gt=0, stock_id=OuterRef("pk"))
        .values_list(Sum("quantity_allocated"))
    )
    allocated_subquery = Subquery(queryset=allocations, output_field=IntegerField())

    stocks = list(
        Stock.objects.for_channel(channel_slug)
        .filter(quantity__gt=Coalesce(allocated_subquery, 0))
        .values_list("product_variant_id", flat=True)
    )

    variants = ProductVariant.objects.filter(pk__in=stocks).values("product_id")

    if stock_availability == StockAvailability.IN_STOCK:
        qs = qs.filter(Exists(variants.filter(product_id=OuterRef("pk"))))
    if stock_availability == StockAvailability.OUT_OF_STOCK:
        qs = qs.filter(~Exists(variants.filter(product_id=OuterRef("pk"))))
    return qs


def _filter_attributes(qs, _, value):

0 Source : filters.py
with GNU General Public License v3.0
from Saleor-Multi-Vendor

def _filter_products_is_published(qs, _, value, channel_slug):
    channel = Channel.objects.filter(slug=channel_slug).values("pk")
    product_channel_listings = ProductChannelListing.objects.filter(
        Exists(channel.filter(pk=OuterRef("channel_id"))), is_published=value
    ).values("product_id")

    # Filter out product for which there is no variant with price
    variant_channel_listings = ProductVariantChannelListing.objects.filter(
        Exists(channel.filter(pk=OuterRef("channel_id"))),
        price_amount__isnull=False,
    ).values("id")
    variants = ProductVariant.objects.filter(
        Exists(variant_channel_listings.filter(variant_id=OuterRef("pk")))
    ).values("product_id")

    return qs.filter(
        Exists(product_channel_listings.filter(product_id=OuterRef("pk"))),
        Exists(variants.filter(product_id=OuterRef("pk"))),
    )


def _filter_variant_price(qs, _, value, channel_slug):

0 Source : filters.py
with GNU General Public License v3.0
from Saleor-Multi-Vendor

def product_search(qs, phrase):
    """Return matching products for storefront views.

        Name and description is matched using search vector.

    Args:
        qs (ProductsQueryset): searched data set
        phrase (str): searched phrase

    """
    query = SearchQuery(phrase, config="english")
    vector = F("search_vector")
    ft_in_description_or_name = Q(search_vector=query)

    variants = ProductVariant.objects.filter(sku=phrase).values("id")
    ft_by_sku = Q(Exists(variants.filter(product_id=OuterRef("pk"))))

    return (
        qs.annotate(rank=SearchRank(vector, query))
        .filter((ft_in_description_or_name | ft_by_sku))
        .order_by("-rank", "id")
    )


def filter_search(qs, _, value):

0 Source : resolvers.py
with GNU General Public License v3.0
from Saleor-Multi-Vendor

def resolve_report_product_sales(period, channel_slug) -> ChannelQsContext:
    qs = models.ProductVariant.objects.all()

    # filter by period
    qs = filter_by_period(qs, period, "order_lines__order__created")

    # annotate quantity
    qs = qs.annotate(quantity_ordered=Sum("order_lines__quantity"))

    # filter by channel and order status
    channels = Channel.objects.filter(slug=channel_slug).values("pk")
    exclude_status = [OrderStatus.DRAFT, OrderStatus.CANCELED]
    orders = Order.objects.exclude(status__in=exclude_status).filter(
        Exists(channels.filter(pk=OuterRef("channel_id")).values("pk"))
    )
    qs = qs.filter(
        Exists(orders.filter(pk=OuterRef("order_lines__order_id"))),
        quantity_ordered__isnull=False,
    )

    # order by quantity ordered
    qs = qs.order_by("-quantity_ordered")

    return ChannelQsContext(qs=qs, channel_slug=channel_slug)

0 Source : dataloaders.py
with GNU General Public License v3.0
from Saleor-Multi-Vendor

    def batch_load_quantities_by_country(
        self,
        country_code: Optional[CountryCode],
        channel_slug: Optional[str],
        variant_ids: Iterable[int],
    ) -> Iterable[Tuple[int, int]]:
        # get stocks only for warehouses assigned to the shipping zones
        # that are available in the given channel
        stocks = Stock.objects.filter(product_variant_id__in=variant_ids)
        WarehouseShippingZone = Warehouse.shipping_zones.through  # type: ignore
        warehouse_shipping_zones = WarehouseShippingZone.objects.all()
        additional_warehouse_filter = False
        if country_code or channel_slug:
            additional_warehouse_filter = True
            if country_code:
                shipping_zones = ShippingZone.objects.filter(
                    countries__contains=country_code
                ).values("pk")
                warehouse_shipping_zones = warehouse_shipping_zones.filter(
                    Exists(shipping_zones.filter(pk=OuterRef("shippingzone_id")))
                )
            if channel_slug:
                ShippingZoneChannel = Channel.shipping_zones.through  # type: ignore
                channels = Channel.objects.filter(slug=channel_slug).values("pk")
                shipping_zone_channels = ShippingZoneChannel.objects.filter(
                    Exists(channels.filter(pk=OuterRef("channel_id")))
                ).values("shippingzone_id")
                warehouse_shipping_zones = warehouse_shipping_zones.filter(
                    Exists(
                        shipping_zone_channels.filter(
                            shippingzone_id=OuterRef("shippingzone_id")
                        )
                    )
                )
        warehouse_shipping_zones_map = defaultdict(list)
        for warehouse_shipping_zone in warehouse_shipping_zones:
            warehouse_shipping_zones_map[warehouse_shipping_zone.warehouse_id].append(
                warehouse_shipping_zone.shippingzone_id
            )
        if additional_warehouse_filter:
            stocks = stocks.filter(warehouse_id__in=warehouse_shipping_zones_map.keys())
        stocks = stocks.annotate_available_quantity()

        # A single country code (or a missing country code) can return results from
        # multiple shipping zones. We want to combine all quantities within a single
        # zone and then find out which zone contains the highest total.
        quantity_by_shipping_zone_by_product_variant: DefaultDict[
            int, DefaultDict[int, int]
        ] = defaultdict(lambda: defaultdict(int))
        for stock in stocks:
            quantity = max(0, stock.available_quantity)
            variant_id = stock.product_variant_id
            warehouse_id = stock.warehouse_id
            shipping_zone_ids = warehouse_shipping_zones_map[warehouse_id]
            for shipping_zone_id in shipping_zone_ids:
                quantity_by_shipping_zone_by_product_variant[variant_id][
                    shipping_zone_id
                ] += quantity

        quantity_map: DefaultDict[int, int] = defaultdict(int)
        for (
            variant_id,
            quantity_by_shipping_zone,
        ) in quantity_by_shipping_zone_by_product_variant.items():
            quantity_values = quantity_by_shipping_zone.values()
            if country_code:
                # When country code is known, return the sum of quantities from all
                # shipping zones supporting given country.
                quantity_map[variant_id] = sum(quantity_values)
            else:
                # When country code is unknown, return the highest known quantity.
                quantity_map[variant_id] = max(quantity_values)

        # Return the quantities after capping them at the maximum quantity allowed in
        # checkout. This prevent users from tracking the store's precise stock levels.
        return [
            (
                variant_id,
                min(quantity_map[variant_id], settings.MAX_CHECKOUT_LINE_QUANTITY),
            )
            for variant_id in variant_ids
        ]


class StocksWithAvailableQuantityByProductVariantIdCountryCodeAndChannelLoader(

0 Source : models.py
with GNU General Public License v3.0
from Saleor-Multi-Vendor

    def for_channel(self, channel_slug: str):
        ShippingZoneChannel = Channel.shipping_zones.through  # type: ignore
        WarehouseShippingZone = ShippingZone.warehouses.through  # type: ignore
        channels = Channel.objects.filter(slug=channel_slug).values("pk")
        shipping_zone_channels = ShippingZoneChannel.objects.filter(
            Exists(channels.filter(pk=OuterRef("channel_id")))
        ).values("shippingzone_id")
        warehouse_shipping_zones = WarehouseShippingZone.objects.filter(
            Exists(
                shipping_zone_channels.filter(
                    shippingzone_id=OuterRef("shippingzone_id")
                )
            )
        ).values("warehouse_id")
        return self.select_related("product_variant").filter(
            Exists(
                warehouse_shipping_zones.filter(warehouse_id=OuterRef("warehouse_id"))
            )
        )

    def for_country_and_channel(self, country_code: str, channel_slug):

0 Source : cleanup.py
with MIT License
from techlib

def clean_obsolete_platform_title_links(pretend=False):
    """
    Doing it in one query is possible, but takes a very long time. Therefore
    we go by platform-organization tuples.
    :return:
    """
    stats = Counter()
    for platform_id, organization_id in PlatformTitle.objects.values_list(
        'platform_id', 'organization_id'
    ).distinct():
        accesslog_query = AccessLog.objects.filter(
            organization=organization_id,
            platform=platform_id,
            target=OuterRef('title'),
            date=OuterRef('date'),
        )
        qs = (
            PlatformTitle.objects.filter(organization_id=organization_id, platform_id=platform_id)
            .annotate(valid=Exists(accesslog_query))
            .exclude(valid=True)
        )
        if pretend:
            count = qs.count()
            stats['removed'] += count
        else:
            count, details = qs.delete()
            stats['removed'] += count
        logger.debug('%5d %5d %6d', platform_id, organization_id, count)
    return stats

See More Examples