Here are the examples of the python api django.utils.six.moves.reduce taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
14 Examples
3
Example 1
Project: django-user-accounts Source File: models.py
@classmethod
def exists(cls, code=None, email=None):
checks = []
if code:
checks.append(Q(code=code))
if email:
checks.append(Q(email=code))
if not checks:
return False
return cls._default_manager.filter(six.moves.reduce(operator.or_, checks)).exists()
3
Example 2
def process_filters(self, filters, queryset, view):
if not filters:
return filters
query_bits = []
for field_name, query in filters.children:
for word in query.split(" "):
bit = queryset.query.clean(word.strip())
kwargs = {
field_name: bit
}
query_bits.append(view.query_object(**kwargs))
return six.moves.reduce(operator.and_, filter(lambda x: x, query_bits))
3
Example 3
Project: django-advanced-filters Source File: forms.py
def generate_query(self):
""" Reduces multiple queries into a single usable query """
query = Q()
ORed = []
for form in self._non_deleted_forms:
if not hasattr(form, 'cleaned_data'):
continue
if form.cleaned_data['field'] == "_OR":
ORed.append(query)
query = Q()
else:
query = query & form.make_query()
if ORed:
if query: # add last query for OR if any
ORed.append(query)
query = reduce(operator.or_, ORed)
return query
0
Example 4
def handle_merge(self, loader, conflicts):
"""
Handles merging together conflicted migrations interactively,
if it's safe; otherwise, advises on how to fix it.
"""
if self.interactive:
questioner = InteractiveMigrationQuestioner()
else:
questioner = MigrationQuestioner(defaults={'ask_merge': True})
for app_label, migration_names in conflicts.items():
# Grab out the migrations in question, and work out their
# common ancestor.
merge_migrations = []
for migration_name in migration_names:
migration = loader.get_migration(app_label, migration_name)
migration.ancestry = loader.graph.forwards_plan((app_label, migration_name))
merge_migrations.append(migration)
common_ancestor = None
for level in zip(*[m.ancestry for m in merge_migrations]):
if reduce(operator.eq, level):
common_ancestor = level[0]
else:
break
if common_ancestor is None:
raise ValueError("Could not find common ancestor of %s" % migration_names)
# Now work out the operations along each divergent branch
for migration in merge_migrations:
migration.branch = migration.ancestry[
(migration.ancestry.index(common_ancestor) + 1):
]
migration.merged_operations = []
for node_app, node_name in migration.branch:
migration.merged_operations.extend(
loader.get_migration(node_app, node_name).operations
)
# In future, this could use some of the Optimizer code
# (can_optimize_through) to automatically see if they're
# mergeable. For now, we always just prompt the user.
if self.verbosity > 0:
self.stdout.write(self.style.MIGRATE_HEADING("Merging %s" % app_label))
for migration in merge_migrations:
self.stdout.write(self.style.MIGRATE_LABEL(" Branch %s" % migration.name))
for operation in migration.merged_operations:
self.stdout.write(" - %s\n" % operation.describe())
if questioner.ask_merge(app_label):
# If they still want to merge it, then write out an empty
# file depending on the migrations needing merging.
numbers = [
MigrationAutodetector.parse_number(migration.name)
for migration in merge_migrations
]
try:
biggest_number = max([x for x in numbers if x is not None])
except ValueError:
biggest_number = 1
subclass = type("Migration", (Migration, ), {
"dependencies": [(app_label, migration.name) for migration in merge_migrations],
})
new_migration = subclass("%04i_merge" % (biggest_number + 1), app_label)
writer = MigrationWriter(new_migration)
with open(writer.path, "wb") as fh:
fh.write(writer.as_string())
if self.verbosity > 0:
self.stdout.write("\nCreated new merge migration %s" % writer.path)
0
Example 5
def _alter_field(self, model, old_field, new_field, old_type, new_type, old_db_params, new_db_params, strict=False):
"""Actually perform a "physical" (non-ManyToMany) field update."""
# Drop any FK constraints, we'll remake them later
fks_dropped = set()
if old_field.rel and old_field.db_constraint:
fk_names = self._constraint_names(model, [old_field.column], foreign_key=True)
if strict and len(fk_names) != 1:
raise ValueError("Found wrong number (%s) of foreign key constraints for %s.%s" % (
len(fk_names),
model._meta.db_table,
old_field.column,
))
for fk_name in fk_names:
fks_dropped.add((old_field.column,))
self.execute(self._delete_constraint_sql(self.sql_delete_fk, model, fk_name))
# Has unique been removed?
if old_field.unique and (not new_field.unique or (not old_field.primary_key and new_field.primary_key)):
# Find the unique constraint for this field
constraint_names = self._constraint_names(model, [old_field.column], unique=True)
if strict and len(constraint_names) != 1:
raise ValueError("Found wrong number (%s) of unique constraints for %s.%s" % (
len(constraint_names),
model._meta.db_table,
old_field.column,
))
for constraint_name in constraint_names:
self.execute(self._delete_constraint_sql(self.sql_delete_unique, model, constraint_name))
# Drop incoming FK constraints if we're a primary key and things are going
# to change.
if old_field.primary_key and new_field.primary_key and old_type != new_type:
for rel in new_field.model._meta.get_all_related_objects():
rel_fk_names = self._constraint_names(rel.model, [rel.field.column], foreign_key=True)
for fk_name in rel_fk_names:
self.execute(self._delete_constraint_sql(self.sql_delete_fk, rel.model, fk_name))
# Removed an index? (no strict check, as multiple indexes are possible)
if (old_field.db_index and not new_field.db_index and
not old_field.unique and not
(not new_field.unique and old_field.unique)):
# Find the index for this field
index_names = self._constraint_names(model, [old_field.column], index=True)
for index_name in index_names:
self.execute(self._delete_constraint_sql(self.sql_delete_index, model, index_name))
# Change check constraints?
if old_db_params['check'] != new_db_params['check'] and old_db_params['check']:
constraint_names = self._constraint_names(model, [old_field.column], check=True)
if strict and len(constraint_names) != 1:
raise ValueError("Found wrong number (%s) of check constraints for %s.%s" % (
len(constraint_names),
model._meta.db_table,
old_field.column,
))
for constraint_name in constraint_names:
self.execute(self._delete_constraint_sql(self.sql_delete_check, model, constraint_name))
# Have they renamed the column?
if old_field.column != new_field.column:
self.execute(self.sql_rename_column % {
"table": self.quote_name(model._meta.db_table),
"old_column": self.quote_name(old_field.column),
"new_column": self.quote_name(new_field.column),
"type": new_type,
})
# Next, start accuemulating actions to do
actions = []
null_actions = []
post_actions = []
# Type change?
if old_type != new_type:
fragment, other_actions = self._alter_column_type_sql(model._meta.db_table, new_field.column, new_type)
actions.append(fragment)
post_actions.extend(other_actions)
# When changing a column NULL constraint to NOT NULL with a given
# default value, we need to perform 4 steps:
# 1. Add a default for new incoming writes
# 2. Update existing NULL rows with new default
# 3. Replace NULL constraint with NOT NULL
# 4. Drop the default again.
# Default change?
old_default = self.effective_default(old_field)
new_default = self.effective_default(new_field)
needs_database_default = (
old_default != new_default and
new_default is not None and
not self.skip_default(new_field)
)
if needs_database_default:
if self.connection.features.requires_literal_defaults:
# Some databases can't take defaults as a parameter (oracle)
# If this is the case, the individual schema backend should
# implement prepare_default
actions.append((
self.sql_alter_column_default % {
"column": self.quote_name(new_field.column),
"default": self.prepare_default(new_default),
},
[],
))
else:
actions.append((
self.sql_alter_column_default % {
"column": self.quote_name(new_field.column),
"default": "%s",
},
[new_default],
))
# Nullability change?
if old_field.null != new_field.null:
if new_field.null:
null_actions.append((
self.sql_alter_column_null % {
"column": self.quote_name(new_field.column),
"type": new_type,
},
[],
))
else:
null_actions.append((
self.sql_alter_column_not_null % {
"column": self.quote_name(new_field.column),
"type": new_type,
},
[],
))
# Only if we have a default and there is a change from NULL to NOT NULL
four_way_default_alteration = (
new_field.has_default() and
(old_field.null and not new_field.null)
)
if actions or null_actions:
if not four_way_default_alteration:
# If we don't have to do a 4-way default alteration we can
# directly run a (NOT) NULL alteration
actions = actions + null_actions
# Combine actions together if we can (e.g. postgres)
if self.connection.features.supports_combined_alters and actions:
sql, params = tuple(zip(*actions))
actions = [(", ".join(sql), reduce(operator.add, params))]
# Apply those actions
for sql, params in actions:
self.execute(
self.sql_alter_column % {
"table": self.quote_name(model._meta.db_table),
"changes": sql,
},
params,
)
if four_way_default_alteration:
# Update existing rows with default value
self.execute(
self.sql_update_with_default % {
"table": self.quote_name(model._meta.db_table),
"column": self.quote_name(new_field.column),
"default": "%s",
},
[new_default],
)
# Since we didn't run a NOT NULL change before we need to do it
# now
for sql, params in null_actions:
self.execute(
self.sql_alter_column % {
"table": self.quote_name(model._meta.db_table),
"changes": sql,
},
params,
)
if post_actions:
for sql, params in post_actions:
self.execute(sql, params)
# Added a unique?
if not old_field.unique and new_field.unique:
self.execute(self._create_unique_sql(model, [new_field.column]))
# Added an index?
if (not old_field.db_index and new_field.db_index and
not new_field.unique and not
(not old_field.unique and new_field.unique)):
self.execute(self._create_index_sql(model, [new_field], suffix="_uniq"))
# Type alteration on primary key? Then we need to alter the column
# referring to us.
rels_to_update = []
if old_field.primary_key and new_field.primary_key and old_type != new_type:
rels_to_update.extend(new_field.model._meta.get_all_related_objects())
# Changed to become primary key?
# Note that we don't detect unsetting of a PK, as we assume another field
# will always come along and replace it.
if not old_field.primary_key and new_field.primary_key:
# First, drop the old PK
constraint_names = self._constraint_names(model, primary_key=True)
if strict and len(constraint_names) != 1:
raise ValueError("Found wrong number (%s) of PK constraints for %s" % (
len(constraint_names),
model._meta.db_table,
))
for constraint_name in constraint_names:
self.execute(self._delete_constraint_sql(self.sql_delete_pk, model, constraint_name))
# Make the new one
self.execute(
self.sql_create_pk % {
"table": self.quote_name(model._meta.db_table),
"name": self.quote_name(self._create_index_name(model, [new_field.column], suffix="_pk")),
"columns": self.quote_name(new_field.column),
}
)
# Update all referencing columns
rels_to_update.extend(new_field.model._meta.get_all_related_objects())
# Handle our type alters on the other end of rels from the PK stuff above
for rel in rels_to_update:
rel_db_params = rel.field.db_parameters(connection=self.connection)
rel_type = rel_db_params['type']
self.execute(
self.sql_alter_column % {
"table": self.quote_name(rel.model._meta.db_table),
"changes": self.sql_alter_column_type % {
"column": self.quote_name(rel.field.column),
"type": rel_type,
}
}
)
# Does it have a foreign key?
if new_field.rel and \
(fks_dropped or (old_field.rel and not old_field.db_constraint)) and \
new_field.db_constraint:
self.execute(self._create_fk_sql(model, new_field, "_fk_%(to_table)s_%(to_column)s"))
# Rebuild FKs that pointed to us if we previously had to drop them
if old_field.primary_key and new_field.primary_key and old_type != new_type:
for rel in new_field.model._meta.get_all_related_objects():
self.execute(self._create_fk_sql(rel.model, rel.field, "_fk"))
# Does it have check constraints we need to add?
if old_db_params['check'] != new_db_params['check'] and new_db_params['check']:
self.execute(
self.sql_create_check % {
"table": self.quote_name(model._meta.db_table),
"name": self.quote_name(self._create_index_name(model, [new_field.column], suffix="_check")),
"column": self.quote_name(new_field.column),
"check": new_db_params['check'],
}
)
# Drop the default if we need to
# (Django usually does not use in-database defaults)
if needs_database_default:
sql = self.sql_alter_column % {
"table": self.quote_name(model._meta.db_table),
"changes": self.sql_alter_column_no_default % {
"column": self.quote_name(new_field.column),
}
}
self.execute(sql)
# Reset connection if required
if self.connection.features.connection_persists_old_columns:
self.connection.close()
0
Example 6
def render_data(self, state, review_request):
"""Return the rendered contents of the column."""
groups = review_request.target_groups.all()
return reduce(lambda a, d: a + d.name + ' ', groups, '')
0
Example 7
def render_data(self, state, review_request):
"""Return the rendered contents of the column."""
people = review_request.target_people.all()
return reduce(lambda a, d: a + d.username + ' ', people, '')
0
Example 8
def alter_field(self, model, old_field, new_field, strict=False):
"""
Allows a field's type, uniqueness, nullability, default, column,
constraints etc. to be modified.
Requires a copy of the old field as well so we can only perform
changes that are required.
If strict is true, raises errors if the old column does not match old_field precisely.
"""
# Ensure this field is even column-based
old_db_params = old_field.db_parameters(connection=self.connection)
old_type = old_db_params['type']
new_db_params = new_field.db_parameters(connection=self.connection)
new_type = new_db_params['type']
if old_type is None and new_type is None \
and (old_field.rel.through and new_field.rel.through and old_field.rel.through._meta.auto_created
and new_field.rel.through._meta.auto_created):
return self._alter_many_to_many(model, old_field, new_field, strict)
elif old_type is None or new_type is None:
raise ValueError(
"Cannot alter field %s into %s - they are not compatible types "
"(probably means only one is an M2M with implicit through model)" % (
old_field,
new_field,
)
)
# Has unique been removed?
if old_field.unique and (not new_field.unique or (not old_field.primary_key and new_field.primary_key)):
# Find the unique constraint for this field
constraint_names = self._constraint_names(model, [old_field.column], unique=True)
if strict and len(constraint_names) != 1:
raise ValueError("Found wrong number (%s) of unique constraints for %s.%s" % (
len(constraint_names),
model._meta.db_table,
old_field.column,
))
for constraint_name in constraint_names:
self.execute(*self._delete_db_constraint_sql(model, constraint_name, constraint_type='unique'))
# Removed an index?
if old_field.db_index and not new_field.db_index and not old_field.unique \
and not (not new_field.unique and old_field.unique):
# Find the index for this field
index_names = self._constraint_names(model, [old_field.column], index=True)
if strict and len(index_names) != 1:
raise ValueError("Found wrong number (%s) of indexes for %s.%s" % (
len(index_names),
model._meta.db_table,
old_field.column,
))
for index_name in index_names:
self.execute(*self._delete_db_constraint_sql(model, index_name, constraint_type='index'))
# Drop any FK constraints, we'll remake them later
if old_field.rel:
fk_names = self._constraint_names(model, [old_field.column], foreign_key=True)
if strict and len(fk_names) != 1:
raise ValueError("Found wrong number (%s) of foreign key constraints for %s.%s" % (
len(fk_names),
model._meta.db_table,
old_field.column,
))
for fk_name in fk_names:
self.execute(*self._delete_db_constraint_sql(model, fk_name, constraint_type='fk'))
# Drop incoming FK constraints if we're a primary key and things are going
# to change.
if old_field.primary_key and new_field.primary_key and old_type != new_type:
for rel in new_field.model._meta.get_all_related_objects():
rel_fk_names = self._constraint_names(rel.model, [rel.field.column], foreign_key=True)
for fk_name in rel_fk_names:
self.execute(*self._delete_db_constraint_sql(model, fk_name, constraint_type='fk'))
# Change check constraints?
if old_db_params['check'] != new_db_params['check'] and old_db_params['check']:
constraint_names = self._constraint_names(model, [old_field.column], check=True)
if strict and len(constraint_names) != 1:
raise ValueError("Found wrong number (%s) of check constraints for %s.%s" % (
len(constraint_names),
model._meta.db_table,
old_field.column,
))
for constraint_name in constraint_names:
self.execute(*self._delete_db_constraint_sql(model, constraint_name, constraint_type='check'))
# Have they renamed the column?
if old_field.column != new_field.column:
self.rename_db_column(model, old_field.column, new_field.column, new_type)
# Next, start accuemulating actions to do
actions = []
post_actions = []
# Type change?
if old_type != new_type:
type_actions = self._alter_db_column_sql(model, new_field.column, 'type',
values={
'type': new_type,
'old_type': old_type,
},
fragment=True,
)
actions.extend(type_actions[0])
post_actions.extend(type_actions[1])
# Default change?
old_default = self.effective_default(old_field)
new_default = self.effective_default(new_field)
if old_default != new_default:
if new_default is None:
default_actions = self._alter_db_column_sql(model, new_field.column, 'no_default',
fragment=True)
else:
default_sql, default_params = self.prepare_default(new_default)
default_actions = self._alter_db_column_sql(model, new_field.column, 'default',
values={'default': default_sql}, fragment=True, params=default_params)
actions.extend(default_actions[0])
post_actions.extend(default_actions[1])
# Nullability change?
if old_field.null != new_field.null:
alteration = 'null' if new_field.null else 'not_null'
null_actions = self._alter_db_column_sql(model, new_field.column, alteration,
values={'type': new_type}, fragment=True)
actions.extend(null_actions[0])
post_actions.extend(null_actions[1])
if actions:
# Combine actions together if we can (e.g. postgres)
if self.connection.features.supports_combined_alters:
sql, params = tuple(zip(*actions))
actions = [(", ".join(sql), reduce(operator.add, params))]
# Apply those actions
for sql, params in actions:
if sql:
self.execute(
self.sql_alter_column % {
"table": self.quote_name(model._meta.db_table),
"changes": sql,
},
params,
)
if post_actions:
for sql, params in post_actions:
if sql:
self.execute(sql, params)
# Added a unique?
if not old_field.unique and new_field.unique:
self.execute(*self._create_db_constraint_sql(model, new_field.column, 'unique'))
# Added an index?
if not old_field.db_index and new_field.db_index and not new_field.unique \
and not (not old_field.unique and new_field.unique):
self.execute(*self._create_db_constraint_sql(model, new_field.column, 'unique'))
# Type alteration on primary key? Then we need to alter the column
# referring to us.
rels_to_update = []
if old_field.primary_key and new_field.primary_key and old_type != new_type:
rels_to_update.extend(new_field.model._meta.get_all_related_objects())
# Changed to become primary key?
# Note that we don't detect unsetting of a PK, as we assume another field
# will always come along and replace it.
if not old_field.primary_key and new_field.primary_key:
# First, drop the old PK
constraint_names = self._constraint_names(model, primary_key=True)
if strict and len(constraint_names) != 1:
raise ValueError("Found wrong number (%s) of PK constraints for %s" % (
len(constraint_names),
model._meta.db_table,
))
for constraint_name in constraint_names:
self.execute(*self._delete_db_constraint_sql(model, constraint_name, constraint_type='pk'))
# Make the new one
self.execute(*self._create_db_constraint_sql(model, new_field.column, 'pk'))
# Update all referencing columns
rels_to_update.extend(new_field.model._meta.get_all_related_objects())
# Handle our type alters on the other end of rels from the PK stuff above
for rel in rels_to_update:
# rel_db_params = rel.field.db_parameters(connection=self.connection)
# rel_type = rel_db_params['type']
type_actions = self._alter_db_column_sql(rel.model, rel.field.column, 'type',
values={
'type': new_type,
'old_type': old_type,
},
)
# Does it have a foreign key?
if new_field.rel:
self.execute(*self._create_db_constraint_sql(model, new_field.column, 'fk', values={
"to_table": self.quote_name(new_field.rel.to._meta.db_table),
"to_column": self.quote_name(new_field.rel.get_related_field().column),
}))
# Rebuild FKs that pointed to us if we previously had to drop them
if old_field.primary_key and new_field.primary_key and old_type != new_type:
for rel in new_field.model._meta.get_all_related_objects():
self.execute(*self._create_db_constraint_sql(model, new_field.column, 'fk', values={
"to_table": self.quote_name(model._meta.db_table),
"to_column": self.quote_name(new_field.column),
}))
# Does it have check constraints we need to add?
if old_db_params['check'] != new_db_params['check'] and new_db_params['check']:
self.execute(*self._create_db_constraint_sql(model, new_field.column, 'check', values={
'check': new_db_params['check'],
}))
# Reset connection if required
if self.connection.features.connection_persists_old_columns:
self.connection.close()
0
Example 9
@log_query
def search(self, query_string, **kwargs):
hits = 0
results = []
result_class = SearchResult
models = connections[self.connection_alias].get_unified_index().get_indexed_models()
if kwargs.get('result_class'):
result_class = kwargs['result_class']
if kwargs.get('models'):
models = kwargs['models']
if query_string:
for model in models:
if query_string == '*':
qs = model.objects.all()
else:
for term in query_string.split():
queries = []
for field in model._meta.fields:
if hasattr(field, 'related'):
continue
if not field.get_internal_type() in ('TextField', 'CharField', 'SlugField'):
continue
queries.append(Q(**{'%s__icontains' % field.name: term}))
qs = model.objects.filter(six.moves.reduce(lambda x, y: x | y, queries))
hits += len(qs)
for match in qs:
match.__dict__.pop('score', None)
app_label, model_name = get_model_ct_tuple(match)
result = result_class(app_label, model_name, match.pk, 0, **match.__dict__)
# For efficiency.
result._model = match.__class__
result._object = match
results.append(result)
return {
'results': results,
'hits': hits,
}
0
Example 10
def autocomplete(self, **kwargs):
"""
A shortcut method to perform an autocomplete search.
Must be run against fields that are either ``NgramField`` or
``EdgeNgramField``.
"""
clone = self._clone()
query_bits = []
for field_name, query in kwargs.items():
for word in query.split(' '):
bit = clone.query.clean(word.strip())
if bit:
kwargs = {
field_name: bit,
}
query_bits.append(SQ(**kwargs))
return clone.filter(six.moves.reduce(operator.__and__, query_bits))
0
Example 11
def get_queryset(self):
"""
Constructs an '__contains' or '__icontains' filter across all of the
fields listed in ``SEARCH_FIELDS``.
"""
qs = super(SearchForm, self).get_queryset()
# Do Searching
q = self.cleaned_data.get('q', '').strip()
if q:
args = []
for field in self.SEARCH_FIELDS:
if self.CASE_SENSITIVE:
kwarg = {field + '__contains': q}
else:
kwarg = {field + '__icontains': q}
args.append(Q(**kwarg))
if len(args) > 1:
qs = qs.filter(reduce(lambda x, y: x | y, args))
elif len(args) == 1:
qs = qs.filter(args[0])
return qs
0
Example 12
@property
def media(self):
return reduce(add, (form.media for form in self.forms.values()))
0
Example 13
Project: wagtail-modeltranslation Source File: manager.py
def append_lookup_keys(model, fields):
return moves.reduce(set.union, (append_lookup_key(model, field) for field in fields), set())
0
Example 14
def build_query(self, **filters):
"""
Creates a single SQ filter from querystring parameters that correspond to the SearchIndex fields
that have been "registered" in `view.fields`.
Default behavior is to `OR` terms for the same parameters, and `AND` between parameters. Any
querystring parameters that are not registered in `view.fields` will be ignored.
:param dict[str, list[str]] filters: is an expanded QueryDict or a mapping of keys to a list of
parameters.
"""
applicable_filters = []
applicable_exclusions = []
for param, value in filters.items():
# Skip if the parameter is not listed in the serializer's `fields`
# or if it's in the `exclude` list.
excluding_term = False
param_parts = param.split("__")
base_param = param_parts[0] # only test against field without lookup
negation_keyword = constants.DRF_HAYSTACK_NEGATION_KEYWORD
if len(param_parts) > 1 and param_parts[1] == negation_keyword:
excluding_term = True
param = param.replace("__%s" % negation_keyword, "") # haystack wouldn't understand our negation
if self.view.serializer_class:
if self.view.serializer_class.Meta.field_aliases:
old_base = base_param
base_param = self.view.serializer_class.Meta.field_aliases.get(base_param, base_param)
param = param.replace(old_base, base_param) # need to replace the alias
fields = self.view.serializer_class.Meta.fields
exclude = self.view.serializer_class.Meta.exclude
search_fields = self.view.serializer_class.Meta.search_fields
if ((fields or search_fields) and base_param not in chain(fields, search_fields)) or base_param in exclude or not value:
continue
field_queries = []
for token in self.tokenize(value, self.view.lookup_sep):
field_queries.append(self.view.query_object((param, token)))
field_queries = [fq for fq in field_queries if fq]
if len(field_queries) > 0:
term = six.moves.reduce(operator.or_, field_queries)
if excluding_term:
applicable_exclusions.append(term)
else:
applicable_filters.append(term)
applicable_filters = six.moves.reduce(
self.default_operator, filter(lambda x: x, applicable_filters)) if applicable_filters else []
applicable_exclusions = six.moves.reduce(
self.default_operator, filter(lambda x: x, applicable_exclusions)) if applicable_exclusions else []
return applicable_filters, applicable_exclusions