Here are the examples of the python api django.db.models.Min taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
73 Examples
3
Source : signal.py
with Mozilla Public License 2.0
from Amsterdam
with Mozilla Public License 2.0
from Amsterdam
def after_filter(self, queryset, name, value):
"""
Filters a Parent Signal on the created date of the first child Signal
"""
return queryset.annotate(
min_child_created_at=Min('children__created_at')
).filter(
min_child_created_at__gte=value
)
def filter_queryset(self, queryset):
3
Source : tests.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_aggregate_multi_join(self):
vals = Store.objects.aggregate(Max("books__authors__age"))
self.assertEqual(len(vals), 1)
self.assertEqual(vals["books__authors__age__max"], 57)
vals = Author.objects.aggregate(Min("book__publisher__num_awards"))
self.assertEqual(len(vals), 1)
self.assertEqual(vals["book__publisher__num_awards__min"], 1)
def test_aggregate_alias(self):
3
Source : tests.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_annotate_with_aggregation_in_value(self):
self.assertQuerysetEqual(
CaseTestModel.objects.values(*self.non_lob_fields).annotate(
min=Min('fk_rel__integer'),
max=Max('fk_rel__integer'),
).annotate(
test=Case(
When(integer=2, then='min'),
When(integer=3, then='max'),
),
).order_by('pk'),
[(1, None, 1, 1), (2, 2, 2, 3), (3, 4, 3, 4), (2, 2, 2, 3), (3, 4, 3, 4), (3, 4, 3, 4), (4, None, 5, 5)],
transform=itemgetter('integer', 'test', 'min', 'max')
)
def test_annotate_with_aggregation_in_condition(self):
3
Source : tests.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_annotate_with_aggregation_in_condition(self):
self.assertQuerysetEqual(
CaseTestModel.objects.values(*self.non_lob_fields).annotate(
min=Min('fk_rel__integer'),
max=Max('fk_rel__integer'),
).annotate(
test=Case(
When(integer2=F('min'), then=Value('min')),
When(integer2=F('max'), then=Value('max')),
output_field=models.CharField(),
),
).order_by('pk'),
[(1, 1, 'min'), (2, 3, 'max'), (3, 4, 'max'), (2, 2, 'min'), (3, 4, 'max'), (3, 3, 'min'), (4, 5, 'min')],
transform=itemgetter('integer', 'integer2', 'test')
)
def test_annotate_with_aggregation_in_predicate(self):
3
Source : tests.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_filter_with_aggregation_in_value(self):
self.assertQuerysetEqual(
CaseTestModel.objects.values(*self.non_lob_fields).annotate(
min=Min('fk_rel__integer'),
max=Max('fk_rel__integer'),
).filter(
integer2=Case(
When(integer=2, then='min'),
When(integer=3, then='max'),
),
).order_by('pk'),
[(3, 4, 3, 4), (2, 2, 2, 3), (3, 4, 3, 4)],
transform=itemgetter('integer', 'integer2', 'min', 'max')
)
def test_filter_with_aggregation_in_condition(self):
3
Source : tests.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_filter_with_aggregation_in_condition(self):
self.assertQuerysetEqual(
CaseTestModel.objects.values(*self.non_lob_fields).annotate(
min=Min('fk_rel__integer'),
max=Max('fk_rel__integer'),
).filter(
integer=Case(
When(integer2=F('min'), then=2),
When(integer2=F('max'), then=3),
),
).order_by('pk'),
[(3, 4, 3, 4), (2, 2, 2, 3), (3, 4, 3, 4)],
transform=itemgetter('integer', 'integer2', 'min', 'max')
)
def test_filter_with_aggregation_in_predicate(self):
3
Source : test_regress.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_unicode_date(self):
"Testing dates are converted properly, even on SpatiaLite. See #16408."
founded = datetime(1857, 5, 23)
PennsylvaniaCity.objects.create(name='Mansfield', county='Tioga', point='POINT(-77.071445 41.823881)',
founded=founded)
self.assertEqual(founded, PennsylvaniaCity.objects.datetimes('founded', 'day')[0])
self.assertEqual(founded, PennsylvaniaCity.objects.aggregate(Min('founded'))['founded__min'])
def test_empty_count(self):
3
Source : tests.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_query_aggregation(self):
# Only min and max make sense for datetimes.
Event.objects.create(dt=datetime.datetime(2011, 9, 1, 23, 20, 20))
Event.objects.create(dt=datetime.datetime(2011, 9, 1, 13, 20, 30))
Event.objects.create(dt=datetime.datetime(2011, 9, 1, 3, 20, 40))
result = Event.objects.all().aggregate(Min('dt'), Max('dt'))
self.assertEqual(result, {
'dt__min': datetime.datetime(2011, 9, 1, 3, 20, 40),
'dt__max': datetime.datetime(2011, 9, 1, 23, 20, 20),
})
def test_query_annotation(self):
3
Source : tests.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_query_aggregation(self):
# Only min and max make sense for datetimes.
Event.objects.create(dt=datetime.datetime(2011, 9, 1, 23, 20, 20, tzinfo=EAT))
Event.objects.create(dt=datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT))
Event.objects.create(dt=datetime.datetime(2011, 9, 1, 3, 20, 40, tzinfo=EAT))
result = Event.objects.all().aggregate(Min('dt'), Max('dt'))
self.assertEqual(result, {
'dt__min': datetime.datetime(2011, 9, 1, 3, 20, 40, tzinfo=EAT),
'dt__max': datetime.datetime(2011, 9, 1, 23, 20, 20, tzinfo=EAT),
})
def test_query_annotation(self):
3
Source : update_texting_history.py
with GNU General Public License v3.0
from JustFixNYC
with GNU General Public License v3.0
from JustFixNYC
def get_min_max_date_sent(
queryset,
) -> Tuple[Optional[datetime.datetime], Optional[datetime.datetime]]:
result = queryset.aggregate(Min("date_sent"), Max("date_sent"))
return (result["date_sent__min"], result["date_sent__max"])
def stop_when_older_than(msgs: MessageIterator, when: datetime.datetime) -> MessageIterator:
3
Source : test_regress.py
with Apache License 2.0
from lumanjiao
with Apache License 2.0
from lumanjiao
def test_unicode_date(self):
"Testing dates are converted properly, even on SpatiaLite. See #16408."
founded = datetime(1857, 5, 23)
mansfield = PennsylvaniaCity.objects.create(name='Mansfield', county='Tioga', point='POINT(-77.071445 41.823881)',
founded=founded)
self.assertEqual(founded, PennsylvaniaCity.objects.datetimes('founded', 'day')[0])
self.assertEqual(founded, PennsylvaniaCity.objects.aggregate(Min('founded'))['founded__min'])
def test_empty_count(self):
3
Source : models.py
with BSD 3-Clause "New" or "Revised" License
from mitodl
with BSD 3-Clause "New" or "Revised" License
from mitodl
def start_end_dates(self):
"""
Start date: earliest course run start date
End date: latest course run end date
"""
course_ids = self.program.courses.all().values_list("id", flat=True)
dates = CourseRunCertificate.objects.filter(
user_id=self.user_id, course_run__course_id__in=course_ids
).aggregate(
start_date=models.Min("course_run__start_date"),
end_date=models.Max("course_run__end_date"),
)
return dates["start_date"], dates["end_date"]
def __str__(self):
3
Source : run_queries.py
with MIT License
from paul-wolf
with MIT License
from paul-wolf
def q_books_avg_min_max_queryset(**kwargs):
a = []
qs = Book.objects.aggregate(Avg("price"), Max("price"), Min("price"))
for rec in qs:
a.append(rec)
@timeit
3
Source : sorters.py
with GNU General Public License v3.0
from Saleor-Multi-Vendor
with GNU General Public License v3.0
from Saleor-Multi-Vendor
def qs_with_value(queryset: QuerySet, channel_slug: str) -> QuerySet:
return queryset.annotate(
value=Min(
"channel_listings__discount_value",
filter=Q(channel_listings__channel__slug=str(channel_slug)),
)
)
class SaleSortingInput(ChannelSortInputObjectType):
3
Source : sorters.py
with GNU General Public License v3.0
from Saleor-Multi-Vendor
with GNU General Public License v3.0
from Saleor-Multi-Vendor
def qs_with_minimum_spent_amount(queryset: QuerySet, channel_slug: str) -> QuerySet:
return queryset.annotate(
min_spent_amount=Min(
"channel_listings__min_spent_amount",
filter=Q(channel_listings__channel__slug=str(channel_slug)),
)
)
@staticmethod
3
Source : sorters.py
with GNU General Public License v3.0
from Saleor-Multi-Vendor
with GNU General Public License v3.0
from Saleor-Multi-Vendor
def qs_with_value(queryset: QuerySet, channel_slug: str) -> QuerySet:
return queryset.annotate(
discount_value=Min(
"channel_listings__discount_value",
filter=Q(channel_listings__channel__slug=str(channel_slug)),
)
)
class VoucherSortingInput(ChannelSortInputObjectType):
3
Source : sorters.py
with GNU General Public License v3.0
from Saleor-Multi-Vendor
with GNU General Public License v3.0
from Saleor-Multi-Vendor
def qs_with_price(queryset: QuerySet, channel_slug: str) -> QuerySet:
return queryset.annotate(
min_variants_price_amount=Min(
"variants__channel_listings__price_amount",
filter=Q(variants__channel_listings__channel__slug=str(channel_slug))
& Q(variants__channel_listings__price_amount__isnull=False),
)
)
@staticmethod
3
Source : sorters.py
with GNU General Public License v3.0
from Saleor-Multi-Vendor
with GNU General Public License v3.0
from Saleor-Multi-Vendor
def qs_with_minimal_price(queryset: QuerySet, channel_slug: str) -> QuerySet:
return queryset.annotate(
discounted_price_amount=Min(
"channel_listings__discounted_price_amount",
filter=Q(channel_listings__channel__slug=str(channel_slug)),
)
)
@staticmethod
3
Source : filters.py
with GNU Affero General Public License v3.0
from svthalia
with GNU Affero General Public License v3.0
from svthalia
def lookups(self, request, model_admin):
objects_end = model_admin.model.objects.aggregate(Max("end"))
objects_start = model_admin.model.objects.aggregate(Min("start"))
if objects_end["end__max"] and objects_start["start__min"]:
year_end = datetime_to_lectureyear(objects_end["end__max"])
year_start = datetime_to_lectureyear(objects_start["start__min"])
return [
(year, f"{year}-{year + 1}")
for year in range(year_end, year_start - 1, -1)
]
return []
def queryset(self, request, queryset):
3
Source : views.py
with MIT License
from techlib
with MIT License
from techlib
def interest(self, request, pk):
org_filter = organization_filter_from_org_id(pk, request.user)
date_filter = date_filter_from_params(request.GET)
interest_rt = ReportType.objects.get(short_name='interest', source__isnull=True)
accesslog_filter_params = {'report_type': interest_rt, **org_filter, **date_filter}
replace_report_type_with_materialized(accesslog_filter_params)
data = AccessLog.objects.filter(**accesslog_filter_params).aggregate(
interest_sum=Sum('value'), min_date=Min('date'), max_date=Max('date')
)
if data['max_date']:
# the date might be None and then we do not want to do the math ;)
data['max_date'] = month_end(data['max_date'])
data['days'] = (data['max_date'] - data['min_date']).days + 1
else:
data['days'] = 0
return Response(data)
@action(detail=True, url_path='title-interest-histogram')
3
Source : models.py
with GNU General Public License v3.0
from thinkingmachines
with GNU General Public License v3.0
from thinkingmachines
def match_dataset(self, export=False):
result = match_dataset.apply_async((self.id,), expires=360)
result.get()
if export:
self.items.filter(
id__in=(
i['id'] for i in
self.items.values('dataset_index').annotate(id=Min('id'))
)).update(chosen=True)
self.export()
def save_choices(self, match_choices):
3
Source : views.py
with GNU General Public License v3.0
from urankajtazaj
with GNU General Public License v3.0
from urankajtazaj
def student_detail(request, pk):
student = Student.objects.get(pk=pk)
success = Grade.objects.filter(student=student.user).order_by('-grade')
details = Grade.objects.filter(student=student.user, grade__gt=4).aggregate(Avg('grade'), Max('grade'), Min('grade'))
if request.user.is_authenticated:
return render(
request, 'student_profile.html', {'student': student, 'success': success, 'details': details},
)
else:
return redirect('login')
# ########################################################
def course_detail(request, pk):
3
Source : group.py
with GNU Affero General Public License v3.0
from wechange-eg
with GNU Affero General Public License v3.0
from wechange-eg
def get_or_infer_from_date(self):
""" Gets the (conference) group's `from_date` or if not set,
infers it from the starting time of the earliest conference event """
if self.from_date:
return self.from_date
from cosinnus_event.models import ConferenceEvent # noqa
queryset = ConferenceEvent.objects.filter(room__group=self)
if queryset.count() > 0:
return queryset.aggregate(Min('from_date'))['from_date__min']
return None
@property
3
Source : serializers.py
with GNU Affero General Public License v3.0
from wechange-eg
with GNU Affero General Public License v3.0
from wechange-eg
def get_dates(self, obj):
queryset = ConferenceEvent.objects.filter(room__group=obj)
if queryset.count() > 0:
queryset = queryset.aggregate(Min('from_date'), Max('to_date'))
from_date = queryset['from_date__min'].date() if queryset['from_date__min'] else now()
to_date = queryset['to_date__max'].date() if queryset['to_date__max'] else from_date
else:
from_date, to_date = now(), now()
return [from_date + timedelta(days=i) for i in range((to_date - from_date).days + 1)]
def get_avatar(self, obj):
0
Source : admin_list.py
with GNU General Public License v3.0
from Aghoreshwar
with GNU General Public License v3.0
from Aghoreshwar
def date_hierarchy(cl):
"""
Display the date hierarchy for date drill-down functionality.
"""
if cl.date_hierarchy:
field_name = cl.date_hierarchy
year_field = '%s__year' % field_name
month_field = '%s__month' % field_name
day_field = '%s__day' % field_name
field_generic = '%s__' % field_name
year_lookup = cl.params.get(year_field)
month_lookup = cl.params.get(month_field)
day_lookup = cl.params.get(day_field)
def link(filters):
return cl.get_query_string(filters, [field_generic])
if not (year_lookup or month_lookup or day_lookup):
# select appropriate start level
date_range = cl.queryset.aggregate(first=models.Min(field_name),
last=models.Max(field_name))
if date_range['first'] and date_range['last']:
if date_range['first'].year == date_range['last'].year:
year_lookup = date_range['first'].year
if date_range['first'].month == date_range['last'].month:
month_lookup = date_range['first'].month
if year_lookup and month_lookup and day_lookup:
day = datetime.date(int(year_lookup), int(month_lookup), int(day_lookup))
return {
'show': True,
'back': {
'link': link({year_field: year_lookup, month_field: month_lookup}),
'title': capfirst(formats.date_format(day, 'YEAR_MONTH_FORMAT'))
},
'choices': [{'title': capfirst(formats.date_format(day, 'MONTH_DAY_FORMAT'))}]
}
elif year_lookup and month_lookup:
days = cl.queryset.filter(**{year_field: year_lookup, month_field: month_lookup})
days = getattr(days, 'dates')(field_name, 'day')
return {
'show': True,
'back': {
'link': link({year_field: year_lookup}),
'title': str(year_lookup)
},
'choices': [{
'link': link({year_field: year_lookup, month_field: month_lookup, day_field: day.day}),
'title': capfirst(formats.date_format(day, 'MONTH_DAY_FORMAT'))
} for day in days]
}
elif year_lookup:
months = cl.queryset.filter(**{year_field: year_lookup})
months = getattr(months, 'dates')(field_name, 'month')
return {
'show': True,
'back': {
'link': link({}),
'title': _('All dates')
},
'choices': [{
'link': link({year_field: year_lookup, month_field: month.month}),
'title': capfirst(formats.date_format(month, 'YEAR_MONTH_FORMAT'))
} for month in months]
}
else:
years = getattr(cl.queryset, 'dates')(field_name, 'year')
return {
'show': True,
'choices': [{
'link': link({year_field: str(year.year)}),
'title': str(year.year),
} for year in years]
}
@register.inclusion_tag('admin/search_form.html')
0
Source : signals.py
with Mozilla Public License 2.0
from Amsterdam
with Mozilla Public License 2.0
from Amsterdam
def geography(self, request):
"""
Returns a GeoJSON of all Signal's that are in an "Open" state and in a publicly available category.
Additional filtering can be done by adding query parameters.
"""
qs = self.get_queryset()
if request.query_params.get('group_by', '').lower() == 'category':
# Group by category and return the oldest signal created_at date
qs = qs.values('category_assignment__category_id').annotate(created_at=Min('created_at'))
queryset = self.filter_queryset(
qs.annotate(
# Transform the output of the query to GeoJSON in the database.
# This is much faster than using a DRF Serializer.
feature=JSONObject(
type=Value('Feature', output_field=CharField()),
geometry=AsGeoJSON('location__geometrie'),
properties=JSONObject(
category=JSONObject(
# Return the category public_name. If the public_name is empty, return the category name
name=Coalesce('category_assignment__category__public_name',
'category_assignment__category__name'),
),
# Creation date of the Signal
created_at='created_at',
),
)
)
).exclude(
# Only signals that are in an "Open" state
status__state__in=[AFGEHANDELD, AFGEHANDELD_EXTERN, GEANNULEERD, VERZOEK_TOT_HEROPENEN],
# Only Signal's that are in categories that are publicly accessible
category_assignment__category__is_public_accessible=False,
)
# Paginate our queryset and turn it into a GeoJSON feature collection:
headers = []
feature_collection = {'type': 'FeatureCollection', 'features': []}
paginator = LinkHeaderPaginationForQuerysets(page_query_param='geopage', page_size=SIGNALS_API_GEO_PAGINATE_BY)
page_qs = paginator.paginate_queryset(queryset, self.request, view=self)
if page_qs is not None:
features = page_qs.aggregate(features=JSONAgg('feature'))
feature_collection.update(features)
headers = paginator.get_pagination_headers()
return Response(feature_collection, headers=headers)
0
Source : views.py
with Apache License 2.0
from aropan
with Apache License 2.0
from aropan
def resource(request, host, template='resource.html', extra_context=None):
now = timezone.now()
resource = get_object_or_404(Resource, host=host)
if request.user.is_authenticated:
coder = request.user.coder
coder_account = coder.account_set.filter(resource=resource, rating__isnull=False).first()
coder_account_ids = set(coder.account_set.filter(resource=resource).values_list('id', flat=True))
show_coder_account_rating = True
else:
coder = None
coder_account = None
coder_account_ids = set()
show_coder_account_rating = False
params = {}
contests = resource.contest_set.annotate(has_statistics=Exists('statistics'))
accounts = Account.objects.filter(resource=resource)
has_country = accounts.filter(country__isnull=False).exists()
countries = request.GET.getlist('country')
countries = set([c for c in countries if c])
if countries:
params['countries'] = countries
accounts = accounts.filter(country__in=countries)
period = request.GET.get('period', 'all')
params['period'] = period
deltas_period = {
'month': timedelta(days=30 * 1),
'quarter': timedelta(days=30 * 3),
'half': timedelta(days=30 * 6),
'year': timedelta(days=30 * 12),
'all': None,
}
periods = list(deltas_period.keys())
delta_period = deltas_period.get(period, None)
if delta_period:
accounts = accounts.filter(last_activity__gte=now - delta_period)
default_variables = resource.info.get('default_variables', {})
for field, operator in (
('min_rating', 'rating__gte'),
('max_rating', 'rating__lte'),
('min_n_participations', 'n_contests__gte'),
('max_n_participations', 'n_contests__lte'),
):
value = request.GET.get(field, default_variables.get(field))
if value:
params[field] = value
if field in params:
accounts = accounts.filter(**{operator: params[field]})
countries = accounts \
.filter(country__isnull=False) \
.values('country') \
.annotate(count=Count('country')) \
.order_by('-count', 'country')
n_x_axis = resource.info.get('ratings', {}).get('chartjs', {}).get('n_x_axis')
coloring_field = resource.info.get('ratings', {}).get('chartjs', {}).get('coloring_field')
width = 50
min_rating = params.get('min_rating')
max_rating = params.get('max_rating')
if n_x_axis or min_rating and max_rating and int(max_rating) - int(min_rating) < = 100:
width = 1
rating_field = 'rating50' if width == 50 else 'rating'
ratings = accounts.filter(**{f'{rating_field}__isnull': False})
if n_x_axis:
rs = ratings.aggregate(max_rating=Max(rating_field), min_rating=Min(rating_field))
if rs['max_rating'] is not None:
width = max((rs['max_rating'] - rs['min_rating']) // n_x_axis, 1)
else:
width = 1
ratings = ratings.annotate(ratingw=F(rating_field) / width)
rating_field = 'ratingw'
annotations = {'count': Count(rating_field)}
if coloring_field:
ratings = ratings.annotate(rank=Cast(KeyTextTransform(coloring_field, 'info'), IntegerField()))
annotations['coloring_field'] = Avg('rank')
ratings = ratings \
.values(rating_field) \
.annotate(**annotations) \
.order_by(rating_field)
ratings = list(ratings)
labels = []
data = []
if ratings and resource.ratings:
idx = 0
for rating in ratings:
low = rating[rating_field] * width
high = low + width - 1
val = rating.get('coloring_field', low)
if val is None:
continue
while val > resource.ratings[idx]['high']:
idx += 1
while idx and val < = resource.ratings[idx - 1]['high']:
idx -= 1
data.append({
'title': f'{low}..{high}',
'rating': low,
'count': rating['count'],
'info': resource.ratings[idx],
})
min_rating = ratings[0][rating_field]
max_rating = ratings[-1][rating_field]
labels = list(range(min_rating * width, max_rating * width + 1, width))
context = {
'resource': resource,
'coder': coder,
'coder_accounts_ids': coder_account_ids,
'accounts': resource.account_set.filter(coders__isnull=False).prefetch_related('coders').order_by('-modified'),
'countries': countries,
'rating': {
'labels': labels,
'data': data,
'account': coder_account if show_coder_account_rating else None,
'width': width,
},
'contests': {
'past': {
'contests': contests.filter(end_time__lt=now).order_by('-end_time'),
'field': 'end_time',
},
'coming': {
'contests': contests.filter(start_time__gt=now).order_by('start_time'),
'field': 'start_time',
},
'running': {
'contests': contests.filter(start_time__lt=now, end_time__gt=now).order_by('end_time'),
'field': 'time_left',
},
},
'contest_key': None,
'has_country': has_country,
'periods': periods,
'params': params,
'first_per_page': 10,
'per_page': 50,
'last_activities': accounts.filter(last_activity__isnull=False).order_by('-last_activity', 'id'),
'top': accounts.filter(rating__isnull=False).order_by('-rating', 'id'),
'most_participated': accounts.order_by('-n_contests', 'id'),
'most_writer': accounts.filter(n_writers__gt=0).order_by('-n_writers', 'id'),
'problems': resource.problem_set.filter(url__isnull=False).order_by('-time', 'contest_id', 'index'),
}
if extra_context is not None:
context.update(extra_context)
return render(request, template, context)
@permission_required('clist.view_resources_dump_data')
0
Source : pdf_utils.py
with Apache License 2.0
from digitalfabrik
with Apache License 2.0
from digitalfabrik
def generate_pdf(region, language_slug, pages):
"""
Function for handling a pdf export request for pages.
The pages were either selected by cms user or by API request (see :func:`~integreat_cms.api.v3.pdf_export`)
For more information on xhtml2pdf, see :doc:`xhtml2pdf:index`
:param region: region which requested the pdf document
:type region: ~integreat_cms.cms.models.regions.region.Region
:param language_slug: bcp47 slug of the current language
:type language_slug: str
:param pages: at least on page to render as PDF document
:type pages: ~treebeard.ns_tree.NS_NodeQuerySet
:return: PDF document wrapped in a HtmlResponse
:rtype: ~django.http.HttpResponse
"""
# first all necessary data for hashing are collected, starting at region slug
# region last_updated field taking into account, to keep track of maybe edited region icons
pdf_key_list = [region.slug, region.last_updated]
for page in pages:
# add translation id and last_updated to hash key list if they exist
page_translation = page.get_public_translation(language_slug)
if page_translation:
# if translation for this language exists
pdf_key_list.append(page_translation.id)
pdf_key_list.append(page_translation.last_updated)
else:
# if the page has no translation for this language
pages = pages.exclude(id=page.id)
# finally combine all list entries to a single hash key
pdf_key_string = "_".join(map(str, pdf_key_list))
# compute the hash value based on the hash key
pdf_hash = hashlib.sha256(bytes(pdf_key_string, "utf-8")).hexdigest()
cache = caches["pdf"]
cached_response = cache.get(pdf_hash, "has_expired")
if cached_response != "has_expired":
# if django cache already contains a response object
return cached_response
amount_pages = pages.count()
if amount_pages == 0:
return HttpResponse(
_("No valid pages selected for PDF generation."), status=400
)
if amount_pages == 1:
# If pdf contains only one page, take its title as filename
title = pages.first().get_public_translation(language_slug).title
else:
# If pdf contains multiple pages, check the minimum level
min_level = pages.aggregate(Min("depth")).get("depth__min")
# Query all pages with this minimum level
min_level_pages = pages.filter(depth=min_level)
if min_level_pages.count() == 1:
# If there's exactly one page with the minimum level, take its title
title = min_level_pages.first().get_public_translation(language_slug).title
else:
# In any other case, take the region name
title = region.name
language = Language.objects.get(slug=language_slug)
filename = f"Integreat - {language.translated_name} - {title}.pdf"
# Convert queryset to annotated list which can be rendered better
annotated_pages = Page.get_annotated_list_qs(pages)
context = {
"right_to_left": language.text_direction == text_directions.RIGHT_TO_LEFT,
"region": region,
"annotated_pages": annotated_pages,
"language": language,
"amount_pages": amount_pages,
"prevent_italics": ["ar", "fa"],
"request": HttpRequest(),
}
response = HttpResponse(content_type="application/pdf")
response["Content-Disposition"] = f'filename="{filename}"'
html = get_template("pages/page_pdf.html").render(context)
pisa_status = pisa.CreatePDF(
html, dest=response, link_callback=link_callback, encoding="UTF-8"
)
# pylint: disable=no-member
if pisa_status.err:
logger.error(
"The following PDF could not be rendered: %r, %r, %r",
region,
language,
pages,
)
return HttpResponse(
_("The PDF could not be successfully generated."), status=500
)
cache.set(pdf_hash, response, 60 * 60 * 24)
return response
# pylint: disable=unused-argument
def link_callback(uri, rel):
0
Source : models.py
with MIT License
from dtcooper
with MIT License
from dtcooper
def random_queryset_pick(queryset):
model_cls = queryset.model
id_range = model_cls.objects.aggregate(min=models.Min("id"), max=models.Max("id"))
min_id, max_id = id_range["min"], id_range["max"]
if min_id is None or max_id is None:
logger.warning(f"AutoDJ couldn't generate a random {model_cls._meta.verbose_name}, no assets exist")
return None
# We've got our query set, we're ready to pick our track
# Generate chunk size * number of tries to get a set potential random IDs
random_ids = random.sample(
range(min_id, max_id + 1),
min(RANDOM_CHUNK_TRIES * RANDOM_CHUNK_SIZE, max_id + 1 - min_id),
)
# Try for assets in the potential random ID set in chunks.
for i in range(0, len(random_ids), RANDOM_CHUNK_SIZE):
random_ids_chunk = random_ids[i : i + RANDOM_CHUNK_SIZE]
# Preserve random ordering in query and take the first one that exists
random_order = models.Case(*[models.When(id=id, then=pos) for pos, id in enumerate(random_ids_chunk)])
pick = queryset.filter(id__in=random_ids_chunk).order_by(random_order).first()
if pick:
return pick
logger.warning(f"AutoDJ couldn't generate a random {model_cls._meta.verbose_name}")
return None
def normalize_title_field(value):
0
Source : invalidate.py
with MIT License
from g0v
with MIT License
from g0v
def handle(self, *args, **options):
from_date = options['from_date']
to_date = options['to_date']
static_qs = HouseTS.objects.filter(
created__gte=from_date,
created__lte=to_date,
**self.could_be_house
).values(
'vendor_house_id',
*self.should_be_static_fields
).annotate(
count=Count('id'),
).order_by(
'vendor_house_id'
)
static_houses = {}
total_houses = 0
total_invalid_houses = 0
for house in static_qs:
house_id = house['vendor_house_id']
# print(' {} x {} - {}'.format(house_id, house['count'], house['building_type']))
if house['vendor_house_id'] in static_houses:
static_houses[house_id].append(house['count'])
total_invalid_houses += 1
else:
static_houses[house_id] = [house['count']]
total_houses += 1
for house_id in static_houses:
if len(static_houses[house_id]) > 1:
print('[STATIC] House {} changed {} ({}) times!!'.format(house_id, len(static_houses[house_id]), static_houses[house_id]))
print('[STATIC] Invald house: {}/{}'.format(total_invalid_houses, total_houses))
# min should be bigger than max/2
annotates = {}
for field in self.should_be_small_diff_fields:
annotates['max_{}'.format(field)] = Max(field)
annotates['min_{}'.format(field)] = Min(field)
small_diff_qs = HouseTS.objects.filter(
created__gte=from_date,
created__lte=to_date,
**self.could_be_house
).values(
'vendor_house_id',
).annotate(
count=Count('id'),
**annotates,
).order_by(
'vendor_house_id'
)
total_houses = 0
total_invalid_houses = 0
for house in small_diff_qs:
is_invalid = False
total_houses += 1
for field in self.should_be_small_diff_fields:
max_value = house['max_{}'.format(field)]
min_value = house['min_{}'.format(field)]
if max_value is not None and min_value is not None:
if max_value / 2 > min_value and min_value >= 0:
is_invalid = True
print('[SMALL] House {} field {} change too much, from {} to {}'.format(
house['vendor_house_id'], field, min_value, max_value
))
if is_invalid:
total_invalid_houses += 1
print('[SMALL] Invald house: {}/{}'.format(total_invalid_houses, total_houses))
0
Source : tests.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_even_more_aggregate(self):
publishers = Publisher.objects.annotate(
earliest_book=Min("book__pubdate"),
).exclude(earliest_book=None).order_by("earliest_book").values(
'earliest_book',
'num_awards',
'id',
'name',
)
self.assertEqual(
list(publishers), [
{
'earliest_book': datetime.date(1991, 10, 15),
'num_awards': 9,
'id': self.p4.id,
'name': 'Morgan Kaufmann'
},
{
'earliest_book': datetime.date(1995, 1, 15),
'num_awards': 7,
'id': self.p3.id,
'name': 'Prentice Hall'
},
{
'earliest_book': datetime.date(2007, 12, 6),
'num_awards': 3,
'id': self.p1.id,
'name': 'Apress'
},
{
'earliest_book': datetime.date(2008, 3, 3),
'num_awards': 1,
'id': self.p2.id,
'name': 'Sams'
}
]
)
vals = Store.objects.aggregate(Max("friday_night_closing"), Min("original_opening"))
self.assertEqual(
vals,
{
"friday_night_closing__max": datetime.time(23, 59, 59),
"original_opening__min": datetime.datetime(1945, 4, 25, 16, 24, 14),
}
)
def test_annotate_values_list(self):
0
Source : tests.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_query_annotation(self):
# Only min and max make sense for datetimes.
morning = Session.objects.create(name='morning')
afternoon = Session.objects.create(name='afternoon')
SessionEvent.objects.create(dt=datetime.datetime(2011, 9, 1, 23, 20, 20), session=afternoon)
SessionEvent.objects.create(dt=datetime.datetime(2011, 9, 1, 13, 20, 30), session=afternoon)
SessionEvent.objects.create(dt=datetime.datetime(2011, 9, 1, 3, 20, 40), session=morning)
morning_min_dt = datetime.datetime(2011, 9, 1, 3, 20, 40)
afternoon_min_dt = datetime.datetime(2011, 9, 1, 13, 20, 30)
self.assertQuerysetEqual(
Session.objects.annotate(dt=Min('events__dt')).order_by('dt'),
[morning_min_dt, afternoon_min_dt],
transform=lambda d: d.dt)
self.assertQuerysetEqual(
Session.objects.annotate(dt=Min('events__dt')).filter(dt__lt=afternoon_min_dt),
[morning_min_dt],
transform=lambda d: d.dt)
self.assertQuerysetEqual(
Session.objects.annotate(dt=Min('events__dt')).filter(dt__gte=afternoon_min_dt),
[afternoon_min_dt],
transform=lambda d: d.dt)
def test_query_datetimes(self):
0
Source : tests.py
with Apache License 2.0
from gethue
with Apache License 2.0
from gethue
def test_query_annotation(self):
# Only min and max make sense for datetimes.
morning = Session.objects.create(name='morning')
afternoon = Session.objects.create(name='afternoon')
SessionEvent.objects.create(dt=datetime.datetime(2011, 9, 1, 23, 20, 20, tzinfo=EAT), session=afternoon)
SessionEvent.objects.create(dt=datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT), session=afternoon)
SessionEvent.objects.create(dt=datetime.datetime(2011, 9, 1, 3, 20, 40, tzinfo=EAT), session=morning)
morning_min_dt = datetime.datetime(2011, 9, 1, 3, 20, 40, tzinfo=EAT)
afternoon_min_dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)
self.assertQuerysetEqual(
Session.objects.annotate(dt=Min('events__dt')).order_by('dt'),
[morning_min_dt, afternoon_min_dt],
transform=lambda d: d.dt)
self.assertQuerysetEqual(
Session.objects.annotate(dt=Min('events__dt')).filter(dt__lt=afternoon_min_dt),
[morning_min_dt],
transform=lambda d: d.dt)
self.assertQuerysetEqual(
Session.objects.annotate(dt=Min('events__dt')).filter(dt__gte=afternoon_min_dt),
[afternoon_min_dt],
transform=lambda d: d.dt)
@skipUnlessDBFeature('has_zoneinfo_database')
0
Source : transaction_scan_service.py
with MIT License
from gnosis
with MIT License
from gnosis
def get_block_numbers_for_search(
self, safe_addresses: List[str]
) -> Optional[Tuple[int, int]]:
"""
:param safe_addresses:
:return: Minimum common `from_block_number` and `to_block_number` for search of relevant `tx hashes`
"""
block_process_limit = self.block_process_limit
confirmations = self.confirmations
current_block_number = self.ethereum_client.current_block_number
safe_tx_status_queryset = SafeTxStatus.objects.filter(
safe_id__in=safe_addresses
)
common_minimum_block_number = safe_tx_status_queryset.aggregate(
**{self.database_field: Min(self.database_field)}
)[self.database_field]
if common_minimum_block_number is None: # Empty queryset
return
from_block_number = common_minimum_block_number + 1
if (current_block_number - common_minimum_block_number) < confirmations:
return # We don't want problems with reorgs
if block_process_limit:
to_block_number = min(
common_minimum_block_number + block_process_limit,
current_block_number - confirmations,
)
else:
to_block_number = current_block_number - confirmations
return from_block_number, to_block_number
def process_addresses(
0
Source : ethereum_indexer.py
with MIT License
from gnosis
with MIT License
from gnosis
def get_minimum_block_number(
self, addresses: Optional[Sequence[str]] = None
) -> Optional[int]:
logger.debug(
"%s: Getting minimum-block-number for %d addresses",
self.__class__.__name__,
len(addresses) if addresses else 0,
)
queryset = (
self.database_queryset.filter(address__in=addresses)
if addresses
else self.database_queryset
)
minimum_block_number = queryset.aggregate(
**{self.database_field: Min(self.database_field)}
)[self.database_field]
logger.debug(
"%s: Got minimum-block-number=%s",
self.__class__.__name__,
minimum_block_number,
)
return minimum_block_number
def get_almost_updated_addresses(
0
Source : admin_list.py
with Apache License 2.0
from i13-msrg
with Apache License 2.0
from i13-msrg
def date_hierarchy(cl):
"""
Display the date hierarchy for date drill-down functionality.
"""
if cl.date_hierarchy:
field_name = cl.date_hierarchy
year_field = '%s__year' % field_name
month_field = '%s__month' % field_name
day_field = '%s__day' % field_name
field_generic = '%s__' % field_name
year_lookup = cl.params.get(year_field)
month_lookup = cl.params.get(month_field)
day_lookup = cl.params.get(day_field)
def link(filters):
return cl.get_query_string(filters, [field_generic])
if not (year_lookup or month_lookup or day_lookup):
# select appropriate start level
date_range = cl.queryset.aggregate(first=models.Min(field_name),
last=models.Max(field_name))
if date_range['first'] and date_range['last']:
if date_range['first'].year == date_range['last'].year:
year_lookup = date_range['first'].year
if date_range['first'].month == date_range['last'].month:
month_lookup = date_range['first'].month
if year_lookup and month_lookup and day_lookup:
day = datetime.date(int(year_lookup), int(month_lookup), int(day_lookup))
return {
'show': True,
'back': {
'link': link({year_field: year_lookup, month_field: month_lookup}),
'title': capfirst(formats.date_format(day, 'YEAR_MONTH_FORMAT'))
},
'choices': [{'title': capfirst(formats.date_format(day, 'MONTH_DAY_FORMAT'))}]
}
elif year_lookup and month_lookup:
days = getattr(cl.queryset, 'dates')(field_name, 'day')
return {
'show': True,
'back': {
'link': link({year_field: year_lookup}),
'title': str(year_lookup)
},
'choices': [{
'link': link({year_field: year_lookup, month_field: month_lookup, day_field: day.day}),
'title': capfirst(formats.date_format(day, 'MONTH_DAY_FORMAT'))
} for day in days]
}
elif year_lookup:
months = getattr(cl.queryset, 'dates')(field_name, 'month')
return {
'show': True,
'back': {
'link': link({}),
'title': _('All dates')
},
'choices': [{
'link': link({year_field: year_lookup, month_field: month.month}),
'title': capfirst(formats.date_format(month, 'YEAR_MONTH_FORMAT'))
} for month in months]
}
else:
years = getattr(cl.queryset, 'dates')(field_name, 'year')
return {
'show': True,
'choices': [{
'link': link({year_field: str(year.year)}),
'title': str(year.year),
} for year in years]
}
@register.tag(name='date_hierarchy')
0
Source : purge_old_personal_api_key_events.py
with BSD 3-Clause "New" or "Revised" License
from ietf-tools
with BSD 3-Clause "New" or "Revised" License
from ietf-tools
def handle(self, *args, **options):
keep_days = options['keep_days']
dry_run = options['dry_run']
def _format_count(count, unit='day'):
return '{} {}{}'.format(count, unit, ('' if count == 1 else 's'))
if keep_days < 0:
raise CommandError('Negative keep_days not allowed ({} was specified)'.format(keep_days))
if dry_run:
self.stdout.write('Dry run requested, records will not be deleted\n')
self.stdout.write('Finding events older than {}\n'.format(_format_count(keep_days)))
self.stdout.flush()
now = datetime.now()
old_events = PersonApiKeyEvent.objects.filter(
time__lt=now - timedelta(days=keep_days)
)
stats = old_events.aggregate(Min('time'), Max('time'))
old_count = old_events.count()
if old_count == 0:
self.stdout.write('No events older than {} found\n'.format(_format_count(keep_days)))
return
oldest_date = stats['time__min']
oldest_ago = now - oldest_date
newest_date = stats['time__max']
newest_ago = now - newest_date
action_fmt = 'Would delete {}\n' if dry_run else 'Deleting {}\n'
self.stdout.write(action_fmt.format(_format_count(old_count, 'event')))
self.stdout.write(' Oldest at {} ({} ago)\n'.format(oldest_date, _format_count(oldest_ago.days)))
self.stdout.write(' Most recent at {} ({} ago)\n'.format(newest_date, _format_count(newest_ago.days)))
self.stdout.flush()
if not dry_run:
old_events.delete()
0
Source : admin.py
with MIT License
from lukasvinclav
with MIT License
from lukasvinclav
def choices(self, changelist):
total = self.q.all().count()
min_value = self.q.all().aggregate(
min=Min(self.parameter_name)
).get('min', 0)
if total > 1:
max_value = self.q.all().aggregate(
max=Max(self.parameter_name)
).get('max', 0)
else:
max_value = None
if isinstance(self.field, (FloatField, DecimalField)):
decimals = self.MAX_DECIMALS
step = self.STEP if self.STEP else self._get_min_step(self.MAX_DECIMALS)
else:
decimals = 0
step = self.STEP if self.STEP else 1
return ({
'decimals': decimals,
'step': step,
'parameter_name': self.parameter_name,
'request': self.request,
'min': min_value,
'max': max_value,
'value_from': self.used_parameters.get(self.parameter_name + '_from', min_value),
'value_to': self.used_parameters.get(self.parameter_name + '_to', max_value),
'form': SliderNumericForm(name=self.parameter_name, data={
self.parameter_name + '_from': self.used_parameters.get(self.parameter_name + '_from', min_value),
self.parameter_name + '_to': self.used_parameters.get(self.parameter_name + '_to', max_value),
})
}, )
def _get_min_step(self, precision):
0
Source : tests.py
with Apache License 2.0
from lumanjiao
with Apache License 2.0
from lumanjiao
def test_even_more_aggregate(self):
publishers = Publisher.objects.annotate(earliest_book=Min("book__pubdate")).exclude(earliest_book=None).order_by("earliest_book").values()
self.assertEqual(
list(publishers), [
{
'earliest_book': datetime.date(1991, 10, 15),
'num_awards': 9,
'id': 4,
'name': 'Morgan Kaufmann'
},
{
'earliest_book': datetime.date(1995, 1, 15),
'num_awards': 7,
'id': 3,
'name': 'Prentice Hall'
},
{
'earliest_book': datetime.date(2007, 12, 6),
'num_awards': 3,
'id': 1,
'name': 'Apress'
},
{
'earliest_book': datetime.date(2008, 3, 3),
'num_awards': 1,
'id': 2,
'name': 'Sams'
}
]
)
vals = Store.objects.aggregate(Max("friday_night_closing"), Min("original_opening"))
self.assertEqual(
vals,
{
"friday_night_closing__max": datetime.time(23, 59, 59),
"original_opening__min": datetime.datetime(1945, 4, 25, 16, 24, 14),
}
)
def test_annotate_values_list(self):
0
Source : tests.py
with Apache License 2.0
from lumanjiao
with Apache License 2.0
from lumanjiao
def test_query_annotation(self):
# Only min and max make sense for datetimes.
morning = Session.objects.create(name='morning')
afternoon = Session.objects.create(name='afternoon')
SessionEvent.objects.create(dt=datetime.datetime(2011, 9, 1, 23, 20, 20), session=afternoon)
SessionEvent.objects.create(dt=datetime.datetime(2011, 9, 1, 13, 20, 30), session=afternoon)
SessionEvent.objects.create(dt=datetime.datetime(2011, 9, 1, 3, 20, 40), session=morning)
morning_min_dt = datetime.datetime(2011, 9, 1, 3, 20, 40)
afternoon_min_dt = datetime.datetime(2011, 9, 1, 13, 20, 30)
self.assertQuerysetEqual(
Session.objects.annotate(dt=Min('events__dt')).order_by('dt'),
[morning_min_dt, afternoon_min_dt],
transform=lambda d: d.dt)
self.assertQuerysetEqual(
Session.objects.annotate(dt=Min('events__dt')).filter(dt__lt=afternoon_min_dt),
[morning_min_dt],
transform=lambda d: d.dt)
self.assertQuerysetEqual(
Session.objects.annotate(dt=Min('events__dt')).filter(dt__gte=afternoon_min_dt),
[afternoon_min_dt],
transform=lambda d: d.dt)
def test_query_datetimes(self):
0
Source : tests.py
with Apache License 2.0
from lumanjiao
with Apache License 2.0
from lumanjiao
def test_query_annotation(self):
# Only min and max make sense for datetimes.
morning = Session.objects.create(name='morning')
afternoon = Session.objects.create(name='afternoon')
SessionEvent.objects.create(dt=datetime.datetime(2011, 9, 1, 23, 20, 20, tzinfo=EAT), session=afternoon)
SessionEvent.objects.create(dt=datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT), session=afternoon)
SessionEvent.objects.create(dt=datetime.datetime(2011, 9, 1, 3, 20, 40, tzinfo=EAT), session=morning)
morning_min_dt = datetime.datetime(2011, 9, 1, 3, 20, 40, tzinfo=EAT)
afternoon_min_dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)
self.assertQuerysetEqual(
Session.objects.annotate(dt=Min('events__dt')).order_by('dt'),
[morning_min_dt, afternoon_min_dt],
transform=lambda d: d.dt)
self.assertQuerysetEqual(
Session.objects.annotate(dt=Min('events__dt')).filter(dt__lt=afternoon_min_dt),
[morning_min_dt],
transform=lambda d: d.dt)
self.assertQuerysetEqual(
Session.objects.annotate(dt=Min('events__dt')).filter(dt__gte=afternoon_min_dt),
[afternoon_min_dt],
transform=lambda d: d.dt)
@skipUnlessDBFeature('has_zoneinfo_database')
0
Source : admin_list.py
with MIT License
from MekAkUActOR
with MIT License
from MekAkUActOR
def date_hierarchy(cl):
"""
Display the date hierarchy for date drill-down functionality.
"""
if cl.date_hierarchy:
field_name = cl.date_hierarchy
year_field = '%s__year' % field_name
month_field = '%s__month' % field_name
day_field = '%s__day' % field_name
field_generic = '%s__' % field_name
year_lookup = cl.params.get(year_field)
month_lookup = cl.params.get(month_field)
day_lookup = cl.params.get(day_field)
def link(filters):
return cl.get_query_string(filters, [field_generic])
if not (year_lookup or month_lookup or day_lookup):
# select appropriate start level
date_range = cl.queryset.aggregate(first=models.Min(field_name),
last=models.Max(field_name))
if date_range['first'] and date_range['last']:
if date_range['first'].year == date_range['last'].year:
year_lookup = date_range['first'].year
if date_range['first'].month == date_range['last'].month:
month_lookup = date_range['first'].month
if year_lookup and month_lookup and day_lookup:
day = datetime.date(int(year_lookup), int(month_lookup), int(day_lookup))
return {
'show': True,
'back': {
'link': link({year_field: year_lookup, month_field: month_lookup}),
'title': capfirst(formats.date_format(day, 'YEAR_MONTH_FORMAT'))
},
'choices': [{'title': capfirst(formats.date_format(day, 'MONTH_DAY_FORMAT'))}]
}
elif year_lookup and month_lookup:
days = getattr(cl.queryset, 'dates')(field_name, 'day')
return {
'show': True,
'back': {
'link': link({year_field: year_lookup}),
'title': str(year_lookup)
},
'choices': [{
'link': link({year_field: year_lookup, month_field: month_lookup, day_field: day.day}),
'title': capfirst(formats.date_format(day, 'MONTH_DAY_FORMAT'))
} for day in days]
}
elif year_lookup:
months = getattr(cl.queryset, 'dates')(field_name, 'month')
return {
'show': True,
'back': {
'link': link({}),
'title': _('All dates')
},
'choices': [{
'link': link({year_field: year_lookup, month_field: month.month}),
'title': capfirst(formats.date_format(month, 'YEAR_MONTH_FORMAT'))
} for month in months]
}
else:
years = getattr(cl.queryset, 'dates')(field_name, 'year')
return {
'show': True,
'back': None,
'choices': [{
'link': link({year_field: str(year.year)}),
'title': str(year.year),
} for year in years]
}
@register.tag(name='date_hierarchy')
0
Source : ator_serializer.py
with GNU Affero General Public License v3.0
from parlametria
with GNU Affero General Public License v3.0
from parlametria
def get_queryset(self):
"""
Retorna a atividade parlamentar por interesse e tema de um parlamentar.
"""
leggo_id_autor = self.kwargs["id_autor"]
tema_arg = self.request.query_params.get("tema")
interesse_arg = self.request.query_params.get("interesse")
destaques_arg = self.request.query_params.get('destaque')
if interesse_arg is None:
interesse_arg = "leggo"
interesses = get_filtered_interesses(interesse_arg, tema_arg)
atores = Atores.objects
if destaques_arg == 'true':
destaques = get_filtered_destaques(destaques_arg)
atores = (
atores.filter(id_leggo__in=destaques)
)
atores = (
atores.filter(id_leggo__in=interesses.values("id_leggo"))
.filter(tipo_acao__in=['Proposição', 'Recurso'])
.select_related("entidade")
.values(
"id_autor_parlametria"
)
.annotate(
total_documentos=Sum("num_documentos"),
peso_documentos=Sum("peso_total_documentos"),
)
.prefetch_related(Prefetch("interesse", queryset=interesses))
)
min_max = atores.aggregate(
max_peso_documentos=Max("peso_documentos"),
min_peso_documentos=Min("peso_documentos"))
ator = atores.filter(id_autor_parlametria=leggo_id_autor).annotate(
max_peso_documentos=Value(min_max["max_peso_documentos"], FloatField()),
min_peso_documentos=Value(min_max["min_peso_documentos"], FloatField())
)
return ator
class AtuacaoParlamentarSerializer(serializers.Serializer):
0
Source : test_aws_report_db_accessor.py
with Apache License 2.0
from project-koku
with Apache License 2.0
from project-koku
def test_populate_line_item_daily_table(self):
"""Test that the daily table is populated."""
ce_table_name = AWS_CUR_TABLE_MAP["cost_entry"]
daily_table_name = AWS_CUR_TABLE_MAP["line_item_daily"]
ce_table = getattr(self.accessor.report_schema, ce_table_name)
daily_table = getattr(self.accessor.report_schema, daily_table_name)
with schema_context(self.schema):
bills = self.accessor.get_cost_entry_bills_query_by_provider(self.aws_provider.uuid)
bill_ids = [str(bill.id) for bill in bills.all()]
ce_entry = ce_table.objects.all().aggregate(Min("interval_start"), Max("interval_start"))
start_date = ce_entry["interval_start__min"]
end_date = ce_entry["interval_start__max"]
start_date = start_date.replace(hour=0, minute=0, second=0, microsecond=0)
end_date = end_date.replace(hour=0, minute=0, second=0, microsecond=0)
query = self.accessor._get_db_obj_query(daily_table_name)
query.delete()
initial_count = query.count()
self.accessor.populate_line_item_daily_table(start_date, end_date, bill_ids)
with schema_context(self.schema):
self.assertNotEqual(query.count(), initial_count)
daily_entry = daily_table.objects.all().aggregate(Min("usage_start"), Max("usage_start"))
result_start_date = daily_entry["usage_start__min"]
result_end_date = daily_entry["usage_start__max"]
self.assertEqual(result_start_date, start_date.date())
self.assertEqual(result_end_date, end_date.date())
entry = query.first()
summary_columns = [
"cost_entry_product_id",
"cost_entry_pricing_id",
"line_item_type",
"usage_account_id",
"usage_start",
"usage_end",
"product_code",
"usage_type",
"operation",
"resource_id",
"usage_amount",
"currency_code",
"unblended_rate",
"unblended_cost",
"blended_rate",
"blended_cost",
"public_on_demand_cost",
"public_on_demand_rate",
"tags",
]
for column in summary_columns:
self.assertIsNotNone(getattr(entry, column))
self.assertNotEqual(getattr(entry, "tags"), {})
def test_populate_line_item_daily_table_no_bill_ids(self):
0
Source : test_aws_report_db_accessor.py
with Apache License 2.0
from project-koku
with Apache License 2.0
from project-koku
def test_populate_line_item_daily_table_no_bill_ids(self):
"""Test that the daily table is populated."""
ce_table_name = AWS_CUR_TABLE_MAP["cost_entry"]
daily_table_name = AWS_CUR_TABLE_MAP["line_item_daily"]
ce_table = getattr(self.accessor.report_schema, ce_table_name)
daily_table = getattr(self.accessor.report_schema, daily_table_name)
bill_ids = None
with schema_context(self.schema):
ce_entry = ce_table.objects.all().aggregate(Min("interval_start"), Max("interval_start"))
start_date = ce_entry["interval_start__min"]
end_date = ce_entry["interval_start__max"]
start_date = start_date.replace(hour=0, minute=0, second=0, microsecond=0)
end_date = end_date.replace(hour=0, minute=0, second=0, microsecond=0)
query = self.accessor._get_db_obj_query(daily_table_name)
self.accessor.populate_line_item_daily_table(start_date, end_date, bill_ids)
with schema_context(self.schema):
daily_entry = daily_table.objects.all().aggregate(Min("usage_start"), Max("usage_start"))
result_start_date = daily_entry["usage_start__min"]
result_end_date = daily_entry["usage_start__max"]
self.assertEqual(result_start_date, start_date.date())
self.assertEqual(result_end_date, end_date.date())
entry = query.first()
summary_columns = [
"cost_entry_product_id",
"cost_entry_pricing_id",
"line_item_type",
"usage_account_id",
"usage_start",
"usage_end",
"product_code",
"usage_type",
"operation",
"resource_id",
"usage_amount",
"currency_code",
"unblended_rate",
"unblended_cost",
"blended_rate",
"blended_cost",
"public_on_demand_cost",
"public_on_demand_rate",
"tags",
]
for column in summary_columns:
self.assertIsNotNone(getattr(entry, column))
self.assertNotEqual(getattr(entry, "tags"), {})
def test_populate_line_item_daily_summary_table(self):
0
Source : test_aws_report_db_accessor.py
with Apache License 2.0
from project-koku
with Apache License 2.0
from project-koku
def test_populate_line_item_daily_summary_table(self):
"""Test that the daily summary table is populated."""
ce_table_name = AWS_CUR_TABLE_MAP["cost_entry"]
summary_table_name = AWS_CUR_TABLE_MAP["line_item_daily_summary"]
ce_table = getattr(self.accessor.report_schema, ce_table_name)
summary_table = getattr(self.accessor.report_schema, summary_table_name)
bills = self.accessor.get_cost_entry_bills_query_by_provider(self.aws_provider.uuid)
with schema_context(self.schema):
bill_ids = [str(bill.id) for bill in bills.all()]
table_name = AWS_CUR_TABLE_MAP["line_item"]
tag_query = self.accessor._get_db_obj_query(table_name)
possible_keys = []
possible_values = []
with schema_context(self.schema):
for item in tag_query:
possible_keys += list(item.tags.keys())
possible_values += list(item.tags.values())
ce_entry = ce_table.objects.all().aggregate(Min("interval_start"), Max("interval_start"))
start_date = ce_entry["interval_start__min"]
end_date = ce_entry["interval_start__max"]
start_date = start_date.replace(hour=0, minute=0, second=0, microsecond=0)
end_date = end_date.replace(hour=0, minute=0, second=0, microsecond=0)
query = self.accessor._get_db_obj_query(summary_table_name)
with schema_context(self.schema):
summary_table.objects.all().delete()
initial_count = query.count()
self.accessor.populate_line_item_daily_summary_table(start_date, end_date, bill_ids)
with schema_context(self.schema):
self.assertNotEqual(query.count(), initial_count)
summary_entry = summary_table.objects.all().aggregate(Min("usage_start"), Max("usage_start"))
result_start_date = summary_entry["usage_start__min"]
result_end_date = summary_entry["usage_start__max"]
self.assertEqual(result_start_date, start_date.date())
self.assertEqual(result_end_date, end_date.date())
entry = query.first()
summary_columns = [
"usage_start",
"usage_end",
"usage_account_id",
"product_code",
"product_family",
"region",
"unit",
"resource_count",
"usage_amount",
"currency_code",
"unblended_rate",
"unblended_cost",
"blended_rate",
"blended_cost",
"public_on_demand_cost",
"public_on_demand_rate",
"tags",
]
for column in summary_columns:
self.assertIsNotNone(getattr(entry, column))
found_keys = []
found_values = []
for item in query.all():
found_keys += list(item.tags.keys())
found_values += list(item.tags.values())
self.assertEqual(set(sorted(possible_keys)), set(sorted(found_keys)))
self.assertEqual(set(sorted(possible_values)), set(sorted(found_values)))
def test_populate_line_item_daily_summary_table_no_bill_ids(self):
0
Source : test_aws_report_db_accessor.py
with Apache License 2.0
from project-koku
with Apache License 2.0
from project-koku
def test_populate_line_item_daily_summary_table_no_bill_ids(self):
"""Test that the daily summary table is populated."""
ce_table_name = AWS_CUR_TABLE_MAP["cost_entry"]
summary_table_name = AWS_CUR_TABLE_MAP["line_item_daily_summary"]
ce_table = getattr(self.accessor.report_schema, ce_table_name)
summary_table = getattr(self.accessor.report_schema, summary_table_name)
bill_ids = None
table_name = AWS_CUR_TABLE_MAP["line_item"]
tag_query = self.accessor._get_db_obj_query(table_name)
possible_keys = []
possible_values = []
with schema_context(self.schema):
for item in tag_query:
possible_keys += list(item.tags.keys())
possible_values += list(item.tags.values())
ce_entry = ce_table.objects.all().aggregate(Min("interval_start"), Max("interval_start"))
start_date = ce_entry["interval_start__min"]
end_date = ce_entry["interval_start__max"]
start_date = start_date.replace(hour=0, minute=0, second=0, microsecond=0)
end_date = end_date.replace(hour=0, minute=0, second=0, microsecond=0)
query = self.accessor._get_db_obj_query(summary_table_name)
with schema_context(self.schema):
summary_table.objects.all().delete()
initial_count = query.count()
self.accessor.populate_line_item_daily_summary_table(start_date, end_date, bill_ids)
with schema_context(self.schema):
self.assertNotEqual(query.count(), initial_count)
summary_entry = summary_table.objects.all().aggregate(Min("usage_start"), Max("usage_start"))
result_start_date = summary_entry["usage_start__min"]
result_end_date = summary_entry["usage_start__max"]
self.assertEqual(result_start_date, start_date.date())
self.assertEqual(result_end_date, end_date.date())
entry = query.first()
summary_columns = [
"usage_start",
"usage_end",
"usage_account_id",
"product_code",
"product_family",
"region",
"unit",
"resource_count",
"usage_amount",
"currency_code",
"unblended_rate",
"unblended_cost",
"blended_rate",
"blended_cost",
"public_on_demand_cost",
"public_on_demand_rate",
"tags",
]
for column in summary_columns:
self.assertIsNotNone(getattr(entry, column))
found_keys = []
found_values = []
for item in query.all():
found_keys += list(item.tags.keys())
found_values += list(item.tags.values())
self.assertEqual(set(sorted(possible_keys)), set(sorted(found_keys)))
self.assertEqual(set(sorted(possible_values)), set(sorted(found_values)))
def test_populate_awstags_summary_table(self):
0
Source : test_aws_report_db_accessor.py
with Apache License 2.0
from project-koku
with Apache License 2.0
from project-koku
def test_populate_ocp_on_aws_cost_daily_summary(self):
"""Test that the OCP on AWS cost summary table is populated."""
summary_table_name = AWS_CUR_TABLE_MAP["ocp_on_aws_daily_summary"]
project_summary_table_name = AWS_CUR_TABLE_MAP["ocp_on_aws_project_daily_summary"]
bill_ids = []
markup_value = Decimal(0.1)
summary_table = get_model(summary_table_name)
project_table = get_model(project_summary_table_name)
today = DateAccessor().today_with_timezone("UTC")
last_month = today - relativedelta.relativedelta(months=1)
resource_id = "i-12345"
with schema_context(self.schema):
for cost_entry_date in (today, last_month):
bill = self.creator.create_cost_entry_bill(
provider_uuid=self.aws_provider.uuid, bill_date=cost_entry_date
)
bill_ids.append(str(bill.id))
cost_entry = self.creator.create_cost_entry(bill, cost_entry_date)
product = self.creator.create_cost_entry_product("Compute Instance")
pricing = self.creator.create_cost_entry_pricing()
reservation = self.creator.create_cost_entry_reservation()
self.creator.create_cost_entry_line_item(
bill, cost_entry, product, pricing, reservation, resource_id=resource_id
)
self.accessor.populate_line_item_daily_table(last_month, today, bill_ids)
li_table_name = AWS_CUR_TABLE_MAP["line_item"]
with schema_context(self.schema):
li_table = get_model(li_table_name)
sum_aws_cost = li_table.objects.all().aggregate(Sum("unblended_cost"))["unblended_cost__sum"]
with OCPReportDBAccessor(self.schema) as ocp_accessor:
cluster_id = "testcluster"
with ProviderDBAccessor(provider_uuid=self.ocp_test_provider_uuid) as provider_access:
provider_uuid = provider_access.get_provider().uuid
for cost_entry_date in (today, last_month):
period = self.creator.create_ocp_report_period(
provider_uuid, period_date=cost_entry_date, cluster_id=cluster_id
)
report = self.creator.create_ocp_report(period, cost_entry_date)
self.creator.create_ocp_usage_line_item(period, report, resource_id=resource_id)
self.creator.create_ocp_node_label_line_item(period, report)
ocp_report_table_name = OCP_REPORT_TABLE_MAP["report"]
with schema_context(self.schema):
report_table = getattr(ocp_accessor.report_schema, ocp_report_table_name)
report_entry = report_table.objects.all().aggregate(Min("interval_start"), Max("interval_start"))
start_date = report_entry["interval_start__min"]
end_date = report_entry["interval_start__max"]
start_date = start_date.replace(hour=0, minute=0, second=0, microsecond=0)
end_date = end_date.replace(hour=0, minute=0, second=0, microsecond=0)
ocp_accessor.populate_node_label_line_item_daily_table(start_date, end_date, cluster_id)
ocp_accessor.populate_line_item_daily_table(start_date, end_date, cluster_id)
ocp_accessor.populate_line_item_daily_summary_table(start_date, end_date, cluster_id, provider_uuid)
with schema_context(self.schema):
query = self.accessor._get_db_obj_query(summary_table_name)
initial_count = query.count()
self.accessor.populate_ocp_on_aws_cost_daily_summary(last_month, today, cluster_id, bill_ids, markup_value)
with schema_context(self.schema):
self.assertNotEqual(query.count(), initial_count)
sum_cost = summary_table.objects.filter(cluster_id=cluster_id).aggregate(Sum("unblended_cost"))[
"unblended_cost__sum"
]
sum_project_cost = project_table.objects.filter(cluster_id=cluster_id).aggregate(Sum("unblended_cost"))[
"unblended_cost__sum"
]
sum_pod_cost = project_table.objects.filter(cluster_id=cluster_id).aggregate(Sum("pod_cost"))[
"pod_cost__sum"
]
sum_markup_cost = summary_table.objects.filter(cluster_id=cluster_id).aggregate(Sum("markup_cost"))[
"markup_cost__sum"
]
sum_markup_cost_project = project_table.objects.filter(cluster_id=cluster_id).aggregate(
Sum("markup_cost")
)["markup_cost__sum"]
sum_project_markup_cost_project = project_table.objects.filter(cluster_id=cluster_id).aggregate(
Sum("project_markup_cost")
)["project_markup_cost__sum"]
self.assertEqual(sum_cost, sum_project_cost)
self.assertLessEqual(sum_cost, sum_aws_cost)
self.assertAlmostEqual(sum_markup_cost, sum_cost * markup_value, 9)
self.assertAlmostEqual(sum_markup_cost_project, sum_cost * markup_value, 9)
self.assertAlmostEqual(sum_project_markup_cost_project, sum_pod_cost * markup_value, 9)
def test_bills_for_provider_uuid(self):
0
Source : test_aws_report_db_accessor.py
with Apache License 2.0
from project-koku
with Apache License 2.0
from project-koku
def test_populate_markup_cost(self):
"""Test that the daily summary table is populated."""
summary_table_name = AWS_CUR_TABLE_MAP["line_item_daily_summary"]
summary_table = getattr(self.accessor.report_schema, summary_table_name)
bills = self.accessor.get_cost_entry_bills_query_by_provider(self.aws_provider.uuid)
with schema_context(self.schema):
bill_ids = [str(bill.id) for bill in bills.all()]
summary_entry = summary_table.objects.all().aggregate(Min("usage_start"), Max("usage_start"))
start_date = summary_entry["usage_start__min"]
end_date = summary_entry["usage_start__max"]
query = self.accessor._get_db_obj_query(summary_table_name)
with schema_context(self.schema):
expected_markup = query.filter(cost_entry_bill__in=bill_ids).aggregate(
markup=Sum(F("unblended_cost") * decimal.Decimal(0.1))
)
expected_markup = expected_markup.get("markup")
self.accessor.populate_markup_cost(
self.aws_provider.uuid, decimal.Decimal(0.1), start_date, end_date, bill_ids
)
with schema_context(self.schema):
query = (
self.accessor._get_db_obj_query(summary_table_name)
.filter(cost_entry_bill__in=bill_ids)
.aggregate(Sum("markup_cost"))
)
actual_markup = query.get("markup_cost__sum")
self.assertAlmostEqual(actual_markup, expected_markup, 6)
@patch("masu.database.aws_report_db_accessor.AWSReportDBAccessor._execute_presto_raw_sql_query")
0
Source : test_azure_report_db_accessor.py
with Apache License 2.0
from project-koku
with Apache License 2.0
from project-koku
def test_populate_line_item_daily_summary_table(self):
"""Test that the daily summary table is populated."""
summary_table_name = AZURE_REPORT_TABLE_MAP["line_item_daily_summary"]
summary_table = getattr(self.accessor.report_schema, summary_table_name)
bills = self.accessor.get_cost_entry_bills_query_by_provider(self.azure_provider_uuid)
with schema_context(self.schema):
bill_ids = [str(bill.id) for bill in bills.all()]
table_name = AZURE_REPORT_TABLE_MAP["line_item"]
line_item_table = getattr(self.accessor.report_schema, table_name)
tag_query = self.accessor._get_db_obj_query(table_name)
possible_keys = []
possible_values = []
with schema_context(self.schema):
for item in tag_query:
possible_keys += list(item.tags.keys())
possible_values += list(item.tags.values())
li_entry = line_item_table.objects.all().aggregate(Min("usage_date"), Max("usage_date"))
start_date = li_entry["usage_date__min"]
end_date = li_entry["usage_date__max"]
start_date = start_date.date() if isinstance(start_date, datetime.datetime) else start_date
end_date = end_date.date() if isinstance(end_date, datetime.datetime) else end_date
query = self.accessor._get_db_obj_query(summary_table_name)
with schema_context(self.schema):
query.delete()
initial_count = query.count()
self.accessor.populate_line_item_daily_summary_table(start_date, end_date, bill_ids)
with schema_context(self.schema):
self.assertNotEqual(query.count(), initial_count)
summary_entry = summary_table.objects.all().aggregate(Min("usage_start"), Max("usage_start"))
result_start_date = summary_entry["usage_start__min"]
result_end_date = summary_entry["usage_start__max"]
self.assertEqual(result_start_date, start_date)
self.assertEqual(result_end_date, end_date)
entry = query.order_by("-uuid")
summary_columns = [
"usage_start",
"usage_quantity",
"pretax_cost",
"cost_entry_bill_id",
"meter_id",
"tags",
]
for column in summary_columns:
self.assertIsNotNone(getattr(entry.first(), column))
found_keys = []
found_values = []
for item in query.all():
found_keys += list(item.tags.keys())
found_values += list(item.tags.values())
self.assertEqual(set(sorted(possible_keys)), set(sorted(found_keys)))
self.assertEqual(set(sorted(possible_values)), set(sorted(found_values)))
def test_get_cost_entry_bills_by_date(self):
See More Examples