django.db.models.Sum

Here are the examples of the python api django.db.models.Sum taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

170 Examples 7

Example 151

Project: airmozilla Source File: dashboard.py
@staff_required
@json_view
def dashboard_data(request):
    context = {}
    now = timezone.now()
    today = now.replace(hour=0, minute=0, second=0, microsecond=0)
    tomorrow = today + datetime.timedelta(days=1)
    yesterday = today - datetime.timedelta(days=1)
    this_week = today - datetime.timedelta(days=today.weekday())
    next_week = this_week + datetime.timedelta(days=7)
    last_week = this_week - datetime.timedelta(days=7)
    this_month = today.replace(day=1)
    next_month = this_month
    while next_month.month == this_month.month:
        next_month += datetime.timedelta(days=1)
    last_month = (this_month - datetime.timedelta(days=1)).replace(day=1)
    this_year = this_month.replace(month=1)
    next_year = this_year.replace(year=this_year.year + 1)
    last_year = this_year.replace(year=this_year.year - 1)
    context['groups'] = []

    def make_filter(key, gte=None, lt=None):
        filter = {}
        if gte is not None:
            filter['%s__gte' % key] = gte
        if lt is not None:
            filter['%s__lt' % key] = lt
        return filter

    def get_counts(qs, key):
        counts = {}

        counts['today'] = qs.filter(
            **make_filter(key, gte=today, lt=tomorrow)
        ).count()
        counts['yesterday'] = qs.filter(
            **make_filter(key, gte=yesterday, lt=today)).count()

        counts['this_week'] = qs.filter(
            **make_filter(key, gte=this_week, lt=next_week)).count()
        counts['last_week'] = qs.filter(
            **make_filter(key, gte=last_week, lt=this_week)).count()

        counts['this_month'] = qs.filter(
            **make_filter(key, gte=this_month, lt=next_month)).count()
        counts['last_month'] = qs.filter(
            **make_filter(key, gte=last_month, lt=this_month)).count()

        counts['this_year'] = qs.filter(
            **make_filter(key, gte=this_year, lt=next_year)).count()
        counts['last_year'] = qs.filter(
            **make_filter(key, gte=last_year, lt=this_year)).count()

        counts['ever'] = qs.count()
        return counts

    # Events
    events = Event.objects.exclude(status=Event.STATUS_REMOVED)
    counts = get_counts(events, 'start_time')
    context['groups'].append({
        'name': 'New Events',
        'counts': counts
    })

    # Suggested Events
    counts = get_counts(SuggestedEvent.objects.all(), 'created')
    context['groups'].append({
        'name': 'Requested Events',
        'counts': counts
    })

    # Users
    counts = get_counts(User.objects.all(), 'date_joined')
    context['groups'].append({
        'name': 'New Users',
        'counts': counts
    })

    # Comments
    counts = get_counts(Comment.objects.all(), 'created')
    context['groups'].append({
        'name': 'Comments',
        'counts': counts
    })

    # Event revisions
    counts = get_counts(EventRevision.objects.all(), 'created')
    context['groups'].append({
        'name': 'Event Revisions',
        'counts': counts
    })

    # Pictures
    counts = get_counts(Picture.objects.all(), 'created')
    context['groups'].append({
        'name': 'Pictures',
        'counts': counts
    })

    # Chapters
    counts = get_counts(Chapter.objects.all(), 'created')
    context['groups'].append({
        'name': 'Chapters',
        'counts': counts
    })

    # Starred events
    counts = get_counts(StarredEvent.objects.all(), 'created')
    context['groups'].append({
        'name': 'Starred events',
        'counts': counts
    })

    def get_duration_totals(qs, key='start_time'):

        # def make_filter(gte=None, lt=None):
        #     filter = {}
        #     if gte is not None:
        #         filter['%s__gte' % key] = gte
        #     if lt is not None:
        #         filter['%s__lt' % key] = lt
        #     return filter

        counts = {}

        def sum(elements):
            seconds = elements.aggregate(Sum('duration'))['duration__sum']
            seconds = seconds or 0  # in case it's None
            minutes = seconds / 60
            hours = minutes / 60
            if hours > 1:
                return "%dh" % hours
            elif minutes > 1:
                return "%dm" % minutes
            return "%ds" % seconds

        counts['today'] = sum(qs.filter(
            **make_filter(key, gte=today)))
        counts['yesterday'] = sum(qs.filter(
            **make_filter(key, gte=yesterday, lt=today)))

        counts['this_week'] = sum(qs.filter(
            **make_filter(key, gte=this_week)))
        counts['last_week'] = sum(qs.filter(
            **make_filter(key, gte=last_week, lt=this_week)))

        counts['this_month'] = sum(qs.filter(
            **make_filter(key, gte=this_month)))
        counts['last_month'] = sum(qs.filter(
            **make_filter(key, gte=last_month, lt=this_month)))

        counts['this_year'] = sum(qs.filter(
            **make_filter(key, gte=this_year)))
        counts['last_year'] = sum(qs.filter(
            **make_filter(key, gte=last_year, lt=this_year)))

        counts['ever'] = sum(qs)
        return counts

    def get_size_totals(qs, key='created'):

        counts = {}

        def sum(elements):
            bytes = elements.aggregate(Sum('size'))['size__sum']
            return filesizeformat(bytes)

        counts['today'] = sum(qs.filter(
            **make_filter(key, gte=today)))
        counts['yesterday'] = sum(qs.filter(
            **make_filter(key, gte=yesterday, lt=today)))

        counts['this_week'] = sum(qs.filter(
            **make_filter(key, gte=this_week)))
        counts['last_week'] = sum(qs.filter(
            **make_filter(key, gte=last_week, lt=this_week)))

        counts['this_month'] = sum(qs.filter(
            **make_filter(key, gte=this_month)))
        counts['last_month'] = sum(qs.filter(
            **make_filter(key, gte=last_month, lt=this_month)))

        counts['this_year'] = sum(qs.filter(
            **make_filter(key, gte=this_year)))
        counts['last_year'] = sum(qs.filter(
            **make_filter(key, gte=last_year, lt=this_year)))

        counts['ever'] = sum(qs)
        return counts

    # Exceptional
    counts = get_duration_totals(Event.objects.exclude(duration__isnull=True))
    context['groups'].append({
        'name': 'Total Event Durations',
        'counts': counts
    })

    counts = get_size_totals(Upload.objects.all())
    context['groups'].append({
        'name': 'Uploads',
        'counts': counts,
        'small': True
    })

    return context

Example 152

Project: zamboni Source File: views.py
def _app_purchases_and_refunds(addon):
    purchases = {}
    now = datetime.now()
    base_qs = (Contribution.objects.values('currency')
                           .annotate(total=Count('id'),
                                     amount=Sum('amount'))
                           .filter(addon=addon)
                           .exclude(type__in=[mkt.CONTRIB_REFUND,
                                              mkt.CONTRIB_CHARGEBACK,
                                              mkt.CONTRIB_PENDING]))
    for typ, start_date in (('last_24_hours', now - timedelta(hours=24)),
                            ('last_7_days', now - timedelta(days=7)),
                            ('alltime', None),):
        qs = base_qs.all()
        if start_date:
            qs = qs.filter(created__gte=start_date)
        sums = list(qs)
        purchases[typ] = {'total': sum(s['total'] for s in sums),
                          'amounts': [numbers.format_currency(s['amount'],
                                                              s['currency'])
                                      for s in sums if s['currency']]}
    refunds = {}
    rejected_q = Q(status=mkt.REFUND_DECLINED) | Q(status=mkt.REFUND_FAILED)
    qs = Refund.objects.filter(contribution__addon=addon)

    refunds['requested'] = qs.exclude(rejected_q).count()
    percent = 0.0
    total = purchases['alltime']['total']
    if total:
        percent = (refunds['requested'] / float(total)) * 100.0
    refunds['percent_of_purchases'] = '%.1f%%' % percent
    refunds['auto-approved'] = (qs.filter(status=mkt.REFUND_APPROVED_INSTANT)
                                .count())
    refunds['approved'] = qs.filter(status=mkt.REFUND_APPROVED).count()
    refunds['rejected'] = qs.filter(rejected_q).count()

    return purchases, refunds

Example 153

Project: pyfreebilling Source File: dailystats.py
    def handle(self, *args, **options):
        for var in args:
            try:

                # date filter
                # today = datetime.datetime(2013, 06, 14, 00, 00, 00)
                current_tz = pytz.utc
                if var == "lastday":
                    dt = datetime.datetime.now()
                    today = datetime.datetime(
                        dt.year, dt.month, dt.day, 00, 00, 00).replace(tzinfo=current_tz)
                    yesterday = today - datetime.timedelta(days=1)
                elif var == "past":
                    today = datetime.datetime(
                        2014, 8, 28, 00, 00, 00).replace(tzinfo=current_tz)
                    yesterday = today - datetime.timedelta(days=1)
                elif var == "custom":
                    for fd in first:
                        try:
                            today = datetime.datetime(
                                fd[0], fd[1], fd[2], fd[3], fd[4], fd[5]).replace(tzinfo=current_tz)
                        except:
                            return
                    for ld in last:
                        try:
                            yesterday = datetime.datetime(
                                fd[0], fd[1], fd[2], fd[3], fd[4], fd[5]).replace(tzinfo=current_tz)
                        except:
                            return
                else:
                    return
# Query construction
                qs = CDR.objects.all().filter(
                    start_stamp__gte=yesterday).filter(start_stamp__lt=today)
                # exclude(sell_destination='did')
                qs_uuid_unique = qs.order_by('-start_stamp')
# Customer filter - take unique uuid with late start_stamp
# DimCustomerHangupCause
                qss_hangup_unique_customer = qs_uuid_unique.values('customer', 'sell_destination', 'hangup_cause').annotate(
                    total_calls=Count('uuid', distinct=True)).order_by('customer', 'sell_destination')
# DimCustomerSipHangupCause
                qss_siphangup_unique_customer = qs_uuid_unique.values('customer', 'sell_destination', 'sip_hangup_cause').annotate(
                    total_calls=Count('uuid', distinct=True)).order_by('customer', 'sell_destination')
# Provider filter - take unique uuid with late start_stamp
# DimProviderHangupCause
                qss_hangup_unique_provider = qs_uuid_unique.values('lcr_carrier_id', 'cost_destination', 'hangup_cause').exclude(
                    lcr_carrier_id__isnull=True).annotate(total_calls=Count('uuid', distinct=True)).order_by('lcr_carrier_id', 'cost_destination')
# DimProviderSipHangupCause
                qss_siphangup_unique_provider = qs_uuid_unique.values('lcr_carrier_id', 'cost_destination', 'sip_hangup_cause').exclude(
                    lcr_carrier_id__isnull=True).annotate(total_calls=Count('uuid', distinct=True)).order_by('lcr_carrier_id', 'cost_destination')
# Stats on successful calls
                qss2 = qs.filter(effective_duration__gt="0")
# Customers
# DimCustomerDestination
                qss_total_customer = qs.extra(select={'destination': 'sell_destination'}).values('customer', 'destination').annotate(total_calls=Count('uuid', distinct=True), total_duration=Sum(
                    'effective_duration'), max_duration=Max('effective_duration'), total_sell=Sum('total_sell'), total_cost=Sum('total_cost')).order_by('customer', 'sell_destination')
    # Get total calls
                qss_success_customer = qss2.extra(select={'destination': 'sell_destination'}).values('customer', 'destination').annotate(
                    min_duration=Min('effective_duration'), success_calls=Count('id'), avg_duration=Avg('effective_duration')).order_by('customer', 'sell_destination')
                for key, val in enumerate(qss_total_customer):
                    for k, v in enumerate(qss_success_customer):
                        if v['customer'] == val['customer'] and v['destination'] == val['destination']:
                            val['min_duration'] = v['min_duration']
                            val['success_calls'] = v['success_calls']
                            val['avg_duration'] = v['avg_duration']
                for key, val in enumerate(qss_total_customer):
                    if 'min_duration' not in val:
                        val['min_duration'] = 0
                        val['success_calls'] = 0
                        val['avg_duration'] = 0
# Providers
# DimProviderDestination
                qss_total_provider = qs.exclude(lcr_carrier_id__isnull=True).extra(select={'provider': 'lcr_carrier_id_id', 'destination': 'cost_destination'}).values('provider', 'destination').annotate(
                    total_calls=Count('uuid', distinct=True), total_duration=Sum('effective_duration'), max_duration=Max('effective_duration'), total_sell=Sum('total_sell'), total_cost=Sum('total_cost')).order_by('lcr_carrier_id', 'cost_destination')
    # Get total calls
                qss_success_provider = qss2.exclude(lcr_carrier_id__isnull=True).extra(select={'provider': 'lcr_carrier_id_id', 'destination': 'cost_destination'}).values(
                    'provider', 'destination').annotate(avg_duration=Avg('effective_duration'), min_duration=Min('effective_duration'), success_calls=Count('id')).order_by('lcr_carrier_id', 'cost_destination')
                for key, val in enumerate(qss_total_provider):
                    for k, v in enumerate(qss_success_provider):
                        if v['provider'] == val['provider'] and v['destination'] == val['destination']:
                            val['min_duration'] = v['min_duration']
                            val['success_calls'] = v['success_calls']
                            val['avg_duration'] = v['avg_duration']
                for key, val in enumerate(qss_total_provider):
                    if 'min_duration' not in val:
                        val['min_duration'] = 0
                        val['success_calls'] = 0
                        val['avg_duration'] = 0
                # print(qss_total_provider)
                # get or set dim date
                try:
                    workingdate = DimDate.objects.get(date=yesterday)
                except DimDate.DoesNotExist:
                    workingdate = DimDate(
                        date=yesterday,
                        day=yesterday.day,
                        day_of_week=yesterday.isoweekday(),
                        hour=yesterday.hour,
                        month=yesterday.month,
                        quarter=" ",
                        year=yesterday.year
                    )
                    workingdate.save()
                current_date = DimDate.objects.get(date=yesterday).id

# DimCustomerHangupCause
    # check if entry
                try:
                    DimCustomerHangupcause.objects.filter(
                        date_id=current_date).delete()
                except:
                    pass
                for item in qss_hangup_unique_customer:

                    data_dchc = DimCustomerHangupcause(
                        customer_id=item["customer"],
                        destination=item["sell_destination"],
                        hangupcause=item["hangup_cause"],
                        date_id=current_date,
                        total_calls=int(item["total_calls"])
                    )
                    data_dchc.save()

# DimCustomerSipHangupCause
    # check if entry
                try:
                    DimCustomerSipHangupcause.objects.filter(
                        date_id=current_date).delete()
                except:
                    pass
                for item in qss_siphangup_unique_customer:

                    data_dcshc = DimCustomerSipHangupcause(
                        customer_id=item["customer"],
                        destination=item["sell_destination"],
                        sip_hangupcause=item["sip_hangup_cause"],
                        date_id=current_date,
                        total_calls=int(item["total_calls"])
                    )
                    data_dcshc.save()

# DimProviderHangupCause
    # check if entry
                try:
                    DimProviderHangupcause.objects.filter(
                        date_id=current_date).delete()
                except:
                    pass
                for item in qss_hangup_unique_provider:

                    data_dphc = DimProviderHangupcause(
                        provider_id=item["lcr_carrier_id"],
                        destination=item["cost_destination"],
                        hangupcause=item["hangup_cause"],
                        date_id=current_date,
                        total_calls=int(item["total_calls"])
                    )
                    data_dphc.save()

# DimProviderSipHangupCause
    # check if entry
                try:
                    DimProviderSipHangupcause.objects.filter(
                        date_id=current_date).delete()
                except:
                    pass
                for item in qss_siphangup_unique_provider:

                    data_dpshc = DimProviderSipHangupcause(
                        provider_id=item["lcr_carrier_id"],
                        destination=item["cost_destination"],
                        sip_hangupcause=item["sip_hangup_cause"],
                        date_id=current_date,
                        total_calls=int(item["total_calls"])
                    )
                    data_dpshc.save()

# DimCustomerDestination
    # check if entry
                try:
                    DimCustomerDestination.objects.filter(
                        date_id=current_date).delete()
                except:
                    pass
                for item in qss_total_customer:

                    data_dcd = DimCustomerDestination(
                        customer_id=item["customer"],
                        destination=item["destination"],
                        date_id=current_date,
                        total_calls=int(item["total_calls"]),
                        total_duration=item["total_duration"],
                        max_duration=int(math.ceil(item["max_duration"])),
                        total_sell=item["total_sell"],
                        total_cost=item["total_cost"],
                        success_calls=int(item["success_calls"]),
                        avg_duration=int(math.ceil(item["avg_duration"])),
                        min_duration=int(math.ceil(item["min_duration"]))
                    )
                    data_dcd.save()

# DimProviderDestination
    # check if entry
                try:
                    DimProviderDestination.objects.filter(
                        date_id=current_date).delete()
                except:
                    pass
                for item in qss_total_provider:

                    data_dpd = DimProviderDestination(
                        provider_id=item["provider"],
                        destination=item["destination"],
                        date_id=current_date,
                        total_calls=int(item["total_calls"]),
                        success_calls=int(item["success_calls"]),
                        total_duration=item["total_duration"],
                        avg_duration=int(math.ceil(item["avg_duration"])),
                        max_duration=int(math.ceil(item["max_duration"])),
                        min_duration=int(math.ceil(item["min_duration"])),
                        total_sell=item["total_sell"],
                        total_cost=item["total_cost"]
                    )
                    data_dpd.save()

                # pprint(connection.queries)
            except CDR.DoesNotExist:
                raise CommandError('stats does not exist')

            self.stdout.write('Successfully stats ')

Example 154

Project: pyfreebilling Source File: views.py
@staff_member_required
def global_stats_view(request, vue):
    # set start_date and end_date
    # default yesterday stats
    if vue == 'customer' or vue == 'dest_customer':
        qs = DimCustomerDestination.objects.all()
    if vue == 'provider' or vue == 'dest_provider':
        qs = DimProviderDestination.objects.all()

    current_tz = pytz.utc
    dt = datetime.datetime.now()
    end_date = datetime.date(dt.year, dt.month, dt.day)
    # end_date = datetime.date(2014, 8, 28)
    start_date = end_date - datetime.timedelta(days=1)
    qs_orderby = '-total_sell'

    # Get the q GET parameter
    # date from and to and check value
    start_d = {'y': [], 'm': [], 'd': [], 'status': True}
    end_d = {'y': [], 'm': [], 'min': [], 'status': True}
    li = ['y', 'm', 'd']
    for i in li:
        start_d[str(i)] = request.GET.get("from_" + str(i))
        if start_d[str(i)] and start_d[str(i)].isnumeric():
            start_d[str(i)] = int(start_d[str(i)])
        else:
            start_d['status'] = False
        end_d[str(i)] = request.GET.get("to_" + str(i))
        if end_d[str(i)] and end_d[str(i)].isnumeric():
            end_d[str(i)] = int(end_d[str(i)])
        else:
            end_d['status'] = False
    # dest num
    dest_num = request.GET.get("dest_num")
    company = request.GET.get("company")
    if start_d['status']:
        start_date = datetime.datetime(
            start_d['y'], start_d['m'], start_d['d'], 00, 00)
    if end_d['status']:
        end_date = datetime.datetime(
            end_d['y'], end_d['m'], end_d['d'], 00, 00)
    if start_date and end_date:
        qs = qs.filter(date__date__range=(start_date, end_date))

    if dest_num:
        qs = qs.filter(destination__startswith=dest_num)

    if company:
        if vue == 'customer' or vue == 'dest_customer':
            qs = qs.filter(customer__name__contains=company)
        if vue == 'provider' or vue == 'dest_provider':
            qs = qs.filter(provider__name__contains=company)

    if vue == 'customer':
        qs1 = qs.values('customer__name', 'customer__cb_currency__code')
    if vue == 'dest_customer' or vue == 'dest_provider':
        qs1 = qs.values('destination')
    if vue == 'provider':
        qs1 = qs.values('provider__name', 'provider__cb_currency__code')
    stats_table = qs1.\
        annotate(total_sell=Sum('total_sell')).\
        annotate(success_calls=Sum('success_calls')).\
        annotate(total_calls=Sum('total_calls')).\
        annotate(total_cost=Sum('total_cost')).\
        annotate(total_duration=Sum('total_duration')).\
        annotate(max_duration=Max('max_duration')).\
        annotate(min_duration=Min('min_duration')).\
        annotate(avg_duration=Min('avg_duration')).\
        order_by('-total_sell')
    total_table = qs.\
        aggregate(total_sell=Sum('total_sell'),
            success_calls=Sum('success_calls'),\
            total_calls=Sum('total_calls'),\
            total_cost=Sum('total_cost'),\
            total_duration=Sum('total_duration'),\
            max_duration=Max('max_duration'),\
            min_duration=Min('min_duration'),\
            avg_duration=Min('avg_duration'))

    if vue == 'customer':
        table = TopCustTable(stats_table)
    if vue == 'dest_customer':
        table = TopDestCustTable(stats_table)
    if vue == 'provider':
        table = TopProvTable(stats_table)
    if vue == 'dest_provider':
        table = TopDestProvTable(stats_table)
    RequestConfig(request, paginate={"per_page": 100}).configure(table)
    #import pdb; pdb.set_trace()
    return render_to_response('admin/customers_stats.html', locals(),
                              context_instance=RequestContext(request))

Example 155

Project: pyfreebilling Source File: views.py
    @classmethod
    def get_stats_revenue(cls):
        data = []
        data1 = {'key': [], 'values': [], 'color': '#2ca02c'}
        data2 = {'key': [], 'values': []}
        data3 = {'key': [], 'area': 'true', 'values': [], 'color': '#ff7f0e'}
        data4 = {'key': [], 'area': 'true', 'values': [], 'color': '#7777ff'}

        values_sell = []
        values_cost = []
        values_duration = []
        margin = []
        values_margin = []

        qs = CDR.objects.filter(effective_duration__gt="0")
        qs_d = DimCustomerDestination.objects.all()
        # qs_h = DimCustomerHangupcause.objects.all()
        qss_sell = qsstats.QuerySetStats(qs, 'start_stamp',
                                         aggregate=Sum('total_sell'))
        qss_cost = qsstats.QuerySetStats(qs, 'start_stamp',
                                         aggregate=Sum('total_cost'))
        qss_sum_duration = qsstats.QuerySetStats(qs, 'start_stamp',
                                                 aggregate=Sum('effective_duration'))
        today = datetime.date.today() - datetime.timedelta(days=0)
        firstday = today - datetime.timedelta(days=90)
        # stats_sell = qss_sell.time_series(seven_days_ago, today)
        # stats_cost = qss_sell.time_series(seven_days_ago, today)
        # stats_duration = qss_sum_duration.time_series(seven_days_ago, today)

        ts_total_calls = time_series(
            qs_d, 'date__date', [firstday, today], func=Sum('total_calls'))
        ts_success_calls = time_series(
            qs_d, 'date__date', [firstday, today], func=Sum('success_calls'))
        stats_duration = time_series(
            qs_d, 'date__date', [firstday, today], func=Sum('total_duration'))
        stats_sell = time_series(
            qs_d, 'date__date', [firstday, today], func=Sum('total_sell'))
        stats_cost = time_series(
            qs_d, 'date__date', [firstday, today], func=Sum('total_cost'))

        for i in range(len(stats_sell)):
            values_sell.append(
                [int(time.mktime(stats_sell[i][0].timetuple()) * 1000),
                    round_value(stats_sell[i][1])])

        data1['key'].append("Revenue")
        data1['values'] = values_sell
        data.append(data1)

        for i in range(len(stats_sell)):
            temp_data = [
                int(time.mktime(stats_sell[i][0].timetuple()) * 1000),
                # round_value(stats_sell[i][1])
                # round_value(stats_cost[i][1]),
                int(round_value(stats_duration[i][1]))
            ]
            values_duration.append(temp_data)

        data2['values'] = values_duration
        # data2['bar'].append('true')
        data2['key'].append("Duration")
        # data.append(data2)

        for i in range(len(stats_sell)):
            values_cost.append(
                [int(time.mktime(stats_cost[i][0].timetuple()) * 1000),
                    round_value(stats_cost[i][1])])

        data3['key'].append("Cost")
        data3['values'] = values_cost
        data.append(data3)

        for i in range(len(stats_sell)):
            if stats_sell[i][1]:
                if stats_cost[i][1]:
                    margin.append(stats_sell[i][1] - stats_cost[i][1])
                else:
                    margin.append(stats_sell[i][1])
            else:
                if stats_cost[i][1]:
                    margin.append(0 - stats_cost[i][1])
                else:
                    margin.append(0)
            values_margin.append(
                [int(time.mktime(stats_cost[i][0].timetuple()) * 1000),
                    round_value(margin[i])])

        data4['key'].append("Margin")
        data4['values'] = values_margin
        data.append(data4)

        #data = [{"values": [[1400281200000, 3.36], [1400367600000, 0.03], [1400454000000, 30.15], [1400540400000, 34.57], [1400626800000, 30.73], [1400713200000, 32.12], [1400799600000, 60.69], [1400886000000, 3.61], [1400972400000, 0.05], [1401058800000, 68.54], [1401145200000, 339.0], [1401231600000, 130.58], [1401318000000, 17.12], [1401404400000, 133.52], [1401490800000, 111.67], [1401577200000, 0.02], [1401663600000, 640.63], [1401750000000, 565.65], [1401836400000, 646.74], [1401922800000, 639.96], [1402009200000, 798.42], [1402095600000, 493.09], [1402182000000, 65.13], [1402268400000, 380.07], [1402354800000, 17.01], [1402441200000, 388.32], [1402527600000, 0], [1402614000000, 0], [1402700400000, 0], [1402786800000, 0], [1402873200000, 0]], "bar": ["true"], "key": ["Revenue"]}, {"values": [[1400281200000, 25562], [1400367600000, 65], [1400454000000, 232339], [1400540400000, 225068], [1400626800000, 225401], [1400713200000, 198695], [1400799600000, 257652], [1400886000000, 14543], [1400972400000, 92], [1401058800000, 295177], [1401145200000, 980922], [1401231600000, 467542], [1401318000000, 70453], [1401404400000, 369460], [1401490800000, 307402], [1401577200000, 84], [1401663600000, 1814630], [1401750000000, 1578658], [1401836400000, 1799965], [1401922800000, 2344407], [1402009200000, 2540328], [1402095600000, 1345970], [1402182000000, 21832], [1402268400000, 1010094], [1402354800000, 66511], [1402441200000, 1078292], [1402527600000, 0], [1402614000000, 0], [1402700400000, 0], [1402786800000, 0], [1402873200000, 0]], "key": ["Duration"]}]

        return data

Example 156

Project: newfies-dialer Source File: admin.py
    def voip_daily_report(self, request):
        opts = VoIPCall._meta
        kwargs = {}
        if request.method == 'POST':
            form = AdminVoipSearchForm(request.POST)
            kwargs = voipcall_record_common_fun(request)
        else:
            kwargs = voipcall_record_common_fun(request)
            tday = datetime.today()
            form = AdminVoipSearchForm(initial={"from_date": tday.strftime("%Y-%m-%d"),
                                                "to_date": tday.strftime("%Y-%m-%d")})
            if len(kwargs) == 0:
                kwargs['starting_date__gte'] = datetime(tday.year, tday.month, tday.day,
                                                        0, 0, 0, 0).replace(tzinfo=utc)

        select_data = {"starting_date": "SUBSTR(CAST(starting_date as CHAR(30)),1,10)"}
        # Get Total Records from VoIPCall Report table for Daily Call Report
        total_data = VoIPCall.objects.extra(select=select_data).values('starting_date').filter(**kwargs)\
            .annotate(Count('starting_date'))\
            .annotate(Sum('duration'))\
            .annotate(Avg('duration'))\
            .order_by('-starting_date')

        # Following code will count total voip calls, duration
        if total_data:
            max_duration = max([x['duration__sum'] for x in total_data])
            total_duration = sum([x['duration__sum'] for x in total_data])
            total_calls = sum([x['starting_date__count'] for x in total_data])
            total_avg_duration = (sum([x['duration__avg'] for x in total_data])) / total_calls
        else:
            max_duration = 0
            total_duration = 0
            total_calls = 0
            total_avg_duration = 0

        ctx = RequestContext(request, {
            'form': form,
            'total_data': total_data,
            'total_duration': total_duration,
            'total_calls': total_calls,
            'total_avg_duration': total_avg_duration,
            'max_duration': max_duration,
            'opts': opts,
            'model_name': opts.object_name.lower(),
            'app_label': APP_LABEL,
            'title': _('call aggregate report'),
        })
        return render_to_response('admin/dialer_cdr/voipcall/voip_report.html', context_instance=ctx)

Example 157

Project: newfies-dialer Source File: views.py
@permission_required('dialer_campaign.view_dashboard', login_url='/')
@login_required
def customer_dashboard(request, on_index=None):
    """Customer dashboard gives the following information

        * Total Campaigns contacts
        * Amount of contact reached today
        * Disposition of calls via pie chart
        * Call records & Duration of calls are shown on graph by days/hours

    **Attributes**:

        * ``template`` - frontend/dashboard.html
        * ``form`` - DashboardForm
    """
    logging.debug('Start Dashboard')
    # All campaign for logged in User
    campaign_id_list = Campaign.objects.values_list('id', flat=True).filter(user=request.user).order_by('id')

    # Contacts count which are active and belong to those phonebook(s) which is
    # associated with all campaign
    pb_active_contact_count = Contact.objects\
        .filter(phonebook__campaign__in=campaign_id_list, status=CONTACT_STATUS.ACTIVE).count()

    form = DashboardForm(request.user, request.POST or None)
    logging.debug('Got Campaign list')

    total_record = dict()
    total_duration_sum = 0
    total_billsec_sum = 0
    total_call_count = 0
    total_answered = 0
    total_not_answered = 0
    total_busy = 0
    total_cancel = 0
    total_congestion = 0
    total_failed = 0
    search_type = SEARCH_TYPE.D_Last_24_hours  # default Last 24 hours
    selected_campaign = ''

    if campaign_id_list:
        selected_campaign = campaign_id_list[0]  # default campaign id

    # selected_campaign should not be empty
    if selected_campaign:
        if form.is_valid():
            selected_campaign = request.POST['campaign']
            search_type = request.POST['search_type']

        end_date = datetime.utcnow().replace(tzinfo=utc)
        start_date = calculate_date(search_type)

        # date_length is used to do group by starting_date
        if int(search_type) >= SEARCH_TYPE.B_Last_7_days:  # all options except 30 days
            date_length = 13
            if int(search_type) == SEARCH_TYPE.C_Yesterday:  # yesterday
                tday = datetime.utcnow().replace(tzinfo=utc)
                start_date = datetime(tday.year, tday.month, tday.day, 0, 0, 0, 0)\
                    .replace(tzinfo=utc) - relativedelta(days=1)
                end_date = datetime(tday.year, tday.month, tday.day, 23, 59, 59, 999999)\
                    .replace(tzinfo=utc) - relativedelta(days=1)
            if int(search_type) >= SEARCH_TYPE.E_Last_12_hours:
                date_length = 16
        else:
            date_length = 10  # Last 30 days option

        select_data = {
            "starting_date": "SUBSTR(CAST(starting_date as CHAR(30)),1,%s)" % str(date_length)
        }

        # This calls list is used by pie chart
        calls = VoIPCall.objects\
            .filter(callrequest__campaign=selected_campaign,
                    duration__isnull=False,
                    user=request.user,
                    starting_date__range=(start_date, end_date))\
            .extra(select=select_data)\
            .values('starting_date', 'disposition')\
            .annotate(Count('starting_date'))\
            .order_by('starting_date')

        logging.debug('Aggregate VoIPCall')

        for i in calls:
            total_call_count += i['starting_date__count']
            if i['disposition'] == CALL_DISPOSITION.ANSWER or i['disposition'] == 'NORMAL_CLEARING':
                total_answered += i['starting_date__count']
            elif i['disposition'] == CALL_DISPOSITION.BUSY or i['disposition'] == 'USER_BUSY':
                total_busy += i['starting_date__count']
            elif i['disposition'] == CALL_DISPOSITION.NOANSWER or i['disposition'] == 'NO_ANSWER':
                total_not_answered += i['starting_date__count']
            elif i['disposition'] == CALL_DISPOSITION.CANCEL or i['disposition'] == 'ORIGINATOR_CANCEL':
                total_cancel += i['starting_date__count']
            elif i['disposition'] == CALL_DISPOSITION.CONGESTION or i['disposition'] == 'NORMAL_CIRCUIT_CONGESTION':
                total_congestion += i['starting_date__count']
            else:
                # VOIP CALL FAILED
                total_failed += i['starting_date__count']

        # following calls list is without disposition & group by call date
        calls = VoIPCall.objects\
            .filter(callrequest__campaign=selected_campaign,
                    duration__isnull=False,
                    user=request.user,
                    starting_date__range=(start_date, end_date))\
            .extra(select=select_data)\
            .values('starting_date')\
            .annotate(Sum('duration'))\
            .annotate(Sum('billsec'))\
            .annotate(Avg('duration'))\
            .annotate(Count('starting_date'))\
            .order_by('starting_date')

        logging.debug('Aggregate VoIPCall (2)')

        mintime = start_date
        maxtime = end_date
        calls_dict = {}
        calls_dict_with_min = {}

        for call in calls:
            total_duration_sum += call['duration__sum']
            total_billsec_sum += call['billsec__sum']
            if int(search_type) >= SEARCH_TYPE.B_Last_7_days:
                ctime = datetime(int(call['starting_date'][0:4]),
                                 int(call['starting_date'][5:7]),
                                 int(call['starting_date'][8:10]),
                                 int(call['starting_date'][11:13]),
                                 0,
                                 0,
                                 0).replace(tzinfo=utc)
                if int(search_type) >= SEARCH_TYPE.E_Last_12_hours:
                    ctime = datetime(int(call['starting_date'][0:4]),
                                     int(call['starting_date'][5:7]),
                                     int(call['starting_date'][8:10]),
                                     int(call['starting_date'][11:13]),
                                     int(call['starting_date'][14:16]),
                                     0,
                                     0).replace(tzinfo=utc)
            else:
                ctime = datetime(int(call['starting_date'][0:4]),
                                 int(call['starting_date'][5:7]),
                                 int(call['starting_date'][8:10]),
                                 0,
                                 0,
                                 0,
                                 0).replace(tzinfo=utc)
            if ctime > maxtime:
                maxtime = ctime
            elif ctime < mintime:
                mintime = ctime

            # all options except 30 days
            if int(search_type) >= SEARCH_TYPE.B_Last_7_days:
                calls_dict[int(ctime.strftime("%Y%m%d%H"))] =\
                    {
                        'call_count': call['starting_date__count'],
                        'duration_sum': call['duration__sum'],
                        'duration_avg': float(call['duration__avg']),
                }

                calls_dict_with_min[int(ctime.strftime("%Y%m%d%H%M"))] =\
                    {
                        'call_count': call['starting_date__count'],
                        'duration_sum': call['duration__sum'],
                        'duration_avg': float(call['duration__avg']),
                }
            else:
                # Last 30 days option
                calls_dict[int(ctime.strftime("%Y%m%d"))] =\
                    {
                        'call_count': call['starting_date__count'],
                        'duration_sum': call['duration__sum'],
                        'duration_avg': float(call['duration__avg']),
                }

        logging.debug('After Call Loops')

        dateList = date_range(mintime, maxtime, q=search_type)

        for date in dateList:
            inttime = int(date.strftime("%Y%m%d"))

            # last 7 days | yesterday | last 24 hrs
            if int(search_type) == SEARCH_TYPE.B_Last_7_days \
                or int(search_type) == SEARCH_TYPE.C_Yesterday \
                    or int(search_type) == SEARCH_TYPE.D_Last_24_hours:

                for option in range(0, 24):
                    day_time = int(str(inttime) + str(option).zfill(2))

                    graph_day = datetime(int(date.strftime("%Y")),
                                         int(date.strftime("%m")),
                                         int(date.strftime("%d")),
                                         int(str(option).zfill(2))).replace(tzinfo=utc)

                    dt = int(1000 * time.mktime(graph_day.timetuple()))
                    total_record[dt] = {
                        'call_count': 0,
                        'duration_sum': 0,
                        'duration_avg': 0.0,
                    }

                    if day_time in calls_dict.keys():
                        total_record[dt]['call_count'] += calls_dict[day_time]['call_count']
                        total_record[dt]['duration_sum'] += calls_dict[day_time]['duration_sum']
                        total_record[dt]['duration_avg'] += float(calls_dict[day_time]['duration_avg'])

            # last 12 hrs | last 6 hrs | last 1 hr
            elif (int(search_type) == SEARCH_TYPE.E_Last_12_hours
                  or int(search_type) == SEARCH_TYPE.F_Last_6_hours
                  or int(search_type) == SEARCH_TYPE.G_Last_hour):

                for hour in range(0, 24):
                    for minute in range(0, 60):
                        hr_time = int(str(inttime) + str(hour).zfill(2) + str(minute).zfill(2))

                        graph_day = datetime(int(date.strftime("%Y")),
                                             int(date.strftime("%m")),
                                             int(date.strftime("%d")),
                                             int(str(hour).zfill(2)),
                                             int(str(minute).zfill(2))).replace(tzinfo=utc)

                        dt = int(1000 * time.mktime(graph_day.timetuple()))
                        total_record[dt] = {
                            'call_count': 0,
                            'duration_sum': 0,
                            'duration_avg': 0.0,
                        }

                        if hr_time in calls_dict_with_min.keys():
                            total_record[dt]['call_count'] += calls_dict_with_min[hr_time]['call_count']
                            total_record[dt]['duration_sum'] += calls_dict_with_min[hr_time]['duration_sum']
                            total_record[dt]['duration_avg'] += float(calls_dict_with_min[hr_time]['duration_avg'])
            else:
                # Default: Last 30 days option
                graph_day = datetime(int(date.strftime("%Y")),
                                     int(date.strftime("%m")),
                                     int(date.strftime("%d"))).replace(tzinfo=utc)
                dt = int(1000 * time.mktime(graph_day.timetuple()))
                total_record[dt] = {
                    'call_count': 0,
                    'duration_sum': 0,
                    'duration_avg': 0,
                }
                if inttime in calls_dict.keys():
                    total_record[dt]['call_count'] += calls_dict[inttime]['call_count']
                    total_record[dt]['duration_sum'] += calls_dict[inttime]['duration_sum']
                    total_record[dt]['duration_avg'] += float(calls_dict[inttime]['duration_avg'])

    logging.debug('After dateList Loops')

    # sorting on date col
    total_record = total_record.items()
    total_record = sorted(total_record, key=lambda k: k[0])

    # lineplusbarwithfocuschart
    final_charttype = "linePlusBarChart"
    xdata = []
    ydata = []
    ydata2 = []
    for i in total_record:
        xdata.append(i[0])
        ydata.append(i[1]['call_count'])
        ydata2.append(i[1]['duration_sum'])

    tooltip_date = "%d %b %y %H:%M %p"
    kwargs1 = {}
    kwargs1['bar'] = True
    extra_serie1 = {"tooltip": {"y_start": "", "y_end": " calls"}, "date_format": tooltip_date}
    extra_serie2 = {"tooltip": {"y_start": "", "y_end": " sec"}, "date_format": tooltip_date}

    final_chartdata = {
        'x': xdata,
        'name1': 'Calls', 'y1': ydata, 'extra1': extra_serie1, 'kwargs1': kwargs1,
        'name2': 'Duration', 'y2': ydata2, 'extra2': extra_serie2,
    }

    # Contacts which are successfully called for running campaign
    reached_contact = 0
    if campaign_id_list:
        tday = datetime.utcnow().replace(tzinfo=utc)
        start_date = datetime(tday.year, tday.month, tday.day, 0, 0, 0, 0).replace(tzinfo=utc)
        end_date = datetime(tday.year, tday.month, tday.day, 23, 59, 59, 999999).replace(tzinfo=utc)
        reached_contact = Subscriber.objects\
            .filter(campaign_id__in=campaign_id_list,  # status=5,
                    updated_date__range=(start_date, end_date))\
            .count()

    # PieChart
    hangup_analytic_charttype = "pieChart"
    xdata = []
    ydata = []
    hangup_analytic_chartdata = {'x': xdata, 'y1': ydata}
    if total_call_count != 0:
        for i in CALL_DISPOSITION:
            xdata.append(i[0])

        # Y-axis order depend upon CALL_DISPOSITION
        # 'ANSWER', 'BUSY', 'CANCEL', 'CONGESTION', 'FAILED', 'NOANSWER'
        ydata = [percentage(total_answered, total_call_count),
                 percentage(total_busy, total_call_count),
                 percentage(total_cancel, total_call_count),
                 percentage(total_congestion, total_call_count),
                 percentage(total_failed, total_call_count),
                 percentage(total_not_answered, total_call_count)]

        color_list = [
            COLOR_DISPOSITION['ANSWER'],
            COLOR_DISPOSITION['BUSY'],
            COLOR_DISPOSITION['CANCEL'],
            COLOR_DISPOSITION['CONGESTION'],
            COLOR_DISPOSITION['FAILED'],
            COLOR_DISPOSITION['NOANSWER'],
        ]

        extra_serie = {"tooltip": {"y_start": "", "y_end": " %"},
                       "color_list": color_list}
        hangup_analytic_chartdata = {'x': xdata, 'y1': ydata, 'extra1': extra_serie}

    data = {
        'form': form,
        'campaign_phonebook_active_contact_count': pb_active_contact_count,
        'reached_contact': reached_contact,
        'total_duration_sum': total_duration_sum,
        'total_billsec_sum': total_billsec_sum,
        'total_call_count': total_call_count,
        'total_answered': total_answered,
        'total_not_answered': total_not_answered,
        'total_busy': total_busy,
        'total_cancel': total_cancel,
        'total_congestion': total_congestion,
        'total_failed': total_failed,
        'answered_color': COLOR_DISPOSITION['ANSWER'],
        'busy_color': COLOR_DISPOSITION['BUSY'],
        'not_answered_color': COLOR_DISPOSITION['NOANSWER'],
        'cancel_color': COLOR_DISPOSITION['CANCEL'],
        'congestion_color': COLOR_DISPOSITION['CONGESTION'],
        'failed_color': COLOR_DISPOSITION['FAILED'],
        'CALL_DISPOSITION': CALL_DISPOSITION,
        'hangup_analytic_chartdata': hangup_analytic_chartdata,
        'hangup_analytic_charttype': hangup_analytic_charttype,
        'hangup_chartcontainer': 'piechart_container',
        'hangup_extra': {
            'x_is_date': False,
            'x_axis_format': '',
            'tag_script_js': True,
            'jquery_on_ready': True,
        },
        'final_chartdata': final_chartdata,
        'final_charttype': final_charttype,
        'final_chartcontainer': 'lineplusbarwithfocuschart_container',
        'final_extra': {
            'x_is_date': True,
            'x_axis_format': '%d %b %Y',
            'tag_script_js': True,
            'jquery_on_ready': True,
            'resize': True,
            'focus_enable': True,
        }
    }
    if on_index == 'yes':
        return data
    return render_to_response('frontend/dashboard.html', data, context_instance=RequestContext(request))

Example 158

Project: weblate Source File: translation.py
    def update_stats(self):
        """Updates translation statistics."""
        # Grab stats
        stats = self.unit_set.aggregate(
            Sum('num_words'),
            Count('id'),
            fuzzy__sum=do_boolean_sum('fuzzy'),
            translated__sum=do_boolean_sum('translated'),
            has_failing_check__sum=do_boolean_sum('has_failing_check'),
            has_suggestion__sum=do_boolean_sum('has_suggestion'),
            has_comment__sum=do_boolean_sum('has_comment'),
        )

        # Check if we have any units
        if stats['num_words__sum'] is None:
            self.total_words = 0
            self.total = 0
            self.fuzzy = 0
            self.translated = 0
            self.failing_checks = 0
            self.have_suggestion = 0
            self.have_comment = 0
        else:
            self.total_words = stats['num_words__sum']
            self.total = stats['id__count']
            self.fuzzy = int(stats['fuzzy__sum'])
            self.translated = int(stats['translated__sum'])
            self.failing_checks = int(stats['has_failing_check__sum'])
            self.have_suggestion = int(stats['has_suggestion__sum'])
            self.have_comment = int(stats['has_comment__sum'])

        # Count translated words
        self.translated_words = self.unit_set.filter(
            translated=True
        ).aggregate(
            Sum('num_words')
        )['num_words__sum']
        # Nothing matches filter
        if self.translated_words is None:
            self.translated_words = 0

        # Count fuzzy words
        self.fuzzy_words = self.unit_set.filter(
            fuzzy=True
        ).aggregate(
            Sum('num_words')
        )['num_words__sum']
        # Nothing matches filter
        if self.fuzzy_words is None:
            self.fuzzy_words = 0

        # Count words with failing checks
        self.failing_checks_words = self.unit_set.filter(
            has_failing_check=True
        ).aggregate(
            Sum('num_words')
        )['num_words__sum']
        # Nothing matches filter
        if self.failing_checks_words is None:
            self.failing_checks_words = 0

        # Store hash will save object
        self.store_hash()

Example 159

Project: weblate Source File: stats.py
def get_per_language_stats(project):
    """Calculates per language stats for project"""
    result = []

    # List languages
    languages = Translation.objects.filter(
        subproject__project=project
    ).values_list(
        'language',
        flat=True
    ).distinct()

    # Calculates total strings in project
    total = 0
    for component in project.subproject_set.all():
        try:
            total += component.translation_set.all()[0].total
        except IndexError:
            pass

    # Translated strings in language
    for language in Language.objects.filter(pk__in=languages):
        translated = Translation.objects.filter(
            language=language,
            subproject__project=project
        ).aggregate(
            Sum('translated'),
        )['translated__sum']

        # Insert sort
        pos = None
        for i in range(len(result)):
            if translated >= result[i][1]:
                pos = i
                break
        value = (language, translated, total)
        if pos is not None:
            result.insert(pos, value)
        else:
            result.append(value)

    return result

Example 160

Project: nnmware Source File: booking_tags.py
@register.simple_tag(takes_context=True)
def price_variants(context, room, rate):
    btype = context.get('btype')
    from_date, to_date, date_period, delta, guests = dates_guests_from_context(context)
    settlement = SettlementVariant.objects.filter(room=room, settlement__gte=guests,
        placeprice__date__range=date_period, placeprice__amount__gt=0).annotate(valid_s=Count('pk')).\
        filter(valid_s__gte=delta).order_by('settlement').values_list('pk', flat=True).distinct()[0]
    result = PlacePrice.objects.filter(settlement__room=room, settlement__pk=settlement,
                                       date__range=date_period).aggregate(Sum('amount'))['amount__sum']
    answer = convert_to_client_currency(result, rate)
    total_cost = answer
    discount = room.simple_discount
    prices = []
    ub, gb, nr = dict(), dict(), dict()
    variants = []
    if discount.ub:
        if 0 < discount.ub_discount < 100:
            ub['price'] = (answer * (100 - discount.ub_discount)) / 100
            ub['discount'] = discount.ub_discount
        else:
            ub['price'] = answer
            ub['discount'] = None
        ub['average'] = ub['price'] / delta
        ub['variant'] = 'ub'
        if btype == 'ub':
            total_cost = ub['price']
    if discount.gb:
        gb['days'] = from_date - timedelta(days=discount.gb_days)
        if 0 < discount.gb_discount < 100:
            gb['price'] = (answer * (100 - discount.gb_discount)) / 100
            gb['discount'] = discount.gb_discount
        else:
            gb['price'] = answer
            gb['discount'] = None
        if 0 < discount.gb_penalty <= 100:
            room_answer = PlacePrice.objects.get(settlement__room=room, settlement__pk=settlement, date=from_date)
            room_answer = convert_to_client_currency(room_answer.amount, rate)
            if gb['discount'] is not None:
                gb['penalty'] = (room_answer * (100 - gb['discount']) * discount.gb_penalty) / 10000
            else:
                gb['penalty'] = (room_answer * discount.gb_penalty) / 100
        else:
            gb['penalty'] = None
        gb['average'] = gb['price'] / delta
        gb['variant'] = 'gb'
        if btype == 'gb':
            total_cost = gb['price']
    if discount.nr:
        if 0 < discount.nr_discount < 100:
            nr['price'] = (answer * (100 - discount.nr_discount)) / 100
            nr['discount'] = discount.nr_discount
        else:
            nr['price'] = answer
            nr['discount'] = None
        if btype == 'nr':
            total_cost = nr['price']
        nr['average'] = nr['price'] / delta
        nr['variant'] = 'nr'
    if btype == 'ub' and bool(ub):
        variants.append(ub)
        if bool(gb):
            variants.append(gb)
        if bool(nr):
            variants.append(nr)
    elif btype == 'gb' and bool(gb):
        variants.append(gb)
        if bool(ub):
            variants.append(ub)
        if bool(nr):
            variants.append(nr)
    elif btype == 'nr' and bool(nr):
        variants.append(nr)
        if bool(ub):
            variants.append(ub)
        if bool(gb):
            variants.append(gb)
    else:
        if bool(ub):
            variants.append(ub)
        if bool(gb):
            variants.append(gb)
        if bool(nr):
            variants.append(nr)
    prices.append(variants)
    prices.append(answer / delta)
    prices.append(total_cost)
    prices.append(delta)
    prices.append(len(variants))
    return prices

Example 161

Project: Open-Knesset Source File: views.py
    def get_context_data(self, **kwargs):
        context = super(PartyListView, self).get_context_data(**kwargs)
        qs = context['object_list']

        info = self.kwargs['stat_type']

        context['coalition'] = qs.filter(is_coalition=True)
        context['opposition'] = qs.filter(is_coalition=False)

        context['friend_pages'] = self.pages
        context['stat_type'] = info

        if info == 'seats':
            context['coalition']  =  context['coalition'].annotate(extra=Sum('number_of_seats')).order_by('-extra')
            context['opposition'] = context['opposition'].annotate(extra=Sum('number_of_seats')).order_by('-extra')
            context['norm_factor'] = 1
            context['baseline'] = 0

        if info == 'votes-per-seat':
            m = 0
            for p in chain(context['coalition'], context['opposition']):
                p.extra = p.voting_statistics.votes_per_seat()
                if p.extra > m:
                    m = p.extra
            context['norm_factor'] = m / 20
            context['baseline'] = 0

        if info == 'discipline':
            m = 100
            for p in context['coalition']:
                p.extra = p.voting_statistics.discipline()
                if p.extra < m:
                    m = p.extra
            for p in context['opposition']:
                p.extra = p.voting_statistics.discipline()
                if p.extra < m:
                    m = p.extra
            context['norm_factor'] = (100.0-m)/15
            context['baseline'] = m - 2

        if info=='coalition-discipline':
            m = 100
            for p in context['coalition']:
                p.extra = p.voting_statistics.coalition_discipline()
                if p.extra < m:
                    m = p.extra
            for p in context['opposition']:
                p.extra = p.voting_statistics.coalition_discipline()
                if p.extra < m:
                    m = p.extra
            context['norm_factor'] = (100.0-m)/15
            context['baseline'] = m - 2

        if info=='residence-centrality':
            m = 10
            for p in context['coalition']:
                rc = [member.residence_centrality for member in p.members.all() if member.residence_centrality]
                if rc:
                    p.extra = round(float(sum(rc))/len(rc),1)
                else:
                    p.extra = 0
                if p.extra < m:
                    m = p.extra
            for p in context['opposition']:
                rc = [member.residence_centrality for member in p.members.all() if member.residence_centrality]
                if rc:
                    p.extra = round(float(sum(rc))/len(rc),1)
                else:
                    p.extra = 0
                if p.extra < m:
                    m = p.extra
            context['norm_factor'] = (10.0-m)/15
            context['baseline'] = m-1

        if info=='residence-economy':
            m = 10
            for p in context['coalition']:
                rc = [member.residence_economy for member in p.members.all() if member.residence_economy]
                if rc:
                    p.extra = round(float(sum(rc))/len(rc),1)
                else:
                    p.extra = 0
                if p.extra < m:
                    m = p.extra
            for p in context['opposition']:
                rc = [member.residence_economy for member in p.members.all() if member.residence_economy]
                if rc:
                    p.extra = round(float(sum(rc))/len(rc),1)
                else:
                    p.extra = 0
                if p.extra < m:
                    m = p.extra
            context['norm_factor'] = (10.0-m)/15
            context['baseline'] = m-1

        if info == 'bills-proposed':
            m = 9999
            d = Knesset.objects.current_knesset().start_date
            for p in chain(context['coalition'], context['opposition']):
                p.extra = round(float(
                    len(set(Bill.objects.filter(
                        proposers__current_party=p,
                        proposals__date__gt=d).values_list('id', flat=True))
                        )) / p.number_of_seats, 1)
                if p.extra < m:
                    m = p.extra
            context['norm_factor'] = m / 2
            context['baseline'] = 0

        if info == 'bills-pre':
            m = 9999
            d = Knesset.objects.current_knesset().start_date
            for p in chain(context['coalition'], context['opposition']):
                p.extra = round(float(
                    len(set(Bill.objects.filter(
                        Q(stage='2') | Q(stage='3') | Q(stage='4') |
                        Q(stage='5') | Q(stage='6'),
                        proposers__current_party=p,
                        proposals__date__gt=d).values_list('id', flat=True))
                        )) / p.number_of_seats, 1)
                if p.extra < m:
                    m = p.extra
            context['norm_factor'] = m / 2
            context['baseline'] = 0

        if info == 'bills-first':
            m = 9999
            d = Knesset.objects.current_knesset().start_date
            for p in chain(context['coalition'], context['opposition']):
                p.extra = round(float(
                    len(set(Bill.objects.filter(
                        Q(stage='4') | Q(stage='5') | Q(stage='6'),
                        proposers__current_party=p,
                        proposals__date__gt=d).values_list('id', flat=True))
                        )) / p.number_of_seats, 1)
                if p.extra < m:
                    m = p.extra
            context['norm_factor'] = m / 2
            context['baseline'] = 0

        if info == 'bills-approved':
            m = 9999
            d = Knesset.objects.current_knesset().start_date
            for p in chain(context['coalition'], context['opposition']):
                p.extra = round(float(
                    len(set(Bill.objects.filter(
                        proposers__current_party=p,
                        proposals__date__gt=d,
                        stage='6').values_list('id', flat=True))
                        )) / p.number_of_seats, 1)
                if p.extra < m:
                    m = p.extra
            context['norm_factor'] = m / 2
            context['baseline'] = 0

        if info == 'presence':
            m = 9999
            for p in chain(context['coalition'], context['opposition']):
                awp = [member.average_weekly_presence() for member in
                       p.members.all()]
                awp = [a for a in awp if a]
                if awp:
                    p.extra = round(float(sum(awp)) / len(awp), 1)
                else:
                    p.extra = 0
                if p.extra < m:
                    m = p.extra
            context['norm_factor'] = m / 2
            context['baseline'] = 0

        if info == 'committees':
            m = 9999
            for p in chain(context['coalition'], context['opposition']):
                cmpm = [member.committee_meetings_per_month() for member in
                        p.members.all()]
                cmpm = [c for c in cmpm if c]
                if cmpm:
                    p.extra = round(float(sum(cmpm)) / len(cmpm), 1)
                else:
                    p.extra = 0
                if p.extra < m:
                    m = p.extra
            context['norm_factor'] = m / 2
            context['baseline'] = 0

        context['title'] = _('Parties by %s') % dict(self.pages)[info]
        # prepare data for graphs. We'll be doing loops instead of list
        # comprehensions, to prevent multiple runs on the dataset (ticks, etc)
        ticks = []
        coalition_data = []
        opposition_data = []

        label = '%s<br><a href="%s">%s</a>'
        count = 0  # Make sure we have some value, otherwise things like tests may fail

        for count, party in enumerate(context['coalition'], 1):
            coalition_data.append((count, party.extra))
            ticks.append((count + 0.5, label % (party.extra, party.get_absolute_url(), party.name)))

        for opp_count, party in enumerate(context['opposition'], count + 1):
            opposition_data.append((opp_count, party.extra))
            ticks.append((opp_count + 0.5, label % (party.extra, party.get_absolute_url(), party.name)))

        graph_data = {
            'data': [
                {'label': _('Coalition'), 'data': coalition_data},
                {'label': _('Opposition'), 'data': opposition_data},
            ],
            'ticks': ticks
        }
        context['graph'] = json.dumps(graph_data, cls=PromiseAwareJSONEncoder)
        return context

Example 162

Project: olhoneles Source File: consolidate.py
    def handle(self, *args, **options):
        institutions = []
        if "almg" in args:
            institutions.append(Institution.objects.get(siglum='ALMG'))

        if "algo" in args:
            institutions.append(Institution.objects.get(siglum='ALGO'))

        if "senado" in args:
            institutions.append(Institution.objects.get(siglum='Senado'))

        if "cmbh" in args:
            institutions.append(Institution.objects.get(siglum='CMBH'))

        if "cmsp" in args:
            institutions.append(Institution.objects.get(siglum='CMSP'))

        if "cdep" in args:
            institutions.append(Institution.objects.get(siglum='CDF'))

        for institution in institutions:
            print u'Consolidating data for %s' % (institution.name)

            # Per nature
            PerNature.objects.filter(institution=institution).delete()
            PerNatureByYear.objects.filter(institution=institution).delete()
            PerNatureByMonth.objects.filter(institution=institution).delete()

            data = Expense.objects.all()
            data = filter_for_institution(data, institution)

            date_ranges = get_date_ranges_from_data(institution, data)

            data = data.values('nature__id')
            data = data.annotate(expensed=Sum('expensed')).order_by('-expensed')

            years = [d.year for d in Expense.objects.dates('date', 'year')]
            years = ensure_years_in_range(date_ranges, years)

            per_natures_to_create = list()
            per_natures_by_year_to_create = list()
            per_natures_by_month_to_create = list()

            for item in data:
                # Totals
                nature = ExpenseNature.objects.get(id=item['nature__id'])
                p = PerNature(institution=institution,
                              date_start=date_ranges['cdf'],
                              date_end=date_ranges['cdt'],
                              nature=nature,
                              expensed=item['expensed'])
                per_natures_to_create.append(p)

                # Totals for Legislature
                for legislature in institution.legislature_set.all():
                    pargs = (institution.siglum, nature.name, legislature.date_start.year, legislature.date_end.year)
                    print u'[%s] Consolidating nature %s totals for legislature %d-%d…' % pargs
                    legislature_data = Expense.objects.filter(nature=nature)
                    legislature_data = legislature_data.filter(mandate__legislature=legislature)

                    legislature_ranges = get_date_ranges_from_data(institution, legislature_data)

                    legislature_data = legislature_data.values('nature__id')
                    legislature_data = legislature_data.annotate(expensed=Sum('expensed')).order_by('-expensed')

                    if legislature_data:
                        legislature_data = legislature_data[0]
                    else:
                        legislature_data = dict(expensed='0.')

                    p = PerNature(institution=institution,
                                  legislature=legislature,
                                  date_start=legislature_ranges['cdf'],
                                  date_end=legislature_ranges['cdt'],
                                  nature=nature,
                                  expensed=legislature_data['expensed'])
                    per_natures_to_create.append(p)

                # By Year
                for year in years:
                    print u'[%s] Consolidating nature %s totals for year %d…' % (institution.siglum, nature.name, year)
                    year_data = Expense.objects.filter(nature=nature)
                    year_data = year_data.filter(date__year=year)
                    year_data = filter_for_institution(year_data, institution)

                    # By Month
                    last_date = year_data and year_data.order_by('-date')[0].date or date.today()
                    for month in range(1, 13):
                        print u'[%s] Consolidating nature %s totals for %d-%d…' % (institution.siglum, nature.name, year, month)
                        month_date = date(year, month, 1)

                        if month_date >= last_date:
                            break

                        mdata = year_data.filter(date__month=month)
                        mdata = mdata.values('nature__id')
                        mdata = mdata.annotate(expensed=Sum('expensed')).order_by('-expensed')

                        if mdata:
                            mdata = mdata[0]
                        else:
                            mdata = dict(expensed='0.')

                        p = PerNatureByMonth(institution=institution,
                                             date=month_date,
                                             nature=nature,
                                             expensed=float(mdata['expensed']))
                        per_natures_by_month_to_create.append(p)

                    year_data = year_data.values('nature__id')
                    year_data = year_data.annotate(expensed=Sum("expensed"))

                    if year_data:
                        year_data = year_data[0]
                    else:
                        year_data = dict(expensed='0.')

                    p = PerNatureByYear(institution=institution,
                                        year=year,
                                        nature=nature,
                                        expensed=float(year_data['expensed']))
                    per_natures_by_year_to_create.append(p)

            PerNature.objects.bulk_create(per_natures_to_create)
            PerNatureByMonth.objects.bulk_create(per_natures_by_month_to_create)
            PerNatureByYear.objects.bulk_create(per_natures_by_year_to_create)

            # Legislator
            PerLegislator.objects.filter(institution=institution).delete()

            data = Expense.objects.all()
            data = filter_for_institution(data, institution)

            date_ranges = get_date_ranges_from_data(institution, data)

            data = data.values('mandate__legislator__id')
            data = data.annotate(expensed=Sum('expensed'))

            per_legislators_to_create = list()
            for item in data:
                legislator = Legislator.objects.get(id=int(item['mandate__legislator__id']))

                # Totals for Legislature
                for legislature in institution.legislature_set.all():
                    pargs = (institution.siglum, legislator.name, legislature.date_start.year, legislature.date_end.year)
                    print u'[%s] Consolidating legislator %s totals for legislature %d-%d…' % pargs

                    legislature_data = Expense.objects.filter(mandate__legislature=legislature)
                    legislature_data = legislature_data.filter(mandate__legislator=legislator)

                    legislature_ranges = get_date_ranges_from_data(institution, legislature_data)

                    legislature_data = legislature_data.values('mandate__legislator__id')
                    legislature_data = legislature_data.annotate(expensed=Sum('expensed')).order_by('-expensed')

                    if legislature_data:
                        legislature_data = legislature_data[0]
                    else:
                        legislature_data = dict(expensed='0.')

                    p = PerLegislator(institution=institution,
                                      legislature=legislature,
                                      date_start=date_ranges['cdf'],
                                      date_end=date_ranges['cdt'],
                                      legislator=legislator,
                                      expensed=legislature_data['expensed'])
                    per_legislators_to_create.append(p)

                print u'[%s] Consolidating totals for legislator %s…' % (institution.siglum, legislator.name)
                p = PerLegislator(institution=institution,
                                  date_start=date_ranges['cdf'],
                                  date_end=date_ranges['cdt'],
                                  legislator=legislator,
                                  expensed=item['expensed'])
                per_legislators_to_create.append(p)
            PerLegislator.objects.bulk_create(per_legislators_to_create)

        if 'agnostic' in args:
            # Institution-agnostic consolidations - biggest suppliers
            print u'Consolidating institution-agnostic totals…'

            BiggestSupplierForYear.objects.all().delete()
            years = [d.year for d in Expense.objects.dates('date', 'year')]
            for year in years:
                print u'Consolidating supplier totals for year %d…' % year
                data = Expense.objects.filter(date__year=year)
                data = data.values('supplier__id')
                data = data.annotate(expensed=Sum('expensed')).order_by('-expensed')

                biggest_suppliers_for_year_to_add = list()
                for item in data:
                    supplier = Supplier.objects.get(id=item['supplier__id'])

                    b = BiggestSupplierForYear(supplier=supplier,
                                               year=year,
                                               expensed=item['expensed'])
                    biggest_suppliers_for_year_to_add.append(b)
                BiggestSupplierForYear.objects.bulk_create(biggest_suppliers_for_year_to_add)

Example 163

Project: olhoneles Source File: views.py
def get_supplier_detail_data(request, supplier_id, filter_spec=''):
    institution, _ = parse_filter(filter_spec)
    data, date_ranges = get_basic_objects_for_model(filter_spec)

    supplier = Supplier.objects.get(pk=supplier_id)
    data = data.filter(supplier=supplier)

    # Data prepared for displaying the per-party graph
    graph_data = data.values('mandate__party__logo', 'mandate__party__siglum', 'mandate__party__name')
    graph_data = graph_data.annotate(expensed=Sum('expensed'))
    graph_data = postprocess_party_data(institution, graph_data)

    top_buyers = data.values('mandate__legislator__id',
                             'mandate__legislator__name',
                             'mandate__legislature__institution__siglum',
                             'mandate__party__siglum')
    top_buyers = top_buyers.annotate(expensed=Sum('expensed')).order_by('-expensed')
    top_buyers = top_buyers[:15]

    total_expensed = data.values('supplier__name')
    total_expensed = total_expensed.annotate(total_expensed=Sum('expensed'))
    if len(total_expensed) > 0:
        total_expensed = total_expensed[0]['total_expensed']
    else:
        total_expensed = 0

    data = data.values('nature__name',
                       'mandate__legislator__name', 'mandate__party__siglum',
                       'number', 'date', 'expensed').order_by('-date')

    paginator = Paginator(data, 10)
    page = request.GET.get('page')
    try:
        data = paginator.page(page)
    except PageNotAnInteger:
        data = paginator.page(1)
    except EmptyPage:
        data = paginator.page(paginator.num_pages)

    c = {'supplier': supplier,
         'data': data,
         'graph_data': graph_data,
         'top_buyers': top_buyers,
         'total_expensed': total_expensed,
         'colors': generate_colors(len(data), 0.93, 0.8)}

    c.update(date_ranges)
    return c

Example 164

Project: linz2osm Source File: models.py
    def features_complete(self):
        return self.workslice_set.filter(state__in=('complete',)).aggregate(Sum('feature_count'))['feature_count__sum'] or 0

Example 165

Project: nodeconductor Source File: models.py
    @classmethod
    def get_sum_of_quotas_as_dict(cls, scopes, quota_names=None, fields=['usage', 'limit']):
        """
        Return dictionary with sum of all scopes' quotas.

        Dictionary format:
        {
            'quota_name1': 'sum of limits for quotas with such quota_name1',
            'quota_name1_usage': 'sum of usages for quotas with such quota_name1',
            ...
        }
        All `scopes` have to be instances of the same model.
        `fields` keyword argument defines sum of which fields of quotas will present in result.
        """
        if not scopes:
            return {}

        if quota_names is None:
            quota_names = cls.get_quotas_names()

        scope_models = set([scope._meta.model for scope in scopes])
        if len(scope_models) > 1:
            raise exceptions.QuotaError('All scopes have to be instances of the same model')

        filter_kwargs = {
            'content_type': ct_models.ContentType.objects.get_for_model(scopes[0]),
            'object_id__in': [scope.id for scope in scopes],
            'name__in': quota_names
        }

        result = {}
        if 'usage' in fields:
            items = Quota.objects.filter(**filter_kwargs)\
                         .values('name').annotate(usage=Sum('usage'))
            for item in items:
                result[item['name'] + '_usage'] = item['usage']

        if 'limit' in fields:
            unlimited_quotas = Quota.objects.filter(limit=-1, **filter_kwargs)
            unlimited_quotas = list(unlimited_quotas.values_list('name', flat=True))
            for quota_name in unlimited_quotas:
                result[quota_name] = -1

            items = Quota.objects\
                         .filter(**filter_kwargs)\
                         .exclude(name__in=unlimited_quotas)\
                         .values('name')\
                         .annotate(limit=Sum('limit'))
            for item in items:
                result[item['name']] = item['limit']

        return result

Example 166

Project: otm-core Source File: benefits.py
    def _benefits_for_feature_qs(self, feature_qs, instance):
        from stormwater.models import PolygonalMapFeature
        feature_count = feature_qs.count()
        feature_qs = feature_qs.filter(drainage_area__isnull=False)
        poly_qs = PolygonalMapFeature.objects.filter(id__in=feature_qs)
        config = self.MapFeatureClass.get_config(instance)
        diversion_rate = config['diversion_rate']
        should_compute = (instance.annual_rainfall_inches is not None and
                          diversion_rate is not None and
                          config['should_show_eco'])
        if should_compute:
            total_drainage_area = feature_qs.aggregate(
                total_drainage_area=Sum('drainage_area')).get(
                'total_drainage_area')
            if total_drainage_area is None:
                should_compute = False
        if should_compute:
            annual_rainfall_ft = instance.annual_rainfall_inches * \
                FEET_PER_INCH
            # annual stormwater diverted =
            #     annual rainfall x (total feature area +
            #     (total drainage area x fraction stormwater diverted))
            total_drainage_area *= FEET_SQ_PER_METER_SQ
            feature_areas = \
                self.MapFeatureClass.feature_qs_areas(poly_qs)
            total_area = sum(feature_areas) * FEET_SQ_PER_METER_SQ
            runoff_reduced = annual_rainfall_ft * (
                total_area + total_drainage_area * diversion_rate)
            runoff_reduced *= GALLONS_PER_CUBIC_FT
            stats = self._format_stats(instance, runoff_reduced)
            features_used = poly_qs.count()
            basis = self._get_basis(features_used,
                                    feature_count - features_used)
        else:
            stats = {}
            basis = self._get_basis(0, feature_count)
        return stats, basis

Example 167

Project: otm-legacy Source File: update_aggregates.py
    def update_aggregates(self, base_model, ag_model):
        """
        Updates the aggregate tables.  TODO:  don't just overwrite old summaries
        example: update_aggregates(Neighborhood, AggregateNeighborhood)
        """
        start_time = time.time()
        ns = base_model.objects.all()
        field_names = [x.name for x in models.ResourceSummaryModel._meta.fields 
            if not x.name == 'id']
        for n in ns:
            print n
            agg =  ag_model.objects.filter(location=n)
            if agg:
                agg = agg[0]
            else:
                agg = ag_model(location=n)
            trees = models.Tree.objects.filter(plot__geometry__within=n.geometry)
            plots = models.Plot.objects.filter(geometry__within=n.geometry)
            agg.total_trees = trees.count()
            agg.total_plots = plots.count()
            #agg.distinct_species = len(trees.values("species").annotate(Count("id")).order_by("species"))
            #TODO figure out how to summarize diff stratum stuff
            trees = trees.exclude( Q(dbh=None) | Q(dbh=0.0) ).exclude(species=None)
            for f in field_names:
                if agg.total_trees == 0:
                    s = 0.0
                else:
                    fn = 'treeresource__' + f   
                    s = trees.aggregate(Sum(fn))[fn + '__sum'] or 0.0
                print agg,f,s
                setattr(agg,f,s)
            agg.save()
        end_time = time.time()
        print "done in ", float(end_time) - float(start_time), "seconds"

Example 168

Project: ganeti_webmgr Source File: models.py
Function: used_resources
    def used_resources(self, cluster=None, only_running=True):
        """
        Return dictionary of total resources used by VMs that this ClusterUser
        has perms to.
        @param cluster  if set, get only VMs from specified cluster
        @param only_running  if set, get only running VMs
        """
        # XXX - order_by must be cleared or it breaks annotation grouping since
        #       the default order_by field is also added to the group_by clause
        base = self.virtual_machines.all().order_by()

        # XXX - use a custom aggregate for ram and vcpu count when filtering by
        # running.  this allows us to execute a single query.
        #
        # XXX - quotes must be used in this order.  postgresql quirk
        if only_running:
            sum_ram = SumIf('ram', condition="status='running'")
            sum_vcpus = SumIf('virtual_cpus', condition="status='running'")
        else:
            sum_ram = Sum('ram')
            sum_vcpus = Sum('virtual_cpus')

        base = base.exclude(ram=-1, disk_size=-1, virtual_cpus=-1)

        if cluster:
            base = base.filter(cluster=cluster)
            result = base.aggregate(ram=sum_ram, disk=Sum('disk_size'),
                                    virtual_cpus=sum_vcpus)

            # repack with zeros instead of Nones
            if result['disk'] is None:
                result['disk'] = 0
            if result['ram'] is None:
                result['ram'] = 0
            if result['virtual_cpus'] is None:
                result['virtual_cpus'] = 0
            return result

        else:
            base = base.values('cluster').annotate(uram=sum_ram,
                                                   udisk=Sum('disk_size'),
                                                   uvirtual_cpus=sum_vcpus)

            # repack as dictionary
            result = {}
            for used in base:
                # repack with zeros instead of Nones, change index names
                used["ram"] = used.pop("uram") or 0
                used["disk"] = used.pop("udisk") or 0
                used["virtual_cpus"] = used.pop("uvirtual_cpus") or 0
                result[used.pop('cluster')] = used

            return result

Example 169

Project: pytrader Source File: views.py
def get_balance_chart(bs, denom, symbol, start_time):

    dep_amount_fieldname = 'deposited_amount_usd' if denom != 'btc_balance' else 'deposited_amount_btc'

    ds = PivotDataPool(
        series=[
            {'options': {
                'source': bs.filter(created_on__gte=start_time).order_by('-created_on').all(),
                'categories': 'date_str'
            },
                'terms': {
                    'total_value': Sum(denom), 'total_invested': Sum(dep_amount_fieldname),
                }}])

    pivcht = PivotChart(
        datasource=ds,
        series_options=[
            {'options': {
                'type': 'line',
                'xAxis': 0,
                'yAxis': 0},
                'terms': ['total_value', 'total_invested']}],
        chart_options={
            'title': {
                'text': 'Balance over time ('+denom+') '},
            'xAxis': {
                'title': {
                    'text': 'Time'}},
            'terms': ['total_value', 'total_invested']
        }
    )
    return pivcht

Example 170

Project: pytrader Source File: views.py
def get_trade_chart(bs, denom, symbol, start_time):

    if settings.MAKE_TRADES:
        trades = Trade.objects.exclude(created_on_str="").filter(
            symbol=symbol, created_on__gte=start_time).filter(status__in=['fill', 'open', 'error']).order_by('id')
    else:
        trades = Trade.objects.exclude(created_on_str="").filter(
            symbol=symbol, created_on__gte=start_time).order_by('id')

    ds = PivotDataPool(
        series=[
            {'options': {
                'source': trades,
                'categories': 'created_on_str',
                'legend_by': 'status'},
                'terms': {
                    'total_value': Sum('net_amount')}}])

    pivcht = PivotChart(
        datasource=ds,
        series_options=[
            {'options': {
                'type': 'column',
                'stacking': True,
                'xAxis': 0,
                'yAxis': 0},
                'terms': ['total_value']}],
        chart_options={
            'title': {
                'text': 'Trades over time '},
            'xAxis': {
                'title': {
                    'text': 'Time'}},
            'terms': ['total_value']
        }
    )

    return pivcht
See More Examples - Go to Next Page
Page 1 Page 2 Page 3 Page 4 Selected