django.db.models.Sum

Here are the examples of the python api django.db.models.Sum taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

170 Examples 7

Example 101

Project: django-q Source File: monitor.py
def info(broker=None):
    if not broker:
        broker = get_broker()
    term = Terminal()
    broker.ping()
    stat = Stat.get_all(broker=broker)
    # general stats
    clusters = len(stat)
    workers = 0
    reincarnations = 0
    for cluster in stat:
        workers += len(cluster.workers)
        reincarnations += cluster.reincarnations
    # calculate tasks pm and avg exec time
    tasks_per = 0
    per = _('day')
    exec_time = 0
    last_tasks = models.Success.objects.filter(stopped__gte=timezone.now() - timedelta(hours=24))
    tasks_per_day = last_tasks.count()
    if tasks_per_day > 0:
        # average execution time over the last 24 hours
        if not connection.vendor == 'sqlite':
            exec_time = last_tasks.aggregate(time_taken=Sum(F('stopped') - F('started')))
            exec_time = exec_time['time_taken'].total_seconds() / tasks_per_day
        else:
            # can't sum timedeltas on sqlite
            for t in last_tasks:
                exec_time += t.time_taken()
            exec_time = exec_time / tasks_per_day
        # tasks per second/minute/hour/day in the last 24 hours
        if tasks_per_day > 24 * 60 * 60:
            tasks_per = tasks_per_day / (24 * 60 * 60)
            per = _('second')
        elif tasks_per_day > 24 * 60:
            tasks_per = tasks_per_day / (24 * 60)
            per = _('minute')
        elif tasks_per_day > 24:
            tasks_per = tasks_per_day / 24
            per = _('hour')
        else:
            tasks_per = tasks_per_day
    # print to terminal
    print(term.clear_eos())
    col_width = int(term.width / 6)
    print(term.black_on_green(
        term.center(
            _('-- {} {} on {}  --').format(Conf.PREFIX.capitalize(), '.'.join(str(v) for v in VERSION),
                                           broker.info()))))
    print(term.cyan(_('Clusters')) +
          term.move_x(1 * col_width) +
          term.white(str(clusters)) +
          term.move_x(2 * col_width) +
          term.cyan(_('Workers')) +
          term.move_x(3 * col_width) +
          term.white(str(workers)) +
          term.move_x(4 * col_width) +
          term.cyan(_('Restarts')) +
          term.move_x(5 * col_width) +
          term.white(str(reincarnations))
          )
    print(term.cyan(_('Queued')) +
          term.move_x(1 * col_width) +
          term.white(str(broker.queue_size())) +
          term.move_x(2 * col_width) +
          term.cyan(_('Successes')) +
          term.move_x(3 * col_width) +
          term.white(str(models.Success.objects.count())) +
          term.move_x(4 * col_width) +
          term.cyan(_('Failures')) +
          term.move_x(5 * col_width) +
          term.white(str(models.Failure.objects.count()))
          )
    print(term.cyan(_('Schedules')) +
          term.move_x(1 * col_width) +
          term.white(str(models.Schedule.objects.count())) +
          term.move_x(2 * col_width) +
          term.cyan(_('Tasks/{}'.format(per))) +
          term.move_x(3 * col_width) +
          term.white('{0:.2f}'.format(tasks_per)) +
          term.move_x(4 * col_width) +
          term.cyan(_('Avg time')) +
          term.move_x(5 * col_width) +
          term.white('{0:.4f}'.format(exec_time))
          )
    return True

Example 102

Project: plata Source File: product.py
def product_xls():
    """
    Create a list of all product variations, including stock and aggregated
    stock transactions (by type)
    """

    from plata.product.stock.models import Period
    StockTransaction = plata.stock_model()

    xls = XLSDocuement()
    xls.add_sheet(capfirst(_('products')))

    _transactions = StockTransaction.objects.filter(
        period=Period.objects.current(),
        ).order_by().values('product', 'type').annotate(Sum('change'))

    transactions = defaultdict(dict)
    for t in _transactions:
        transactions[t['product']][t['type']] = t['change__sum']

    titles = [
        capfirst(_('product')),
        _('SKU'),
        capfirst(_('stock')),
    ]
    titles.extend(
        '%s' % row[1] for row in StockTransaction.TYPE_CHOICES)

    data = []

    for product in plata.product_model().objects.all().select_related():
        row = [
            product,
            getattr(product, 'sku', ''),
            getattr(product, 'items_in_stock', -1),
            ]
        row.extend(
            transactions[product.id].get(key, '')
            for key, name in StockTransaction.TYPE_CHOICES)
        data.append(row)

    xls.table(titles, data)
    return xls

Example 103

Project: database-as-a-service Source File: tasks.py
@app.task
@only_one(key="db_infra_notification_key", timeout=20)
def databaseinfra_notification(self, user=None):
    worker_name = get_worker_name()
    task_history = TaskHistory.register(
        request=self.request, user=user, worker_name=worker_name)
    threshold_infra_notification = Configuration.get_by_name_as_int(
        "threshold_infra_notification", default=0)
    if threshold_infra_notification <= 0:
        LOG.warning("database infra notification is disabled")
        return

    # Sum capacity per databseinfra with parameter plan, environment and engine
    infras = DatabaseInfra.objects.values(
        'plan__name', 'environment__name', 'engine__engine_type__name',
        'plan__provider'
    ).annotate(capacity=Sum('capacity'))
    for infra in infras:
        try:
            database = infra.databases.get()
        except Database.MultipleObjectsReturned:
            pass
        else:
            if database.is_in_quarantine:
                continue
            if not database.subscribe_to_email_events:
                continue

        used = DatabaseInfra.objects.filter(
            plan__name=infra['plan__name'], environment__name=infra['environment__name'],
            engine__engine_type__name=infra['engine__engine_type__name']
        ).aggregate(used=Count('databases'))
        # calculate the percentage

        percent = int(used['used'] * 100 / infra['capacity'])
        if percent >= threshold_infra_notification and infra['plan__provider'] != Plan.CLOUDSTACK:
            LOG.info('Plan %s in environment %s with %s%% occupied' % (
                infra['plan__name'], infra['environment__name'], percent))
            LOG.info("Sending database infra notification...")
            context = {}
            context['plan'] = infra['plan__name']
            context['environment'] = infra['environment__name']
            context['used'] = used['used']
            context['capacity'] = infra['capacity']
            context['percent'] = percent
            email_notifications.databaseinfra_ending(context=context)

        task_history.update_status_for(
            TaskHistory.STATUS_SUCCESS,
            details='Databaseinfra Notification successfully sent to dbaas admins!'
        )
    return

Example 104

Project: django-bitcoin Source File: tasks.py
@task()
def check_integrity():
    from django_bitcoin.models import Wallet, BitcoinAddress, WalletTransaction, DepositTransaction
    from django_bitcoin.utils import bitcoind
    from django.db.models import Avg, Max, Min, Sum
    from decimal import Decimal

    import sys
    from cStringIO import StringIO
    backup = sys.stdout
    sys.stdout = StringIO()

    bitcoinaddress_sum = BitcoinAddress.objects.filter(active=True)\
        .aggregate(Sum('least_received_confirmed'))['least_received_confirmed__sum'] or Decimal(0)
    print "Total received, sum", bitcoinaddress_sum
    transaction_wallets_sum = WalletTransaction.objects.filter(from_wallet__id__gt=0, to_wallet__id__gt=0)\
        .aggregate(Sum('amount'))['amount__sum'] or Decimal(0)
    print "Total transactions, sum", transaction_wallets_sum
    transaction_out_sum = WalletTransaction.objects.filter(from_wallet__id__gt=0)\
        .exclude(to_bitcoinaddress="").exclude(to_bitcoinaddress="")\
        .aggregate(Sum('amount'))['amount__sum'] or Decimal(0)
    print "Total outgoing, sum", transaction_out_sum
    # for x in WalletTransaction.objects.filter(from_wallet__id__gt=0, to_wallet__isnull=True, to_bitcoinaddress=""):
    #   print x.amount, x.created_at
    fee_sum = WalletTransaction.objects.filter(from_wallet__id__gt=0, to_wallet__isnull=True, to_bitcoinaddress="")\
        .aggregate(Sum('amount'))['amount__sum'] or Decimal(0)
    print "Fees, sum", fee_sum
    print "DB balance", (bitcoinaddress_sum - transaction_out_sum - fee_sum)
    print "----"
    bitcoind_balance = bitcoind.bitcoind_api.getbalance()
    print "Bitcoind balance", bitcoind_balance
    print "----"
    print "Wallet quick check"
    total_sum = Decimal(0)
    for w in Wallet.objects.filter(last_balance__lt=0):
        if w.total_balance()<0:
            bal = w.total_balance()
            # print w.id, bal
            total_sum += bal
    print "Negatives:", Wallet.objects.filter(last_balance__lt=0).count(), "Amount:", total_sum
    print "Migration check"
    tot_received = WalletTransaction.objects.filter(from_wallet=None).aggregate(Sum('amount'))['amount__sum'] or Decimal(0)
    tot_received_bitcoinaddress = BitcoinAddress.objects.filter(migrated_to_transactions=True)\
        .aggregate(Sum('least_received_confirmed'))['least_received_confirmed__sum'] or Decimal(0)
    tot_received_unmigrated = BitcoinAddress.objects.filter(migrated_to_transactions=False)\
        .aggregate(Sum('least_received_confirmed'))['least_received_confirmed__sum'] or Decimal(0)
    if tot_received != tot_received_bitcoinaddress:
        print "wrong total receive amount! "+str(tot_received)+", "+str(tot_received_bitcoinaddress)
    print "Total " + str(tot_received) + " BTC deposits migrated, unmigrated " + str(tot_received_unmigrated) + " BTC"
    print "Migration check #2"
    dts = DepositTransaction.objects.filter(address__migrated_to_transactions=False).exclude(transaction=None)
    if dts.count() > 0:
        print "Illegal transaction!", dts
    if WalletTransaction.objects.filter(from_wallet=None, deposit_address=None).count() > 0:
        print "Illegal deposit transactions!"
    print "Wallet check"
    for w in Wallet.objects.filter(last_balance__gt=0):
        lb = w.last_balance
        tb_sql = w.total_balance_sql()
        tb = w.total_balance()
        if lb != tb or w.last_balance != tb or tb != tb_sql:
            print "Wallet balance error!", w.id, lb, tb_sql, tb
            print w.sent_transactions.all().count()
            print w.received_transactions.all().count()
            print w.sent_transactions.all().aggregate(Max('created_at'))['created_at__max']
            print w.received_transactions.all().aggregate(Max('created_at'))['created_at__max']
            # Wallet.objects.filter(id=w.id).update(last_balance=w.total_balance_sql())
    # print w.created_at, w.sent_transactions.all(), w.received_transactions.all()
        # if random.random() < 0.001:
        #     sleep(1)
    print "Address check"
    for ba in BitcoinAddress.objects.filter(least_received_confirmed__gt=0, migrated_to_transactions=True):
        dts = DepositTransaction.objects.filter(address=ba, wallet=ba.wallet)
        s = dts.aggregate(Sum('amount'))['amount__sum'] or Decimal(0)
        if s != ba.least_received:
            print "DepositTransaction error", ba.address, ba.least_received, s
            print "BitcoinAddress check"
    for ba in BitcoinAddress.objects.filter(migrated_to_transactions=True):
        dts = ba.deposittransaction_set.filter(address=ba, confirmations__gte=settings.BITCOIN_MINIMUM_CONFIRMATIONS)
        deposit_sum = dts.aggregate(Sum('amount'))['amount__sum'] or Decimal(0)
        wt_sum = WalletTransaction.objects.filter(deposit_address=ba).aggregate(Sum('amount'))['amount__sum'] or Decimal(0)
        if wt_sum != deposit_sum or ba.least_received_confirmed != deposit_sum:
            print "Bitcoinaddress integrity error!", ba.address, deposit_sum, wt_sum, ba.least_received_confirmed
        # if random.random() < 0.001:
        #     sleep(1)

    integrity_test_output = sys.stdout.getvalue() # release output
    # ####

    sys.stdout.close()  # close the stream
    sys.stdout = backup # restore original stdout
    mail_admins("Integrity check", integrity_test_output)

Example 105

Project: sfm-ui Source File: notifications.py
Function: create_context
def _create_context(user, collection_set_cache):
    today = datetime.date.today()
    yesterday = today + datetime.timedelta(days=-1)
    prev_day = today + datetime.timedelta(days=-2)
    # Greater than this date
    last_7_start = yesterday + datetime.timedelta(days=-7)
    prev_7_start = yesterday + datetime.timedelta(days=-14)
    last_30_start = yesterday + datetime.timedelta(days=-30)
    prev_30_start = yesterday + datetime.timedelta(days=-60)
    # Less than or equal to this date
    last_7_end = yesterday
    prev_7_end = last_7_start
    last_30_end = yesterday
    prev_30_end = last_30_start
    c = {
        "url": _create_url(reverse('home'))
    }
    # Ordered list of collection sets
    collection_sets = OrderedDict()
    for collection_set in CollectionSet.objects.filter(group__in=user.groups.all()).order_by('name'):
        # Using a cache to avoid regenerating the data repeatedly.
        if collection_set in collection_set_cache:
            collections = collection_set_cache[collection_set]
        else:
            collections = OrderedDict()
            for collection in Collection.objects.filter(collection_set=collection_set).order_by('name'):
                collection_info = {
                    "url": _create_url(reverse('collection_detail', args=(collection.id,)))
                }
                if collection.is_active:
                    collection_info['next_run_time'] = next_run_time(collection.id)
                    stats = {}
                    # Yesterday
                    _add_stats(stats, 'yesterday', HarvestStat.objects.filter(harvest__collection=collection,
                                                                              harvest_date=yesterday).values(
                        'item').annotate(count=Sum('count')))
                    # Prev day
                    _add_stats(stats, 'prev_day', HarvestStat.objects.filter(harvest__collection=collection,
                                                                             harvest_date=prev_day).values(
                        'item').annotate(count=Sum('count')))
                    # Last 7
                    _add_stats(stats, 'last_7', HarvestStat.objects.filter(harvest__collection=collection,
                                                                           harvest_date__gt=last_7_start,
                                                                           harvest_date__lte=last_7_end).values(
                        'item').annotate(count=Sum('count')))
                    # Prev 7
                    _add_stats(stats, 'prev_7', HarvestStat.objects.filter(harvest__collection=collection,
                                                                           harvest_date__gt=prev_7_start,
                                                                           harvest_date__lte=prev_7_end).values(
                        'item').annotate(count=Sum('count')))
                    # Last 30
                    _add_stats(stats, 'last_30', HarvestStat.objects.filter(harvest__collection=collection,
                                                                            harvest_date__gt=last_30_start,
                                                                            harvest_date__lte=last_30_end).values(
                        'item').annotate(count=Sum('count')))
                    # Prev 7
                    _add_stats(stats, 'prev_30', HarvestStat.objects.filter(harvest__collection=collection,
                                                                            harvest_date__gt=prev_30_start,
                                                                            harvest_date__lte=prev_30_end).values(
                        'item').annotate(count=Sum('count')))
                    collection_info['stats'] = stats
                collections[collection] = collection_info

            collection_set_cache[collection_set] = collections
        collection_sets[collection_set] = {
            "collections": collections,
            "url": _create_url(reverse('collection_set_detail', args=(collection_set.id,)))

        }
    c['collection_sets'] = collection_sets
    return c

Example 106

Project: CommunityCellularManager Source File: staff.py
    def get(self, request):
        """"Handles GET requests."""
        user_profile = models.UserProfile.objects.get(user=request.user)
        if not user_profile.user.is_staff:
            return response.Response('', status=status.HTTP_404_NOT_FOUND)
        # Build up the context and initial form data.
        initial_form_data = {}
        context = {
            'networks': get_objects_for_user(request.user, 'view_network', klass=models.Network),
            'user_profile': user_profile,
        }
        network_pk = request.GET.get('network', None)
        if network_pk:
            # If a network has been selected, populate the form and the table.
            initial_form_data['network'] = network_pk
            network = models.Network.objects.get(pk=network_pk)
            context['network'] = network
            operator = models.UserProfile.objects.get(network=network)
            context['operator'] = operator
            # There were errors with the CC recharge system that occurred
            # multiple times before Feb 3, 2015.  So for analysis purposes,
            # we'll ignore data before that time.
            feb3_2015 = datetime.datetime(year=2015, month=2, day=3,
                                          tzinfo=pytz.utc)
            context['start_of_analysis'] = feb3_2015
            network_creation_date = operator.user.date_joined
            context['network_creation_date'] = network_creation_date
            if network_creation_date < feb3_2015:
                days_of_operation = (datetime.datetime.now(pytz.utc) -
                                     feb3_2015).days
            else:
                days_of_operation = (datetime.datetime.now(pytz.utc) -
                                     network_creation_date).days
            context['days_of_operation'] = days_of_operation
            # Calculate operator revenue (the sum of UsageEvent.change for
            # certain UsageEvent.kinds).  These change values are all negative
            # so we multiply by negative one to fix that.
            kinds = ['local_call', 'local_sms', 'outside_call', 'outside_sms',
                     'incoming_call', 'incoming_sms', 'local_recv_call',
                     'local_recv_sms']
            events = models.UsageEvent.objects.filter(
                network=network, kind__in=kinds, date__gte=feb3_2015).only(
                    'change')
            if events:
                credit = events.aggregate(Sum('change'))['change__sum']
                # Convert revenue to USD.
                conversion_to_usd = {
                    'USD': 1 / (100 * 1000.),
                    'IDR': 1 / 13789.50,
                }
                multiplier = conversion_to_usd[network.subscriber_currency]
                revenue = -1 * credit * multiplier
            else:
                revenue = 0
            context['revenue'] = revenue
            # Calculate operator costs (payments to Endaga).
            ledger = models.Ledger.objects.get(userp=operator)
            transactions = models.Transaction.objects.filter(
                ledger=ledger, kind='credit', reason='Automatic Recharge',
                created__gte=feb3_2015).only('amount')
            if transactions:
                costs = (transactions.aggregate(Sum('amount'))['amount__sum'] /
                         (100 * 1000.))
            else:
                costs = 0
            context['costs'] = costs
            # Determine the net profit.
            profit = revenue - costs
            context['profit'] = profit
            if days_of_operation != 0:
                context['profit_per_day'] = profit / float(days_of_operation)
            else:
                context['profit_per_day'] = None

        # Attach the network selection form with any specified initial data.
        select_network_form = SelectNetworkForm(initial=initial_form_data)
        select_network_form.helper.form_action = (
            '/dashboard/staff/network-earnings')
        context['select_network_form'] = select_network_form
        # Render the template.
        earnings_template = template.loader.get_template(
            'dashboard/staff/network-earnings.html')
        html = earnings_template.render(context, request)
        return http.HttpResponse(html)

Example 107

Project: evething Source File: trade.py
@login_required
def trade_timeframe(request, year=None, month=None, period=None, slug=None):
    """Trade overview for a variety of timeframe types"""
    # Initialise data
    data = {
        'total_buys': 0,
        'total_sells': 0,
        'total_balance': 0,
        'total_projected_average': 0,
        'total_projected_market': 0,
    }

    # Get a QuerySet of transactions by this user
    characters = list(Character.objects.filter(apikeys__user=request.user.id).values_list('id', flat=True))
    corporations = Corporation.get_ids_with_access(request.user, APIKey.CORP_WALLET_TRANSACTIONS_MASK)
    wallets = list(CorpWallet.objects.filter(corporation__in=corporations).values_list('account_id', flat=True))

    transactions = Transaction.objects.filter(
        Q(character__in=characters) |
        Q(corp_wallet__in=wallets)
    )

    # Year/Month
    if year and month:
        year = int(year)
        month = int(month)
        transactions = transactions.filter(date__range=_month_range(year, month))
        data['timeframe'] = '%s %s' % (MONTHS[month], year)
        data['urlpart'] = '%s-%02d' % (year, month)
    # Timeframe slug
    elif slug:
        camp = get_object_or_404(Campaign, slug=slug)
        transactions = camp.get_transactions_filter(transactions)
        data['timeframe'] = '%s (%s -> %s)' % (camp.title, camp.start_date, camp.end_date)
        data['urlpart'] = slug
    # All
    elif period:
        data['timeframe'] = 'all time'
        data['urlpart'] = 'all'

    # Fetch the aggregate transaction data
    data_set = transactions.values('buy_transaction', 'item').annotate(
        sum_quantity=Sum('quantity'),
        min_price=Min('price'),
        max_price=Max('price'),
        sum_total=Sum('total_price'),
    )

    t_map = {}
    # { buy_transaction, item, sum_quantity, min_price, max_price, sum_total }
    for row in data_set.iterator():
        item_id = int(row['item'])

        if item_id not in t_map:
            t_map[item_id] = {}

        if row['buy_transaction']:
            t_map[item_id]['buy_quantity'] = row['sum_quantity']
            t_map[item_id]['buy_minimum'] = row['min_price']
            t_map[item_id]['buy_maximum'] = row['max_price']
            t_map[item_id]['buy_total'] = row['sum_total']
            t_map[item_id]['buy_average'] = row['sum_total'] / row['sum_quantity']
        else:
            t_map[item_id]['sell_quantity'] = row['sum_quantity']
            t_map[item_id]['sell_minimum'] = row['min_price']
            t_map[item_id]['sell_maximum'] = row['max_price']
            t_map[item_id]['sell_total'] = row['sum_total']
            t_map[item_id]['sell_average'] = row['sum_total'] / row['sum_quantity']

    # fetch the items
    item_map = Item.objects.select_related().in_bulk(t_map.keys())

    # import time
    # start = time.time()

    data['items'] = []
    for item in item_map.values():
        t = t_map[item.id]
        item.t = t

        # Add missing data
        for k in ('buy_average', 'sell_average', 'buy_quantity', 'sell_quantity', 'buy_minimum', 'sell_minimum',
                  'buy_maximum', 'sell_maximum', 'buy_total', 'sell_total'):
            if k not in t:
                t[k] = 0

        if t['buy_average'] and t['sell_average']:
            t['average_profit'] = (t['sell_average'] - t['buy_average']).quantize(TWO_PLACES)
            t['average_profit_per'] = '%.1f' % (t['average_profit'] / t['buy_average'] * 100)

        t['diff'] = t['buy_quantity'] - t['sell_quantity']

        t['balance'] = t['sell_total'] - t['buy_total']

        # Projected balance
        if t['diff'] > 0:
            t['projected_average'] = (t['balance'] + (t['diff'] * t['sell_average'])).quantize(TWO_PLACES)
            t['projected_market'] = (t['balance'] + (t['diff'] * item.sell_price)).quantize(TWO_PLACES)
            t['outstanding'] = ((t['projected_average'] - t['balance']) * -1).quantize(TWO_PLACES)
            if t['outstanding'] == 0:
                t['outstanding'] = ((t['projected_market'] - t['balance']) * -1).quantize(TWO_PLACES)
        else:
            t['projected_average'] = t['balance']
            t['projected_market'] = t['balance']

        data['items'].append(item)

        # Update totals
        data['total_buys'] += t['buy_total']
        data['total_sells'] += t['sell_total']
        data['total_projected_average'] += t['projected_average']
        data['total_projected_market'] += t['projected_market']

    # Render template
    return render_page(
        'thing/trade_timeframe.html',
        data,
        request,
    )

Example 108

Project: django-oscar-accounts Source File: views.py
    def form_valid(self, form):
        # Take cutoff as the first second of the following day, which we
        # convert to a datetime instane in UTC
        threshold_date = form.cleaned_data['date'] + datetime.timedelta(days=1)
        threshold_datetime = datetime.datetime.combine(
            threshold_date, datetime.time(tzinfo=timezone.utc))

        # Get data
        rows = []
        totals = {'total': D('0.00'),
                  'num_accounts': 0}
        for acc_type_name in names.DEFERRED_INCOME_ACCOUNT_TYPES:
            acc_type = AccountType.objects.get(name=acc_type_name)
            data = {
                'name': acc_type_name,
                'total': D('0.00'),
                'num_accounts': 0,
                'num_expiring_within_30': 0,
                'num_expiring_within_60': 0,
                'num_expiring_within_90': 0,
                'num_expiring_outside_90': 0,
                'num_open_ended': 0,
                'total_expiring_within_30': D('0.00'),
                'total_expiring_within_60': D('0.00'),
                'total_expiring_within_90': D('0.00'),
                'total_expiring_outside_90': D('0.00'),
                'total_open_ended': D('0.00'),
            }
            for account in acc_type.accounts.all():
                data['num_accounts'] += 1
                total = account.transactions.filter(
                    date_created__lt=threshold_datetime).aggregate(
                        total=Sum('amount'))['total']
                if total is None:
                    total = D('0.00')
                data['total'] += total
                days_remaining = account.days_remaining(threshold_datetime)
                if days_remaining is None:
                    data['num_open_ended'] += 1
                    data['total_open_ended'] += total
                else:
                    if days_remaining <= 30:
                        data['num_expiring_within_30'] += 1
                        data['total_expiring_within_30'] += total
                    elif days_remaining <= 60:
                        data['num_expiring_within_60'] += 1
                        data['total_expiring_within_60'] += total
                    elif days_remaining <= 90:
                        data['num_expiring_within_90'] += 1
                        data['total_expiring_within_90'] += total
                    else:
                        data['num_expiring_outside_90'] += 1
                        data['total_expiring_outside_90'] += total

            totals['total'] += data['total']
            totals['num_accounts'] += data['num_accounts']
            rows.append(data)
        ctx = self.get_context_data(form=form)
        ctx['rows'] = rows
        ctx['totals'] = totals
        ctx['report_date'] = form.cleaned_data['date']
        return self.render_to_response(ctx)

Example 109

Project: moolah Source File: models.py
Function: total
    def total(self):
        return self.aggregate(models.Sum('amount_per_day'))[
            'amount_per_day__sum']

Example 110

Project: fumblerooski Source File: views.py
def player_detail(request, team, season, player):
    cy = get_object_or_404(CollegeYear, college__slug=team, season=season)
    p = Player.objects.get(team=cy, season=cy.season, slug=player)
    starts = PlayerGame.objects.filter(player=p, game__season=season, starter=True).count()
    ps = PlayerScoring.objects.filter(player=p, game__season=season).select_related().order_by('-college_game.date')
    pret = PlayerReturn.objects.filter(player=p, game__season=season).select_related().order_by('-college_game.date')
    pf = PlayerFumble.objects.filter(player=p, game__season=season).select_related().order_by('-college_game.date')
    pr = PlayerRush.objects.filter(player=p, game__season=season).select_related().order_by('-college_game.date')
    if pr:
        rush_totals = pr.aggregate(Sum('net'),Sum('gain'),Sum('loss'),Sum('rushes'),Sum('td'))
        rush_tot_avg = float(rush_totals['net__sum'])/float(rush_totals['rushes__sum'])
    else:
        rush_totals = {'rushes__sum': None, 'gain__sum': None, 'loss__sum': None, 'td__sum': None, 'net__sum': None}
        rush_tot_avg = None
    pp = PlayerPass.objects.filter(player=p, game__season=season).select_related().order_by('-college_game.date')
    if pp:
        pass_totals = pp.aggregate(Sum('td'), Sum('yards'), Sum('attempts'), Sum('completions'), Sum('interceptions'), Avg('pass_efficiency'))
        comp_pct = float(pass_totals['completions__sum'])/float(pass_totals['attempts__sum'])*100
    else:
        pass_totals = {'interceptions__sum': None, 'td__sum':None, 'attempts__sum': None, 'completions__sum': None, 'yards__sum': None, 'pass_efficiency__avg': None}
        comp_pct = None
    prec = PlayerReceiving.objects.filter(player=p, game__season=season).select_related().order_by('-college_game.date')
    if prec:
        rec_totals = prec.aggregate(Sum('receptions'), Sum('yards'), Sum('td'))
        rec_tot_avg = float(rec_totals['yards__sum'])/float(rec_totals['receptions__sum'])
    else:
        rec_totals = {'receptions__sum': None, 'yards__sum': None, 'td__sum': None}
        rec_tot_avg = None
    pt = PlayerTackle.objects.filter(player=p, game__season=season).select_related().order_by('-college_game.date')
    ptfl = PlayerTacklesLoss.objects.filter(player=p, game__season=season).select_related().order_by('-college_game.date')
    ppd = PlayerPassDefense.objects.filter(player=p, game__season=season).select_related().order_by('-college_game.date')
    other_seasons = Player.objects.filter(team__college=cy.college, slug=p.slug).exclude(season=season).order_by('-season')
    return render_to_response('college/player_detail.html', {'team': cy.college, 'year': season, 'cy': cy, 'player': p, 'starts': starts, 'other_seasons': other_seasons, 'scoring': ps, 'returns': pret, 'fumbles': pf, 
        'rushing': pr, 'passing':pp, 'receiving': prec, 'tackles':pt, 'tacklesloss': ptfl, 'passdefense':ppd, 
        'pass_tot_int':pass_totals['interceptions__sum'], 'pass_tot_td':pass_totals['td__sum'], 'pass_tot_attempts': pass_totals['attempts__sum'], 'pass_tot_comps': pass_totals['completions__sum'], 
        'pass_tot_yards': pass_totals['yards__sum'], 'pass_tot_eff': pass_totals['pass_efficiency__avg'], 'rush_tot_rushes': rush_totals['rushes__sum'], 'rush_tot_gains': rush_totals['gain__sum'],
        'rush_tot_loss': rush_totals['loss__sum'], 'rush_tot_td': rush_totals['td__sum'], 'rush_tot_net': rush_totals['net__sum'], 'rush_tot_avg': rush_tot_avg, 'comp_pct':comp_pct, 
        'rec_tot_receptions': rec_totals['receptions__sum'], 'rec_tot_yards': rec_totals['yards__sum'], 'rec_tot_td': rec_totals['td__sum'], 'rec_tot_avg': rec_tot_avg})

Example 111

Project: ANALYSE Source File: psychoanalyze.py
def generate_plots_for_problem(problem):

    pmdset = PsychometricData.objects.using(db).filter(
        studentmodule__module_state_key=BlockUsageLocator.from_string(problem)
    )
    nstudents = pmdset.count()
    msg = ""
    plots = []

    if nstudents < 2:
        msg += "%s nstudents=%d --> skipping, too few" % (problem, nstudents)
        return msg, plots

    max_grade = pmdset[0].studentmodule.max_grade

    agdat = pmdset.aggregate(Sum('attempts'), Max('attempts'))
    max_attempts = agdat['attempts__max']
    total_attempts = agdat['attempts__sum']  # not used yet

    msg += "max attempts = %d" % max_attempts

    xdat = range(1, max_attempts + 1)
    dataset = {'xdat': xdat}

    # compute grade statistics
    grades = [pmd.studentmodule.grade for pmd in pmdset]
    gsv = StatVar()
    for g in grades:
        gsv += g
    msg += "<br><p><font color='blue'>Grade distribution: %s</font></p>" % gsv

    # generate grade histogram
    ghist = []

    axisopts = """{
        xaxes: [{
            axisLabel: 'Grade'
        }],
        yaxes: [{
            position: 'left',
            axisLabel: 'Count'
         }]
         }"""

    if gsv.max > max_grade:
        msg += "<br/><p><font color='red'>Something is wrong: max_grade=%s, but max(grades)=%s</font></p>" % (max_grade, gsv.max)
        max_grade = gsv.max

    if max_grade > 1:
        ghist = make_histogram(grades, np.linspace(0, max_grade, max_grade + 1))
        ghist_json = json.dumps(ghist.items())

        plot = {'title': "Grade histogram for %s" % problem,
                'id': 'histogram',
                'info': '',
                'data': "var dhist = %s;\n" % ghist_json,
                'cmd': '[ {data: dhist, bars: { show: true, align: "center" }} ], %s' % axisopts,
                }
        plots.append(plot)
    else:
        msg += "<br/>Not generating histogram: max_grade=%s" % max_grade

    # histogram of time differences between checks
    # Warning: this is inefficient - doesn't scale to large numbers of students
    dtset = []  # time differences in minutes
    dtsv = StatVar()
    for pmd in pmdset:
        try:
            checktimes = eval(pmd.checktimes)  # update log of attempt timestamps
        except:
            continue
        if len(checktimes) < 2:
            continue
        ct0 = checktimes[0]
        for ct in checktimes[1:]:
            dt = (ct - ct0).total_seconds() / 60.0
            if dt < 20:  # ignore if dt too long
                dtset.append(dt)
                dtsv += dt
            ct0 = ct
    if dtsv.cnt > 2:
        msg += "<br/><p><font color='brown'>Time differences between checks: %s</font></p>" % dtsv
        bins = np.linspace(0, 1.5 * dtsv.sdv(), 30)
        dbar = bins[1] - bins[0]
        thist = make_histogram(dtset, bins)
        thist_json = json.dumps(sorted(thist.items(), key=lambda(x): x[0]))

        axisopts = """{ xaxes: [{ axisLabel: 'Time (min)'}], yaxes: [{position: 'left',axisLabel: 'Count'}]}"""

        plot = {'title': "Histogram of time differences between checks",
                'id': 'thistogram',
                'info': '',
                'data': "var thist = %s;\n" % thist_json,
                'cmd': '[ {data: thist, bars: { show: true, align: "center", barWidth:%f }} ], %s' % (dbar, axisopts),
                }
        plots.append(plot)

    # one IRT plot curve for each grade received (TODO: this assumes integer grades)
    for grade in range(1, int(max_grade) + 1):
        yset = {}
        gset = pmdset.filter(studentmodule__grade=grade)
        ngset = gset.count()
        if ngset == 0:
            continue
        ydat = []
        ylast = 0
        for x in xdat:
            y = gset.filter(attempts=x).count() / ngset
            ydat.append(y + ylast)
            ylast = y + ylast
        yset['ydat'] = ydat

        if len(ydat) > 3:  # try to fit to logistic function if enough data points
            try:
                cfp = curve_fit(func_2pl, xdat, ydat, [1.0, max_attempts / 2.0])
                yset['fitparam'] = cfp
                yset['fitpts'] = func_2pl(np.array(xdat), *cfp[0])
                yset['fiterr'] = [yd - yf for (yd, yf) in zip(ydat, yset['fitpts'])]
                fitx = np.linspace(xdat[0], xdat[-1], 100)
                yset['fitx'] = fitx
                yset['fity'] = func_2pl(np.array(fitx), *cfp[0])
            except Exception as err:
                log.debug('Error in psychoanalyze curve fitting: %s' % err)

        dataset['grade_%d' % grade] = yset

    axisopts = """{
        xaxes: [{
            axisLabel: 'Number of Attempts'
        }],
        yaxes: [{
            max:1.0,
            position: 'left',
            axisLabel: 'Probability of correctness'
         }]
         }"""

    # generate points for flot plot
    for grade in range(1, int(max_grade) + 1):
        jsdata = ""
        jsplots = []
        gkey = 'grade_%d' % grade
        if gkey in dataset:
            yset = dataset[gkey]
            jsdata += "var d%d = %s;\n" % (grade, json.dumps(zip(xdat, yset['ydat'])))
            jsplots.append('{ data: d%d, lines: { show: false }, points: { show: true}, color: "red" }' % grade)
            if 'fitpts' in yset:
                jsdata += 'var fit = %s;\n' % (json.dumps(zip(yset['fitx'], yset['fity'])))
                jsplots.append('{ data: fit,  lines: { show: true }, color: "blue" }')
                (a, b) = yset['fitparam'][0]
                irtinfo = "(2PL: D=1.7, a=%6.3f, b=%6.3f)" % (a, b)
            else:
                irtinfo = ""

            plots.append({'title': 'IRT Plot for grade=%s %s' % (grade, irtinfo),
                          'id': "irt%s" % grade,
                          'info': '',
                          'data': jsdata,
                          'cmd': '[%s], %s' % (','.join(jsplots), axisopts),
                          })

    #log.debug('plots = %s' % plots)
    return msg, plots

Example 112

Project: eyebrowse-server Source File: views.py
@render_to('stats/profile_viz.html')
def profile_viz(request, username=None):

    if request.GET.get("date") is None or request.GET.get("date") == "null":
        return redirect_to(request,
                           "/users/%s/visualizations?date=last week&query=%s" %
                           (username, request.GET.get("query", "")))

    if request.user.is_authenticated():
        user = get_object_or_404(User, username=request.user.username)
        userprof = UserProfile.objects.get(user=user)
        confirmed = userprof.confirmed
        if not confirmed:
            return redirect('/consent')
    else:
        user = None
        userprof = None

    username, follows, profile_user, empty_search_msg, nav_bar = _profile_info(
        user, username)

    get_dict, query, date, sort, filter = _get_query(request)
    logger.info(get_dict)
    logger.info(date)

    get_dict["orderBy"] = "end_time"
    get_dict["direction"] = "hl"
    get_dict["filter"] = ""
    get_dict["page"] = request.GET.get("page", 1)
    get_dict["username"] = profile_user.username
    get_dict["sort"] = "time"

    hist, history_stream = live_stream_query_manager(get_dict, profile_user)

    # stats
    tot_time, item_count = profile_stat_gen(profile_user)

    fav_data = FavData.objects.get(user=profile_user)

    num_history = EyeHistory.objects.filter(user=profile_user).count()

    is_online = online_user(user=profile_user)

    following_count = profile_user.profile.follows.count()
    follower_count = UserProfile.objects.filter(
        follows=profile_user.profile).count()

    today = datetime.now() - timedelta(hours=24)
    day_count = hist.filter(start_time__gt=today
                            ).values('url', 'title'
                                     ).annotate(num_urls=Sum('total_time')
                                                ).order_by('-num_urls')[:3]
    day_domains = hist.filter(
        start_time__gt=today
    ).values('domain'
             ).annotate(num_domains=Sum('total_time')
                        ).order_by('-num_domains')[:5]

    day_chart = {}
    for domain in day_domains:
        day_chart[domain['domain']] = domain['num_domains']

    last_week = today - timedelta(days=7)
    week_count = hist.filter(start_time__gt=last_week).values(
        'url', 'title'
    ).annotate(num_urls=Sum('total_time')
               ).order_by('-num_urls')[:3]
    week_domains = hist.filter(
        start_time__gt=last_week
    ).values('domain'
             ).annotate(num_domains=Sum('total_time')
                        ).order_by('-num_domains')[:5]

    week_chart = {}
    for domain in week_domains:
        week_chart[domain['domain']] = domain['num_domains']

    template_dict = {
        'visualization': True,
        'username': profile_user.username,
        'following_count': following_count,
        'follower_count': follower_count,
        "profile_user": profile_user,
        "history_stream": history_stream,
        "empty_search_msg": empty_search_msg,
        "follows": str(follows),
        "is_online": is_online,
        "num_history": num_history,
        "tot_time": tot_time,
        "item_count": item_count,
        "fav_data": fav_data,
        "query": query,
        "date": date,
        'day_articles': day_count,
        'week_articles': week_count,
        'day_chart': json.dumps(day_chart),
        'week_chart': json.dumps(week_chart),

    }

    return _template_values(
        request,
        page_title="profile history",
        navbar=nav_bar,
        sub_navbar="subnav_data",
        **template_dict)

Example 113

Project: element43 Source File: views.py
def quicklook_tab_systems(request, region_id=10000002, type_id=34):

    """
    Generates the content for the systems tab
    """

    buy_orders = Orders.active.select_related('stastation__id',
                                              'stastation__name',
                                              'mapregion__id',
                                              'mapregion__name',
                                              'mapsolarsystem__security_level').filter(invtype=type_id,
                                                                                       is_bid=True,
                                                                                       mapregion_id=region_id).order_by('-price')
    sell_orders = Orders.active.select_related('stastation__id',
                                               'stastation__name',
                                               'mapregion__id',
                                               'mapregion__name',
                                               'mapsolarsystem__security_level').filter(invtype=type_id,
                                                                                        is_bid=False,
                                                                                        mapregion_id=region_id).order_by('price')

    orders = []
    orders += buy_orders
    orders += sell_orders

    systems = []
    for order in orders:
        if not order.mapsolarsystem_id in systems:
            systems.append(order.mapsolarsystem_id)

    # Gather system-based data for this type
    system_data = []
    for system in systems:
        temp_data = []

        system_ask_prices = np.array([order.price for order in buy_orders if order.mapsolarsystem_id == system])
        system_bid_prices = np.array([order.price for order in sell_orders if order.mapsolarsystem_id == system])

        # Order of array entries: Name, Bid/Ask(Low, High, Average, Median, Standard Deviation, Lots, Volume)
        temp_data.append(MapSolarSystem.objects.get(id=system).name)

        if len(system_ask_prices) > 0:
                # Ask values calculated via numpy
                temp_data.append(np.min(system_ask_prices))
                temp_data.append(np.max(system_ask_prices))
                temp_data.append(round(np.average(system_ask_prices), 2))
                temp_data.append(np.median(system_ask_prices))
                temp_data.append(round(np.std(system_ask_prices), 2))
                temp_data.append(len(system_ask_prices))
                temp_data.append(Orders.active.filter(mapsolarsystem_id=system,
                                                      invtype=type_id,
                                                      is_bid=False).aggregate(Sum('volume_remaining'))['volume_remaining__sum'])
        else:
                # Else there are no orders in this system -> add a bunch of 0s
                temp_data.extend([0, 0, 0, 0, 0, 0, 0])

        if len(system_bid_prices) > 0:
                # Bid values calculated via numpy
                temp_data.append(np.min(system_bid_prices))
                temp_data.append(np.max(system_bid_prices))
                temp_data.append(round(np.average(system_bid_prices), 2))
                temp_data.append(np.median(system_bid_prices))
                temp_data.append(round(np.std(system_bid_prices), 2))
                temp_data.append(len(system_bid_prices))
                temp_data.append(Orders.active.filter(mapsolarsystem_id=system,
                                                      invtype=type_id,
                                                      is_bid=True).aggregate(Sum('volume_remaining'))['volume_remaining__sum'])
        else:
                # Else there are no orders in this system -> add a bunch of 0s
                temp_data.extend([0, 0, 0, 0, 0, 0, 0])

        # Append temp_data to system_data
        system_data.append(temp_data)
        # Sort alphabetically by system name
        system_data = sorted(system_data, key=lambda system: system[0])

    # Use all orders for quicklook and add the system_data to the context
    # We shouldn't need to limit the amount of orders displayed here as they all are in the same region
    rcontext = RequestContext(request, {'systems': system_data})

    return render_to_response('_quicklook_tab_systems.haml', rcontext)

Example 114

Project: karaage Source File: tasks.py
def _gen_trend_graph(start, end, machine_category,
                     force_overwrite=False):
    """ Total trend graph for machine category. """
    filename = graphs.get_trend_graph_filename(start, end, machine_category)
    csv_filename = os.path.join(GRAPH_ROOT, filename + '.csv')
    png_filename = os.path.join(GRAPH_ROOT, filename + '.png')

    _check_directory_exists(csv_filename)
    _check_directory_exists(png_filename)

    if not settings.GRAPH_DEBUG or force_overwrite:
        if os.path.exists(csv_filename):
            if os.path.exists(png_filename):
                return

    query = CPUJob.objects.filter(
        machine__category=machine_category,
        date__range=(start, end)
    )
    query = query.values('date').annotate(Sum('cpu_usage'))
    query = query.order_by('date')

    t_start = start
    t_end = end

    start_str = start.strftime('%Y-%m-%d')
    end_str = end.strftime('%Y-%m-%d')

    fig, ax = plt.subplots(figsize=(6, 4))
    ax.set_xlim(start, end)
    ax.set_title('%s - %s' % (start_str, end_str))
    ax.set_ylabel("CPU Time (hours)")
    ax.set_xlabel("Date")

    locator = mdates.AutoDateLocator()
    ax.xaxis.set_major_locator(locator)
    ax.xaxis.set_major_formatter(mdates.AutoDateFormatter(locator))
    ax.xaxis.set_minor_locator(mdates.DayLocator())

    data = {}
    x_data = []
    y_data = []

    with open(csv_filename, 'wb') as csv_file:
        csv_writer = csv.writer(csv_file)
        for row in query.iterator():
            csv_writer.writerow([
                row['date'], row['cpu_usage__sum'] / 3600.00
            ])

            date = row['date']

            data[date] = row['cpu_usage__sum']

    start = t_start
    end = t_end
    while start <= end:
        total = 0
        if start in data:
            total = data[start]
        x_data.append(start)
        y_data.append(total / 3600.00)
        start = start + datetime.timedelta(days=1)

    del data

    ax.plot(x_data, y_data)

    del x_data
    del y_data

    fig.autofmt_xdate()
    plt.tight_layout()
    plt.savefig(png_filename)
    plt.close()

Example 115

Project: django-oscar Source File: views.py
    def get_hourly_report(self, hours=24, segments=10):
        """
        Get report of order revenue split up in hourly chunks. A report is
        generated for the last *hours* (default=24) from the current time.
        The report provides ``max_revenue`` of the hourly order revenue sum,
        ``y-range`` as the labeling for the y-axis in a template and
        ``order_total_hourly``, a list of properties for hourly chunks.
        *segments* defines the number of labeling segments used for the y-axis
        when generating the y-axis labels (default=10).
        """
        # Get datetime for 24 hours agao
        time_now = now().replace(minute=0, second=0)
        start_time = time_now - timedelta(hours=hours - 1)

        orders_last_day = Order.objects.filter(date_placed__gt=start_time)

        order_total_hourly = []
        for hour in range(0, hours, 2):
            end_time = start_time + timedelta(hours=2)
            hourly_orders = orders_last_day.filter(date_placed__gt=start_time,
                                                   date_placed__lt=end_time)
            total = hourly_orders.aggregate(
                Sum('total_incl_tax')
            )['total_incl_tax__sum'] or D('0.0')
            order_total_hourly.append({
                'end_time': end_time,
                'total_incl_tax': total
            })
            start_time = end_time

        max_value = max([x['total_incl_tax'] for x in order_total_hourly])
        divisor = 1
        while divisor < max_value / 50:
            divisor *= 10
        max_value = (max_value / divisor).quantize(D('1'), rounding=ROUND_UP)
        max_value *= divisor
        if max_value:
            segment_size = (max_value) / D('100.0')
            for item in order_total_hourly:
                item['percentage'] = int(item['total_incl_tax'] / segment_size)

            y_range = []
            y_axis_steps = max_value / D(str(segments))
            for idx in reversed(range(segments + 1)):
                y_range.append(idx * y_axis_steps)
        else:
            y_range = []
            for item in order_total_hourly:
                item['percentage'] = 0

        ctx = {
            'order_total_hourly': order_total_hourly,
            'max_revenue': max_value,
            'y_range': y_range,
        }
        return ctx

Example 116

Project: kolibri Source File: importcontent.py
Function: transfer
    def _transfer(self, method, channel_id, path=None):

        with using_content_database(channel_id):
            files = File.objects.all()
            total_bytes_to_transfer = files.aggregate(Sum('file_size'))['file_size__sum']

            with self.start_progress(total=total_bytes_to_transfer) as overall_progress_update:

                for f in files:

                    filename = f.get_filename()
                    dest = paths.get_content_storage_file_path(filename)

                    # if the file already exists, add its size to our overall progress, and skip
                    if os.path.isfile(dest) and os.path.getsize(dest) == f.file_size:
                        overall_progress_update(f.file_size)
                        continue

                    # determine where we're downloading/copying from, and create appropriate transfer object
                    if method == DOWNLOAD_METHOD:
                        url = paths.get_content_storage_file_url(filename)
                        filetransfer = transfer.FileDownload(url, dest)
                    elif method == COPY_METHOD:
                        srcpath = paths.get_content_storage_file_path(filename, datafolder=path)
                        filetransfer = transfer.FileCopy(srcpath, dest)

                    with filetransfer:

                        with self.start_progress(total=filetransfer.total_size) as file_dl_progress_update:

                            for chunk in filetransfer:
                                length = len(chunk)
                                overall_progress_update(length)
                                file_dl_progress_update(length)

Example 117

Project: courtlistener Source File: views.py
def get_homepage_stats():
    """Get any stats that are displayed on the homepage and return them as a
    dict
    """
    ten_days_ago = make_aware(datetime.today() - timedelta(days=10), utc)
    alerts_in_last_ten = Stat.objects.filter(
        name__contains='alerts.sent',
        date_logged__gte=ten_days_ago
    ).aggregate(Sum('count'))['count__sum']
    queries_in_last_ten = Stat.objects.filter(
        name='search.results',
        date_logged__gte=ten_days_ago
    ).aggregate(Sum('count'))['count__sum']
    bulk_in_last_ten = Stat.objects.filter(
        name__contains='bulk_data',
        date_logged__gte=ten_days_ago
    ).aggregate(Sum('count'))['count__sum']
    r = redis.StrictRedis(
        host=settings.REDIS_HOST,
        port=settings.REDIS_PORT,
        db=settings.REDIS_DATABASES['STATS'],
    )
    last_ten_days = ['api:v3.d:%s.count' %
                     (date.today() - timedelta(days=x)).isoformat()
                     for x in range(0, 10)]
    api_in_last_ten = sum(
        [int(result) for result in
         r.mget(*last_ten_days) if result is not None]
    )
    users_in_last_ten = User.objects.filter(
        date_joined__gte=ten_days_ago
    ).count()
    opinions_in_last_ten = Opinion.objects.filter(
        date_created__gte=ten_days_ago
    ).count()
    oral_arguments_in_last_ten = Audio.objects.filter(
        date_created__gte=ten_days_ago
    ).count()
    days_of_oa = naturalduration(
        Audio.objects.aggregate(
            Sum('duration')
        )['duration__sum'],
        as_dict=True,
    )['d']
    viz_in_last_ten = SCOTUSMap.objects.filter(
        date_published__gte=ten_days_ago,
        published=True,
    ).count()
    visualizations = SCOTUSMap.objects.filter(
        published=True,
        deleted=False,
    ).annotate(
        Count('clusters'),
    ).filter(
        # Ensures that we only show good stuff on homepage
        clusters__count__gt=10,
    ).order_by(
        '-date_published',
        '-date_modified',
        '-date_created',
    )[:1]
    return {
        'alerts_in_last_ten': alerts_in_last_ten,
        'queries_in_last_ten': queries_in_last_ten,
        'opinions_in_last_ten': opinions_in_last_ten,
        'oral_arguments_in_last_ten': oral_arguments_in_last_ten,
        'bulk_in_last_ten': bulk_in_last_ten,
        'api_in_last_ten': api_in_last_ten,
        'users_in_last_ten': users_in_last_ten,
        'days_of_oa': days_of_oa,
        'viz_in_last_ten': viz_in_last_ten,
        'visualizations': visualizations,
        'private': False,  # VERY IMPORTANT!
    }

Example 118

Project: evething Source File: wallet_journal.py
@login_required
def wallet_journal(request):
    """Wallet journal"""
    # Get profile
    profile = request.user.profile

    characters = Character.objects.filter(
        apikeys__user=request.user,
        apikeys__valid=True,
        apikeys__key_type__in=[APIKey.ACCOUNT_TYPE, APIKey.CHARACTER_TYPE]
    ).distinct()
    character_ids = [c.id for c in characters]

    corporation_ids = Corporation.get_ids_with_access(request.user, APIKey.CORP_WALLET_JOURNAL_MASK)
    corporations = Corporation.objects.filter(pk__in=corporation_ids)

    # Parse filters and apply magic
    filters, journal_ids, days = _journal_queryset(request, character_ids, corporation_ids)

    # Calculate a total value
    total_amount = journal_ids.aggregate(t=Sum('amount'))['t']

    # Get only the ids, at this point joining the rest is unnecessary
    journal_ids = journal_ids.values_list('pk', flat=True)

    # Create a new paginator
    paginator = Paginator(journal_ids, profile.entries_per_page)

    # If page request is out of range, deliver last page of results
    try:
        paginated = paginator.page(request.GET.get('page'))
    except PageNotAnInteger:
        # Page is not an integer, use first page
        paginated = paginator.page(1)
    except EmptyPage:
        # Page is out of range, deliver last page
        paginated = paginator.page(paginator.num_pages)

    # Actually execute the query to avoid a nested subquery
    paginated_ids = list(paginated.object_list.all())
    entries = JournalEntry.objects.filter(pk__in=paginated_ids).select_related('character', 'corp_wallet__corporation')

    # Do page number things
    hp = paginated.has_previous()
    hn = paginated.has_next()
    prev = []
    next = []

    if hp:
        # prev and next, use 1 of each
        if hn:
            prev.append(paginated.previous_page_number())
            next.append(paginated.next_page_number())
        # no next, add up to 2 previous links
        else:
            for i in range(paginated.number - 1, 0, -1)[:2]:
                prev.insert(0, i)
    else:
        # no prev, add up to 2 next links
        for i in range(paginated.number + 1, paginator.num_pages)[:2]:
            next.append(i)

    # Do some stuff with entries
    item_ids = set()
    owner_ids = set()
    reftype_ids = set()
    station_ids = set()

    for entry in entries:
        owner_ids.add(entry.owner1_id)
        owner_ids.add(entry.owner2_id)
        reftype_ids.add(entry.ref_type_id)

        # Insurance
        if entry.ref_type_id == 19:
            item_ids.add(int(entry.arg_name))
        # Clone Transfer
        elif entry.ref_type_id == 52:
            station_ids.add(int(entry.arg_id))
        # Bounty Prizes
        elif entry.ref_type_id == 85:
            for thing in entry.reason.split(','):
                thing = thing.strip()
                if ':' in thing:
                    item_ids.add(int(thing.split(':')[0]))

    char_map = Character.objects.in_bulk(owner_ids)
    corp_map = Corporation.objects.in_bulk(owner_ids)
    alliance_map = Alliance.objects.in_bulk(owner_ids)
    item_map = Item.objects.in_bulk(item_ids)
    rt_map = RefType.objects.in_bulk(reftype_ids)
    station_map = Station.objects.in_bulk(station_ids)

    for entry in entries:
        # Owner 1
        if entry.owner1_id in character_ids:
            entry.z_owner1_mine = True

        entry.z_owner1_char = char_map.get(entry.owner1_id)
        entry.z_owner1_corp = corp_map.get(entry.owner1_id)
        entry.z_owner1_alliance = alliance_map.get(entry.owner1_id)

        # Owner 2
        if entry.owner2_id in character_ids:
            entry.z_owner2_mine = True

        entry.z_owner2_char = char_map.get(entry.owner2_id)
        entry.z_owner2_corp = corp_map.get(entry.owner2_id)
        entry.z_owner2_alliance = alliance_map.get(entry.owner2_id)

        # RefType
        entry.z_reftype = rt_map.get(entry.ref_type_id)

        # Inheritance
        if entry.ref_type_id == 9:
            entry.z_description = entry.reason
        # Player Donation/Corporation Account Withdrawal
        elif entry.ref_type_id in (10, 37) and entry.reason != '':
            entry.z_description = '"%s"' % (entry.get_unescaped_reason()[5:].strip())
        # Insurance, arg_name is the item_id of the ship that exploded
        elif entry.ref_type_id == 19:
            if entry.amount >= 0:
                item = item_map.get(int(entry.arg_name))
                if item:
                    entry.z_description = 'Insurance payment for loss of a %s' % item.name
            else:
                entry.z_description = 'Insurance purchased (RefID: %s)' % (entry.arg_name[1:])
        # Clone Transfer, arg_name is the name of the station you're going to
        elif entry.ref_type_id == 52:
            station = station_map.get(entry.arg_id)
            if station:
                entry.z_description = 'Clone transfer to %s' % (station.short_name)
        # Bounty Prizes
        elif entry.ref_type_id == 85:
            killed = []

            for thing in entry.reason.split(','):
                thing = thing.strip()
                if ':' in thing:
                    item_id, count = thing.split(':')
                    item = item_map.get(int(item_id))
                    if item:
                        killed.append((item.name, '%sx %s' % (count, item.name)))
                elif thing == '...':
                    killed.append(('ZZZ', '... (list truncated)'))

            # Sort killed
            killed = [k[1] for k in sorted(killed)]

            entry.z_description = 'Bounty prizes for killing pirates in %s' % (entry.arg_name.strip())
            entry.z_hover = '||'.join(killed)

        # Filter links
        entry.z_reftype_filter = build_filter(filters, 'reftype', 'eq', entry.ref_type_id)
        entry.z_owner1_filter = build_filter(filters, 'owners', 'eq', entry.z_owner1_char or entry.z_owner1_corp or entry.z_owner1_alliance)
        entry.z_owner2_filter = build_filter(filters, 'owners', 'eq', entry.z_owner2_char or entry.z_owner2_corp or entry.z_owner2_alliance)

    # Render template
    return render_page(
        'thing/wallet_journal.html',
        {
            'json_data': _json_data(characters, corporations, filters),
            'total_amount': total_amount,
            'days': days,
            'entries': entries,
            'paginated': paginated,
            'next': next,
            'prev': prev,
            'ignoreself': 'ignoreself' in request.GET,
            'group_by': {},
        },
        request,
        character_ids,
        corporation_ids,
    )

Example 119

Project: commcare-hq Source File: tasks.py
@periodic_task(run_every=crontab(minute=0, hour=9), queue='background_queue', acks_late=True)
def send_overdue_reminders(today=None):
    from corehq.apps.domain.views import DomainSubscriptionView
    from corehq.apps.domain.views import DomainBillingStatementsView

    today = today or datetime.date.today()
    invoices = Invoice.objects.filter(is_hidden=False,
                                      subscription__service_type=SubscriptionType.PRODUCT,
                                      date_paid__isnull=True,
                                      date_due__lt=today)\
        .exclude(subscription__plan_version__plan__edition=SoftwarePlanEdition.ENTERPRISE)\
        .order_by('date_due')\
        .select_related('subscription__subscriber')

    domains = set()
    for invoice in invoices:
        if invoice.get_domain() not in domains:
            domains.add(invoice.get_domain())
            total = Invoice.objects.filter(is_hidden=False,
                                           subscription__subscriber__domain=invoice.get_domain())\
                .aggregate(Sum('balance'))['balance__sum']
            if total >= 100:
                domain = Domain.get_by_name(invoice.get_domain())
                current_subscription = Subscription.get_subscribed_plan_by_domain(domain)[1]
                if not current_subscription.skip_auto_downgrade:
                    days_ago = (today - invoice.date_due).days
                    context = {
                        'domain': invoice.get_domain(),
                        'total': total,
                        'subscription_url': absolute_reverse(DomainSubscriptionView.urlname,
                                                             args=[invoice.get_domain()]),
                        'statements_url': absolute_reverse(DomainBillingStatementsView.urlname,
                                                           args=[invoice.get_domain()]),
                        'date_60': invoice.date_due + datetime.timedelta(days=60),
                        'contact_email': settings.INVOICING_CONTACT_EMAIL
                    }
                    if days_ago == 61:
                        _downgrade_domain(current_subscription)
                        _send_downgrade_notice(invoice, context)
                    elif days_ago == 58:
                        _send_downgrade_warning(invoice, context)
                    elif days_ago == 30:
                        _send_overdue_notice(invoice, context)
                    elif days_ago == 1:
                        _create_overdue_notification(invoice, context)

Example 120

Project: bennedetto Source File: models.py
    def total_expense(self):
        expr = models.Sum(self.total_by)
        key = '{}__sum'.format(self.total_by)
        return self.filter(amount__lt=0).aggregate(expr)[key] or 0

Example 121

Project: synnefo Source File: stats.py
Function: get_public_stats
def get_public_stats():
    # VirtualMachines
    vm_objects = VirtualMachine.objects
    servers = vm_objects.values("deleted", "operstate")\
                        .annotate(count=Count("id"),
                                  cpu=Sum("flavor__cpu"),
                                  ram=Sum("flavor__ram"),
                                  disk=Sum("flavor__disk"))
    zero_stats = {"count": 0, "cpu": 0, "ram": 0, "disk": 0}
    server_stats = {}
    for state in VirtualMachine.RSAPI_STATE_FROM_OPER_STATE.values():
        server_stats[state] = copy(zero_stats)

    for stats in servers:
        deleted = stats.get("deleted")
        operstate = stats.get("operstate")
        state = VirtualMachine.RSAPI_STATE_FROM_OPER_STATE.get(operstate)
        if deleted:
            for key in zero_stats.keys():
                server_stats["DELETED"][key] += (stats.get(key, 0) or 0)
        elif state:
            for key in zero_stats.keys():
                server_stats[state][key] += (stats.get(key, 0) or 0)

    # Networks
    net_objects = Network.objects
    networks = net_objects.values("deleted", "state")\
                          .annotate(count=Count("id"))
    zero_stats = {"count": 0}
    network_stats = {}
    for state in Network.RSAPI_STATE_FROM_OPER_STATE.values():
        network_stats[state] = copy(zero_stats)

    for stats in networks:
        deleted = stats.get("deleted")
        state = stats.get("state")
        state = Network.RSAPI_STATE_FROM_OPER_STATE.get(state)
        if deleted:
            for key in zero_stats.keys():
                network_stats["DELETED"][key] += stats.get(key, 0)
        elif state:
            for key in zero_stats.keys():
                network_stats[state][key] += stats.get(key, 0)

    statistics = {"servers": server_stats,
                  "networks": network_stats}
    return statistics

Example 122

Project: synnefo Source File: util.py
def get_db_holdings(user=None, project=None, for_users=True):
    """Get per user or per project holdings from Cyclades DB."""

    if for_users is False and user is not None:
        raise ValueError(
            "Computing per project holdings; setting a user is meaningless.")
    holdings = QuotaDict()

    vms = VirtualMachine.objects.filter(deleted=False)
    networks = Network.objects.filter(deleted=False)
    floating_ips = IPAddress.objects.filter(deleted=False, floating_ip=True)
    volumes = Volume.objects.filter(deleted=False)

    if for_users and user is not None:
        vms = vms.filter(userid=user)
        networks = networks.filter(userid=user)
        floating_ips = floating_ips.filter(userid=user)
        volumes = volumes.filter(userid=user)

    if project is not None:
        vms = vms.filter(project=project)
        networks = networks.filter(project=project)
        floating_ips = floating_ips.filter(project=project)
        volumes = volumes.filter(project=project)

    values = ["project"]
    if for_users:
        values.append("userid")

    vm_resources = vms.values(*values)\
        .annotate(num=Count("id"),
                  total_ram=Sum("flavor__ram"),
                  total_cpu=Sum("flavor__cpu"))
    for vm_res in vm_resources.iterator():
        project = vm_res['project']
        res = {"cyclades.vm": vm_res["num"],
               "cyclades.total_cpu": vm_res["total_cpu"],
               "cyclades.total_ram": vm_res["total_ram"] * MiB}
        pholdings = holdings[vm_res['userid']] if for_users else holdings
        pholdings[project] = res

    vm_active_resources = vms.values(*values)\
        .filter(Q(operstate="STARTED") | Q(operstate="BUILD") |
                Q(operstate="ERROR"))\
        .annotate(ram=Sum("flavor__ram"),
                  cpu=Sum("flavor__cpu"))

    for vm_res in vm_active_resources.iterator():
        project = vm_res['project']
        pholdings = holdings[vm_res['userid']] if for_users else holdings
        pholdings[project]["cyclades.cpu"] = vm_res["cpu"]
        pholdings[project]["cyclades.ram"] = vm_res["ram"] * MiB

    # Get disk resource
    disk_resources = volumes.values(*values).annotate(Sum("size"))
    for disk_res in disk_resources.iterator():
        project = disk_res['project']
        pholdings = (holdings[disk_res['userid']]
                     if for_users else holdings)
        pholdings[project]["cyclades.disk"] = disk_res["size__sum"] * GiB

    # Get resources related with networks
    net_resources = networks.values(*values)\
                            .annotate(num=Count("id"))

    for net_res in net_resources.iterator():
        project = net_res['project']
        if project is None:
            continue
        pholdings = holdings[net_res['userid']] if for_users else holdings
        pholdings[project]["cyclades.network.private"] = net_res["num"]

    floating_ips_resources = floating_ips.values(*values)\
                                         .annotate(num=Count("id"))

    for floating_ip_res in floating_ips_resources.iterator():
        project = floating_ip_res["project"]
        pholdings = (holdings[floating_ip_res["userid"]]
                     if for_users else holdings)
        pholdings[project]["cyclades.floating_ip"] = \
            floating_ip_res["num"]

    return holdings

Example 123

Project: django Source File: tests.py
    def test_values_annotation_with_expression(self):
        # ensure the F() is promoted to the group by clause
        qs = Author.objects.values('name').annotate(another_age=Sum('age') + F('age'))
        a = qs.get(name="Adrian Holovaty")
        self.assertEqual(a['another_age'], 68)

        qs = qs.annotate(friend_count=Count('friends'))
        a = qs.get(name="Adrian Holovaty")
        self.assertEqual(a['friend_count'], 2)

        qs = qs.annotate(combined_age=Sum('age') + F('friends__age')).filter(
            name="Adrian Holovaty").order_by('-combined_age')
        self.assertEqual(
            list(qs), [
                {
                    "name": 'Adrian Holovaty',
                    "another_age": 68,
                    "friend_count": 1,
                    "combined_age": 69
                },
                {
                    "name": 'Adrian Holovaty',
                    "another_age": 68,
                    "friend_count": 1,
                    "combined_age": 63
                }
            ]
        )

        vals = qs.values('name', 'combined_age')
        self.assertEqual(
            list(vals), [
                {
                    "name": 'Adrian Holovaty',
                    "combined_age": 69
                },
                {
                    "name": 'Adrian Holovaty',
                    "combined_age": 63
                }
            ]
        )

Example 124

Project: timtec Source File: models.py
    @property
    def count_votes(self):
        return self.votes.aggregate(models.Sum('value'))['value__sum'] or 0

Example 125

Project: element43 Source File: station.py
def import_system(request, station_id=60003760, system_id=30000142):

    """
    Generates a list like http://goonmetrics.com/importing/
    Pattern: System -> Station
    """

    # Get system, station and markup
    system = MapSolarSystem.objects.get(id=system_id)
    station = StaStation.objects.get(id=station_id)

    # get the path to destination, assume trying for highsec route
    path = find_path(system_id, station.solar_system_id)
    numjumps = len(path) - 1 # don't count the start system

    # Mapping: (invTyeID, invTypeName, foreign_ask, local_bid, markup, invTyeID)
    markup = import_markup(station_id, 0, system_id, 0)

    # Get last week for history query
    last_week = pytz.utc.localize(
        datetime.datetime.utcnow() - datetime.timedelta(days=7))
    data = []

    for point in markup:
        # Add new values to dict and if there's a weekly volume append it to list
        new_values = {
            # Get local weekly volume for that item
            'weekly_volume': OrderHistory.objects.filter(mapregion_id=station.region.id,
                                                                 invtype_id=point['id'],
                                                                 date__gte=last_week)
            .aggregate(Sum("quantity"))['quantity__sum'],

            # Get filtered local bid qty
            'bid_qty_filtered': Orders.active.filter(stastation_id=station_id,
                                                      invtype_id=point['id'], is_bid=True,
                                                      minimum_volume=1,
                                                      price__gte=(point['local_bid'] - (point['local_bid'] * 0.01)))
            .aggregate(Sum("volume_remaining"))['volume_remaining__sum'],

            # Get filtered ask qty of the other system
            'ask_qty_filtered': Orders.active.filter(mapsolarsystem_id=system_id,
                                                      invtype_id=point['id'], is_bid=False,
                                                      minimum_volume=1,
                                                      price__lte=(point['foreign_ask'] + (point['foreign_ask'] * 0.01)))
            .aggregate(Sum("volume_remaining"))['volume_remaining__sum']}
        point.update(new_values)

        # Calculate potential profit ((local_bid - foreign_ask) * weekly_volume)
        if point['weekly_volume'] is not None:
            point['potential_profit'] = ((point['local_bid'] - point['foreign_ask']) * point['weekly_volume'])
            data.append(point)

    data.sort(key=itemgetter('potential_profit'), reverse=True)

    rcontext = RequestContext(request, {'system': system, 'markup':
                              data, 'path': path, 'jumps': numjumps})

    return render_to_response('station/_import_system.haml', rcontext)

Example 126

Project: public-contracts Source File: models.py
    def compute_data(self):
        """
        Computes the data of this entity from the existing relations.
        """
        logger.info('computing data of entity %d', self.base_id)

        # if data does not exist, we create it.
        try:
            self.data
        except EntityData.DoesNotExist:
            self.data = EntityData(entity=self)

        c_set = self.contract_set.aggregate(Sum('price'), Max('signing_date'))
        c_made = self.contracts_made.aggregate(Sum('price'), Max('signing_date'))

        def max_dates(*dates):
            """
            Returns the max of all non-None dates and None if all dates are None.
            """
            dates = list(dates)
            none = datetime.date(1900, 1, 1)

            for index, date in enumerate(dates):
                if date is None:
                    dates[index] = none
            date = max(dates)
            if date == none:
                date = None
            return date

        # update the total earnings and total expenses.
        self.data.total_earned = c_set['price__sum'] or 0
        self.data.total_expended = c_made['price__sum'] or 0

        self.data.last_activity = max_dates(c_set['signing_date__max'],
                                            c_made['signing_date__max'])

        self.data.is_updated = True

        # finish
        self.data.save()

        # update list of contracts on cache
        self.get_contracts_ids(flush_cache=True)

Example 127

Project: eyebrowse-server Source File: views.py
@render_to('stats/profile_data.html')
def profile_data(request, username=None):

    if request.user.is_authenticated():
        user = get_object_or_404(User, username=request.user.username)
        userprof = UserProfile.objects.get(user=user)
        confirmed = userprof.confirmed
        if not confirmed:
            return redirect('/consent')
    else:
        user = None
        userprof = None

    """
        Own profile page
    """
    username, follows, profile_user, empty_search_msg, nav_bar = _profile_info(
        user, username)

    get_dict, query, date, sort, filter = _get_query(request)

    get_dict["orderBy"] = "end_time"
    get_dict["direction"] = "hl"
    get_dict["filter"] = ""
    get_dict["page"] = request.GET.get("page", 1)
    get_dict["username"] = profile_user.username
    get_dict["sort"] = "time"

    hist, history_stream = live_stream_query_manager(get_dict, request.user, empty_search_msg=empty_search_msg)

    # stats
    tot_time, item_count = profile_stat_gen(profile_user)

    fav_data = FavData.objects.get(user=profile_user)

    num_history = EyeHistory.objects.filter(user=profile_user).count()

    is_online = online_user(user=profile_user)

    following_count = profile_user.profile.follows.count()
    follower_count = UserProfile.objects.filter(
        follows=profile_user.profile).count()

    today = datetime.now() - timedelta(hours=24)

    day_count = hist.filter(
        start_time__gt=today
    ).values('url', 'title').annotate(
        num_urls=Sum('total_time')
    ).order_by('-num_urls')[:3]

    day_domains = hist.filter(
        start_time__gt=today
    ).values('domain'
             ).annotate(num_domains=Sum('total_time')
                        ).order_by('-num_domains')[:5]

    day_chart = {}
    for domain in day_domains:
        day_chart[domain['domain']] = domain['num_domains']

    last_week = today - timedelta(days=7)

    week_count = hist.filter(
        start_time__gt=last_week
    ).values('url', 'title'
             ).annotate(num_urls=Sum('total_time')
                        ).order_by('-num_urls')[:3]

    week_domains = hist.filter(
        start_time__gt=last_week
    ).values('domain'
             ).annotate(num_domains=Sum('total_time')
                        ).order_by('-num_domains')[:5]

    week_chart = {}
    for domain in week_domains:
        week_chart[domain['domain']] = domain['num_domains']

    template_dict = {
        'username': profile_user.username,
        'following_count': following_count,
        'follower_count': follower_count,
        "profile_user": profile_user,
        "history_stream": history_stream,
        "empty_search_msg": empty_search_msg,
        "follows": str(follows),
        "is_online": is_online,
        "num_history": num_history,
        "tot_time": tot_time,
        "item_count": item_count,
        "fav_data": fav_data,
        "query": query,
        "date": date,
        'day_articles': day_count,
        'week_articles': week_count,
        'day_chart': json.dumps(day_chart),
        'week_chart': json.dumps(week_chart),
    }

    return _template_values(request,
                            page_title="profile history",
                            navbar=nav_bar,
                            sub_navbar="subnav_data",
                            **template_dict)

Example 128

Project: element43 Source File: views.py
def quicklook_tab_regions(request, type_id=34):

    """
    Generates the content for the regions tab
    """

    # Get the item type
    type_object = InvType.objects.get(id=type_id)

    # Fetch all buy/sell orders of that type from DB
    buy_orders = Orders.active.filter(invtype=type_id, is_bid=True, is_active=True).order_by('-price')
    sell_orders = Orders.active.filter(invtype=type_id, is_bid=False, is_active=True).order_by('price')

    # Make list with all orders
    orders = []
    orders += buy_orders
    orders += sell_orders

    # Get region IDs of regions with orders for this type
    regions = []
    for order in orders:
        if not order.mapregion_id in regions:
            regions.append(order.mapregion_id)

    # Gather region-based data for this type
    region_data = []
    for region in regions:
        # Temporary array for this region - will later be appended to region_data
        temp_data = []

        # Get all the prices of this region into a numpy array for processing later on
        region_ask_prices = np.array([order.price for order in buy_orders if order.mapregion_id == region])
        region_bid_prices = np.array([order.price for order in sell_orders if order.mapregion_id == region])

        # Order of array entries: Name, Bid/Ask(Low, High, Average, Median, Standard Deviation, Lots, Volume, region id)
        temp_data.append(MapRegion.objects.get(id=region).name)

        if len(region_ask_prices) > 0:
            # Ask values calculated via numpy
            temp_data.append(np.min(region_ask_prices))
            temp_data.append(np.max(region_ask_prices))
            temp_data.append(round(np.average(region_ask_prices), 2))
            temp_data.append(np.median(region_ask_prices))
            temp_data.append(round(np.std(region_ask_prices), 2))
            temp_data.append(len(region_ask_prices))
            temp_data.append(Orders.active.filter(mapregion_id=region,
                                                  invtype=type_id,
                                                  is_bid=False).aggregate(Sum('volume_remaining'))['volume_remaining__sum'])
            temp_data.append(region)
        else:
            # Else there are no orders in this region -> add a bunch of 0s
            temp_data.extend([0, 0, 0, 0, 0, 0, 0])
            temp_data.append(region)

        if len(region_bid_prices) > 0:
            # Bid values calculated via numpy
            temp_data.append(np.min(region_bid_prices))
            temp_data.append(np.max(region_bid_prices))
            temp_data.append(round(np.average(region_bid_prices), 2))
            temp_data.append(np.median(region_bid_prices))
            temp_data.append(round(np.std(region_bid_prices), 2))
            temp_data.append(len(region_bid_prices))
            temp_data.append(Orders.active.filter(mapregion_id=region,
                                                  invtype=type_id,
                                                  is_bid=True).aggregate(Sum('volume_remaining'))['volume_remaining__sum'])
            temp_data.append(region)
        else:
            # Else there are no orders in this region -> add a bunch of 0s
            temp_data.extend([0, 0, 0, 0, 0, 0, 0])
            temp_data.append(region)

        # Append temp_data to region_data
        region_data.append(temp_data)

    # Sort alphabetically by region name
    region_data = sorted(region_data, key=lambda region: region[0])

    rcontext = RequestContext(request, {'type': type_object,
                                        'regions': region_data})

    return render_to_response('_quicklook_tab_regions.haml', rcontext)

Example 129

Project: django-bitcoin Source File: CheckDbIntegrity.py
    def handle_noargs(self, **options):
        # BitcoinAddress.objects.filter(active=True)
        bitcoinaddress_sum = BitcoinAddress.objects.filter(active=True)\
            .aggregate(Sum('least_received_confirmed'))['least_received_confirmed__sum'] or Decimal(0)
        print "Total received, sum", bitcoinaddress_sum
        transaction_wallets_sum = WalletTransaction.objects.filter(from_wallet__id__gt=0, to_wallet__id__gt=0)\
            .aggregate(Sum('amount'))['amount__sum'] or Decimal(0)
        print "Total transactions, sum", transaction_wallets_sum
        transaction_out_sum = WalletTransaction.objects.filter(from_wallet__id__gt=0)\
        	.exclude(to_bitcoinaddress="").exclude(to_bitcoinaddress="")\
            .aggregate(Sum('amount'))['amount__sum'] or Decimal(0)
        print "Total outgoing, sum", transaction_out_sum
        # for x in WalletTransaction.objects.filter(from_wallet__id__gt=0, to_wallet__isnull=True, to_bitcoinaddress=""):
        # 	print x.amount, x.created_at
        fee_sum = WalletTransaction.objects.filter(from_wallet__id__gt=0, to_wallet__isnull=True, to_bitcoinaddress="")\
            .aggregate(Sum('amount'))['amount__sum'] or Decimal(0)
        print "Fees, sum", fee_sum
        print "DB balance", (bitcoinaddress_sum - transaction_out_sum - fee_sum)
        print "----"
        bitcoind_balance = bitcoind.bitcoind_api.getbalance()
        print "Bitcoind balance", bitcoind_balance
        print "----"
        print "Wallet quick check"
        total_sum = Decimal(0)
        for w in Wallet.objects.filter(last_balance__lt=0):
            if w.total_balance()<0:
                bal = w.total_balance()
                # print w.id, bal
                total_sum += bal
        print "Negatives:", Wallet.objects.filter(last_balance__lt=0).count(), "Amount:", total_sum
        print "Migration check"
        tot_received = WalletTransaction.objects.filter(from_wallet=None).aggregate(Sum('amount'))['amount__sum'] or Decimal(0)
        tot_received_bitcoinaddress = BitcoinAddress.objects.filter(migrated_to_transactions=True)\
            .aggregate(Sum('least_received_confirmed'))['least_received_confirmed__sum'] or Decimal(0)
        tot_received_unmigrated = BitcoinAddress.objects.filter(migrated_to_transactions=False)\
            .aggregate(Sum('least_received_confirmed'))['least_received_confirmed__sum'] or Decimal(0)
        if tot_received != tot_received_bitcoinaddress:
            raise Exception("wrong total receive amount! "+str(tot_received)+", "+str(tot_received_bitcoinaddress))
        print "Total " + str(tot_received) + " BTC deposits migrated, unmigrated " + str(tot_received_unmigrated) + " BTC"
        print "Migration check #2"
        dts = DepositTransaction.objects.filter(address__migrated_to_transactions=False).exclude(transaction=None)
        if dts.count() > 0:
            print "Illegal transaction!", dts
        if WalletTransaction.objects.filter(from_wallet=None, deposit_address=None).count() > 0:
            print "Illegal deposit transactions!"
        print "Wallet check"
        for w in Wallet.objects.filter(last_balance__gt=0):
            lb = w.last_balance
            tb_sql = w.total_balance_sql()
            tb = w.total_balance()
            if lb != tb or w.last_balance != tb or tb != tb_sql:
                print "Wallet balance error!", w.id, lb, tb_sql, tb
                print w.sent_transactions.all().count()
                print w.received_transactions.all().count()
                print w.sent_transactions.all().aggregate(Max('created_at'))['created_at__max']
                print w.received_transactions.all().aggregate(Max('created_at'))['created_at__max']
                # Wallet.objects.filter(id=w.id).update(last_balance=w.total_balance_sql())
        # print w.created_at, w.sent_transactions.all(), w.received_transactions.all()
            # if random.random() < 0.001:
            #     sleep(1)
        print "Address check"
        for ba in BitcoinAddress.objects.filter(least_received_confirmed__gt=0, migrated_to_transactions=True):
            dts = DepositTransaction.objects.filter(address=ba, wallet=ba.wallet)
            s = dts.aggregate(Sum('amount'))['amount__sum'] or Decimal(0)
            if s != ba.least_received:
                print "DepositTransaction error", ba.address, ba.least_received, s
                print "BitcoinAddress check"
        for ba in BitcoinAddress.objects.filter(migrated_to_transactions=True):
            dts = ba.deposittransaction_set.filter(address=ba, confirmations__gte=settings.BITCOIN_MINIMUM_CONFIRMATIONS)
            deposit_sum = dts.aggregate(Sum('amount'))['amount__sum'] or Decimal(0)
            wt_sum = WalletTransaction.objects.filter(deposit_address=ba).aggregate(Sum('amount'))['amount__sum'] or Decimal(0)
            if wt_sum != deposit_sum or ba.least_received_confirmed != deposit_sum:
                print "Bitcoinaddress integrity error!", ba.address, deposit_sum, wt_sum, ba.least_received_confirmed

Example 130

Project: django-bmf Source File: tasks.py
def _calc_account_balance(pk):
    logger.debug('Calc account balance for account #%s' % pk)
    account_cls = apps.get_model(settings.CONTRIB_ACCOUNT)
    transaction_cls = apps.get_model(settings.CONTRIB_TRANSACTIONITEM)
    account = account_cls.objects.get(pk=pk)
    pks = list(account_cls.objects.filter(parents=pk).values_list('pk', flat=True))
    pks += [pk]

    credit = transaction_cls.objects.filter(
        account_id__in=pks,
        draft=False,
        credit=True,
    ).aggregate(Sum('amount'))

    debit = transaction_cls.objects.filter(
        account_id__in=pks,
        draft=False,
        credit=False,
    ).aggregate(Sum('amount'))

    value_credit = credit['amount__sum'] or Decimal(0)
    value_debit = debit['amount__sum'] or Decimal(0)

    if account.credit_increase():
        account.balance = value_debit - value_credit
    else:
        account.balance = value_credit - value_debit

    account.save(update_parents=False)

    for obj in account.parents.all():
        _calc_account_balance(obj.pk)

Example 131

Project: karaage Source File: tasks.py
def _gen_project_trend_graph(project,
                             start,
                             end,
                             machine_category,
                             force_overwrite=False):
    """Generates a bar graph for a project

    Keyword arguments:
    project -- Project
    start -- start date
    end -- end date
    machine_category -- MachineCategory object

    """
    filename = graphs.get_project_trend_graph_filename(
        project, start, end, machine_category)
    csv_filename = os.path.join(GRAPH_ROOT, filename + '.csv')
    png_filename = os.path.join(GRAPH_ROOT, filename + '.png')

    _check_directory_exists(csv_filename)
    _check_directory_exists(png_filename)

    if not settings.GRAPH_DEBUG or force_overwrite:
        if os.path.exists(csv_filename):
            if os.path.exists(png_filename):
                return

    query = CPUJob.objects.filter(
        project=project,
        machine__category=machine_category,
        date__range=(start, end)
    )
    query = query.values('account', 'account__username', 'date')
    query = query.annotate(Sum('cpu_usage')).order_by('account', 'date')

    t_start = start
    t_end = end

    start_str = start.strftime('%Y-%m-%d')
    end_str = end.strftime('%Y-%m-%d')

    fig, ax = plt.subplots(figsize=(6, 4))
    ax.set_xlim(start, end + datetime.timedelta(days=1))
    ax.set_title('%s   %s - %s' % (project.pid, start_str, end_str))
    ax.set_ylabel("CPU Time (hours)")
    ax.set_xlabel("Date")

    locator = mdates.AutoDateLocator()
    ax.xaxis.set_major_locator(locator)
    ax.xaxis.set_major_formatter(mdates.AutoDateFormatter(locator))
    ax.xaxis.set_minor_locator(mdates.DayLocator())

    data = {}
    x_data = {}
    y_data = {}

    with open(csv_filename, 'wb') as csv_file:
        csv_writer = csv.writer(csv_file)
        for row in query.iterator():
            csv_writer.writerow([
                row['account__username'],
                row['date'], row['cpu_usage__sum'] / 3600.00
            ])

            account = row['account']
            date = row['date']

            if account not in data:
                data[account] = {}
                x_data[account] = []
                y_data[account] = []

            data[account][date] = row['cpu_usage__sum']

    for account, dates in six.iteritems(data):
        start = t_start
        end = t_end
        while start <= end:
            total = 0
            if start in dates:
                total = dates[start]
            x_data[account].append(start)
            y_data[account].append(total / 3600.00)
            start = start + datetime.timedelta(days=1)

    del data

    totals = []
    start = t_start
    end = t_end
    while start <= end:
        totals.append(0)
        start = start + datetime.timedelta(days=1)

    count = 0
    for account in x_data.keys():
        ax.bar(
            x_data[account], y_data[account],
            bottom=totals,
            color=graphs.get_colour(count),
            edgecolor=graphs.get_colour(count),
            align='edge')
        count = count + 1

        i = 0
        start = t_start
        end = t_end
        while start <= end:
            totals[i] += y_data[account][i]
            i = i + 1
            start = start + datetime.timedelta(days=1)

    del x_data
    del y_data
    del totals

    fig.autofmt_xdate()
    plt.tight_layout()
    plt.savefig(png_filename)
    plt.close()

Example 132

Project: CommunityCellularManager Source File: staff.py
    def count_usage_events(self, traffic_type, tier):
        """Count UsageEvents of a specified type.

        For instance specifying 'sms' and 'on_network_receive' will return all
        UEs of type 'local_recv_sms'.  'call' type events will have their
        billsec summed rather than just the number of calls.
        """
        network = tier.network
        directionality = tier.directionality
        events = models.UsageEvent.objects
        filters = Q(network=network)
        # We will only gather events after Jul 30, 2014 due to an issue with
        # UsageEvent generation in Papua.
        JUL30_2014 = datetime.datetime(month=7, day=30, year=2014,
                                       tzinfo=pytz.utc)
        filters = filters & Q(date__gte=JUL30_2014)
        # SMS-types.
        if traffic_type == 'sms':
            mapping = {
                'on_network_receive': 'local_recv_sms',
                'on_network_send': 'local_sms',
                'off_network_receive': 'incoming_sms',
                'off_network_send': 'outside_sms',
            }
            filters = filters & Q(kind=mapping[directionality])
            # Off-network send events need further filtering by Tier.
            if directionality == 'off_network_send':
                filters = (filters & Q(
                    destination__destination_group=tier.destination_group))
            return events.filter(filters).count()
        # Call-types.
        elif traffic_type == 'call':
            mapping = {
                'on_network_receive': 'local_recv_call',
                'on_network_send': 'local_call',
                'off_network_receive': 'incoming_call',
                'off_network_send': 'outside_call',
            }
            filters = filters & Q(kind=mapping[directionality])
            # Off-network send events need further filtering by Tier.
            if directionality == 'off_network_send':
                filters = (filters & Q(
                    destination__destination_group=tier.destination_group))
            seconds = events.filter(filters).aggregate(
                Sum('billsec'))['billsec__sum']
            if seconds:
                return seconds / 60.
            else:
                return 0

Example 133

Project: karaage Source File: tasks.py
def _gen_institute_trend_graph(institute,
                               start,
                               end,
                               machine_category,
                               force_overwrite=False):
    """ Institute trend graph for machine category. """
    filename = graphs.get_institute_trend_graph_filename(
        institute, start, end, machine_category)
    csv_filename = os.path.join(GRAPH_ROOT, filename + '.csv')
    png_filename = os.path.join(GRAPH_ROOT, filename + '.png')

    _check_directory_exists(csv_filename)
    _check_directory_exists(png_filename)

    if not settings.GRAPH_DEBUG or force_overwrite:
        if os.path.exists(csv_filename):
            if os.path.exists(png_filename):
                return

    query = CPUJob.objects.filter(
        project__institute=institute,
        machine__category=machine_category,
        date__range=(start, end)
    )
    query = query.values('date').annotate(Sum('cpu_usage'))
    query = query.order_by('date')

    t_start = start
    t_end = end

    start_str = start.strftime('%Y-%m-%d')
    end_str = end.strftime('%Y-%m-%d')

    fig, ax = plt.subplots(figsize=(6, 4))
    ax.set_xlim(start, end)
    ax.set_title('%s - %s' % (start_str, end_str))
    ax.set_ylabel("CPU Time (hours)")
    ax.set_xlabel("Date")

    locator = mdates.AutoDateLocator()
    ax.xaxis.set_major_locator(locator)
    ax.xaxis.set_major_formatter(mdates.AutoDateFormatter(locator))
    ax.xaxis.set_minor_locator(mdates.DayLocator())

    data = {}
    x_data = []
    y_data = []

    with open(csv_filename, 'wb') as csv_file:
        csv_writer = csv.writer(csv_file)
        for row in query.iterator():
            csv_writer.writerow([
                row['date'], row['cpu_usage__sum'] / 3600.00
            ])

            date = row['date']

            data[date] = row['cpu_usage__sum']

    start = t_start
    end = t_end
    while start <= end:
        total = 0
        if start in data:
            total = data[start]
        x_data.append(start)
        y_data.append(total / 3600.00)
        start = start + datetime.timedelta(days=1)

    del data

    ax.plot(x_data, y_data)

    del x_data
    del y_data

    fig.autofmt_xdate()
    plt.tight_layout()
    plt.savefig(png_filename)
    plt.close()

Example 134

Project: codesy Source File: model_tests.py
    def test_surplus_payout_from_two_bidders(self):
        self.bid3.set_offer(50)
        claim = mommy.make(Claim, user=self.user1, issue=self.issue)
        account = mommy.make(StripeAccount, user=self.user1)
        self.assertEqual(account, self.user1.account())
        api_request = claim.payout()
        if api_request:
            self.assertEqual(claim.status, 'Paid')

        payouts = claim.payouts.all()

        offers = Offer.objects.all()

        self.assertEqual(len(payouts), 2)
        self.assertEqual(len(offers), 4)

        offer_fees = OfferFee.objects.all()
        offer_credits = OfferCredit.objects.all()

        payout_fees = PayoutFee.objects.all()
        payout_credits = PayoutCredit.objects.all()

        self.assertEqual(len(offer_fees), 8)
        self.assertEqual(len(offer_credits), 2)
        self.assertEqual(len(payout_fees), 4)
        self.assertEqual(len(payout_credits), 2)

        sum_payout = payouts.aggregate(Sum('amount'))['amount__sum']

        self.assertEqual(sum_payout, Decimal('66.66'))

        sum_payout_credits = payout_credits.aggregate(
            Sum('amount'))['amount__sum']

        self.assertEqual(sum_payout_credits, Decimal('33.34'))

Example 135

Project: kolibri Source File: exportcontent.py
    def handle_async(self, *args, **options):
        channel_id = options["channel_id"]
        data_dir = os.path.realpath(options["destination"])
        logging.info("Exporting content for channel id {} to {}".format(channel_id, data_dir))

        with using_content_database(channel_id):
            files = File.objects.all()
            total_bytes_to_transfer = files.aggregate(Sum('file_size'))['file_size__sum']

            with self.start_progress(total=total_bytes_to_transfer) as overall_progress_update:

                for f in files:

                    filename = f.get_filename()

                    srcpath = paths.get_content_storage_file_path(filename)
                    dest = paths.get_content_storage_file_path(filename, datafolder=data_dir)

                    # if the file already exists, add its size to our overall progress, and skip
                    if os.path.isfile(dest) and os.path.getsize(dest) == f.file_size:
                        overall_progress_update(f.file_size)
                        continue

                    with transfer.FileCopy(srcpath, dest) as copy:

                        with self.start_progress(total=copy.total_size) as file_cp_progress_update:

                            for chunk in copy:
                                length = len(chunk)
                                overall_progress_update(length)
                                file_cp_progress_update(length)

Example 136

Project: wolnelektury Source File: models.py
Function: sum
    def sum(self):
        """ The money gathered. """
        return self.funding_payed().aggregate(s=models.Sum('amount'))['s'] or 0

Example 137

Project: evething Source File: home.py
@login_required
def home(request):
    """Home page"""
    tt = TimerThing('home')

    profile = request.user.profile

    tt.add_time('profile')

    # Make a set of characters to hide
    hide_characters = set(int(c) for c in profile.home_hide_characters.split(',') if c)

    # Initialise various data structures
    now = datetime.datetime.utcnow()
    total_balance = 0

    api_keys = set()
    training = set()
    chars = {}
    ship_item_ids = set()

    # Try retrieving characters from cache
    cache_key = 'home:characters:%d' % (request.user.id)
    characters = cache.get(cache_key)
    # Not cached, fetch from database and cache
    if characters is None:
        character_qs = Character.objects.filter(
            apikeys__user=request.user,
            apikeys__valid=True,
            apikeys__key_type__in=(APIKey.ACCOUNT_TYPE, APIKey.CHARACTER_TYPE),
        ).prefetch_related(
            'apikeys',
        ).select_related(
            'config',
            'details',
        ).distinct()

        # Django 1.5 workaround for the stupid change from a non-existent reverse
        # relation returning None to it raising self.related.model.DoesNotExist :(
        characters = []
        char_map = {}
        for c in character_qs:
            try:
                c.details is not None
            except:
                pass
            else:
                characters.append(c)
                char_map[c.id] = c

        tt.add_time('c1')

        # Fetch skill data now WITHOUT Unpublished SP
        cskill_qs = CharacterSkill.objects.filter(
            character__in=char_map.keys(),
            skill__item__market_group__isnull=False,
        ).values(
            'character',
        ).annotate(
            total_sp=Sum('points'),
        )
        for cskill in cskill_qs:
            char_map[cskill['character']].total_sp = cskill['total_sp']

        cache.set(cache_key, characters, 300)

        tt.add_time('c2')

    for character in characters:
        char_keys = [ak for ak in character.apikeys.all() if ak.user_id == request.user.id]
        api_keys.update(char_keys)

        chars[character.id] = character
        character.z_apikey = char_keys[0]
        character.z_training = {}

        total_balance += character.details.wallet_balance
        if character.details.ship_item_id is not None:
            ship_item_ids.add(character.details.ship_item_id)

    tt.add_time('characters')

    # Retrieve ship information
    ship_map = Item.objects.in_bulk(ship_item_ids)
    tt.add_time('ship_items')

    # Do skill training check - this can't be in the model because it
    # scales like crap doing individual queries
    skill_qs = []

    queues = SkillQueue.objects.filter(character__in=chars, end_time__gte=now)
    queues = queues.select_related('skill__item')
    for sq in queues:
        char = chars[sq.character_id]
        duration = total_seconds(sq.end_time - now)

        if 'sq' not in char.z_training:
            char.z_training['sq'] = sq
            char.z_training['skill_duration'] = duration
            char.z_training['sp_per_hour'] = int(sq.skill.get_sp_per_minute(char) * 60)
            char.z_training['complete_per'] = sq.get_complete_percentage(now, char)
            training.add(char.z_apikey)

            skill_qs.append(Q(character=char, skill=sq.skill))

        char.z_training['queue_duration'] = duration

    tt.add_time('training')

    # Retrieve training skill information
    if skill_qs:
        for cs in CharacterSkill.objects.filter(reduce(operator.ior, skill_qs)):
            chars[cs.character_id].z_tskill = cs

    tt.add_time('training skills')

    # Do total skill point aggregation
    total_sp = 0
    for char in characters:
        char.z_total_sp = getattr(char, 'total_sp', 0)
        if 'sq' in char.z_training and hasattr(char, 'z_tskill'):
            char.z_total_sp += int(char.z_training['sq'].get_completed_sp(char.z_tskill, now, char))

        total_sp += char.z_total_sp

    tt.add_time('total_sp')

    # Try retrieving total asset value from cache
    cache_key = 'home:total_assets:%d' % (request.user.id)
    total_assets = cache.get(cache_key)
    # Not cached, fetch from database and cache
    if total_assets is None:
        total_assets = AssetSummary.objects.filter(
            character__in=chars.keys(),
            corporation_id=0,
        ).aggregate(
            t=Sum('total_value'),
        )['t']
        cache.set(cache_key, total_assets, 300)

    tt.add_time('total_assets')

    # Work out who is and isn't training
    not_training = api_keys - training

    # Do notifications
    for char_id, char in chars.items():
        char.z_notifications = []

        # Game time warnings
        if char.z_apikey.paid_until:
            timediff = total_seconds(char.z_apikey.paid_until - now)

            if timediff < 0:
                char.z_notifications.append({
                    'icon': 'clock-o',
                    'text': 'Expired',
                    'tooltip': 'Game time has expired!',
                    'span_class': 'low-game-time',
                })

            elif timediff < EXPIRE_WARNING:
                char.z_notifications.append({
                    'icon': 'clock-o',
                    'text': shortduration(timediff),
                    'tooltip': 'Remaining game time is low!',
                    'span_class': 'low-game-time',
                })

        # API key warnings
        if char.z_apikey.expires:
            timediff = total_seconds(char.z_apikey.expires - now)
            if timediff < EXPIRE_WARNING:
                char.z_notifications.append({
                    'icon': 'key',
                    'text': shortduration(timediff),
                    'tooltip': 'API key is close to expiring!',
                })

        # Empty skill queue
        if char.z_apikey in not_training:
            char.z_notifications.append({
                'icon': 'list-ol',
                'text': 'Empty!',
                'tooltip': 'Skill queue is empty!',
            })

        if char.z_training:
            # Room in skill queue
            if char.z_training['queue_duration'] < ONE_DAY:
                timediff = ONE_DAY - char.z_training['queue_duration']
                char.z_notifications.append({
                    'icon': 'list-ol',
                    'text': shortduration(timediff),
                    'tooltip': 'Skill queue is not full!',
                })

            # Missing implants
            skill = char.z_training['sq'].skill
            pri_attrs = Skill.ATTRIBUTE_MAP[skill.primary_attribute]
            sec_attrs = Skill.ATTRIBUTE_MAP[skill.secondary_attribute]
            pri_bonus = getattr(char.details, pri_attrs[1])
            sec_bonus = getattr(char.details, sec_attrs[1])

            t = []
            if pri_bonus == 0:
                t.append(skill.get_primary_attribute_display())
            if sec_bonus == 0:
                t.append(skill.get_secondary_attribute_display())

            if t:
                char.z_notifications.append({
                    'icon': 'lightbulb-o',
                    'text': ', '.join(t),
                    'tooltip': 'Missing stat implants for currently training skill!',
                })

        # Sort out well classes here ugh
        classes = []
        if char.z_apikey in not_training:
            if profile.home_highlight_backgrounds:
                classes.append('background-error')
            if profile.home_highlight_borders:
                classes.append('border-error')
        elif char.z_notifications:
            if profile.home_highlight_backgrounds:
                classes.append('background-warn')
            if profile.home_highlight_borders:
                classes.append('border-warn')
        else:
            if profile.home_highlight_backgrounds:
                classes.append('background-success')
            if profile.home_highlight_borders:
                classes.append('border-success')

        if classes:
            char.z_well_class = ' %s' % (' '.join(classes))
        else:
            char.z_well_class = ''

    tt.add_time('notifications')

    # Decorate/sort based on settings, ugh
    char_list = chars.values()
    if profile.home_sort_order == 'apiname':
        temp = [(c.z_apikey.group_name or 'ZZZ', c.z_apikey.name, c.name.lower(), c) for c in char_list]
    elif profile.home_sort_order == 'charname':
        temp = [(c.z_apikey.group_name or 'ZZZ', c.name.lower(), c) for c in char_list]
    elif profile.home_sort_order == 'corpname':
        temp = [(c.z_apikey.group_name or 'ZZZ', c.corporation.name.lower(), c.name.lower(), c) for c in char_list]
    elif profile.home_sort_order == 'totalsp':
        temp = [(c.z_apikey.group_name or 'ZZZ', getattr(c, 'z_total_sp', 0), c) for c in char_list]
    elif profile.home_sort_order == 'wallet':
        temp = [(c.z_apikey.group_name or 'ZZZ', c.details and c.details.wallet_balance, c.name.lower(), c) for c in char_list]

    temp.sort()
    if profile.home_sort_descending:
        temp.reverse()

    tt.add_time('sort')

    # Now group based on group_name
    bleh = OrderedDict()
    for temp_data in temp:
        bleh.setdefault(temp_data[0], []).append(temp_data[-1])

    char_lists = []
    for char_list in bleh.values():
        first = [char for char in char_list if char.z_training and char.id not in hide_characters]
        last = [char for char in char_list if not char.z_training and char.id not in hide_characters]
        char_lists.append(first + last)

    tt.add_time('group')

    # Try retrieving corporations from cache
    cache_key = 'home:corporations:%d' % (request.user.id)
    corporations = cache.get(cache_key)
    # Not cached, fetch from database and cache
    if corporations is None:
        corp_ids = Corporation.get_ids_with_access(request.user, APIKey.CORP_ACCOUNT_BALANCE_MASK)
        corp_map = OrderedDict()
        # WARNING: Theoritically we are exposing the wallet divison name which may not be exposed
        # if you only have the BALANCE_MASK or some shit
        for corp_wallet in CorpWallet.objects.select_related().filter(corporation__in=corp_ids):
            if corp_wallet.corporation_id not in corp_map:
                corp_map[corp_wallet.corporation_id] = corp_wallet.corporation
                corp_map[corp_wallet.corporation_id].wallets = []

            corp_map[corp_wallet.corporation_id].wallets.append(corp_wallet)

        corporations = corp_map.values()
        cache.set(cache_key, corporations, 300)

    tt.add_time('corps')

    # Try retrieving total corp asset value from cache
    cache_key = 'home:corp_assets:%d' % (request.user.id)
    corp_assets = cache.get(cache_key)
    # Not cached, fetch from database and cache
    if corp_assets is None:
        corp_ids = Corporation.get_ids_with_access(request.user, APIKey.CORP_ASSET_LIST_MASK)

        corp_assets = AssetSummary.objects.filter(
            corporation_id__in=corp_ids,
        ).aggregate(
            t=Sum('total_value'),
        )['t']
        cache.set(cache_key, corp_assets, 300)

    tt.add_time('corp_assets')

    # Render template
    out = render_page(
        'thing/home.html',
        {
            'profile': profile,
            'not_training': not_training,
            'total_balance': total_balance,
            'total_sp': total_sp,
            'total_assets': total_assets,
            'corp_assets': corp_assets,
            'corporations': corporations,
            # 'characters': first + last,
            'characters': char_lists,
            'events': list(Event.objects.filter(user=request.user)[:10]),
            'ship_map': ship_map,
            # 'task_count': task_count,
        },
        request,
        chars.keys(),
        [c.id for c in corporations]
    )

    tt.add_time('template')
    if settings.DEBUG:
        tt.finished()

    return out

Example 138

Project: django-oscar Source File: views.py
    def get_stats(self):
        datetime_24hrs_ago = now() - timedelta(hours=24)

        orders = Order.objects.all()
        orders_last_day = orders.filter(date_placed__gt=datetime_24hrs_ago)

        open_alerts = StockAlert.objects.filter(status=StockAlert.OPEN)
        closed_alerts = StockAlert.objects.filter(status=StockAlert.CLOSED)

        total_lines_last_day = Line.objects.filter(
            order__in=orders_last_day).count()
        stats = {
            'total_orders_last_day': orders_last_day.count(),
            'total_lines_last_day': total_lines_last_day,

            'average_order_costs': orders_last_day.aggregate(
                Avg('total_incl_tax')
            )['total_incl_tax__avg'] or D('0.00'),

            'total_revenue_last_day': orders_last_day.aggregate(
                Sum('total_incl_tax')
            )['total_incl_tax__sum'] or D('0.00'),

            'hourly_report_dict': self.get_hourly_report(hours=24),
            'total_customers_last_day': User.objects.filter(
                date_joined__gt=datetime_24hrs_ago,
            ).count(),

            'total_open_baskets_last_day': self.get_open_baskets({
                'date_created__gt': datetime_24hrs_ago
            }).count(),

            'total_products': Product.objects.count(),
            'total_open_stock_alerts': open_alerts.count(),
            'total_closed_stock_alerts': closed_alerts.count(),

            'total_site_offers': self.get_active_site_offers().count(),
            'total_vouchers': self.get_active_vouchers().count(),
            'total_promotions': self.get_number_of_promotions(),

            'total_customers': User.objects.count(),
            'total_open_baskets': self.get_open_baskets().count(),
            'total_orders': orders.count(),
            'total_lines': Line.objects.count(),
            'total_revenue': orders.aggregate(
                Sum('total_incl_tax')
            )['total_incl_tax__sum'] or D('0.00'),

            'order_status_breakdown': orders.order_by(
                'status'
            ).values('status').annotate(freq=Count('id'))
        }
        return stats

Example 139

Project: evething Source File: wallet_journal.py
@login_required
def wallet_journal_aggregate(request):
    characters = Character.objects.filter(apikeys__user=request.user.id)
    character_ids = [c.id for c in characters]

    corporation_ids = Corporation.get_ids_with_access(request.user, APIKey.CORP_WALLET_JOURNAL_MASK)
    corporations = Corporation.objects.filter(pk__in=corporation_ids)

    # Parse filters and apply magic
    filters, journal_ids, days = _journal_queryset(request, character_ids, corporation_ids)

    # Group by
    group_by = {
        'date': request.GET.get('group_by_date', 'year'),
        'owner1': request.GET.get('group_by_owner1'),
        'owner2': request.GET.get('group_by_owner2'),
        'reftype': request.GET.get('group_by_reftype'),
        'source': request.GET.get('group_by_source'),
    }

    # Build a horrifying ORM query
    if group_by['date'] == 'day':
        extras = {
            'year': 'EXTRACT(year FROM date)',
            'month': 'EXTRACT(month FROM date)',
            'day': 'EXTRACT(day FROM date)',
        }
        values = ['year', 'month', 'day']

    elif group_by['date'] == 'month':
        extras = {
            'year': 'EXTRACT(year FROM date)',
            'month': 'EXTRACT(month FROM date)',
        }
        values = ['year', 'month']

    else:
        # group_by['date'] = 'year'
        extras = {
            'year': 'EXTRACT(year FROM date)',
        }
        values = ['year']

    empty_colspan = 3
    for k, v in group_by.items():
        if v:
            empty_colspan += 1

    if group_by['owner1']:
        values.append('owner1_id')
    if group_by['owner2']:
        values.append('owner2_id')
    if group_by['reftype']:
        values.append('ref_type')
    if group_by['source']:
        values.append('character')
        values.append('corp_wallet')

    journal_ids = journal_ids.extra(
        select=extras,
    ).values(
        *values
    ).annotate(
        entries=Count('id'),
        total_amount=Sum('amount'),
    ).order_by(
        #    *orders
    )

    # Aggregate!
    wja = WJAggregator(group_by)

    for entry in journal_ids:
        print entry
        wja.add_entry(entry)

    wja.finalise()

    # Render template
    return render_page(
        'thing/wallet_journal_aggregate.html',
        {
            'json_data': _json_data(characters, corporations, filters),
            'agg_data': wja.data,
            'group_by': group_by,
            'empty_colspan': empty_colspan,
        },
        request,
    )

Example 140

Project: sentry Source File: organization_index.py
    @attach_scenarios([list_your_organizations_scenario])
    def get(self, request):
        """
        List your Organizations
        ```````````````````````

        Return a list of organizations available to the authenticated
        session.  This is particularly useful for requests with an
        user bound context.  For API key based requests this will
        only return the organization that belongs to the key.

        :qparam bool member: restrict results to organizations which you have
                             membership

        :auth: required
        """
        member_only = request.GET.get('member') in ('1', 'true')

        queryset = Organization.objects.filter(
            status=OrganizationStatus.VISIBLE,
        )

        if request.auth and not request.user.is_authenticated():
            if hasattr(request.auth, 'project'):
                queryset = queryset.filter(
                    id=request.auth.project.organization_id
                )
            elif request.auth.organization is not None:
                queryset = queryset.filter(
                    id=request.auth.organization.id
                )
        elif member_only or not request.is_superuser():
            queryset = queryset.filter(
                id__in=OrganizationMember.objects.filter(
                    user=request.user,
                ).values('organization'),
            )

        query = request.GET.get('query')
        if query:
            tokens = tokenize_query(query)
            for key, value in six.iteritems(tokens):
                if key == 'query':
                    value = ' '.join(value)
                    queryset = queryset.filter(
                        Q(name__icontains=value) |
                        Q(slug__icontains=value) |
                        Q(members__email__iexact=value)
                    )
                elif key == 'slug':
                    queryset = queryset.filter(
                        in_iexact('slug', value)
                    )
                elif key == 'email':
                    queryset = queryset.filter(
                        in_iexact('members__email', value)
                    )
                elif key == 'platform':
                    queryset = queryset.filter(
                        project__in=ProjectPlatform.objects.filter(
                            platform__in=value,
                        ).values('project_id')
                    )
                elif key == 'id':
                    queryset = queryset.filter(id__in=value)

        sort_by = request.GET.get('sortBy')
        if sort_by == 'members':
            queryset = queryset.annotate(
                member_count=Count('member_set'),
            )
            order_by = '-member_count'
            paginator_cls = OffsetPaginator
        elif sort_by == 'projects':
            queryset = queryset.annotate(
                project_count=Count('project'),
            )
            order_by = '-project_count'
            paginator_cls = OffsetPaginator
        elif sort_by == 'events':
            queryset = queryset.annotate(
                event_count=Sum('stats__events_24h'),
            ).filter(
                stats__events_24h__isnull=False,
            )
            order_by = '-event_count'
            paginator_cls = OffsetPaginator
        else:
            order_by = '-date_added'
            paginator_cls = DateTimePaginator

        return self.paginate(
            request=request,
            queryset=queryset,
            order_by=order_by,
            on_results=lambda x: serialize(x, request.user),
            paginator_cls=paginator_cls,
        )

Example 141

Project: plata Source File: models.py
    def items_in_stock(self, product, update=False, exclude_order=None,
                       include_reservations=False):
        """
        Determine the items in stock for the given product variation,
        optionally updating the ``items_in_stock`` field in the database.

        If ``exclude_order`` is given, ``update`` is always switched off
        and transactions from the given order aren't taken into account.

        If ``include_reservations`` is ``True``, ``update`` is always
        switched off.
        """

        queryset = self.filter(
            period=Period.objects.current(),
            product=product)

        if exclude_order:
            update = False
            queryset = queryset.filter(
                Q(order__isnull=True) | ~Q(order=exclude_order))

        if include_reservations:
            update = False
            queryset = queryset.exclude(
                type=self.model.PAYMENT_PROCESS_RESERVATION,
                created__lt=timezone.now() - timedelta(seconds=15 * 60))
        else:
            queryset = queryset.exclude(
                type=self.model.PAYMENT_PROCESS_RESERVATION)

        count = queryset.aggregate(items=Sum('change')).get('items') or 0

        product_model = plata.product_model()

        if isinstance(product, product_model):
            product.items_in_stock = count

        if update:
            product_model._default_manager.filter(
                id=getattr(product, 'pk', product)
            ).update(items_in_stock=count)

        return count

Example 142

Project: hue Source File: tests.py
    def test_annotation_disjunction(self):
        qs = Book.objects.annotate(n_authors=Count("authors")).filter(
            Q(n_authors=2) | Q(name="Python Web Development with Django")
        )
        self.assertQuerysetEqual(
            qs, [
                "Artificial Intelligence: A Modern Approach",
                "Python Web Development with Django",
                "The Definitive Guide to Django: Web Development Done Right",
            ],
            attrgetter("name")
        )

        qs = Book.objects.annotate(n_authors=Count("authors")).filter(
            Q(name="The Definitive Guide to Django: Web Development Done Right") | (Q(name="Artificial Intelligence: A Modern Approach") & Q(n_authors=3))
        )
        self.assertQuerysetEqual(
            qs, [
                "The Definitive Guide to Django: Web Development Done Right",
            ],
            attrgetter("name")
        )

        qs = Publisher.objects.annotate(
            rating_sum=Sum("book__rating"),
            book_count=Count("book")
        ).filter(
            Q(rating_sum__gt=5.5) | Q(rating_sum__isnull=True)
        ).order_by('pk')
        self.assertQuerysetEqual(
            qs, [
                "Apress",
                "Prentice Hall",
                "Jonno's House of Books",
            ],
            attrgetter("name")
        )

        qs = Publisher.objects.annotate(
            rating_sum=Sum("book__rating"),
            book_count=Count("book")
        ).filter(
            Q(pk__lt=F("book_count")) | Q(rating_sum=None)
        ).order_by("pk")
        self.assertQuerysetEqual(
            qs, [
                "Apress",
                "Jonno's House of Books",
            ],
            attrgetter("name")
        )

Example 143

Project: saleor Source File: models.py
Function: count
    def count(self):
        lines = self.lines.all()
        return lines.aggregate(total_quantity=models.Sum('quantity'))

Example 144

Project: synnefo Source File: stats.py
def get_astakos_stats():
    stats = {"datetime": datetime.datetime.now().strftime("%c"),
             "providers": [],
             "users": {},
             "resources": {}}

    users = AstakosUser.objects.all()
    verified = users.filter(email_verified=True)
    active = users.filter(is_active=True)

    stats["users"]["all"] = {"total": users.count(),
                             "verified": verified.count(),
                             "active": active.count()}
    # Get all holdings from DB. Filter with 'source=None' in order to get
    # only the (base and user) projects, and not the user per project holdings
    holdings = Holding.objects.filter(source=None)\
                              .values("resource")\
                              .annotate(usage_sum=Sum("usage_max"),
                                        limit_sum=Sum("limit"))
    holdings = dict([(h["resource"], h) for h in holdings])

    resources_stats = {}
    for resource in Resource.objects.all():
        res_holdings = holdings.get(resource.name, {})
        resources_stats[resource.name] = {
            "used": res_holdings.get("usage_sum") or 0,
            "allocated": res_holdings.get("limit_sum") or 0,
            "unit": resource.unit,
            "description": resource.desc
        }
    stats["resources"]["all"] = resources_stats

    for provider in settings.IM_MODULES:
        # Add provider
        stats["providers"].append(provider)

        # Add stats about users
        users = AstakosUser.objects.filter(auth_providers__module=provider)
        verified = users.filter(email_verified=True)
        active = users.filter(is_active=True)
        exclusive = AstakosUser.objects.filter(email_verified=True,
                                               is_active=True)\
                               .annotate(num_providers=Count("auth_providers"))\
                               .filter(auth_providers__module=provider)\
                               .filter(num_providers=1)

        stats["users"][provider] = {"total": users.count(),
                                    "verified": verified.count(),
                                    "active": active.count(),
                                    "exclusive": exclusive.count()}

        # Add stats about resources
        users_uuids = exclusive.values_list("uuid", flat=True)
        # The 'holder' attribute contains user UUIDs prefixed with 'user:'
        users_uuids = ["user:" + uuid for uuid in users_uuids]
        resources_stats = {}
        for resource in Resource.objects.all():
            info = Holding.objects\
                          .filter(holder__in=users_uuids,
                                  resource=resource.name)\
                          .aggregate(usage_sum=Sum("usage_max"),
                                     limit_sum=Sum("limit"))
            resources_stats[resource.name] = {
                "used": info.get("usage_sum") or 0,
                "allocated": info.get("limit_sum") or 0,
                "unit": resource.unit,
                "description": resource.desc}
        stats["resources"][provider] = resources_stats

    return stats

Example 145

Project: silk Source File: profiling.py
Function: get_objects
    def _get_objects(self, show=None, order_by=None, name=None, func_name=None, silk_request=None, filters=None):
        if not filters:
            filters = []
        if not show:
            show = self.default_show
        manager = Profile.objects
        if silk_request:
            query_set = manager.filter(request=silk_request)
        else:
            query_set = manager.all()
        if not order_by:
            order_by = self.defualt_order_by
        if order_by == 'Recent':
            query_set = query_set.order_by('-start_time')
        elif order_by == 'Name':
            query_set = query_set.order_by('-name')
        elif order_by == 'Function Name':
            query_set = query_set.order_by('-func_name')
        elif order_by == 'Num. Queries':
            query_set = query_set.annotate(num_queries=Count('queries')).order_by('-num_queries')
        elif order_by == 'Time':
            query_set = query_set.order_by('-time_taken')
        elif order_by == 'Time on queries':
            query_set = query_set.annotate(db_time=Sum('queries__time_taken')).order_by('-db_time')
        elif order_by:
            raise RuntimeError('Unknown order_by: "%s"' % order_by)
        if func_name:
            query_set = query_set.filter(func_name=func_name)
        if name:
            query_set = query_set.filter(name=name)
        for f in filters:
            query_set = f.contribute_to_query_set(query_set)
            query_set = query_set.filter(f)
        return list(query_set[:show])

Example 146

Project: hue Source File: tests.py
    def test_more_more_more(self):
        # Regression for #10199 - Aggregate calls clone the original query so
        # the original query can still be used
        books = Book.objects.all()
        books.aggregate(Avg("authors__age"))
        self.assertQuerysetEqual(
            books.all(), [
                'Artificial Intelligence: A Modern Approach',
                'Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp',
                'Practical Django Projects',
                'Python Web Development with Django',
                'Sams Teach Yourself Django in 24 Hours',
                'The Definitive Guide to Django: Web Development Done Right'
            ],
            lambda b: b.name
        )

        # Regression for #10248 - Annotations work with DateQuerySets
        qs = Book.objects.annotate(num_authors=Count('authors')).filter(num_authors=2).dates('pubdate', 'day')
        self.assertQuerysetEqual(
            qs, [
                datetime.date(1995, 1, 15),
                datetime.date(2007, 12, 6),
            ],
            lambda b: b
        )

        # Regression for #10290 - extra selects with parameters can be used for
        # grouping.
        qs = Book.objects.annotate(mean_auth_age=Avg('authors__age')).extra(select={'sheets' : '(pages + %s) / %s'}, select_params=[1, 2]).order_by('sheets').values('sheets')
        self.assertQuerysetEqual(
            qs, [
                150,
                175,
                224,
                264,
                473,
                566
            ],
            lambda b: int(b["sheets"])
        )

        # Regression for 10425 - annotations don't get in the way of a count()
        # clause
        self.assertEqual(
            Book.objects.values('publisher').annotate(Count('publisher')).count(),
            4
        )
        self.assertEqual(
            Book.objects.annotate(Count('publisher')).values('publisher').count(),
            6
        )

        # Note: intentionally no order_by(), that case needs tests, too.
        publishers = Publisher.objects.filter(id__in=[1, 2])
        self.assertEqual(
            sorted(p.name for p in publishers),
            [
                "Apress",
                "Sams"
            ]
        )

        publishers = publishers.annotate(n_books=Count("book"))
        sorted_publishers = sorted(publishers, key=lambda x: x.name)
        self.assertEqual(
            sorted_publishers[0].n_books,
            2
        )
        self.assertEqual(
            sorted_publishers[1].n_books,
            1
        )

        self.assertEqual(
            sorted(p.name for p in publishers),
            [
                "Apress",
                "Sams"
            ]
        )

        books = Book.objects.filter(publisher__in=publishers)
        self.assertQuerysetEqual(
            books, [
                "Practical Django Projects",
                "Sams Teach Yourself Django in 24 Hours",
                "The Definitive Guide to Django: Web Development Done Right",
            ],
            lambda b: b.name
        )
        self.assertEqual(
            sorted(p.name for p in publishers),
            [
                "Apress",
                "Sams"
            ]
        )

        # Regression for 10666 - inherited fields work with annotations and
        # aggregations
        self.assertEqual(
            HardbackBook.objects.aggregate(n_pages=Sum('book_ptr__pages')),
            {'n_pages': 2078}
        )

        self.assertEqual(
            HardbackBook.objects.aggregate(n_pages=Sum('pages')),
            {'n_pages': 2078},
        )

        qs = HardbackBook.objects.annotate(n_authors=Count('book_ptr__authors')).values('name', 'n_authors')
        self.assertQuerysetEqual(
            qs, [
                {'n_authors': 2, 'name': 'Artificial Intelligence: A Modern Approach'},
                {'n_authors': 1, 'name': 'Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp'}
            ],
            lambda h: h
        )

        qs = HardbackBook.objects.annotate(n_authors=Count('authors')).values('name', 'n_authors')
        self.assertQuerysetEqual(
            qs, [
                {'n_authors': 2, 'name': 'Artificial Intelligence: A Modern Approach'},
                {'n_authors': 1, 'name': 'Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp'}
            ],
            lambda h: h,
        )

        # Regression for #10766 - Shouldn't be able to reference an aggregate
        # fields in an aggregate() call.
        self.assertRaises(
            FieldError,
            lambda: Book.objects.annotate(mean_age=Avg('authors__age')).annotate(Avg('mean_age'))
        )

Example 147

Project: codesy Source File: models.py
    def payout(self):
        try:
            # get all authorized offers for this issue
            valid_offers = Offer.objects.filter(
                bid__issue=self.issue,
                charge_id__isnull=False,
                refund_id=u'',
            ).exclude(
                user=self.user
            )
            # check on a surplus
            # TODO: move this to the payments utils
            sum_offers = valid_offers.aggregate(Sum('amount'))['amount__sum']
            users_ask = self.ask
            offer_adjustment = 1
            if sum_offers > users_ask:
                # surplus is the amount of offers over ask
                surplus = sum_offers - users_ask
                # the claim bonus is the claimaints share of the surplus
                claim_bonus = (surplus / (valid_offers.count() + 1))
                # giveback is the aount to distribute among the offerers
                offer_giveback = surplus - claim_bonus
                # this is the percent of the original payout to be charged
                offer_adjustment = 1 - (offer_giveback / sum_offers)

            for offer in valid_offers:
                adjusted_offer_amount = (offer.amount * offer_adjustment)
                discount_amount = offer.amount - adjusted_offer_amount
                payments.refund(offer)
                # create final adjusted offer
                new_offer = Offer(
                    user=offer.user,
                    bid=offer.bid,
                    amount=adjusted_offer_amount,
                    discount=discount_amount
                )
                new_offer.save()
                # capture payment to this users account
                payout = Payout(
                    user=offer.user,
                    claim=self,
                    amount=adjusted_offer_amount,
                    discount=discount_amount
                )
                payout.save()

                payments.charge(offer, payout)
            return True
        except Exception as e:
            print 'payout error: %s' % e.message
            return False

Example 148

Project: djangosnippets.org Source File: models.py
Function: create_manager
    def create_manager(self, instance, superclass):
        """
        Dynamically create a RelatedManager to handle the back side of the (G)FK
        """
        rel_model = self.rating_model
        rated_model = self.rated_model

        class RelatedManager(superclass):
            def get_queryset(self):
                qs = RatingsQuerySet(rel_model, rated_model=rated_model)
                return qs.filter(**(self.core_filters))

            def add(self, *objs):
                lookup_kwargs = rel_model.lookup_kwargs(instance)
                for obj in objs:
                    if not isinstance(obj, self.model):
                        raise TypeError("'%s' instance expected" % self.model._meta.object_name)
                    for (k, v) in lookup_kwargs.items():
                        setattr(obj, k, v)
                    obj.save()
            add.alters_data = True

            def create(self, **kwargs):
                kwargs.update(rel_model.lookup_kwargs(instance))
                return super(RelatedManager, self).create(**kwargs)
            create.alters_data = True

            def get_or_create(self, **kwargs):
                kwargs.update(rel_model.lookup_kwargs(instance))
                return super(RelatedManager, self).get_or_create(**kwargs)
            get_or_create.alters_data = True

            def remove(self, *objs):
                for obj in objs:
                    # Is obj actually part of this descriptor set?
                    if obj in self.all():
                        obj.delete()
                    else:
                        raise rel_model.DoesNotExist(
                            "%r is not related to %r." % (obj, instance))
            remove.alters_data = True

            def clear(self):
                self.all().delete()
            clear.alters_data = True

            def rate(self, user, score):
                rating, created = self.get_or_create(user=user)
                if created or score != rating.score:
                    rating.score = score
                    rating.save()
                return rating

            def unrate(self, user):
                return self.filter(user=user, **rel_model.lookup_kwargs(instance)).delete()

            def perform_aggregation(self, aggregator):
                score = self.all().aggregate(agg=aggregator('score'))
                return score['agg']

            def cuemulative_score(self):
                # simply the sum of all scores, useful for +1/-1
                return self.perform_aggregation(models.Sum)

            def average_score(self):
                # the average of all the scores, useful for 1-5
                return self.perform_aggregation(models.Avg)

            def standard_deviation(self):
                # the standard deviation of all the scores, useful for 1-5
                return self.perform_aggregation(models.StdDev)

            def variance(self):
                # the variance of all the scores, useful for 1-5
                return self.perform_aggregation(models.Variance)

            def similar_items(self):
                return SimilarItem.objects.get_for_item(instance)

        manager = RelatedManager()
        manager.core_filters = rel_model.lookup_kwargs(instance)
        manager.model = rel_model

        return manager

Example 149

Project: element43 Source File: station.py
def import_region(request, station_id=60003760, region_id=10000002):

    """
    Generates a list like http://goonmetrics.com/importing/
    Pattern: Region -> Station
    """

    # Get region, station and markup
    region = MapRegion.objects.get(id=region_id)
    station = StaStation.objects.get(id=station_id)
    markup = import_markup(station_id, region_id, 0, 0)

    # Get last week for history query
    last_week = pytz.utc.localize(
        datetime.datetime.utcnow() - datetime.timedelta(days=7))

    data = []

    for point in markup:
    # Add new values to dict and if there's a weekly volume append it to list
        new_values = {
            # Get local weekly volume for that item
            'weekly_volume': OrderHistory.objects.filter(mapregion_id=station.region.id,
                                                         invtype_id=point['id'],
                                                         date__gte=last_week)
            .aggregate(Sum("quantity"))['quantity__sum'],

            # Get filtered local bid qty
            'bid_qty_filtered': Orders.active.filter(stastation_id=station_id,
                                                      invtype_id=point['id'], is_bid=True,
                                                      minimum_volume=1,
                                                      price__gte=(point['local_bid'] - (point['local_bid'] * 0.01)))
            .aggregate(Sum("volume_remaining"))['volume_remaining__sum'],

            # Get filtered ask qty of the other region
            'ask_qty_filtered': Orders.active.filter(mapregion_id=region_id,
                                                      invtype_id=point['id'], is_bid=False,
                                                      minimum_volume=1,
                                                      price__lte=(point['foreign_ask'] + (point['foreign_ask'] * 0.01)))
            .aggregate(Sum("volume_remaining"))['volume_remaining__sum']}
        point.update(new_values)

        # Calculate potential profit ((foreign_ask - local_bid) * weekly_volume)
        if point['weekly_volume'] is not None:
            point['potential_profit'] = ((point['local_bid'] - point['foreign_ask']) * point['weekly_volume'])
            data.append(point)

    data.sort(key=itemgetter('potential_profit'), reverse=True)

    rcontext = RequestContext(request, {'region': region, 'markup': data})

    return render_to_response('station/_import_region.haml', rcontext)

Example 150

Project: django-analytics Source File: page_overview.py
    def _build_report_data(self):
        page_visits = list(self.visit_queryset().values_list('pagevisit__pk', flat=True))
        if not page_visits:
            return []

        pages = models.PageVisit.objects.filter(
            pk__in=page_visits
        ).values('page__path').annotate(
            pageviews=Count('pk'),
            unique_pageviews=Count('visit__uuid', distinct=True),
            duration=Sum('duration')
        ).values(
            'page__path',
            'pageviews',
            'unique_pageviews',
            'duration'
        )

        if self.client:
            page_pattern_query = models.PagePattern.objects.filter(
                client=self.client
            )
        else:
            page_pattern_query = models.PagePattern.objects.filter(
                client__domain__web_property=self.web_property
            )
        page_patterns = {
            pattern.display_path: re.compile(pattern.pattern, re.IGNORECASE)
            for pattern in page_pattern_query
        }

        entrances = self.visit_queryset().values(
            'first_page__path'
        ).annotate(Count('uuid'))

        bounce_pks = list(
            self.visit_queryset().annotate(
                page_count=Count('pages')
            ).filter(page_count=1).values_list('pk', flat=True)
        )
        bounces = models.Page.objects.filter(
            pagevisit__visit__pk__in=bounce_pks
        ).values('path').annotate(
            bounce_count=Count('pagevisit__visit')
        ).values('path', 'bounce_count')

        exits = self.visit_queryset().exclude(last_page=None).values(
            'last_page__path'
        ).annotate(Count('uuid'))

        tmp_data = OrderedDict()
        totals = defaultdict(int)
        pageview_idx = self._build_report_headers().index('Pageviews')
        uniqueview_idx = self._build_report_headers().index('Unique Pageviews')
        duration_idx = self._build_report_headers().index('Avg. Time on Page')
        sub_durations = defaultdict(int)
        for page in pages:
            path = page['page__path']
            duration = 0 if page['duration'] is None else page['duration']
            for display_path, regex in page_patterns.items():
                if regex.match(path):
                    path = display_path
                    sub_durations[path] += duration
                    break
            if path in tmp_data:
                tmp_data[path][pageview_idx] += page['pageviews']
                tmp_data[path][uniqueview_idx] += page['unique_pageviews']
            else:
                tmp_data[path] = [
                    path,
                    page['pageviews'],
                    page['unique_pageviews'],
                    utils.average_duration(duration, page['pageviews']),
                    0,  # entrances
                    0,  # bounce rate
                    0,  # % exit
                ]
            totals['pageviews'] += page['pageviews']
            totals['unique_pageviews'] += page['unique_pageviews']
            totals['duration'] += duration

        for path, total_duration in sub_durations.items():
            tmp_data[path][duration_idx] = utils.average_duration(
                total_duration, tmp_data[path][pageview_idx]
            )

        entrance_idx = self._build_report_headers().index('Entrances')
        for entrance in entrances:
            path = entrance['first_page__path']
            for name, regex in page_patterns.items():
                if regex.match(path):
                    path = name
                    break
            tmp_data[path][entrance_idx] += entrance['uuid__count']
            totals['entrances'] += entrance['uuid__count']

        bounce_idx = self._build_report_headers().index('Bounce Rate')
        sub_bounces = defaultdict(int)
        for bounce in bounces:
            path = bounce['path']
            for name, regex in page_patterns.items():
                if regex.match(path):
                    path = name
                    sub_bounces[path] += bounce['bounce_count']
                    break
            tmp_data[path][bounce_idx] = utils.percentage(
                bounce['bounce_count'], tmp_data[path][pageview_idx]
            )
            totals['bounces'] += bounce['bounce_count']
        for path, total_bounce in sub_bounces.items():
            tmp_data[path][bounce_idx] = utils.percentage(
                total_bounce, tmp_data[path][pageview_idx]
            )

        exit_idx = self._build_report_headers().index('% Exit')
        sub_exits = defaultdict(int)
        for exit_page in exits:
            path = exit_page['last_page__path']
            for name, regex in page_patterns.items():
                if regex.match(path):
                    path = name
                    sub_exits[path] += exit_page['uuid__count']
                    break
            if path in tmp_data.keys():
                tmp_data[path][exit_idx] = utils.percentage(
                    exit_page['uuid__count'], tmp_data[path][pageview_idx]
                )
                totals['exits'] += exit_page['uuid__count']
        for path, total_exits in sub_exits.items():
            tmp_data[path][exit_idx] = utils.percentage(
                total_exits, tmp_data[path][pageview_idx]
            )

        total_row = [
            "Totals",
            totals['pageviews'],
            totals['unique_pageviews'],
            utils.average_duration(totals['duration'], totals['pageviews']),
            totals['entrances'],
            utils.percentage(totals['bounces'], totals['pageviews']),
            utils.percentage(totals['exits'], totals['pageviews']),
        ]
        return tmp_data.values() + [total_row]
See More Examples - Go to Next Page
Page 1 Page 2 Page 3 Selected Page 4