django.utils.timezone.now

Here are the examples of the python api django.utils.timezone.now taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

200 Examples 7

Example 1

Project: logtacts
Source File: send_contact_reminders.py
View license
    def handle(self, *args, **options):
        logger.debug("Starting contact reminder sending")
        last_month = timezone.now() - timedelta(weeks=4)
        profiles_opted_in = Profile.objects.filter(send_contact_reminders=True)
        for profile in profiles_opted_in:
            logger.debug("Starting compilation for {}".format(profile.user))
            contact = Contact.objects.get_contacts_for_user(
                profile.user
            ).filter(
                Q(last_contact__lte=last_month) | Q(last_contact=None),
                should_surface=True,
            ).order_by('?')[0]
            subject = '[Contact Otter] Contact reminder'
            context = {
                'contact': contact,
                'domain': Site.objects.get_current().domain,

            }
            txt = get_template('email/contact_reminder.txt').render(context)
            html = get_template('email/contact_reminder.html').render(context)
            message = EmailMultiAlternatives(
                subject=subject,
                body=txt,
                from_email="ContactOtter <[email protected]>",
                to=[profile.user.email],
            )
            message.attach_alternative(html, "text/html")
            try:
                logger.debug("Trying to send message to {} about {}".format(
                    profile.user, contact
                ))
                message.send()
                logger.debug("Sent message to {} successfuly".format(profile.user))
            except:
                logger.exception('Problem sending reminder for %s' % (profile))
                try:
                    if not settings.DEBUG:
                        payload = {
                            'text': 'Error in contactotter reminder: {}'.format(profile)
                        }
                        r = requests.post(
                            settings.SLACK_WEBHOOK_URL,
                            data=json.dumps(payload),
                        )
                except:
                    logger.exception("Error sending error to slack")

        profiles_opted_in = Profile.objects.filter(send_birthday_reminders=True)
        for profile in profiles_opted_in:
            birthdays = ContactField.objects.filter(
                Q(label='Birthday') | Q(label='birthday') | Q(label='BIRTHDAY'),
                kind=contact_constants.FIELD_TYPE_DATE,
                value=timezone.now().strftime("%Y-%m-%d")
            )
            contacts = None
            if birthdays:
                contacts = [birthday.contact for birthday in birthdays]
            if contacts:
                context = {
                    'contacts': contacts,
                    'domain': Site.objects.get_current().domain,
                }
                subject="[ContactOtter] Birthday reminder"
                txt = get_template('email/birthday_reminder.txt').render(context)
                html = get_template('email/birthday_reminder.html').render(context)
                message = EmailMultiAlternatives(
                    subject=subject,
                    body=txt,
                    from_email='ContactOtter <[email protected]>',
                    to=[profile.user.email],
                )
                message.attach_alternative(html, "text/html")
                try:
                    logger.debug("Trying to send message to {} about {}".format(
                        profile.user, contact
                    ))
                    message.send()
                    logger.debug("Sent message to {} successfuly".format(profile.user))
                except:
                    logger.exception('Problem sending reminder for %s' % (profile))
                    try:
                        if not settings.DEBUG:
                            payload = {
                                'text': 'Error in logtacts reminder: {}'.format(profile)
                            }
                            r = requests.post(
                                settings.SLACK_WEBHOOK_URL,
                                data=json.dumps(payload),
                            )
                    except:
                        logger.exception("Error sending error to slack")

Example 2

Project: pinax-stripe
Source File: test_event.py
View license
    @patch("pinax.stripe.actions.customers.sync_customer")
    @patch("stripe.Event.retrieve")
    @patch("stripe.Customer.retrieve")
    def test_customer_subscription_deleted(self, CustomerMock, EventMock, SyncMock):
        """
        Tests to make sure downstream signal handlers do not see stale Subscription object properties
        after a customer.subscription.deleted event occurs.  While the delete method is called
        on the affected Subscription object's properties are still accessible (unless the
        Customer object for the event gets refreshed before sending the complimentary signal)
        """
        ev = EventMock()
        cm = CustomerMock()
        cm.currency = "usd"
        cm.delinquent = False
        cm.default_source = ""
        cm.account_balance = 0
        kind = "customer.subscription.deleted"
        plan = self.plan

        cs = Subscription(stripe_id="su_2ZDdGxJ3EQQc7Q", customer=self.customer, quantity=1, start=timezone.now(), plan=plan)
        cs.save()
        customer = Customer.objects.get(pk=self.customer.pk)

        # Stripe objects will not have this attribute so we must delete it from the mocked object
        del customer.stripe_customer.subscription
        self.assertIsNotNone(customer.subscription_set.all()[0])

        # This is the expected format of a customer.subscription.delete message
        msg = {
            "id": "evt_2eRjeAlnH1XMe8",
            "created": 1380317537,
            "livemode": True,
            "type": kind,
            "data": {
                "object": {
                    "id": "su_2ZDdGxJ3EQQc7Q",
                    "plan": {
                        "interval": "month",
                        "name": "xxx",
                        "amount": 200,
                        "currency": "usd",
                        "id": plan.stripe_id,
                        "object": "plan",
                        "livemode": True,
                        "interval_count": 1,
                        "trial_period_days": None
                    },
                    "object": "subscription",
                    "start": 1379111889,
                    "status": "canceled",
                    "customer": self.customer.stripe_id,
                    "cancel_at_period_end": False,
                    "current_period_start": 1378738246,
                    "current_period_end": 1381330246,
                    "ended_at": 1380317537,
                    "trial_start": None,
                    "trial_end": None,
                    "canceled_at": 1380317537,
                    "quantity": 1,
                    "application_fee_percent": None
                }
            },
            "object": "event",
            "pending_webhooks": 1,
            "request": "iar_2eRjQZmn0i3G9M"
        }
        ev.to_dict.return_value = msg

        # Create a test event for the message
        test_event = Event.objects.create(
            stripe_id=msg["id"],
            kind=kind,
            livemode=msg["livemode"],
            webhook_message=msg,
            validated_message=msg,
            valid=True,
            customer=customer,
        )

        registry.get(test_event.kind)(test_event).process()
        self.assertTrue(SyncMock.called)

Example 3

Project: GAE-Bulk-Mailer
Source File: dates.py
View license
def _get_next_prev(generic_view, date, is_previous, period):
    """
    Helper: Get the next or the previous valid date. The idea is to allow
    links on month/day views to never be 404s by never providing a date
    that'll be invalid for the given view.

    This is a bit complicated since it handles different intervals of time,
    hence the coupling to generic_view.

    However in essence the logic comes down to:

        * If allow_empty and allow_future are both true, this is easy: just
          return the naive result (just the next/previous day/week/month,
          reguardless of object existence.)

        * If allow_empty is true, allow_future is false, and the naive result
          isn't in the future, then return it; otherwise return None.

        * If allow_empty is false and allow_future is true, return the next
          date *that contains a valid object*, even if it's in the future. If
          there are no next objects, return None.

        * If allow_empty is false and allow_future is false, return the next
          date that contains a valid object. If that date is in the future, or
          if there are no next objects, return None.

    """
    date_field = generic_view.get_date_field()
    allow_empty = generic_view.get_allow_empty()
    allow_future = generic_view.get_allow_future()

    get_current = getattr(generic_view, '_get_current_%s' % period)
    get_next = getattr(generic_view, '_get_next_%s' % period)

    # Bounds of the current interval
    start, end = get_current(date), get_next(date)

    # If allow_empty is True, the naive result will be valid
    if allow_empty:
        if is_previous:
            result = get_current(start - datetime.timedelta(days=1))
        else:
            result = end

        if allow_future or result <= timezone_today():
            return result
        else:
            return None

    # Otherwise, we'll need to go to the database to look for an object
    # whose date_field is at least (greater than/less than) the given
    # naive result
    else:
        # Construct a lookup and an ordering depending on whether we're doing
        # a previous date or a next date lookup.
        if is_previous:
            lookup = {'%s__lt' % date_field: generic_view._make_date_lookup_arg(start)}
            ordering = '-%s' % date_field
        else:
            lookup = {'%s__gte' % date_field: generic_view._make_date_lookup_arg(end)}
            ordering = date_field

        # Filter out objects in the future if appropriate.
        if not allow_future:
            # Fortunately, to match the implementation of allow_future,
            # we need __lte, which doesn't conflict with __lt above.
            if generic_view.uses_datetime_field:
                now = timezone.now()
            else:
                now = timezone_today()
            lookup['%s__lte' % date_field] = now

        qs = generic_view.get_queryset().filter(**lookup).order_by(ordering)

        # Snag the first object from the queryset; if it doesn't exist that
        # means there's no next/previous link available.
        try:
            result = getattr(qs[0], date_field)
        except IndexError:
            return None

        # Convert datetimes to dates in the current time zone.
        if generic_view.uses_datetime_field:
            if settings.USE_TZ:
                result = timezone.localtime(result)
            result = result.date()

        # Return the first day of the period.
        return get_current(result)

Example 4

Project: djangae
Source File: backends.py
View license
    def authenticate(self, google_user=None):
        """
        Handles authentication of a user from the given credentials.
        Credentials must be a 'google_user' as returned by the App Engine
        Users API.
        """
        if google_user is None:
            return None

        User = get_user_model()

        if not issubclass(User, GaeAbstractBaseUser):
            raise ImproperlyConfigured(
                "AppEngineUserAPIBackend requires AUTH_USER_MODEL to be a "
                " subclass of djangae.contrib.auth.base.GaeAbstractBaseUser."
            )

        user_id = google_user.user_id()
        email = BaseUserManager.normalize_email(google_user.email())  # Normalizes the domain only.

        try:
            # User exists and we can bail immediately. With one caveat.
            user = User.objects.get(username=user_id)
            # Ensure that the user has got both the `email` and `email_lower` fields populated
            if not user.email_lower:
                # The user existed before the introduction of the `email_lower` field. Update it.
                user.email = email
                # The save will also update the email_lower field
                user.save(update_fields=['email', 'email_lower'])
            return user
        except User.DoesNotExist:
            pass

        auto_create = should_create_unknown_user()
        user_is_admin = users.is_current_user_admin()

        try:
            # Users that existed before the introduction of the `email_lower` field will not have
            # that field, but will mostly likely have a lower case email address because we used to
            # lower case the email field
            existing_user = User.objects.get(Q(email_lower=email.lower()) | Q(email=email.lower()))
        except User.DoesNotExist:
            if not (auto_create or user_is_admin):
                # User doesn't exist and we aren't going to create one.
                return None

            existing_user = None

        # OK. We will grant access. We may need to update an existing user, or
        # create a new one, or both.

        # Those 3 scenarios are:
        # 1. A User object has been created for this user, but that they have not logged in yet.
        # In this case we fetch the User object by email, and then update it with the Google User ID
        # 2. A User object exists for this email address but belonging to a different Google account.
        # This generally only happens when the email address of a Google Apps account has been
        # signed up as a Google account and then the apps account itself has actually become a
        # Google account. This is possible but very unlikely.
        # 3. There is no User object realting to this user whatsoever.

        if existing_user:
            if existing_user.username is None:
                # We can use the existing user for this new login.
                existing_user.username = user_id
                existing_user.email = email
                existing_user.last_login = timezone.now()
                existing_user.save()

                return existing_user
            else:
                # We need to update the existing user and create a new one.
                with self.atomic(**self.atomic_kwargs):
                    existing_user = User.objects.get(pk=existing_user.pk)
                    existing_user.email = ""
                    existing_user.save()

                    return User.objects.create_user(user_id, email=email)
        else:
            # Create a new user, but account for another thread having created it already in a race
            # condition scenario. Our logic cannot be in a transaction, so we have to just catch this.
            try:
                return User.objects.create_user(user_id, email=email)
            except IntegrityError:
                return User.objects.get(username=user_id)

Example 5

Project: molo
Source File: test_models.py
View license
    def test_topic_of_the_day(self):
        User.objects.create_superuser(
            username='testuser', password='password', email='[email protected]')
        self.client.login(username='testuser', password='password')

        # create a new article and go to it's edit page
        new_section = self.mk_section(
            self.section_index, title="New Section", slug="new-section")
        new_article = self.mk_article(new_section, title="New article",)
        response = self.client.get(
            reverse('wagtailadmin_pages:edit', args=(new_article.id,)))
        self.assertEqual(response.status_code, 200)

        # Marking article as Topic of the day with no promote date
        # or demote date raises error
        post_data = {
            "feature_as_topic_of_the_day": True,
            'title': 'this is a test article',
            'slug': 'this-is-a-test-article',
            'related_sections-INITIAL_FORMS': 0,
            'related_sections-MAX_NUM_FORMS': 1000,
            'related_sections-MIN_NUM_FORMS': 0,
            'related_sections-TOTAL_FORMS': 0,
            'body-count': 1,
            'body-0-value': 'Hello',
            'body-0-deleted': False,
            'body-0-order': 1,
            'body-0-type': 'paragraph',
            'metadata_tags': 'love, happiness',
            'action-publish': 'Publish'
        }
        self.client.post(
            reverse('wagtailadmin_pages:edit', args=(new_article.id,)),
            post_data
        )
        self.assertRaisesMessage(
            ValidationError,
            "Please specify the date and time that you would like this "
            "article to appear as the Topic of the Day."
        )

        # Raises error if promote_date is in the past
        post_data.update({
            "promote_date": timezone.now() + timedelta(days=-1),
        })
        self.client.post(
            reverse('wagtailadmin_pages:edit', args=(new_article.id,)),
            post_data
        )
        self.assertRaisesMessage(
            ValidationError,
            "Please select the present date, or a future date."
        )

        # Raise error is demote date is before
        # promote date
        post_data.update({
            "promote_date": timezone.now(),
            "demote_date": timezone.now() + timedelta(days=-1)
        })
        self.client.post(
            reverse('wagtailadmin_pages:edit', args=(new_article.id,)),
            post_data
        )
        self.assertRaisesMessage(
            ValidationError,
            "The article cannot be demoted before it has been promoted."
        )

Example 6

Project: pretix
Source File: __init__.py
View license
    def get_cart(self, answers=False, queryset=None, payment_fee=None, payment_fee_tax_rate=None, downloads=False):
        if queryset:
            prefetch = []
            if answers:
                prefetch.append('item__questions')
                prefetch.append('answers')

            cartpos = queryset.order_by(
                'item', 'variation'
            ).select_related(
                'item', 'variation'
            ).prefetch_related(
                *prefetch
            )
        else:
            cartpos = self.positions

        # Group items of the same variation
        # We do this by list manipulations instead of a GROUP BY query, as
        # Django is unable to join related models in a .values() query
        def keyfunc(pos):
            if downloads:
                return pos.id, 0, 0, 0, 0
            if answers and ((pos.item.admission and self.request.event.settings.attendee_names_asked)
                            or pos.item.questions.all()):
                return pos.id, 0, 0, 0, 0
            return 0, pos.item_id, pos.variation_id, pos.price, (pos.voucher_id or 0)

        positions = []
        for k, g in groupby(sorted(list(cartpos), key=keyfunc), key=keyfunc):
            g = list(g)
            group = g[0]
            group.count = len(g)
            group.total = group.count * group.price
            group.has_questions = answers and k[0] != ""
            if answers:
                group.cache_answers()
            positions.append(group)

        total = sum(p.total for p in positions)

        payment_fee = payment_fee if payment_fee is not None else self.get_payment_fee(total)
        payment_fee_tax_rate = payment_fee_tax_rate if payment_fee_tax_rate is not None else self.request.event.settings.tax_rate_default

        try:
            first_expiry = min(p.expires for p in positions) if positions else now()
            minutes_left = max(first_expiry - now(), timedelta()).seconds // 60
        except AttributeError:
            first_expiry = None
            minutes_left = None

        return {
            'positions': positions,
            'raw': cartpos,
            'total': total + payment_fee,
            'payment_fee': payment_fee,
            'payment_fee_tax_rate': payment_fee_tax_rate,
            'answers': answers,
            'minutes_left': minutes_left,
            'first_expiry': first_expiry
        }

Example 7

Project: PyClassLessons
Source File: dates.py
View license
def _get_next_prev(generic_view, date, is_previous, period):
    """
    Helper: Get the next or the previous valid date. The idea is to allow
    links on month/day views to never be 404s by never providing a date
    that'll be invalid for the given view.

    This is a bit complicated since it handles different intervals of time,
    hence the coupling to generic_view.

    However in essence the logic comes down to:

        * If allow_empty and allow_future are both true, this is easy: just
          return the naive result (just the next/previous day/week/month,
          regardless of object existence.)

        * If allow_empty is true, allow_future is false, and the naive result
          isn't in the future, then return it; otherwise return None.

        * If allow_empty is false and allow_future is true, return the next
          date *that contains a valid object*, even if it's in the future. If
          there are no next objects, return None.

        * If allow_empty is false and allow_future is false, return the next
          date that contains a valid object. If that date is in the future, or
          if there are no next objects, return None.

    """
    date_field = generic_view.get_date_field()
    allow_empty = generic_view.get_allow_empty()
    allow_future = generic_view.get_allow_future()

    get_current = getattr(generic_view, '_get_current_%s' % period)
    get_next = getattr(generic_view, '_get_next_%s' % period)

    # Bounds of the current interval
    start, end = get_current(date), get_next(date)

    # If allow_empty is True, the naive result will be valid
    if allow_empty:
        if is_previous:
            result = get_current(start - datetime.timedelta(days=1))
        else:
            result = end

        if allow_future or result <= timezone_today():
            return result
        else:
            return None

    # Otherwise, we'll need to go to the database to look for an object
    # whose date_field is at least (greater than/less than) the given
    # naive result
    else:
        # Construct a lookup and an ordering depending on whether we're doing
        # a previous date or a next date lookup.
        if is_previous:
            lookup = {'%s__lt' % date_field: generic_view._make_date_lookup_arg(start)}
            ordering = '-%s' % date_field
        else:
            lookup = {'%s__gte' % date_field: generic_view._make_date_lookup_arg(end)}
            ordering = date_field

        # Filter out objects in the future if appropriate.
        if not allow_future:
            # Fortunately, to match the implementation of allow_future,
            # we need __lte, which doesn't conflict with __lt above.
            if generic_view.uses_datetime_field:
                now = timezone.now()
            else:
                now = timezone_today()
            lookup['%s__lte' % date_field] = now

        qs = generic_view.get_queryset().filter(**lookup).order_by(ordering)

        # Snag the first object from the queryset; if it doesn't exist that
        # means there's no next/previous link available.
        try:
            result = getattr(qs[0], date_field)
        except IndexError:
            return None

        # Convert datetimes to dates in the current time zone.
        if generic_view.uses_datetime_field:
            if settings.USE_TZ:
                result = timezone.localtime(result)
            result = result.date()

        # Return the first day of the period.
        return get_current(result)

Example 8

Project: pythondotorg
Source File: base.py
View license
    def setUp(self):
        self.release_275_page = Page.objects.create(
            title='Python 2.7.5',
            path='download/releases/2.7.5',
            content='whatever',
            is_published=True,
        )
        self.release_275 = Release.objects.create(
            version=Release.PYTHON2,
            name='Python 2.7.5',
            is_latest=True,
            is_published=True,
            release_page=self.release_275_page,
            release_date=timezone.now() - datetime.timedelta(days=-1)
        )
        self.release_275_windows_32bit = ReleaseFile.objects.create(
            os=self.windows,
            release=self.release_275,
            name='Windows x86 MSI Installer (2.7.5)',
            description='Windows binary -- does not include source',
            url='ftp/python/2.7.5/python-2.7.5.msi',
        )
        self.release_275_windows_64bit = ReleaseFile.objects.create(
            os=self.windows,
            release=self.release_275,
            name='Windows X86-64 MSI Installer (2.7.5)',
            description='Windows AMD64 / Intel 64 / X86-64 binary -- does not include source',
            url='ftp/python/2.7.5/python-2.7.5.amd64.msi'
        )

        self.release_275_osx = ReleaseFile.objects.create(
            os=self.osx,
            release=self.release_275,
            name='Mac OSX 64-bit/32-bit',
            description='Mac OS X 10.6 and later',
            url='ftp/python/2.7.5/python-2.7.5-macosx10.6.dmg',
        )

        self.release_275_linux = ReleaseFile.objects.create(
            os=self.linux,
            release=self.release_275,
            is_source=True,
            description='Gzipped source',
            url='ftp/python/2.7.5/Python-2.7.5.tgz',
        )

        self.draft_release = Release.objects.create(
            version=Release.PYTHON3,
            name='Python 9.7.2',
            is_published=False,
            release_page=self.release_275_page,
        )
        self.hidden_release = Release.objects.create(
            version=Release.PYTHON3,
            name='Python 0.0.0',
            is_published=True,
            show_on_download_page=False,
            release_page=self.release_275_page,
        )

        self.pre_release = Release.objects.create(
            version=Release.PYTHON3,
            name='Python 3.9.90',
            is_published=True,
            pre_release=True,
            show_on_download_page=True,
            release_page=self.release_275_page,
        )

        self.python_3 = Release.objects.create(
            version=Release.PYTHON3,
            name='Python 3.10',
            is_latest=True,
            is_published=True,
            show_on_download_page=True,
            release_page=self.release_275_page
        )

Example 9

Project: Misago
Source File: createfakethreads.py
View license
    def handle(self, *args, **options):
        try:
            fake_threads_to_create = int(args[0])
        except IndexError:
            fake_threads_to_create = 5
        except ValueError:
            self.stderr.write("\nOptional argument should be integer.")
            sys.exit(1)

        categories = list(Category.objects.all_categories())

        fake = Factory.create()

        User = get_user_model()
        total_users = User.objects.count()

        self.stdout.write('Creating fake threads...\n')

        message = '\nSuccessfully created %s fake threads in %s'

        created_threads = 0
        start_time = time.time()
        show_progress(self, created_threads, fake_threads_to_create)
        for i in range(fake_threads_to_create):
            with atomic():
                datetime = timezone.now()
                category = random.choice(categories)
                user = User.objects.order_by('?')[:1][0]

                thread_is_unapproved = random.randint(0, 100) > 90
                thread_is_hidden = random.randint(0, 100) > 90
                thread_is_closed = random.randint(0, 100) > 90

                thread = Thread(
                    category=category,
                    started_on=datetime,
                    starter_name='-',
                    starter_slug='-',
                    last_post_on=datetime,
                    last_poster_name='-',
                    last_poster_slug='-',
                    replies=0,
                    is_unapproved=thread_is_unapproved,
                    is_hidden=thread_is_hidden,
                    is_closed=thread_is_closed
                )
                thread.set_title(fake.sentence())
                thread.save()

                fake_message = "\n\n".join(fake.paragraphs())
                post = Post.objects.create(
                    category=category,
                    thread=thread,
                    poster=user,
                    poster_name=user.username,
                    poster_ip=fake.ipv4(),
                    original=fake_message,
                    parsed=linebreaks_filter(fake_message),
                    posted_on=datetime,
                    updated_on=datetime
                )
                update_post_checksum(post)
                post.save(update_fields=['checksum'])

                thread.set_first_post(post)
                thread.set_last_post(post)
                thread.save()

                user.threads += 1
                user.posts += 1
                user.save()

                thread_type = random.randint(0, 100)
                if thread_type > 95:
                    thread_replies = random.randint(200, 2500)
                elif thread_type > 50:
                    thread_replies = random.randint(5, 30)
                else:
                    thread_replies = random.randint(0, 10)

                for x in range(thread_replies):
                    datetime = timezone.now()
                    user = User.objects.order_by('?')[:1][0]
                    fake_message = "\n\n".join(fake.paragraphs())

                    is_unapproved = random.randint(0, 100) > 97
                    if not is_unapproved:
                        is_hidden = random.randint(0, 100) > 97
                    else:
                        is_hidden = False

                    post = Post.objects.create(
                        category=category,
                        thread=thread,
                        poster=user,
                        poster_name=user.username,
                        poster_ip=fake.ipv4(),
                        original=fake_message,
                        parsed=linebreaks_filter(fake_message),
                        is_hidden=is_hidden,
                        is_unapproved=is_unapproved,
                        posted_on=datetime,
                        updated_on=datetime
                    )
                    update_post_checksum(post)
                    post.save(update_fields=['checksum'])

                    user.posts += 1
                    user.save()

                thread.synchronize()
                thread.save()

                created_threads += 1
                show_progress(
                    self, created_threads, fake_threads_to_create, start_time)

        pinned_threads = random.randint(0, int(created_threads * 0.025)) or 1
        self.stdout.write('\nPinning %s threads...' % pinned_threads)
        for i in range(0, pinned_threads):
            thread = Thread.objects.order_by('?')[:1][0]
            if random.randint(0, 100) > 75:
                thread.weight = 2
            else:
                thread.weight = 1
            thread.save()

        for category in categories:
            category.synchronize()
            category.save()

        total_time = time.time() - start_time
        total_humanized = time.strftime('%H:%M:%S', time.gmtime(total_time))
        self.stdout.write(message % (created_threads, total_humanized))

Example 10

Project: Misago
Source File: test_thread_model.py
View license
    def test_synchronize(self):
        """synchronize method updates thread data to reflect its contents"""
        User = get_user_model()
        user = User.objects.create_user("Bob", "[email protected]", "Pass.123")

        self.assertEqual(self.thread.replies, 0)

        datetime = timezone.now() + timedelta(5)
        post = Post.objects.create(
            category=self.category,
            thread=self.thread,
            poster=user,
            poster_name=user.username,
            poster_ip='127.0.0.1',
            original="Hello! I am test message!",
            parsed="<p>Hello! I am test message!</p>",
            checksum="nope",
            posted_on=datetime,
            updated_on=datetime
        )

        # first sync call, updates last thread
        self.thread.synchronize()

        self.assertEqual(self.thread.last_post, post)
        self.assertEqual(self.thread.last_post_on, post.posted_on)
        self.assertEqual(self.thread.last_poster, user)
        self.assertEqual(self.thread.last_poster_name, user.username)
        self.assertEqual(self.thread.last_poster_slug, user.slug)
        self.assertFalse(self.thread.has_reported_posts)
        self.assertFalse(self.thread.has_unapproved_posts)
        self.assertFalse(self.thread.has_hidden_posts)
        self.assertEqual(self.thread.replies, 1)

        # add unapproved post
        unapproved_post = Post.objects.create(
            category=self.category,
            thread=self.thread,
            poster=user,
            poster_name=user.username,
            poster_ip='127.0.0.1',
            original="Hello! I am test message!",
            parsed="<p>Hello! I am test message!</p>",
            checksum="nope",
            posted_on=datetime + timedelta(5),
            updated_on=datetime + timedelta(5),
            is_unapproved=True
        )

        self.thread.synchronize()
        self.assertEqual(self.thread.last_post, post)
        self.assertEqual(self.thread.last_post_on, post.posted_on)
        self.assertEqual(self.thread.last_poster, user)
        self.assertEqual(self.thread.last_poster_name, user.username)
        self.assertEqual(self.thread.last_poster_slug, user.slug)
        self.assertFalse(self.thread.has_reported_posts)
        self.assertTrue(self.thread.has_unapproved_posts)
        self.assertFalse(self.thread.has_hidden_posts)
        self.assertEqual(self.thread.replies, 1)

        # add hidden post
        hidden_post = Post.objects.create(
            category=self.category,
            thread=self.thread,
            poster=user,
            poster_name=user.username,
            poster_ip='127.0.0.1',
            original="Hello! I am test message!",
            parsed="<p>Hello! I am test message!</p>",
            checksum="nope",
            posted_on=datetime + timedelta(10),
            updated_on=datetime + timedelta(10),
            is_hidden=True
        )

        self.thread.synchronize()
        self.assertEqual(self.thread.last_post, hidden_post)
        self.assertEqual(self.thread.last_post_on, hidden_post.posted_on)
        self.assertEqual(self.thread.last_poster, user)
        self.assertEqual(self.thread.last_poster_name, user.username)
        self.assertEqual(self.thread.last_poster_slug, user.slug)
        self.assertFalse(self.thread.has_reported_posts)
        self.assertTrue(self.thread.has_unapproved_posts)
        self.assertTrue(self.thread.has_hidden_posts)
        self.assertEqual(self.thread.replies, 2)

        # unhide post
        hidden_post.is_hidden = False
        hidden_post.save()

        # last post changed to unhidden one
        self.thread.synchronize()
        self.assertEqual(self.thread.last_post, hidden_post)
        self.assertEqual(self.thread.last_post_on, hidden_post.posted_on)
        self.assertEqual(self.thread.last_poster, user)
        self.assertEqual(self.thread.last_poster_name, user.username)
        self.assertEqual(self.thread.last_poster_slug, user.slug)
        self.assertFalse(self.thread.has_reported_posts)
        self.assertTrue(self.thread.has_unapproved_posts)
        self.assertFalse(self.thread.has_hidden_posts)
        self.assertEqual(self.thread.replies, 2)

        # unmoderate post
        unapproved_post.is_unapproved = False
        unapproved_post.save()

        # last post not changed, but flags and count did
        self.thread.synchronize()
        self.assertEqual(self.thread.last_post, hidden_post)
        self.assertEqual(self.thread.last_post_on, hidden_post.posted_on)
        self.assertEqual(self.thread.last_poster, user)
        self.assertEqual(self.thread.last_poster_name, user.username)
        self.assertEqual(self.thread.last_poster_slug, user.slug)
        self.assertFalse(self.thread.has_reported_posts)
        self.assertFalse(self.thread.has_unapproved_posts)
        self.assertFalse(self.thread.has_hidden_posts)
        self.assertEqual(self.thread.replies, 3)

         # add event post
        hidden_post = Post.objects.create(
            category=self.category,
            thread=self.thread,
            poster=user,
            poster_name=user.username,
            poster_ip='127.0.0.1',
            original="-",
            parsed="-",
            checksum="nope",
            posted_on=datetime + timedelta(10),
            updated_on=datetime + timedelta(10),
            is_event=True
        )

        # events don't count to reply count
        self.thread.synchronize()
        self.assertEqual(self.thread.last_post, hidden_post)
        self.assertEqual(self.thread.last_post_on, hidden_post.posted_on)
        self.assertEqual(self.thread.last_poster, user)
        self.assertEqual(self.thread.last_poster_name, user.username)
        self.assertEqual(self.thread.last_poster_slug, user.slug)
        self.assertFalse(self.thread.has_reported_posts)
        self.assertFalse(self.thread.has_unapproved_posts)
        self.assertFalse(self.thread.has_hidden_posts)
        self.assertEqual(self.thread.replies, 3)

Example 11

Project: rapidpro
Source File: models.py
View license
    @classmethod
    def trigger_flow_event(cls, webhook_url, flow, run, node_uuid, contact, event, action='POST', resthook=None):
        org = flow.org
        api_user = get_api_user()
        json_time = datetime_to_str(timezone.now())

        # get the results for this contact
        results = flow.get_results(contact)
        values = []

        if results and results[0]:
            values = results[0]['values']
            for value in values:
                value['time'] = datetime_to_str(value['time'])
                value['value'] = unicode(value['value'])

        # if the action is on the first node
        # we might not have an sms (or channel) yet
        channel = None
        text = None
        contact_urn = contact.get_urn()

        if event:
            text = event.text
            channel = event.channel
            contact_urn = event.contact_urn

        if channel:
            channel_id = channel.pk
        else:
            channel_id = -1

        steps = []
        for step in run.steps.prefetch_related('messages', 'broadcasts').order_by('arrived_on'):
            steps.append(dict(type=step.step_type,
                              node=step.step_uuid,
                              arrived_on=datetime_to_str(step.arrived_on),
                              left_on=datetime_to_str(step.left_on),
                              text=step.get_text(),
                              value=step.rule_value))

        data = dict(channel=channel_id,
                    relayer=channel_id,
                    flow=flow.id,
                    flow_name=flow.name,
                    flow_base_language=flow.base_language,
                    run=run.id,
                    text=text,
                    step=unicode(node_uuid),
                    phone=contact.get_urn_display(org=org, scheme=TEL_SCHEME, formatted=False),
                    contact=contact.uuid,
                    urn=unicode(contact_urn),
                    values=json.dumps(values),
                    steps=json.dumps(steps),
                    time=json_time)

        if not action:
            action = 'POST'

        webhook_event = WebHookEvent.objects.create(org=org,
                                                    event=FLOW,
                                                    channel=channel,
                                                    data=json.dumps(data),
                                                    try_count=1,
                                                    action=action,
                                                    resthook=resthook,
                                                    created_by=api_user,
                                                    modified_by=api_user)

        status_code = -1
        message = "None"
        body = None

        # webhook events fire immediately since we need the results back
        try:
            # only send webhooks when we are configured to, otherwise fail
            if not settings.SEND_WEBHOOKS:
                raise Exception("!! Skipping WebHook send, SEND_WEBHOOKS set to False")

            # no url, bail!
            if not webhook_url:
                raise Exception("No webhook_url specified, skipping send")

            # some hosts deny generic user agents, use Temba as our user agent
            if action == 'GET':
                response = requests.get(webhook_url, headers=TEMBA_HEADERS, timeout=10)
            else:
                response = requests.post(webhook_url, data=data, headers=TEMBA_HEADERS, timeout=10)

            response_text = response.text
            body = response.text
            status_code = response.status_code

            if response.status_code == 200 or response.status_code == 201:
                try:
                    response_json = json.loads(response_text)

                    # only update if we got a valid JSON dictionary or list
                    if not isinstance(response_json, dict) and not isinstance(response_json, list):
                        raise ValueError("Response must be a JSON dictionary or list, ignoring response.")

                    run.update_fields(response_json)
                    message = "Webhook called successfully."
                except ValueError as e:
                    message = "Response must be a JSON dictionary, ignoring response."

                webhook_event.status = COMPLETE
            else:
                webhook_event.status = FAILED
                message = "Got non 200 response (%d) from webhook." % response.status_code
                raise Exception("Got non 200 response (%d) from webhook." % response.status_code)

        except Exception as e:
            import traceback
            traceback.print_exc()

            webhook_event.status = FAILED
            message = "Error calling webhook: %s" % unicode(e)

        finally:
            webhook_event.save()

            # make sure our message isn't too long
            if message:
                message = message[:255]

            result = WebHookResult.objects.create(event=webhook_event,
                                                  url=webhook_url,
                                                  status_code=status_code,
                                                  body=body,
                                                  message=message,
                                                  data=urlencode(data, doseq=True),
                                                  created_by=api_user,
                                                  modified_by=api_user)

            # if this is a test contact, add an entry to our action log
            if run.contact.is_test:
                from temba.flows.models import ActionLog
                log_txt = "Triggered <a href='%s' target='_log'>webhook event</a> - %d" % (reverse('api.log_read', args=[webhook_event.pk]), status_code)
                ActionLog.create(run, log_txt, safe=True)

        return result

Example 12

Project: rapidpro
Source File: test_models.py
View license
    def test_event_deliveries(self):
        sms = self.create_msg(contact=self.joe, direction='I', status='H', text="I'm gonna pop some tags")

        with patch('requests.Session.send') as mock:
            now = timezone.now()
            mock.return_value = MockResponse(200, "Hello World")

            # trigger an event, shouldnn't fire as we don't have a webhook
            WebHookEvent.trigger_sms_event(SMS_RECEIVED, sms, now)
            self.assertFalse(WebHookEvent.objects.all())

        self.setupChannel()

        with patch('requests.Session.send') as mock:
            # clear out which events we listen for, we still shouldnt be notified though we have a webhook
            self.channel.org.webhook_events = 0
            self.channel.org.save()

            now = timezone.now()
            mock.return_value = MockResponse(200, "Hello World")

            # trigger an event, shouldnn't fire as we don't have a webhook
            WebHookEvent.trigger_sms_event(SMS_RECEIVED, sms, now)
            self.assertFalse(WebHookEvent.objects.all())

        self.setupChannel()

        with patch('requests.Session.send') as mock:
            # remove all the org users
            self.org.administrators.clear()
            self.org.editors.clear()
            self.org.viewers.clear()

            mock.return_value = MockResponse(200, "Hello World")

            # trigger an event
            WebHookEvent.trigger_sms_event(SMS_RECEIVED, sms, now)
            event = WebHookEvent.objects.get()

            self.assertEquals('F', event.status)
            self.assertEquals(0, event.try_count)
            self.assertFalse(event.next_attempt)

            result = WebHookResult.objects.get()
            self.assertStringContains("No active user", result.message)
            self.assertEquals(0, result.status_code)

            self.assertFalse(mock.called)

            # what if they send weird json back?
            WebHookEvent.objects.all().delete()
            WebHookResult.objects.all().delete()

        # add ad manager back in
        self.org.administrators.add(self.admin)
        self.admin.set_org(self.org)

        with patch('requests.Session.send') as mock:
            mock.return_value = MockResponse(200, "Hello World")

            # trigger an event
            WebHookEvent.trigger_sms_event(SMS_RECEIVED, sms, now)
            event = WebHookEvent.objects.get()

            self.assertEquals('C', event.status)
            self.assertEquals(1, event.try_count)
            self.assertFalse(event.next_attempt)

            result = WebHookResult.objects.get()
            self.assertStringContains("Event delivered successfully", result.message)
            self.assertStringContains("not JSON", result.message)
            self.assertEquals(200, result.status_code)

            self.assertTrue(mock.called)

            WebHookEvent.objects.all().delete()
            WebHookResult.objects.all().delete()

        with patch('requests.Session.send') as mock:
            # valid json, but not our format
            bad_json = '{ "thrift_shops": ["Goodwill", "Value Village"] }'
            mock.return_value = MockResponse(200, bad_json)

            WebHookEvent.trigger_sms_event(SMS_RECEIVED, sms, now)
            event = WebHookEvent.objects.get()

            self.assertEquals('C', event.status)
            self.assertEquals(1, event.try_count)
            self.assertFalse(event.next_attempt)

            self.assertTrue(mock.called)

            result = WebHookResult.objects.get()
            self.assertStringContains("Event delivered successfully", result.message)
            self.assertStringContains("ignoring", result.message)
            self.assertEquals(200, result.status_code)
            self.assertEquals(bad_json, result.body)

            WebHookEvent.objects.all().delete()
            WebHookResult.objects.all().delete()

        with patch('requests.Session.send') as mock:
            mock.return_value = MockResponse(200, '{ "phone": "+250788123123", "text": "I am success" }')

            WebHookEvent.trigger_sms_event(SMS_RECEIVED, sms, now)
            event = WebHookEvent.objects.get()

            self.assertEquals('C', event.status)
            self.assertEquals(1, event.try_count)
            self.assertFalse(event.next_attempt)

            result = WebHookResult.objects.get()
            self.assertEquals(200, result.status_code)

            self.assertTrue(mock.called)

            broadcast = Broadcast.objects.get()
            contact = Contact.get_or_create(self.org, self.admin, name=None, urns=["tel:+250788123123"], channel=self.channel)
            self.assertTrue("I am success", broadcast.text)
            self.assertTrue(contact, broadcast.contacts.all())

            self.assertTrue(mock.called)
            args = mock.call_args_list[0][0]
            prepared_request = args[0]
            self.assertEquals(self.org.get_webhook_url(), prepared_request.url)

            data = parse_qs(prepared_request.body)
            self.assertEquals(self.joe.get_urn(TEL_SCHEME).path, data['phone'][0])
            self.assertEquals(unicode(self.joe.get_urn(TEL_SCHEME)), data['urn'][0])
            self.assertEquals(self.joe.uuid, data['contact'][0])
            self.assertEquals(sms.pk, int(data['sms'][0]))
            self.assertEquals(self.channel.pk, int(data['channel'][0]))
            self.assertEquals(SMS_RECEIVED, data['event'][0])
            self.assertEquals("I'm gonna pop some tags", data['text'][0])
            self.assertTrue('time' in data)

            WebHookEvent.objects.all().delete()
            WebHookResult.objects.all().delete()

        with patch('requests.Session.send') as mock:
            mock.return_value = MockResponse(500, "I am error")

            next_attempt_earliest = timezone.now() + timedelta(minutes=4)
            next_attempt_latest = timezone.now() + timedelta(minutes=6)

            WebHookEvent.trigger_sms_event(SMS_RECEIVED, sms, now)
            event = WebHookEvent.objects.get()

            self.assertEquals('E', event.status)
            self.assertEquals(1, event.try_count)
            self.assertTrue(event.next_attempt)
            self.assertTrue(next_attempt_earliest < event.next_attempt and next_attempt_latest > event.next_attempt)

            result = WebHookResult.objects.get()
            self.assertStringContains("Error", result.message)
            self.assertEquals(500, result.status_code)
            self.assertEquals("I am error", result.body)

            # make sure things become failures after three retries
            event.try_count = 2
            event.deliver()
            event.save()

            self.assertTrue(mock.called)

            self.assertEquals('F', event.status)
            self.assertEquals(3, event.try_count)
            self.assertFalse(event.next_attempt)

            result = WebHookResult.objects.get()
            self.assertStringContains("Error", result.message)
            self.assertEquals(500, result.status_code)
            self.assertEquals("I am error", result.body)
            self.assertEquals("http://fake.com/webhook.php", result.url)
            self.assertTrue(result.data.find("pop+some+tags") > 0)

            # check out our api log
            response = self.client.get(reverse('api.log'))
            self.assertRedirect(response, reverse('users.user_login'))

            response = self.client.get(reverse('api.log_read', args=[event.pk]))
            self.assertRedirect(response, reverse('users.user_login'))

            WebHookEvent.objects.all().delete()
            WebHookResult.objects.all().delete()

        # add a webhook header to the org
        self.channel.org.webhook = u'{"url": "http://fake.com/webhook.php", "headers": {"X-My-Header": "foobar", "Authorization": "Authorization: Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ=="}, "method": "POST"}'
        self.channel.org.save()

        # check that our webhook settings have saved
        self.assertEquals('http://fake.com/webhook.php', self.channel.org.get_webhook_url())
        self.assertDictEqual({'X-My-Header': 'foobar', 'Authorization': 'Authorization: Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ=='}, self.channel.org.get_webhook_headers())

        with patch('requests.Session.send') as mock:
            mock.return_value = MockResponse(200, "Boom")
            WebHookEvent.trigger_sms_event(SMS_RECEIVED, sms, now)
            event = WebHookEvent.objects.get()

            result = WebHookResult.objects.get()
            # both headers should be in the json-encoded url string
            self.assertStringContains('X-My-Header: foobar', result.request)
            self.assertStringContains('Authorization: Authorization: Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==', result.request)

Example 13

Project: rapidpro
Source File: tests.py
View license
    def test_scheduling(self):
        campaign = Campaign.create(self.org, self.admin, "Planting Reminders", self.farmers)

        self.assertEquals("Planting Reminders", unicode(campaign))

        # create a reminder for our first planting event
        planting_reminder = CampaignEvent.create_flow_event(self.org, self.admin, campaign, relative_to=self.planting_date,
                                                            offset=0, unit='D', flow=self.reminder_flow, delivery_hour=17)

        self.assertEquals("Planting Date == 0 -> Color Flow", unicode(planting_reminder))

        # schedule our reminders
        EventFire.update_campaign_events(campaign)

        # we should haven't any event fires created, since neither of our farmers have a planting date
        self.assertEquals(0, EventFire.objects.all().count())

        # ok, set a planting date on one of our contacts
        self.farmer1.set_field(self.user, 'planting_date', "05-10-2020 12:30:10")

        # update our campaign events
        EventFire.update_campaign_events(campaign)

        # should have one event now
        fire = EventFire.objects.get()
        self.assertEquals(5, fire.scheduled.day)
        self.assertEquals(10, fire.scheduled.month)
        self.assertEquals(2020, fire.scheduled.year)

        # account for timezone difference, our org is in UTC+2
        self.assertEquals(17 - 2, fire.scheduled.hour)

        self.assertEquals(self.farmer1, fire.contact)
        self.assertEquals(planting_reminder, fire.event)

        self.assertIsNone(fire.fired)

        # change the date of our date
        self.farmer1.set_field(self.user, 'planting_date', "06-10-2020 12:30:10")

        EventFire.update_campaign_events_for_contact(campaign, self.farmer1)
        fire = EventFire.objects.get()
        self.assertEquals(6, fire.scheduled.day)
        self.assertEquals(10, fire.scheduled.month)
        self.assertEquals(2020, fire.scheduled.year)
        self.assertEquals(self.farmer1, fire.contact)
        self.assertEquals(planting_reminder, fire.event)

        # set it to something invalid
        self.farmer1.set_field(self.user, 'planting_date', "what?")
        EventFire.update_campaign_events_for_contact(campaign, self.farmer1)
        self.assertFalse(EventFire.objects.all())

        # now something valid again
        self.farmer1.set_field(self.user, 'planting_date', "07-10-2020 12:30:10")

        EventFire.update_campaign_events_for_contact(campaign, self.farmer1)
        fire = EventFire.objects.get()
        self.assertEquals(7, fire.scheduled.day)
        self.assertEquals(10, fire.scheduled.month)
        self.assertEquals(2020, fire.scheduled.year)
        self.assertEquals(self.farmer1, fire.contact)
        self.assertEquals(planting_reminder, fire.event)

        # create another reminder
        planting_reminder2 = CampaignEvent.create_flow_event(self.org, self.admin, campaign, relative_to=self.planting_date,
                                                             offset=1, unit='D', flow=self.reminder2_flow)

        self.assertEquals(1, planting_reminder2.abs_offset())

        # update the campaign
        EventFire.update_campaign_events(campaign)

        # should have two events now, ordered by date
        events = EventFire.objects.all()

        self.assertEquals(planting_reminder, events[0].event)
        self.assertEquals(7, events[0].scheduled.day)

        self.assertEquals(planting_reminder2, events[1].event)
        self.assertEquals(8, events[1].scheduled.day)

        # mark one of the events as inactive
        planting_reminder2.is_active = False
        planting_reminder2.save()

        # update the campaign
        EventFire.update_campaign_events(campaign)

        # back to only one event
        event = EventFire.objects.get()
        self.assertEquals(planting_reminder, event.event)
        self.assertEquals(7, event.scheduled.day)

        # update our date
        self.farmer1.set_field(self.user, 'planting_date', '09-10-2020 12:30')

        # should have updated
        event = EventFire.objects.get()
        self.assertEquals(planting_reminder, event.event)
        self.assertEquals(9, event.scheduled.day)

        # let's remove our contact field
        ContactField.hide_field(self.org, self.user, 'planting_date')

        # shouldn't have anything scheduled
        self.assertFalse(EventFire.objects.all())

        # add it back in
        ContactField.get_or_create(self.org, self.admin, 'planting_date', "planting Date")

        # should be back!
        event = EventFire.objects.get()
        self.assertEquals(planting_reminder, event.event)
        self.assertEquals(9, event.scheduled.day)

        # change our fire date to sometimein the past so it gets triggered
        event.scheduled = timezone.now() - timedelta(hours=1)
        event.save()

        # schedule our events to fire
        check_campaigns_task()

        # should have one flow run now
        run = FlowRun.objects.get()
        self.assertEquals(event.contact, run.contact)

Example 14

Project: rapidpro
Source File: tests.py
View license
    def test_trigger_schedule(self):
        self.login(self.admin)
        flow = self.create_flow()

        chester = self.create_contact("Chester", "+250788987654")
        shinoda = self.create_contact("Shinoda", "+250234213455")
        linkin_park = self.create_group("Linkin Park", [chester, shinoda])
        stromae = self.create_contact("Stromae", "+250788645323")

        now = timezone.now()
        now_stamp = time.mktime(now.timetuple())

        tommorrow = now + timedelta(days=1)
        tommorrow_stamp = time.mktime(tommorrow.timetuple())

        post_data = dict()
        post_data['omnibox'] = "g-%s,c-%s" % (linkin_park.uuid, stromae.uuid)
        post_data['repeat_period'] = 'D'
        post_data['start'] = 'later'
        post_data['start_datetime_value'] = "%d" % tommorrow_stamp

        response = self.client.post(reverse("triggers.trigger_schedule"), post_data)
        self.assertEquals(response.context['form'].errors.keys(), ['flow'])
        self.assertFalse(Trigger.objects.all())
        self.assertFalse(Schedule.objects.all())

        # survey flows should not be an option
        flow.flow_type = Flow.SURVEY
        flow.save()
        response = self.client.get(reverse("triggers.trigger_schedule"))
        self.assertEqual(0, response.context['form'].fields['flow'].queryset.all().count())

        # back to normal flow type
        flow.flow_type = Flow.FLOW
        flow.save()
        self.assertEqual(1, response.context['form'].fields['flow'].queryset.all().count())

        post_data = dict()
        post_data['flow'] = flow.pk
        post_data['omnibox'] = "g-%s,c-%s" % (linkin_park.uuid, stromae.uuid)
        post_data['start'] = 'never'
        post_data['repeat_period'] = 'O'

        response = self.client.post(reverse("triggers.trigger_schedule"), post_data)
        self.assertEquals(1, Trigger.objects.all().count())

        trigger = Trigger.objects.all().order_by('-pk')[0]
        self.assertTrue(trigger.schedule)
        self.assertEquals(trigger.schedule.status, 'U')
        self.assertEquals(trigger.groups.all()[0].pk, linkin_park.pk)
        self.assertEquals(trigger.contacts.all()[0].pk, stromae.pk)

        post_data = dict()
        post_data['flow'] = flow.pk
        post_data['omnibox'] = "g-%s,c-%s" % (linkin_park.uuid, stromae.uuid)
        post_data['start'] = 'stop'
        post_data['repeat_period'] = 'O'

        response = self.client.post(reverse("triggers.trigger_schedule"), post_data)
        self.assertEquals(2, Trigger.objects.all().count())

        trigger = Trigger.objects.all().order_by('-pk')[0]
        self.assertTrue(trigger.schedule)
        self.assertEquals(trigger.schedule.status, 'U')
        self.assertEquals(trigger.groups.all()[0].pk, linkin_park.pk)
        self.assertEquals(trigger.contacts.all()[0].pk, stromae.pk)

        post_data = dict()
        post_data['flow'] = flow.pk
        post_data['omnibox'] = "g-%s,c-%s" % (linkin_park.uuid, stromae.uuid)
        post_data['repeat_period'] = 'O'
        post_data['start'] = 'now'
        post_data['start_datetime_value'] = "%d" % now_stamp

        response = self.client.post(reverse("triggers.trigger_schedule"), post_data)
        self.assertEquals(3, Trigger.objects.all().count())

        trigger = Trigger.objects.all().order_by('-pk')[0]
        self.assertTrue(trigger.schedule)
        self.assertFalse(trigger.schedule.next_fire)
        self.assertEquals(trigger.schedule.repeat_period, 'O')
        self.assertEquals(trigger.schedule.repeat_days, 0)
        self.assertEquals(trigger.groups.all()[0].pk, linkin_park.pk)
        self.assertEquals(trigger.contacts.all()[0].pk, stromae.pk)

        post_data = dict()
        post_data['flow'] = flow.pk
        post_data['omnibox'] = "g-%s,c-%s" % (linkin_park.uuid, stromae.uuid)
        post_data['repeat_period'] = 'D'
        post_data['start'] = 'later'
        post_data['start_datetime_value'] = "%d" % tommorrow_stamp

        response = self.client.post(reverse("triggers.trigger_schedule"), post_data)
        self.assertEquals(4, Trigger.objects.all().count())

        trigger = Trigger.objects.all().order_by('-pk')[0]
        self.assertTrue(trigger.schedule)
        self.assertEquals(trigger.schedule.repeat_period, 'D')
        self.assertEquals(trigger.groups.all()[0].pk, linkin_park.pk)
        self.assertEquals(trigger.contacts.all()[0].pk, stromae.pk)

        update_url = reverse('triggers.trigger_update', args=[trigger.pk])

        post_data = dict()
        post_data['omnibox'] = "g-%s,c-%s" % (linkin_park.uuid, stromae.uuid)
        post_data['repeat_period'] = 'O'
        post_data['start'] = 'now'
        post_data['start_datetime_value'] = "%d" % now_stamp

        response = self.client.post(update_url, post_data)
        self.assertEquals(response.context['form'].errors.keys(), ['flow'])

        post_data = dict()
        post_data['flow'] = flow.pk
        post_data['omnibox'] = "g-%s" % linkin_park.uuid
        post_data['repeat_period'] = 'O'
        post_data['start'] = 'now'
        post_data['start_datetime_value'] = "%d" % now_stamp

        response = self.client.post(update_url, post_data)

        trigger = Trigger.objects.get(pk=trigger.pk)
        self.assertTrue(trigger.schedule)
        self.assertEquals(trigger.schedule.repeat_period, 'O')
        self.assertFalse(trigger.schedule.next_fire)
        self.assertEquals(trigger.groups.all()[0].pk, linkin_park.pk)
        self.assertFalse(trigger.contacts.all())

        post_data = dict()
        post_data['flow'] = flow.pk
        post_data['omnibox'] = "g-%s,c-%s" % (linkin_park.uuid, stromae.uuid)
        post_data['start'] = 'never'
        post_data['repeat_period'] = 'O'

        response = self.client.post(update_url, post_data)

        trigger = Trigger.objects.get(pk=trigger.pk)
        self.assertTrue(trigger.schedule)
        self.assertEquals(trigger.schedule.status, 'U')
        self.assertEquals(trigger.groups.all()[0].pk, linkin_park.pk)
        self.assertEquals(trigger.contacts.all()[0].pk, stromae.pk)

        post_data = dict()
        post_data['flow'] = flow.pk
        post_data['omnibox'] = "g-%s,c-%s" % (linkin_park.uuid, stromae.uuid)
        post_data['start'] = 'stop'
        post_data['repeat_period'] = 'O'

        response = self.client.post(update_url, post_data)

        trigger = Trigger.objects.get(pk=trigger.pk)
        self.assertTrue(trigger.schedule)
        self.assertEquals(trigger.schedule.status, 'U')
        self.assertEquals(trigger.groups.all()[0].pk, linkin_park.pk)
        self.assertEquals(trigger.contacts.all()[0].pk, stromae.pk)

        post_data = dict()
        post_data['flow'] = flow.pk
        post_data['omnibox'] = "g-%s,c-%s" % (linkin_park.uuid, stromae.uuid)
        post_data['repeat_period'] = 'D'
        post_data['start'] = 'later'
        post_data['start_datetime_value'] = "%d" % tommorrow_stamp

        response = self.client.post(update_url, post_data)

        trigger = Trigger.objects.get(pk=trigger.pk)

        self.assertTrue(trigger.schedule)
        self.assertEquals(trigger.schedule.repeat_period, 'D')
        self.assertEquals(trigger.groups.all()[0].pk, linkin_park.pk)
        self.assertEquals(trigger.contacts.all()[0].pk, stromae.pk)

Example 15

Project: chain-api
Source File: resources.py
View license
    def serialize_list(self, embed, cache):
        '''a "list" of SensorData resources is actually represented
        as a single resource with a list of data points'''
        if not embed:
            return super(
                PresenceDataResource,
                self).serialize_list(
                embed,
                cache)

        href = self.get_list_href()

        items = []

        serialized_data = {
            '_links': {
                'curies': CHAIN_CURIES,
                'createForm': {
                    'href': self.get_create_href(),
                    'title': 'Add Data'
                },
                'items': items
            },
            'dataType': 'presence'
        }
        request_time = timezone.now()

        # if the time filters aren't given then use the most recent timespan,
        # if they are given, then we need to convert them from unix time to use
        # in the queryset filter
        if 'timestamp__gte' in self._filters:
            try:
                page_start = datetime.utcfromtimestamp(
                    float(self._filters['timestamp__gte']))
            except ValueError:
                raise BadRequestException(
                    "Invalid timestamp format for lower bound of date range.")
        else:
            page_start = request_time - self.default_timespan

        if 'timestamp__lt' in self._filters:
            try:
                page_end = datetime.utcfromtimestamp(
                    float(self._filters['timestamp__lt']))
            except ValueError:
                raise BadRequestException(
                    "Invalid timestamp format for upper bound of date range.")
        else:
            page_end = request_time

        self._filters['timestamp__gte'] = page_start
        self._filters['timestamp__lt'] = page_end

        objs = self._queryset.filter(**self._filters).order_by('timestamp')

        serialized_data = self.add_page_links(serialized_data, href,
                                              page_start, page_end)

        # Make links:
        for obj in objs:
            presence_data_resource = PresenceDataResource(
                obj=obj,
                request=self._request)
            items.append(
                {
                    'href': presence_data_resource.get_single_href(),
                    'title': "%s %s %s at time %s" %
                    (obj.person.last_name,
                     "at" if obj.present else "left",
                     obj.sensor.device,
                     obj.timestamp.isoformat())})
        return serialized_data

Example 16

Project: reviewboard
Source File: email.py
View license
def mail_review_request(review_request, from_user=None, changedesc=None,
                        close_type=None):
    """Send an e-mail representing the supplied review request.

    Args:
        review_request (reviewboard.reviews.models.ReviewRequest):
            The review request to send an e-mail about.

        from_user (django.contrib.auth.models.User):
            The user who triggered the e-mail (i.e., they published or closed
            the review request).

        changedesc (reviewboard.changedescs.models.ChangeDescription):
            An optional change description showing what has changed in the
            review request, possibly with explanatory text from the submitter.
            This is created when saving a draft on a public review request and
            will be ``None`` when publishing initially. This is used by the
            template to add contextual (updated) flags to inform people what
            has changed.

        close_type (unicode):
            How the review request was closed or ``None`` if it was published.
            If this is not ``None`` it must be one of
            :py:attr:`~reviewboard.reviews.models.ReviewRequest.SUBMITTED` or
            :py:attr:`~reviewboard.reviews.models.ReviewRequest.DISCARDED`.
    """
    # If the review request is not yet public or has been discarded, don't send
    # any mail. Relax the "discarded" rule when e-mails are sent on closing
    # review requests.
    if (not review_request.public or
        (not close_type and review_request.status == 'D')):
        return

    if not from_user:
        from_user = review_request.submitter

    summary = _ensure_unicode(review_request.summary)
    subject = "Review Request %d: %s" % (review_request.display_id,
                                         summary)
    reply_message_id = None

    if review_request.email_message_id:
        # Fancy quoted "replies"
        subject = "Re: " + subject
        reply_message_id = review_request.email_message_id
        extra_recipients = review_request.participants
    else:
        extra_recipients = None

    extra_context = {}

    if close_type:
        changedesc = review_request.changedescs.filter(public=True).latest()

    limit_recipients_to = None

    if changedesc:
        extra_context['change_text'] = changedesc.text
        extra_context['change_rich_text'] = changedesc.rich_text
        extra_context['changes'] = changedesc.fields_changed

        fields_changed = changedesc.fields_changed
        changed_field_names = set(fields_changed.keys())

        if (changed_field_names and
            changed_field_names.issubset(['target_people', 'target_groups'])):
            # If the only changes are to the target reviewers, try to send a
            # much more targeted e-mail (rather than sending it out to
            # everyone, only send it to new people).
            limit_recipients_to = set()

            if 'target_people' in changed_field_names:
                user_pks = [
                    item[2]
                    for item in fields_changed['target_people']['added']
                ]

                limit_recipients_to.update(User.objects.filter(
                    pk__in=user_pks))

            if 'target_groups' in changed_field_names:
                group_pks = [
                    item[2]
                    for item in fields_changed['target_groups']['added']
                ]

                limit_recipients_to.update(Group.objects.filter(
                    pk__in=group_pks))

    to_field, cc_field = build_recipients(from_user, review_request,
                                          extra_recipients,
                                          limit_recipients_to)

    extra_filter_kwargs = {}

    if close_type:
        signal = review_request_closed
        extra_filter_kwargs['close_type'] = close_type
    else:
        signal = review_request_published

    to_field, cc_field = filter_email_recipients_from_hooks(
        to_field, cc_field, signal, review_request=review_request,
        user=from_user, **extra_filter_kwargs)

    review_request.time_emailed = timezone.now()
    review_request.email_message_id = \
        send_review_mail(from_user, review_request, subject,
                         reply_message_id, to_field, cc_field,
                         'notifications/review_request_email.txt',
                         'notifications/review_request_email.html',
                         extra_context)
    review_request.save()

Example 17

Project: reviewboard
Source File: review_request.py
View license
    def close(self, type, user=None, description=None, rich_text=False):
        """Closes the review request.

        Args:
            type (unicode):
                How the close occurs. This should be one of
                :py:attr:`SUBMITTED` or :py:attr:`DISCARDED`.

            user (django.contrib.auth.models.User):
                The user who is closing the review request.

            description (unicode):
                An optional description that indicates why the review request
                was closed.

            rich_text (bool):
                Indicates whether or not that the description is rich text.
        """
        if (user and not self.is_mutable_by(user) and
            not user.has_perm("reviews.can_change_status", self.local_site)):
            raise PermissionError

        if type not in [self.SUBMITTED, self.DISCARDED]:
            raise AttributeError("%s is not a valid close type" % type)

        review_request_closing.send(sender=self.__class__,
                                    user=user,
                                    review_request=self,
                                    type=type,
                                    description=description,
                                    rich_text=rich_text)

        draft = get_object_or_none(self.draft)

        if self.status != type:
            if (draft is not None and
                not self.public and type == self.DISCARDED):
                # Copy over the draft information if this is a private discard.
                draft.copy_fields_to_request(self)

            # TODO: Use the user's default for rich_text.
            changedesc = ChangeDescription(public=True,
                                           text=description or "",
                                           rich_text=rich_text or False,
                                           user=user or self.submitter)

            status_field = get_review_request_field('status')(self)
            status_field.record_change_entry(changedesc, self.status, type)
            changedesc.save()

            self.changedescs.add(changedesc)

            if type == self.SUBMITTED:
                if not self.public:
                    raise PublishError("The draft must be public first.")
            else:
                self.commit_id = None

            self.status = type
            self.save(update_counts=True)

            review_request_closed.send(sender=self.__class__, user=user,
                                       review_request=self,
                                       type=type,
                                       description=description,
                                       rich_text=rich_text)
        else:
            # Update submission description.
            changedesc = self.changedescs.filter(public=True).latest()
            changedesc.timestamp = timezone.now()
            changedesc.text = description or ""
            changedesc.rich_text = rich_text
            changedesc.save()

            # Needed to renew last-update.
            self.save()

        # Delete the associated draft review request.
        if draft is not None:
            draft.delete()

Example 18

Project: reviewboard
Source File: review_request_draft.py
View license
    def publish(self, review_request=None, user=None, trivial=False,
                send_notification=True):
        """Publishes this draft.

        This updates and returns the draft's ChangeDescription, which
        contains the changed fields. This is used by the e-mail template
        to tell people what's new and interesting.

        The draft's associated ReviewRequest object will be used if one isn't
        passed in.

        The keys that may be saved in ``fields_changed`` in the
        ChangeDescription are:

        *  ``submitter``
        *  ``summary``
        *  ``description``
        *  ``testing_done``
        *  ``bugs_closed``
        *  ``depends_on``
        *  ``branch``
        *  ``target_groups``
        *  ``target_people``
        *  ``screenshots``
        *  ``screenshot_captions``
        *  ``diff``
        *  Any custom field IDs

        Each field in 'fields_changed' represents a changed field. This will
        save fields in the standard formats as defined by the
        'ChangeDescription' documentation, with the exception of the
        'screenshot_captions' and 'diff' fields.

        For the 'screenshot_captions' field, the value will be a dictionary
        of screenshot ID/dict pairs with the following fields:

        * ``old``: The old value of the field
        * ``new``: The new value of the field

        For the ``diff`` field, there is only ever an ``added`` field,
        containing the ID of the new diffset.

        The ``send_notification`` parameter is intended for internal use only,
        and is there to prevent duplicate notifications when being called by
        ReviewRequest.publish.
        """
        if not review_request:
            review_request = self.review_request

        if not self.changedesc and review_request.public:
            self.changedesc = ChangeDescription()

        if not user:
            if self.changedesc:
                user = self.changedesc.get_user(self)
            else:
                user = review_request.submitter

        self.copy_fields_to_request(review_request)

        if self.diffset:
            self.diffset.history = review_request.diffset_history
            self.diffset.save(update_fields=['history'])

        # If no changes were made, raise exception and do not save
        if self.changedesc and not self.changedesc.has_modified_fields():
            raise NotModifiedError()

        if self.changedesc:
            self.changedesc.user = user
            self.changedesc.timestamp = timezone.now()
            self.changedesc.public = True
            self.changedesc.save()
            review_request.changedescs.add(self.changedesc)

        review_request.description_rich_text = self.description_rich_text
        review_request.testing_done_rich_text = self.testing_done_rich_text
        review_request.rich_text = self.rich_text
        review_request.save()

        if send_notification:
            review_request_published.send(sender=review_request.__class__,
                                          user=user,
                                          review_request=review_request,
                                          trivial=trivial,
                                          changedesc=self.changedesc)

        return self.changedesc

Example 19

Project: orchestra
Source File: machine_tasks.py
View license
def execute(project_id, step_slug):
    project = Project.objects.get(id=project_id)
    step = Step.objects.get(slug=step_slug,
                            workflow_version=project.workflow_version)
    task = Task.objects.get(project=project,
                            step=step)

    # Run machine function
    if step.is_human:
        raise MachineExecutionError('Step worker type is not machine')

    if task.status == Task.Status.COMPLETE:
        raise MachineExecutionError('Task assignment already completed')

    # Machine tasks are only assigned to one worker/machine,
    # so they should only have one task assignment,
    # and should never be submitted for review.

    with transaction.atomic():
        # Uniqueness constraint on assignnment_counter and task prevents
        # concurrent creation of more than one assignment
        task_assignment, created = TaskAssignment.objects.get_or_create(
            assignment_counter=0,
            task=task,
            defaults={
                'status': TaskAssignment.Status.PROCESSING,
                'in_progress_task_data': {}})
        if created:
            task.status = Task.Status.PROCESSING
            task.save()

            Iteration.objects.create(
                assignment=task_assignment,
                start_datetime=task_assignment.start_datetime)
        else:
            # Task assignment already exists
            if task_assignment.status == TaskAssignment.Status.FAILED:
                # Pick up failed task for reprocessing
                task_assignment.status = TaskAssignment.Status.PROCESSING
                task_assignment.save()
            else:
                # Task assignment already processing
                raise MachineExecutionError(
                    'Task already picked up by another machine')

    prerequisites = previously_completed_task_data(task)

    function = locate(step.execution_function['path'])
    kwargs = step.execution_function.get('kwargs', {})
    try:
        project_data = project.project_data
        project_data['project_id'] = project_id
        task_data = function(project_data, prerequisites, **kwargs)
    except:
        task_assignment.status = TaskAssignment.Status.FAILED
        logger.exception('Machine task has failed')
        task_assignment.save()
        return
    task_assignment.status = TaskAssignment.Status.SUBMITTED
    task_assignment.in_progress_task_data = task_data
    task_assignment.save()

    if task.project.status == Project.Status.ABORTED:
        # If a long-running task's project was aborted while running, we ensure
        # the aborted state on the task.
        task.status = Task.Status.ABORTED
        task.save()
    else:
        task.status = Task.Status.COMPLETE
        task.save()

        iteration = get_latest_iteration(task_assignment)
        iteration.status = Iteration.Status.REQUESTED_REVIEW
        iteration.submitted_data = task_data
        iteration.end_datetime = timezone.now()
        iteration.save()

        create_subsequent_tasks(project)

Example 20

Project: orchestra
Source File: task_lifecycle.py
View license
@transaction.atomic
def submit_task(task_id, task_data, iteration_status, worker):
    """
    Returns a dict mapping task prerequisites onto their
    latest task assignment information.  The dict is of the form:
    {'previous-slug': {task_assignment_data}, ...}

    Args:
        task_id (int):
            The ID of the task to submit.
        task_data (str):
            A JSON blob of task data to submit.
        iteration_status (orchestra.models.Iteration.Status):
            The action taken upon task submission (i.e., REQUESTED_REVIEW
            or PROVIDED_REVIEW).
        worker (orchestra.models.Worker):
            The worker submitting the task.

    Returns:
        task (orchestra.models.Task):
            The modified task object.

    Raises:
        orchestra.core.errors.IllegalTaskSubmission:
            Submission prerequisites for the task are incomplete or the
            assignment is in a non-processing state.
        orchestra.core.errors.TaskAssignmentError:
            Worker belongs to more than one assignment for the given
            task.
        orchestra.core.errors.TaskStatusError:
            Task has already been completed.
    """
    submit_datetime = timezone.now()

    task = Task.objects.select_related('step', 'project').get(id=task_id)
    step = task.step
    if not _are_desired_steps_completed_on_project(step.submission_depends_on,
                                                   project=task.project):
        raise IllegalTaskSubmission('Submission prerequisites are not '
                                    'complete.')

    if task.status == Task.Status.COMPLETE:
        raise TaskStatusError('Task already completed')

    # Use select_for_update to prevent concurrency issues with save_task.
    # See https://github.com/b12io/orchestra/issues/2.
    assignments = (TaskAssignment.objects.select_for_update()
                                 .filter(worker=worker, task=task))

    # Worker can belong to only one assignment for a given task.
    if not assignments.count() == 1:
        raise TaskAssignmentError(
            'Task assignment with worker is in broken state.')

    assignment = assignments[0]

    if assignment.status != TaskAssignment.Status.PROCESSING:
        raise IllegalTaskSubmission('Worker is not allowed to submit')

    next_status = get_next_task_status(task, iteration_status)

    assignment.in_progress_task_data = task_data

    # Submit latest iteration
    latest_iteration = get_latest_iteration(assignment)
    latest_iteration.status = iteration_status
    latest_iteration.submitted_data = assignment.in_progress_task_data
    latest_iteration.end_datetime = submit_datetime
    latest_iteration.save()

    assignment.status = TaskAssignment.Status.SUBMITTED
    assignment.save()
    previous_status = task.status
    task.status = next_status
    task.save()

    if task.status == Task.Status.PENDING_REVIEW:
        # Check the assignment policy to try to assign a reviewer automatically
        task = _preassign_workers(task, AssignmentPolicyType.REVIEWER)
    elif task.status == Task.Status.REVIEWING:
        update_related_assignment_status(task,
                                         assignment.assignment_counter + 1,
                                         assignment.in_progress_task_data,
                                         submit_datetime)
    elif task.status == Task.Status.POST_REVIEW_PROCESSING:
        update_related_assignment_status(task,
                                         assignment.assignment_counter - 1,
                                         assignment.in_progress_task_data,
                                         submit_datetime)
    elif task.status == Task.Status.COMPLETE:
        create_subsequent_tasks(task.project)

    notify_status_change(task, previous_status)
    return task

Example 21

Project: rocket-league-replays
Source File: views.py
View license
    def get_context_data(self, **kwargs):
        context = super(SteamView, self).get_context_data(**kwargs)

        # Is this Steam ID associated with a user?
        try:
            social_obj = UserSocialAuth.objects.get(
                uid=kwargs['steam_id'],
            )
            context['steam_info'] = social_obj.extra_data['player']

            context['uploaded'] = social_obj.user.replay_set.all()

            # Limit to public games, or unlisted / private games uploaded by the user.
            if self.request.user.is_authenticated() and self.request.user == social_obj.user:
                context['uploaded'] = context['uploaded'].filter(
                    Q(privacy=PRIVACY_PUBLIC) | Q(user=self.request.user)
                )
            else:
                context['uploaded'] = context['uploaded'].filter(
                    privacy=PRIVACY_PUBLIC,
                )

            context['has_user'] = True
            context['social_obj'] = social_obj
        except UserSocialAuth.DoesNotExist:
            # Pull the profile data and pass it in.
            context['has_user'] = False
            context['steam_info'] = None

            # Do we have a cache object for this already?
            try:
                cache = SteamCache.objects.filter(
                    uid=kwargs['steam_id']
                )

                if cache.count() > 0:
                    for cache_item in cache[1:]:
                        cache_item.delete()

                    cache = cache[0]

                    # Have we updated this profile recently?
                    if 'last_updated' in cache.extra_data:
                        # Parse the last updated date.
                        last_date = parse_datetime(cache.extra_data['last_updated'])

                        seconds_ago = (now() - last_date).seconds

                        # 3600  seconds = 1 hour
                        # 21600 seconds = 6 hours
                        if seconds_ago < 21600:
                            context['steam_info'] = cache.extra_data['player']

            except SteamCache.DoesNotExist:
                pass

            try:
                if not context['steam_info']:
                    player = requests.get(USER_INFO, params={
                        'key': settings.SOCIAL_AUTH_STEAM_API_KEY,
                        'steamids': kwargs['steam_id'],
                    }).json()

                    if len(player['response']['players']) > 0:
                        context['steam_info'] = player['response']['players'][0]

                        # Store this data in a SteamCache object.
                        cache_obj, _ = SteamCache.objects.get_or_create(
                            uid=kwargs['steam_id']
                        )
                        cache_obj.extra_data = {
                            'player': context['steam_info'],
                            'last_updated': now().isoformat(),
                        }
                        cache_obj.save()
            except:
                pass

        context['appears_in'] = Replay.objects.filter(
            show_leaderboard=True,
            player__platform__in=['OnlinePlatform_Steam', '1'],
            player__online_id=kwargs['steam_id'],
        ).distinct()

        if self.request.user.is_authenticated():
            context['appears_in'] = context['appears_in'].filter(
                Q(privacy=PRIVACY_PUBLIC) | Q(user=self.request.user)
            )
        else:
            context['appears_in'] = context['appears_in'].filter(
                privacy=PRIVACY_PUBLIC,
            )

        if not context.get('steam_info', None):
            context['steam_info'] = {
                'steamid': kwargs['steam_id'],
            }

        return context

Example 22

Project: rocket-league-replays
Source File: views.py
View license
    def get_serializer_context(self):
        user = get_object_or_404(User, pk=self.kwargs['user_id'])
        context = user.profile.stream_settings
        context['user'] = user

        # Data
        context['games_played'] = user.replay_set.all()
        context['wins'] = 0
        context['losses'] = 0

        context['average_goals'] = 0
        context['average_assists'] = 0
        context['average_saves'] = 0
        context['average_shots'] = 0
        context['win_percentage'] = 0
        context['goal_assist_ratio'] = 0

        goal_data = []
        assist_data = []
        save_data = []
        shot_data = []

        if context['limit_to'] == '3':
            context['games_played'] = context['games_played'][:3]
        elif context['limit_to'] == '5':
            context['games_played'] = context['games_played'][:5]
        elif context['limit_to'] == '10':
            context['games_played'] = context['games_played'][:10]
        elif context['limit_to'] == '20':
            context['games_played'] = context['games_played'][:20]
        elif context['limit_to'] == 'hour':
            context['games_played'] = context['games_played'].filter(
                timestamp__gte=now() - datetime.timedelta(hours=1)
            )
        elif context['limit_to'] == 'today':
            context['games_played'] = context['games_played'].filter(
                timestamp__gte=now() - datetime.timedelta(days=1)
            )

        elif context['limit_to'] == 'week':
            context['games_played'] = context['games_played'].filter(
                timestamp__gte=now() - datetime.timedelta(days=7)
            )
        elif context['limit_to'] == 'all':
            # We don't need to do anything here.
            pass
        # elif context['limit_to'] == 'session':
        #     pass

        # What team was the user on?
        uid = user.social_auth.get(provider='steam').uid

        for replay in context['games_played']:
            # Which team was this user on?
            player = replay.player_set.filter(
                platform__in=['OnlinePlatform_Steam', '1'],
                online_id=uid,
            )

            if player.count() == 0:
                continue

            player = player[0]

            if player.team == 0:
                if replay.team_0_score > replay.team_1_score:
                    context['wins'] += 1
                else:
                    context['losses'] += 1
            elif player.team == 1:
                if replay.team_1_score > replay.team_0_score:
                    context['wins'] += 1
                else:
                    context['losses'] += 1

            goal_data.append(player.goals)
            assist_data.append(player.assists)
            save_data.append(player.saves)
            shot_data.append(player.shots)

        context['games_played'] = context['games_played'].count()

        # Avoid dividing by zero.
        if len(goal_data) > 0:
            context['average_goals'] = "{0:.2f}".format(sum(goal_data) / len(goal_data))

        if len(assist_data) > 0:
            context['average_assists'] = "{0:.2f}".format(sum(assist_data) / len(assist_data))

        if len(save_data) > 0:
            context['average_saves'] = "{0:.2f}".format(sum(save_data) / len(save_data))

        if len(shot_data) > 0:
            context['average_shots'] = "{0:.2f}".format(sum(shot_data) / len(shot_data))

        if context['games_played'] > 0:
            context['win_percentage'] = "{0:.2f}".format(context['wins'] / context['games_played'] * 100)

        if sum(assist_data) > 0:
            context['goal_assist_ratio'] = "{0:.2f}".format(sum(goal_data) / sum(assist_data))
        else:
            context['goal_assist_ratio'] = sum(goal_data)

        return context

Example 23

Project: folivora
Source File: tasks.py
View license
@task(max_retries=4, iterations=0)
def sync_with_changelog():
    """Syncronize with pypi changelog.

    Right now we only listen for `new-release`, `remove`, `rename`,
    and `create` as we do not store any metadata information.

    Following actions can be issued according to pypi source code:
        new release			- Creates a new Release
        remove				- Removes a Package from the Shop
        rename from %(old)s		- Rename a package
        add %(pyversion)s %(filename)s  - Add a new file to a version
        remove file %(filename)s        - Remove a file
        docupdate                       - Notify for documentation update
        create				- Create a new package
        update %(type)s                 - Update some detailed classifiers
    """
    next_last_sync = timezone.now()

    state, created = SyncState.objects.get_or_create(type=SyncState.CHANGELOG)

    epoch = int(time.mktime(state.last_sync.timetuple()))

    client = CheeseShop()

    try:
        log = client.get_changelog(epoch, True)
    except socket.error as exc:
        if current.iterations == current.max_retries:
            SyncState.objects.filter(type=SyncState.CHANGELOG) \
                             .update(state=SyncState.STATE_DOWN)
            logger.warning('No sync with PyPi, it\'s not reachable.')
            return
        else:
            current.iterations += 1
            current.retry(countdown=0, exc=exc)
    else:
        projects = set()
        for package, version, stamp, action in log:
            if action == 'new release':
                try:
                    pkg = Package.objects.get(name=package)
                except Package.DoesNotExist:
                    pkg = Package.create_with_provider_url(package)

                dt = datetime.datetime.fromtimestamp(stamp)
                release_date = timezone.make_aware(dt, pytz.UTC)
                exists = PackageVersion.objects.filter(package=pkg,
                                                       version=version).exists()
                if not exists:
                    update = PackageVersion(version=version,
                                            release_date=release_date)
                    pkg.versions.add(update)
                    ProjectDependency.objects.filter(package=pkg) \
                                             .update(update=update)

                projects.update(Project.objects.filter(dependencies__package=pkg)
                                               .values_list('id', flat=True))

            elif action == 'remove':
                # We only clear versions and set the recent updated version
                # on every project dependency to NULL. This way we can ensure
                # stability on ProjectDependency.
                try:
                    pkg = Package.objects.get(name=package)
                    ProjectDependency.objects.filter(package=pkg) \
                                             .update(update=None)

                    if version is None:
                        pkg.versions.all().delete()

                    log_affected_projects(pkg, action='remove_package',
                                          type='package', package=pkg)
                except Package.DoesNotExist:
                    pass

            elif action == 'create':
                if not Package.objects.filter(name=package).exists():
                    Package.create_with_provider_url(package)

        for project in projects:
            sync_project.apply(args=(project,))

        SyncState.objects.filter(type=SyncState.CHANGELOG) \
                         .update(last_sync=next_last_sync,
                                 state=SyncState.STATE_RUNNING)

Example 24

Project: Django--an-app-at-a-time
Source File: debug.py
View license
    def get_traceback_data(self):
        """Return a dictionary containing traceback information."""
        try:
            default_template_engine = Engine.get_default()
        except Exception:
            # Since the debug view must never crash, catch all exceptions.
            # If Django can't find a default template engine, get_default()
            # raises ImproperlyConfigured. If some template engines fail to
            # load, any exception may be raised.
            default_template_engine = None

        # TODO: add support for multiple template engines (#24120).
        # TemplateDoesNotExist should carry all the information.
        # Replaying the search process isn't a good design.
        if self.exc_type and issubclass(self.exc_type, TemplateDoesNotExist):
            if default_template_engine is None:
                template_loaders = []
            else:
                self.template_does_not_exist = True
                self.loader_debug_info = []
                # If Django fails in get_template_loaders, provide an empty list
                # for the following loop to not fail.
                try:
                    template_loaders = default_template_engine.template_loaders
                except Exception:
                    template_loaders = []

            for loader in template_loaders:
                try:
                    source_list_func = loader.get_template_sources
                    # NOTE: This assumes exc_value is the name of the template that
                    # the loader attempted to load.
                    template_list = [{
                        'name': t,
                        'status': self.format_path_status(t),
                    } for t in source_list_func(str(self.exc_value))]
                except AttributeError:
                    template_list = []
                loader_name = loader.__module__ + '.' + loader.__class__.__name__
                self.loader_debug_info.append({
                    'loader': loader_name,
                    'templates': template_list,
                })

        # TODO: add support for multiple template engines (#24119).
        if (default_template_engine is not None
                and default_template_engine.debug
                and hasattr(self.exc_value, 'django_template_source')):
            self.get_template_exception_info()

        frames = self.get_traceback_frames()
        for i, frame in enumerate(frames):
            if 'vars' in frame:
                frame_vars = []
                for k, v in frame['vars']:
                    v = pprint(v)
                    # The force_escape filter assume unicode, make sure that works
                    if isinstance(v, six.binary_type):
                        v = v.decode('utf-8', 'replace')  # don't choke on non-utf-8 input
                    # Trim large blobs of data
                    if len(v) > 4096:
                        v = '%s... <trimmed %d bytes string>' % (v[0:4096], len(v))
                    frame_vars.append((k, force_escape(v)))
                frame['vars'] = frame_vars
            frames[i] = frame

        unicode_hint = ''
        if self.exc_type and issubclass(self.exc_type, UnicodeError):
            start = getattr(self.exc_value, 'start', None)
            end = getattr(self.exc_value, 'end', None)
            if start is not None and end is not None:
                unicode_str = self.exc_value.args[1]
                unicode_hint = smart_text(
                    unicode_str[max(start - 5, 0):min(end + 5, len(unicode_str))],
                    'ascii', errors='replace'
                )
        from django import get_version
        c = {
            'is_email': self.is_email,
            'unicode_hint': unicode_hint,
            'frames': frames,
            'request': self.request,
            'filtered_POST': self.filter.get_post_parameters(self.request),
            'settings': get_safe_settings(),
            'sys_executable': sys.executable,
            'sys_version_info': '%d.%d.%d' % sys.version_info[0:3],
            'server_time': timezone.now(),
            'django_version_info': get_version(),
            'sys_path': sys.path,
            'template_info': self.template_info,
            'template_does_not_exist': self.template_does_not_exist,
            'loader_debug_info': self.loader_debug_info,
        }
        # Check whether exception info is available
        if self.exc_type:
            c['exception_type'] = self.exc_type.__name__
        if self.exc_value:
            c['exception_value'] = smart_text(self.exc_value, errors='replace')
        if frames:
            c['lastframe'] = frames[-1]
        return c

Example 25

Project: Django--an-app-at-a-time
Source File: dates.py
View license
def _get_next_prev(generic_view, date, is_previous, period):
    """
    Helper: Get the next or the previous valid date. The idea is to allow
    links on month/day views to never be 404s by never providing a date
    that'll be invalid for the given view.

    This is a bit complicated since it handles different intervals of time,
    hence the coupling to generic_view.

    However in essence the logic comes down to:

        * If allow_empty and allow_future are both true, this is easy: just
          return the naive result (just the next/previous day/week/month,
          regardless of object existence.)

        * If allow_empty is true, allow_future is false, and the naive result
          isn't in the future, then return it; otherwise return None.

        * If allow_empty is false and allow_future is true, return the next
          date *that contains a valid object*, even if it's in the future. If
          there are no next objects, return None.

        * If allow_empty is false and allow_future is false, return the next
          date that contains a valid object. If that date is in the future, or
          if there are no next objects, return None.

    """
    date_field = generic_view.get_date_field()
    allow_empty = generic_view.get_allow_empty()
    allow_future = generic_view.get_allow_future()

    get_current = getattr(generic_view, '_get_current_%s' % period)
    get_next = getattr(generic_view, '_get_next_%s' % period)

    # Bounds of the current interval
    start, end = get_current(date), get_next(date)

    # If allow_empty is True, the naive result will be valid
    if allow_empty:
        if is_previous:
            result = get_current(start - datetime.timedelta(days=1))
        else:
            result = end

        if allow_future or result <= timezone_today():
            return result
        else:
            return None

    # Otherwise, we'll need to go to the database to look for an object
    # whose date_field is at least (greater than/less than) the given
    # naive result
    else:
        # Construct a lookup and an ordering depending on whether we're doing
        # a previous date or a next date lookup.
        if is_previous:
            lookup = {'%s__lt' % date_field: generic_view._make_date_lookup_arg(start)}
            ordering = '-%s' % date_field
        else:
            lookup = {'%s__gte' % date_field: generic_view._make_date_lookup_arg(end)}
            ordering = date_field

        # Filter out objects in the future if appropriate.
        if not allow_future:
            # Fortunately, to match the implementation of allow_future,
            # we need __lte, which doesn't conflict with __lt above.
            if generic_view.uses_datetime_field:
                now = timezone.now()
            else:
                now = timezone_today()
            lookup['%s__lte' % date_field] = now

        qs = generic_view.get_queryset().filter(**lookup).order_by(ordering)

        # Snag the first object from the queryset; if it doesn't exist that
        # means there's no next/previous link available.
        try:
            result = getattr(qs[0], date_field)
        except IndexError:
            return None

        # Convert datetimes to dates in the current time zone.
        if generic_view.uses_datetime_field:
            if settings.USE_TZ:
                result = timezone.localtime(result)
            result = result.date()

        # Return the first day of the period.
        return get_current(result)

Example 26

Project: SchoolIdolAPI
Source File: generate_settings.py
View license
def generate_settings(opt={}):

        print 'Get total donators'
        total_donators = unicode(models.UserPreferences.objects.filter(status__isnull=False).exclude(status__exact='').count())

        print 'Check the current contest'
        current_contests = get_current_contests()
        if not current_contests:
            current_contests = [{
                'url': 'http://schoolido.lu/contest/',
                'image': 'http://i.schoolido.lu/static/currentcontest_no.png',
                'homepage_image': 'http://i.schoolido.lu/static/currentcontest_no.png',
                'name': None,
            }]
        else:
            current_contests = [{
                'url': 'http://schoolido.lu/contest/' + str(current_contest.id) + '/' + tourldash(current_contest.name) + '/',
                'image': (u'%s%s' % (settings.IMAGES_HOSTING_PATH, current_contest.image)) if current_contest.image else 'http://i.schoolido.lu/static/currentcontest.png',
                'homepage_image': (u'%s%s' % (settings.IMAGES_HOSTING_PATH, current_contest.homepage_image)) if current_contest.homepage_image else ((u'%s%s' % (settings.IMAGES_HOSTING_PATH, current_contest.image)) if current_contest.image else 'http://i.schoolido.lu/static/currentcontest.png'),
                'name': current_contest.name,
            } for current_contest in current_contests]

        print 'Check the current events'
        try:
            try:
                current_jp = models.Event.objects.filter(end__lte=timezone.now()).order_by('-beginning')[0]
            except IndexError:
                current_jp = models.Event.objects.order_by('-beginning')[0]
            current_jp = {
                'japanese_name': current_jp.japanese_name,
                'slide_position': len(current_contests) + 1,
                'image': '{}{}'.format(settings.IMAGES_HOSTING_PATH, current_jp.image),
            }
        except:
            current_jp = None
        try:
            try:
                current_en = models.Event.objects.filter(english_beginning__isnull=False).filter(end__lte=timezone.now()).order_by('-english_beginning')[0]
            except IndexError:
                current_en = models.Event.objects.filter(english_beginning__isnull=False).order_by('-english_beginning')[0]
            current_en = {
                'japanese_name': current_en.japanese_name,
                'slide_position': len(current_contests),
                'image': '{}{}'.format(settings.IMAGES_HOSTING_PATH, current_en.english_image if current_en.english_image else current_en.image),
            }
        except:
            current_en = None

        print 'Get ages'
        ages = {}
        for i in range(10,30):
            ages[i] = 0
        prefs = models.UserPreferences.objects.filter(birthdate__isnull=False)
        total_ages = prefs.count()
        for p in prefs:
            age = p.age
            if age > 0 and age < 88:
                if age in ages:
                    ages[age] += 1
                else:
                    ages[age] = 1
        ages = OrderedDict(sorted(ages.items()))

        print 'Get cardsinfo dictionary'
        cards_info = unicode({
            'max_stats': {
                'Smile': models.Card.objects.order_by('-idolized_maximum_statistics_smile')[:1][0].idolized_maximum_statistics_smile,
                'Pure': models.Card.objects.order_by('-idolized_maximum_statistics_pure')[:1][0].idolized_maximum_statistics_pure,
                'Cool': models.Card.objects.order_by('-idolized_maximum_statistics_cool')[:1][0].idolized_maximum_statistics_cool,
            },
            'songs_max_stats': models.Song.objects.order_by('-expert_notes')[0].expert_notes,
            'idols': ValuesQuerySetToDict(models.Card.objects.values('name', 'idol__japanese_name').annotate(total=Count('name')).order_by('-total', 'name')),
            'sub_units': [card['sub_unit'] for card in models.Idol.objects.filter(sub_unit__isnull=False).values('sub_unit').distinct()],
            'years': [idol['year'] for idol in models.Idol.objects.filter(year__isnull=False).values('year').distinct()],
            'schools': [idol['school'] for idol in models.Idol.objects.filter(school__isnull=False).values('school').distinct()],
            'collections': ValuesQuerySetToDict(models.Card.objects.filter(japanese_collection__isnull=False).exclude(japanese_collection__exact='').values('japanese_collection').annotate(total=Count('name')).order_by('-total', 'japanese_collection')),
            'translated_collections': ValuesQuerySetToDict(models.Card.objects.filter(translated_collection__isnull=False).exclude(translated_collection__exact='').values('translated_collection').annotate(total=Count('name')).order_by('-total', 'translated_collection')),
            'skills': ValuesQuerySetToDict(models.Card.objects.filter(skill__isnull=False).values('skill').annotate(total=Count('skill')).order_by('-total')),
            'total_cards': models.Card.objects.order_by('-id')[0].id,
            'en_cards': [int(c.id) for c in models.Card.objects.filter(japan_only=False)],
        })

        print 'Save generated settings'
        s = u'\
from collections import OrderedDict\n\
import datetime\n\
TOTAL_DONATORS = ' + total_donators + u'\n\
CURRENT_CONTESTS = ' + unicode(current_contests) + u'\n\
CURRENT_EVENT_JP = ' + unicode(current_jp) + u'\n\
CURRENT_EVENT_EN = ' + unicode(current_en) + u'\n\
USERS_AGES = ' + unicode(ages) + u'\n\
USERS_TOTAL_AGES = ' + unicode(total_ages) + u'\n\
GENERATED_DATE = datetime.datetime.fromtimestamp(' + unicode(time.time()) + u')\n\
CARDS_INFO = ' + cards_info + u'\n\
'
        print s
        f = open('schoolidolapi/generated_settings.py', 'w')
        print >> f, s
        f.close()

Example 27

Project: shuup
Source File: test_basic_order.py
View license
def create_order(request, creator, customer, product):
    billing_address = get_address().to_immutable()
    shipping_address = get_address(name="Shippy Doge").to_immutable()
    shipping_address.save()
    shop = request.shop
    order = Order(
        creator=creator,
        customer=customer,
        shop=shop,
        payment_method=get_default_payment_method(),
        shipping_method=get_default_shipping_method(),
        billing_address=billing_address,
        shipping_address=shipping_address,
        order_date=now(),
        status=get_initial_order_status(),
        currency=shop.currency,
        prices_include_tax=shop.prices_include_tax,
    )
    order.full_clean()
    order.save()
    supplier = get_default_supplier()
    product_order_line = OrderLine(order=order)
    update_order_line_from_product(
        pricing_context=request,
        order_line=product_order_line,
        product=product,
        quantity=5,
        supplier=supplier)

    assert product_order_line.text == product.safe_translation_getter("name")
    product_order_line.base_unit_price = shop.create_price(100)
    assert product_order_line.price.value > 0
    product_order_line.save()

    line_tax = get_line_taxes_for(product_order_line)[0]

    order_line_tax = OrderLineTax.from_tax(
        tax=line_tax.tax,
        base_amount=line_tax.base_amount,
        order_line=product_order_line,
    )
    order_line_tax.save()  # Save order_line_tax before linking to order_line.tax
    product_order_line.taxes.add(order_line_tax)

    discount_order_line = OrderLine(order=order, quantity=1, type=OrderLineType.OTHER)
    discount_order_line.discount_amount = shop.create_price(30)
    assert discount_order_line.discount_amount.value == 30
    assert discount_order_line.price.value == -30
    assert discount_order_line.base_unit_price.value == 0
    discount_order_line.save()

    order.cache_prices()
    order.check_all_verified()
    order.save()

    assert not order.can_set_complete()

    base = 5 * shop.create_price(100).amount
    discount = shop.create_price(30).amount
    tax_value = line_tax.amount
    if not order.prices_include_tax:
        assert order.taxless_total_price.amount == base - discount
        assert order.taxful_total_price.amount == base + tax_value - discount
    else:
        assert_almost_equal(order.taxless_total_price.amount, base - tax_value - discount)
        assert_almost_equal(order.taxful_total_price.amount, base - discount)

    assert not order.is_fully_shipped()
    shipment = order.create_shipment_of_all_products(supplier=supplier)
    assert order.is_fully_shipped()

    assert shipment.total_products == 5, "All products were shipped"
    assert shipment.weight == product.gross_weight * 5 / 1000, "Gravity works"
    assert not order.get_unshipped_products(), "Nothing was left in the warehouse"

    assert order.can_set_complete()

    order.create_payment(order.taxful_total_price)
    assert order.is_paid()
    assert Order.objects.paid().filter(pk=order.pk).exists(), "It was paid! Honestly!"
    assert order.has_products()

Example 28

Project: djangobb
Source File: views.py
View license
def search(request):
    # TODO: used forms in every search type

    def _render_search_form(form=None):
        return render(request, 'djangobb_forum/search_form.html', {'categories': Category.objects.all(),
                'form': form,
                })

    if not 'action' in request.GET:
        return _render_search_form(form=PostSearchForm())

    if request.GET.get("show_as") == "posts":
        show_as_posts = True
        template_name = 'djangobb_forum/search_posts.html'
    else:
        show_as_posts = False
        template_name = 'djangobb_forum/search_topics.html'

    context = {}

    # Create 'user viewable' pre-filtered topics/posts querysets
    viewable_category = Category.objects.all()
    topics = Topic.objects.all().order_by("-last_post__created")
    posts = Post.objects.all().order_by('-created')
    user = request.user
    if not user.is_superuser:
        user_groups = user.groups.all() or [] # need 'or []' for anonymous user otherwise: 'EmptyManager' object is not iterable
        viewable_category = viewable_category.filter(Q(groups__in=user_groups) | Q(groups__isnull=True))

        topics = Topic.objects.filter(forum__category__in=viewable_category)
        posts = Post.objects.filter(topic__forum__category__in=viewable_category)

    base_url = None
    _generic_context = True

    action = request.GET['action']
    if action == 'show_24h':
        date = timezone.now() - timedelta(days=1)
        if show_as_posts:
            context["posts"] = posts.filter(Q(created__gte=date) | Q(updated__gte=date))
        else:
            context["topics"] = topics.filter(Q(last_post__created__gte=date) | Q(last_post__updated__gte=date))
        _generic_context = False
    elif action == 'show_new':
        if not user.is_authenticated():
            raise Http404("Search 'show_new' not available for anonymous user.")
        try:
            last_read = PostTracking.objects.get(user=user).last_read
        except PostTracking.DoesNotExist:
            last_read = None

        if last_read:
            if show_as_posts:
                context["posts"] = posts.filter(Q(created__gte=last_read) | Q(updated__gte=last_read))
            else:
                context["topics"] = topics.filter(Q(last_post__created__gte=last_read) | Q(last_post__updated__gte=last_read))
            _generic_context = False
        else:
            #searching more than forum_settings.SEARCH_PAGE_SIZE in this way - not good idea :]
            topics_id = [topic.id for topic in topics[:forum_settings.SEARCH_PAGE_SIZE] if forum_extras.has_unreads(topic, user)]
            topics = Topic.objects.filter(id__in=topics_id) # to create QuerySet

    elif action == 'show_unanswered':
        topics = topics.filter(post_count=1)
    elif action == 'show_subscriptions':
        topics = topics.filter(subscribers__id=user.id)
    elif action == 'show_user':
        # Show all posts from user or topics started by user
        if not user.is_authenticated():
            raise Http404("Search 'show_user' not available for anonymous user.")

        user_id = request.GET.get("user_id", user.id)
        try:
            user_id = int(user_id)
        except ValueError:
            raise SuspiciousOperation()

        if user_id != user.id:
            try:
                search_user = User.objects.get(id=user_id)
            except User.DoesNotExist:
                messages.error(request, _("Error: User unknown!"))
                return HttpResponseRedirect(request.path)
            messages.info(request, _("Filter by user '%(username)s'.") % {'username': search_user.username})

        if show_as_posts:
            posts = posts.filter(user__id=user_id)
        else:
            # show as topic
            topics = topics.filter(posts__user__id=user_id).order_by("-last_post__created").distinct()

        base_url = "?action=show_user&user_id=%s&show_as=" % user_id
    elif action == 'search':
        form = PostSearchForm(request.GET)
        if not form.is_valid():
            return _render_search_form(form)

        keywords = form.cleaned_data['keywords']
        author = form.cleaned_data['author']
        forum = form.cleaned_data['forum']
        search_in = form.cleaned_data['search_in']
        sort_by = form.cleaned_data['sort_by']
        sort_dir = form.cleaned_data['sort_dir']

        query = SearchQuerySet().models(Post)

        if author:
            query = query.filter(author__username=author)

        if forum != '0':
            query = query.filter(forum__id=forum)

        if keywords:
            if search_in == 'all':
                query = query.filter(SQ(topic=keywords) | SQ(text=keywords))
            elif search_in == 'message':
                query = query.filter(text=keywords)
            elif search_in == 'topic':
                query = query.filter(topic=keywords)

        order = {'0': 'created',
                 '1': 'author',
                 '2': 'topic',
                 '3': 'forum'}.get(sort_by, 'created')
        if sort_dir == 'DESC':
            order = '-' + order

        post_pks = query.values_list("pk", flat=True)

        if not show_as_posts:
            # TODO: We have here a problem to get a list of topics without double entries.
            # Maybe we must add a search index over topics?

            # Info: If whoosh backend used, setup HAYSTACK_ITERATOR_LOAD_PER_QUERY
            #    to a higher number to speed up
            context["topics"] = topics.filter(posts__in=post_pks).distinct()
        else:
            # FIXME: How to use the pre-filtered query from above?
            posts = posts.filter(pk__in=post_pks).order_by(order)
            context["posts"] = posts

        get_query_dict = request.GET.copy()
        get_query_dict.pop("show_as")
        base_url = "?%s&show_as=" % get_query_dict.urlencode()
        _generic_context = False

    if _generic_context:
        if show_as_posts:
            context["posts"] = posts.filter(topic__in=topics).order_by('-created')
        else:
            context["topics"] = topics

    if base_url is None:
        base_url = "?action=%s&show_as=" % action

    if show_as_posts:
        context['posts_page'] = get_page(context['posts'], request, forum_settings.SEARCH_PAGE_SIZE)
        context["as_topic_url"] = base_url + "topics"
        post_count = context["posts"].count()
        messages.success(request, _("Found %i posts.") % post_count)
    else:
        context['topics_page'] = get_page(context['topics'], request, forum_settings.SEARCH_PAGE_SIZE)
        context["as_post_url"] = base_url + "posts"
        topic_count = context["topics"].count()
        messages.success(request, _("Found %i topics.") % topic_count)

    return render(request, template_name, context)

Example 29

Project: SmartElect
Source File: models.py
View license
    def execute(self):
        """
        Implement the changeset.

        If the changeset status is not valid to execute it,
        raise ChangesetException.

        Otherwise, try to execute the changeset and at the end, set the
        changeset status appropriately.

        Create ChangeRecords to record successful changes, and changes that
        might have been made but were not due to the current status of the
        affected citizen or registration (e.g., if in a rollback, the citizen
        is no longer in the status the changeset being rolled back left them
        in. Or in a block or unblock, if the citizen is already in the status
        the changeset was supposed to change them to).

        If the status is failed, no registration changes will have been
        applied (they are rolled back if needed).
        """

        logger.info("Execute changeset %s...", self.name)
        if not self.in_executable_status():
            raise NotAnAllowedStatus("Cannot execute changeset in status %s"
                                     % self.get_status_display())
        if self.change == Changeset.CHANGE_ROLLBACK:
            # Can only rollback a successful or partially successful changeset
            if not self.other_changeset.in_rollbackable_status():
                raise NotAnAllowedStatus("Cannot rollback changeset in status %s"
                                         % self.other_changeset.get_status_display())
        if self.change not in Changeset.CHANGE_VALID_VALUES:
            raise ChangesetException("Cannot execute changeset, %s is not a valid change type",
                                     self.change)
        try:
            self.status = Changeset.STATUS_EXECUTING
            self.execution_start_time = now()
            self.save()
            with transaction.atomic():
                if self.change == Changeset.CHANGE_CENTER:
                    changerecord_kwargs = dict(changeset=self, change=self.change,
                                               to_center=self.target_center)
                    for reg in self.get_registrations_to_change():
                        changerecord_kwargs.update(
                            citizen=reg.citizen,
                            from_center=reg.registration_center
                        )
                        if reg.registration_center == self.target_center:
                            # Citizen is already registered there.
                            # (Can happen if they uploaded a list of NIDs and later
                            # the citizen changed their registration.)
                            ChangeRecord.objects.create(changed=False, **changerecord_kwargs)
                        else:
                            reg.registration_center = self.target_center
                            reg.save_with_archive_version()
                            ChangeRecord.objects.create(changed=True, **changerecord_kwargs)
                elif self.change in [Changeset.CHANGE_BLOCK, Changeset.CHANGE_UNBLOCK]:
                    changerecord_kwargs = dict(changeset=self, change=self.change)
                    for citizen in self.get_citizens_to_change():
                        changerecord_kwargs['citizen'] = citizen
                        if self.change == Changeset.CHANGE_BLOCK and not citizen.blocked:
                            citizen.block()
                            ChangeRecord.objects.create(changed=True, **changerecord_kwargs)
                        elif self.change == Changeset.CHANGE_UNBLOCK and citizen.blocked:
                            citizen.unblock()
                            ChangeRecord.objects.create(changed=True, **changerecord_kwargs)
                        else:
                            ChangeRecord.objects.create(changed=False, **changerecord_kwargs)
                elif self.change == Changeset.CHANGE_ROLLBACK:
                    # Undo the changes made in another changeset, where possible
                    for change in ChangeRecord.objects.filter(changeset=self.other_changeset,
                                                              changed=True):
                        change.undo(self)
                    self.other_changeset.rollback_changeset = self
                    self.other_changeset.status = Changeset.STATUS_ROLLED_BACK
                    self.other_changeset.save()

                # Set the status depending on whether we applied all the requested changes
                if ChangeRecord.objects.filter(changeset=self, changed=False).exists():
                    self.status = Changeset.STATUS_PARTIALLY_SUCCESSFUL
                else:
                    self.status = Changeset.STATUS_SUCCESSFUL
                self.finish_time = now()
                self.save()
                logger.info("Changeset execution status: %s", self.get_status_display())
        except Exception as e:
            # Exiting the inner 'with transaction' by an exception will have triggered a rollback.
            # This log command will log the exception
            logger.exception("Executing changeset %s failed unexpectedly", self.name)
            self.status = Changeset.STATUS_FAILED
            self.error_text = str(e)
            self.finish_time = now()
            self.save()

Example 30

View license
    def setUp(self):
        self.registrations_per_center = 4
        self.oil_center_period_1_voters = 1
        self.oil_center_period_2_voters = 2
        self.offices = [OfficeFactory(region=Office.REGION_EAST),
                        OfficeFactory(region=Office.REGION_WEST)]
        # Note: An oil center doesn't normally allow registrations, but it does so for
        # this testcase.
        self.oil_center = RegistrationCenterFactory(office=self.offices[0],
                                                    center_type=RegistrationCenter.Types.OIL)

        # !reg_open won't affect election day counts but it will affect whether
        # or not any registrations are found
        self.inactive_for_reg_center = RegistrationCenterFactory(office=self.offices[1],
                                                                 reg_open=False)

        self.centers = [self.oil_center,
                        RegistrationCenterFactory(office=self.offices[0]),
                        RegistrationCenterFactory(office=self.offices[0]),
                        RegistrationCenterFactory(office=self.offices[1]),
                        self.inactive_for_reg_center,
                        ]

        copy_center = RegistrationCenterFactory(office=self.offices[1], copy_of=self.centers[3])
        self.centers.append(copy_center)

        self.election_decoy_before = ElectionFactory(
            name_english='decoy before',
            name_arabic='decoy before (ar)',
            polling_start_time=now() - timedelta(days=10),
            polling_end_time=now() - timedelta(days=9),
        )
        self.election = ElectionFactory(
            name_english='%s election' % type(self).__name__,
            name_arabic='not Arabic',
            polling_start_time=now() - timedelta(hours=2),
            polling_end_time=now() + timedelta(hours=2),
        )
        self.election_decoy_after = ElectionFactory(
            name_english='decoy after',
            name_arabic='decoy after (ar)',
            polling_start_time=now() + timedelta(days=9),
            polling_end_time=now() + timedelta(days=10),
        )

        self.center_opens = []
        for center in self.centers:
            if center != self.centers[1]:
                self.center_opens.append(CenterOpenFactory(election=self.election,
                                                           registration_center=center))

        # CenterOpen may refer to a deleted center. Make sure that we don't find those
        self.deleted_center = RegistrationCenterFactory(office=self.offices[0], deleted=True)
        self.center_open_referring_to_deleted_center = CenterOpenFactory(
            election=self.election,
            registration_center=self.deleted_center)

        # Performance enhancement: this dummy person and SMS allow me to avoid creation of two
        # spurious objects for each registration I create.
        self.citizen = CitizenFactory()
        self.sms = SMSFactory(citizen=self.citizen)

        # Create registrations, but be careful not to create any at the copy center
        # or at the center which doesn't support registrations.
        self.registrations = []
        for center in self.centers:
            if center.reg_open and not center.copy_of:
                self.registrations += \
                    RegistrationFactory.create_batch(self.registrations_per_center,
                                                     citizen=self.citizen,
                                                     sms=self.sms,
                                                     registration_center=center)

        # These reports include quirks such as multiple reports for a center (very common in real
        # life), a missing final period report, and multiple reports for the same center & period.
        self.reports = [
            PollingReportFactory(election=self.election,
                                 registration_center=self.oil_center,
                                 period_number=FIRST_PERIOD_NUMBER,
                                 num_voters=self.oil_center_period_1_voters),
            PollingReportFactory(election=self.election,
                                 registration_center=self.oil_center,
                                 period_number=FIRST_PERIOD_NUMBER + 1,
                                 num_voters=self.oil_center_period_2_voters),
            PollingReportFactory(election=self.election,
                                 registration_center=self.centers[2],
                                 period_number=FIRST_PERIOD_NUMBER,
                                 num_voters=1),
            # The next two reports are for the same center & period with different num_voters
            # to exercise the code that sorts by modification_date.
            PollingReportFactory(election=self.election,
                                 registration_center=self.centers[2],
                                 period_number=FIRST_PERIOD_NUMBER + 1,
                                 num_voters=4),
            PollingReportFactory(election=self.election,
                                 registration_center=self.centers[2],
                                 period_number=FIRST_PERIOD_NUMBER + 1,
                                 num_voters=6),
            PollingReportFactory(election=self.election,
                                 registration_center=self.centers[3],
                                 period_number=FIRST_PERIOD_NUMBER,
                                 num_voters=1),
            PollingReportFactory(election=self.election,
                                 registration_center=self.centers[3],
                                 period_number=FIRST_PERIOD_NUMBER + 1,
                                 num_voters=4),
            # This report for a deleted center should be ignored
            PollingReportFactory(election=self.election,
                                 registration_center=self.deleted_center,
                                 period_number=FIRST_PERIOD_NUMBER + 1,
                                 num_voters=50),
            PollingReportFactory(election=self.election,
                                 registration_center=self.inactive_for_reg_center,
                                 period_number=FIRST_PERIOD_NUMBER + 1,
                                 num_voters=50),
            # This report for a copy center should count towards the original/parent center
            PollingReportFactory(election=self.election,
                                 registration_center=copy_center,
                                 period_number=LAST_PERIOD_NUMBER,
                                 num_voters=1), ]

        self.result = generate_election_day_hq_reports(self.election)
        # Create an alternate result which reflects that the "oil center" is
        # marked inactive for this election.
        self.inactive_on_election = CenterClosedForElection(
            registration_center=self.oil_center, election=self.election
        )
        self.inactive_on_election.full_clean()
        self.inactive_on_election.save()
        self.result_with_inactive = generate_election_day_hq_reports(self.election)

Example 31

Project: splunk-webframework
Source File: dates.py
View license
def _get_next_prev(generic_view, date, is_previous, period):
    """
    Helper: Get the next or the previous valid date. The idea is to allow
    links on month/day views to never be 404s by never providing a date
    that'll be invalid for the given view.

    This is a bit complicated since it handles different intervals of time,
    hence the coupling to generic_view.

    However in essence the logic comes down to:

        * If allow_empty and allow_future are both true, this is easy: just
          return the naive result (just the next/previous day/week/month,
          reguardless of object existence.)

        * If allow_empty is true, allow_future is false, and the naive result
          isn't in the future, then return it; otherwise return None.

        * If allow_empty is false and allow_future is true, return the next
          date *that contains a valid object*, even if it's in the future. If
          there are no next objects, return None.

        * If allow_empty is false and allow_future is false, return the next
          date that contains a valid object. If that date is in the future, or
          if there are no next objects, return None.

    """
    date_field = generic_view.get_date_field()
    allow_empty = generic_view.get_allow_empty()
    allow_future = generic_view.get_allow_future()

    get_current = getattr(generic_view, '_get_current_%s' % period)
    get_next = getattr(generic_view, '_get_next_%s' % period)

    # Bounds of the current interval
    start, end = get_current(date), get_next(date)

    # If allow_empty is True, the naive result will be valid
    if allow_empty:
        if is_previous:
            result = get_current(start - datetime.timedelta(days=1))
        else:
            result = end

        if allow_future or result <= timezone_today():
            return result
        else:
            return None

    # Otherwise, we'll need to go to the database to look for an object
    # whose date_field is at least (greater than/less than) the given
    # naive result
    else:
        # Construct a lookup and an ordering depending on whether we're doing
        # a previous date or a next date lookup.
        if is_previous:
            lookup = {'%s__lt' % date_field: generic_view._make_date_lookup_arg(start)}
            ordering = '-%s' % date_field
        else:
            lookup = {'%s__gte' % date_field: generic_view._make_date_lookup_arg(end)}
            ordering = date_field

        # Filter out objects in the future if appropriate.
        if not allow_future:
            # Fortunately, to match the implementation of allow_future,
            # we need __lte, which doesn't conflict with __lt above.
            if generic_view.uses_datetime_field:
                now = timezone.now()
            else:
                now = timezone_today()
            lookup['%s__lte' % date_field] = now

        qs = generic_view.get_queryset().filter(**lookup).order_by(ordering)

        # Snag the first object from the queryset; if it doesn't exist that
        # means there's no next/previous link available.
        try:
            result = getattr(qs[0], date_field)
        except IndexError:
            return None

        # Convert datetimes to dates in the current time zone.
        if generic_view.uses_datetime_field:
            if settings.USE_TZ:
                result = timezone.localtime(result)
            result = result.date()

        # Return the first day of the period.
        return get_current(result)

Example 32

Project: froide
Source File: tasks.py
View license
def _batch_update(update_requester=True, update_follower=True):
    event_black_list = ("message_received", "message_sent", 'set_concrete_law',)
    translation.activate(settings.LANGUAGE_CODE)
    requests = {}
    users = {}
    gte_date = timezone.now() - timedelta(days=1)
    updates = {}

    message_type = ContentType.objects.get_for_model(FoiMessage)
    for comment in Comment.objects.filter(content_type=message_type,
            submit_date__gte=gte_date):
        try:
            message = FoiMessage.objects.get(pk=comment.object_pk)
            if message.request_id not in requests:
                requests[message.request_id] = message.request
            updates.setdefault(message.request_id, [])
            tf = TimeFormat(comment.submit_date)
            updates[message.request_id].append(
                (
                    comment.submit_date,
                    _("%(time)s: New comment by %(name)s") % {
                        "time": tf.format(_(settings.TIME_FORMAT)),
                        "name": comment.name
                    },
                    comment.user_id
                )
            )
        except FoiMessage.DoesNotExist:
            pass

    if update_requester:
        requester_updates = defaultdict(dict)
        # send out update on comments to request users
        for req_id, request in iteritems(requests):
            if not request.user.is_active:
                continue
            if not request.user.email:
                continue
            if not updates[req_id]:
                continue
            if not any([x for x in updates[req_id] if x[2] != request.user_id]):
                continue

            sorted_events = sorted(updates[req_id], key=lambda x: x[0])

            requester_updates[request.user][request] = {
                'events': [x[1] for x in sorted_events]
            }

        for user, request_dict in iteritems(requester_updates):
            FoiRequest.send_update(request_dict, user=user)

    if update_follower:
        # update followers

        for event in FoiEvent.objects.filter(timestamp__gte=gte_date).select_related("request"):
            if event.event_name in event_black_list:
                continue
            if event.request_id not in requests:
                requests[event.request_id] = event.request
            updates.setdefault(event.request_id, [])
            tf = TimeFormat(event.timestamp)
            updates[event.request_id].append(
                (
                    event.timestamp,
                    _("%(time)s: %(text)s") % {
                        "time": tf.format(_("TIME_FORMAT")),
                        "text": event.as_text()
                    },
                    event.user_id
                )
            )

        # Send out update on comments and event to followers
        follower_updates = defaultdict(dict)
        for req_id, request in iteritems(requests):
            if not updates[req_id]:
                continue
            updates[req_id].sort(key=lambda x: x[0])
            followers = FoiRequestFollower.objects.filter(
                    request=request).select_related('user')
            for follower in followers:
                if follower.user is None and not follower.confirmed:
                    continue
                if follower.user and (
                        not follower.user.is_active or not follower.user.email):
                    continue
                if not request.is_visible(None):
                    continue
                if not any([x for x in updates[req_id] if x[2] != follower.user_id]):
                    continue
                users[follower.user_id] = follower.user
                ident = follower.user_id or follower.email
                follower_updates[ident][request] = {
                    'unfollow_link': follower.get_unfollow_link(),
                    'events': [x[1] for x in updates[req_id]]
                }

        for user_id, req_event_dict in iteritems(follower_updates):
            user = users.get(user_id)
            email = None
            if user is None:
                email = user_id
            FoiRequestFollower.send_update(req_event_dict, user=user, email=email)

Example 33

Project: cartridge
Source File: tests.py
View license
    def test_category_filters(self):
        """
        Test the category filters returns expected results.
        """
        self._product.variations.all().delete()
        self.assertCategoryFilteredProducts(0)

        # Test option filters - add a variation with one option, and
        # assign another option as a category filter. Check that no
        # products match the filters, then add the first option as a
        # category filter and check that the product is matched.
        option_field, options = list(self._options.items())[0]
        option1, option2 = options[:2]
        # Variation with the first option.
        self._product.variations.create_from_options({option_field: [option1]})
        # Filter with the second option
        option = ProductOption.objects.get(type=option_field[-1], name=option2)
        self.assertCategoryFilteredProducts(0)
        # First option as a filter.
        option = ProductOption.objects.get(type=option_field[-1], name=option1)
        self._category.options.add(option)
        self.assertCategoryFilteredProducts(1)

        # Test price filters - add a price filter that when combined
        # with previously created filters, should match no products.
        # Update the variations to match the filter for a unit price,
        # then with sale prices, checking correct matches based on sale
        # dates.
        self._category.combined = True
        self._category.price_min = TEST_PRICE
        self.assertCategoryFilteredProducts(0)
        self._product.variations.all().update(unit_price=TEST_PRICE)
        self.assertCategoryFilteredProducts(1)
        n, d = now(), timedelta(days=1)
        tomorrow, yesterday = n + d, n - d
        self._product.variations.all().update(unit_price=0,
                                              sale_price=TEST_PRICE,
                                              sale_from=tomorrow)
        self.assertCategoryFilteredProducts(0)
        self._product.variations.all().update(sale_from=yesterday)
        self.assertCategoryFilteredProducts(1)

        # Clean up previously added filters and check that explicitly
        # assigned products match.
        for option in self._category.options.all():
            self._category.options.remove(option)
        self._category.price_min = None
        self.assertCategoryFilteredProducts(0)
        self._category.products.add(self._product)
        self.assertCategoryFilteredProducts(1)

        # Test the ``combined`` field - create a variation which
        # matches a price filter, and a separate variation which
        # matches an option filter, and check that the filters
        # have no results when ``combined`` is set, and that the
        # product matches when ``combined`` is disabled.
        self._product.variations.all().delete()
        self._product.variations.create_from_options({option_field:
                                                     [option1, option2]})
        # Price variation and filter.
        variation = self._product.variations.get(**{option_field: option1})
        variation.unit_price = TEST_PRICE
        variation.save()
        self._category.price_min = TEST_PRICE
        # Option variation and filter.
        option = ProductOption.objects.get(type=option_field[-1], name=option2)
        self._category.options.add(option)
        # Check ``combined``.
        self._category.combined = True
        self.assertCategoryFilteredProducts(0)
        self._category.combined = False
        self.assertCategoryFilteredProducts(1)

Example 34

Project: django-feedinator
Source File: core.py
View license
def update_feed(feed_id, reset=False):
    """
    Update an individual feed regardless of freshness.
    """

    feed = Feed.objects.get(pk=feed_id)

    if reset:
        FeedEntry.objects.filter(feed=feed).delete()

    if settings.DEBUG:
        print "--- updating %s" % feed.title

    f = feedparser.parse(feed.url)

    for entry in f.entries:

        if not "id" in entry:
            if settings.DEBUG:
                print "!!!", entry.title, "has no id"
            continue

        entry_exists = feed.entries.filter(uid=entry.id).count()

        if not entry_exists:

            fe = FeedEntry(
                feed=feed,
                uid=entry.id,
                title=entry.title,
                link=entry.link,
                date_updated=tuple_to_datetime(entry.updated_parsed, feed.timezone),
                last_fetched=now()
            )

            fe.summary = entry.get("summary", "")
            if "content" in entry:
                for content in entry.content:
                    fe.content = content.get("value", "")
            else:
                fe.content = entry.get("summary", "")

            if "published_parsed" in entry:
                fe.date_published = tuple_to_datetime(entry.published_parsed, feed.timezone)
            else:
                fe.date_published = tuple_to_datetime(entry.updated_parsed, feed.timezone)

            if "author_detail" in entry:
                fe.author_name = entry.author_detail.get("name", "")
                fe.author_email = entry.author_detail.get("email", None)
                fe.author_uri = entry.author_detail.get("href", None)
            elif "author" in entry:
                fe.author_name = entry.author

            fe.save()

            if "tags" in entry:
                for name in entry.tags:
                    Tag(name=name.term, feed_entry=fe).save()

            if settings.DEBUG:
                print fe

        else:

            fe = FeedEntry.objects.get(uid=entry.id, feed=feed)

            if fe.date_updated < tuple_to_datetime(entry.updated_parsed, feed.timezone):

                fe.title = entry.title
                fe.date_updated = tuple_to_datetime(entry.updated_parsed, feed.timezone)
                fe.last_fetched = now()

                fe.summary = entry.get("summary", "")

                if "content" in entry:
                    for content in entry.content:
                        fe.content = content.get("value", "")
                else:
                    fe.content = entry.get("summary", "")

                fe.save()

                if "tags" in entry:
                    fe.tags.all().delete()
                    for name in entry.tags:
                        Tag(name=name.term, feed_entry=fe).save()

                if settings.DEBUG:
                    print "UPDATED %s" % fe

    feed.last_fetched = now()
    feed.save()

Example 35

Project: taiga-back
Source File: test_emails.py
View license
    def handle(self, *args, **options):
        if len(args) != 1:
            print("Usage: ./manage.py test_emails <email-address>")
            return

        locale = options.get('locale')
        test_email = args[0]

        # Register email
        context = {"lang": locale,
                    "user": get_user_model().objects.all().order_by("?").first(),
                    "cancel_token": "cancel-token"}

        email = mail_builder.registered_user(test_email, context)
        email.send()

        # Membership invitation
        membership = Membership.objects.order_by("?").filter(user__isnull=True).first()
        membership.invited_by = get_user_model().objects.all().order_by("?").first()
        membership.invitation_extra_text = "Text example, Text example,\nText example,\n\nText example"

        context = {"lang": locale, "membership": membership}
        email = mail_builder.membership_invitation(test_email, context)
        email.send()

        # Membership notification
        context = {"lang": locale,
                   "membership": Membership.objects.order_by("?").filter(user__isnull=False).first()}
        email = mail_builder.membership_notification(test_email, context)
        email.send()

        # Feedback
        context = {
            "lang": locale,
            "feedback_entry": {
                "full_name": "Test full name",
                "email": "[email protected]",
                "comment": "Test comment",
            },
            "extra": {
                "key1": "value1",
                "key2": "value2",
            },
        }
        email = mail_builder.feedback_notification(test_email, context)
        email.send()

        # Password recovery
        context = {"lang": locale, "user": get_user_model().objects.all().order_by("?").first()}
        email = mail_builder.password_recovery(test_email, context)
        email.send()

        # Change email
        context = {"lang": locale, "user": get_user_model().objects.all().order_by("?").first()}
        email = mail_builder.change_email(test_email, context)
        email.send()

        # Export/Import emails
        context = {
            "lang": locale,
            "user": get_user_model().objects.all().order_by("?").first(),
            "project": Project.objects.all().order_by("?").first(),
            "error_subject": "Error generating project dump",
            "error_message": "Error generating project dump",
        }
        email = mail_builder.export_error(test_email, context)
        email.send()
        context = {
            "lang": locale,
            "user": get_user_model().objects.all().order_by("?").first(),
            "error_subject": "Error importing project dump",
            "error_message": "Error importing project dump",
        }
        email = mail_builder.import_error(test_email, context)
        email.send()

        deletion_date = timezone.now() + datetime.timedelta(seconds=60*60*24)
        context = {
            "lang": locale,
            "url": "http://dummyurl.com",
            "user": get_user_model().objects.all().order_by("?").first(),
            "project": Project.objects.all().order_by("?").first(),
            "deletion_date": deletion_date,
        }
        email = mail_builder.dump_project(test_email, context)
        email.send()

        context = {
            "lang": locale,
            "user": get_user_model().objects.all().order_by("?").first(),
            "project": Project.objects.all().order_by("?").first(),
        }
        email = mail_builder.load_dump(test_email, context)
        email.send()

        # Notification emails
        notification_emails = [
            ("issues.Issue", "issues/issue-change"),
            ("issues.Issue", "issues/issue-create"),
            ("issues.Issue", "issues/issue-delete"),
            ("tasks.Task", "tasks/task-change"),
            ("tasks.Task", "tasks/task-create"),
            ("tasks.Task", "tasks/task-delete"),
            ("userstories.UserStory", "userstories/userstory-change"),
            ("userstories.UserStory", "userstories/userstory-create"),
            ("userstories.UserStory", "userstories/userstory-delete"),
            ("milestones.Milestone", "milestones/milestone-change"),
            ("milestones.Milestone", "milestones/milestone-create"),
            ("milestones.Milestone", "milestones/milestone-delete"),
            ("wiki.WikiPage", "wiki/wikipage-change"),
            ("wiki.WikiPage", "wiki/wikipage-create"),
            ("wiki.WikiPage", "wiki/wikipage-delete"),
        ]

        context = {
            "lang": locale,
            "project": Project.objects.all().order_by("?").first(),
            "changer": get_user_model().objects.all().order_by("?").first(),
            "history_entries": HistoryEntry.objects.all().order_by("?")[0:5],
            "user": get_user_model().objects.all().order_by("?").first(),
        }

        for notification_email in notification_emails:
            model = apps.get_model(*notification_email[0].split("."))
            snapshot = {
                "subject": "Tests subject",
                "ref": 123123,
                "name": "Tests name",
                "slug": "test-slug"
            }
            queryset = model.objects.all().order_by("?")
            for obj in queryset:
                end = False
                entries = get_history_queryset_by_model_instance(obj).filter(is_snapshot=True).order_by("?")

                for entry in entries:
                    if entry.snapshot:
                        snapshot = entry.snapshot
                        end = True
                        break
                if end:
                    break
            context["snapshot"] = snapshot

            cls = type("InlineCSSTemplateMail", (InlineCSSTemplateMail,), {"name": notification_email[1]})
            email = cls()
            email.send(test_email, context)


        # Transfer Emails
        context = {
            "project": Project.objects.all().order_by("?").first(),
            "requester": User.objects.all().order_by("?").first(),
        }
        email = mail_builder.transfer_request(test_email, context)
        email.send()

        context = {
            "project": Project.objects.all().order_by("?").first(),
            "receiver": User.objects.all().order_by("?").first(),
            "token": "test-token",
            "reason": "Test reason"
        }
        email = mail_builder.transfer_start(test_email, context)
        email.send()

        context = {
            "project": Project.objects.all().order_by("?").first(),
            "old_owner": User.objects.all().order_by("?").first(),
            "new_owner": User.objects.all().order_by("?").first(),
            "reason": "Test reason"
        }
        email = mail_builder.transfer_accept(test_email, context)
        email.send()

        context = {
            "project": Project.objects.all().order_by("?").first(),
            "rejecter": User.objects.all().order_by("?").first(),
            "reason": "Test reason"
        }
        email = mail_builder.transfer_reject(test_email, context)
        email.send()

Example 36

Project: taiga-back
Source File: sample_data.py
View license
    def handle(self, *args, **options):
        # Prevent events emission when sample data is running
        disconnect_events_signals()

        self.users = [User.objects.get(is_superuser=True)]

        # create users
        if BASE_USERS:
            for username, full_name, email in BASE_USERS:
                self.users.append(self.create_user(username=username, full_name=full_name, email=email))
        else:
            for x in range(NUM_USERS):
                self.users.append(self.create_user(counter=x))

        # create project
        projects_range = range(NUM_PROJECTS + NUM_EMPTY_PROJECTS + NUM_BLOCKED_PROJECTS)
        empty_projects_range = range(NUM_PROJECTS, NUM_PROJECTS + NUM_EMPTY_PROJECTS )
        blocked_projects_range = range(
            NUM_PROJECTS + NUM_EMPTY_PROJECTS,
            NUM_PROJECTS + NUM_EMPTY_PROJECTS + NUM_BLOCKED_PROJECTS
        )

        for x in projects_range:
            project = self.create_project(
                x,
                is_private=(x in [2, 4] or self.sd.boolean()),
                blocked_code = BLOCKED_BY_STAFF if x in(blocked_projects_range) else None
            )

            # added memberships
            computable_project_roles = set()
            for user in self.users:
                if user == project.owner:
                    continue

                role = self.sd.db_object_from_queryset(project.roles.all())

                Membership.objects.create(email=user.email,
                                          project=project,
                                          role=role,
                                          is_admin=self.sd.boolean(),
                                          user=user)

                if role.computable:
                    computable_project_roles.add(role)

            # added invitations
            for i in range(NUM_INVITATIONS):
                role = self.sd.db_object_from_queryset(project.roles.all())

                Membership.objects.create(email=self.sd.email(),
                                          project=project,
                                          role=role,
                                          is_admin=self.sd.boolean(),
                                          token=self.sd.hex_chars(10,10))

                if role.computable:
                    computable_project_roles.add(role)

            # If the project isn't empty
            if x not in empty_projects_range:
                # added custom attributes
                names = set([self.sd.words(1, 3) for i in range(1, 6)])
                for name in names:
                    EpicCustomAttribute.objects.create(name=name,
                                                       description=self.sd.words(3, 12),
                                                       type=self.sd.choice(TYPES_CHOICES)[0],
                                                       project=project,
                                                       order=i)
                names = set([self.sd.words(1, 3) for i in range(1, 6)])
                for name in names:
                    UserStoryCustomAttribute.objects.create(name=name,
                                                            description=self.sd.words(3, 12),
                                                            type=self.sd.choice(TYPES_CHOICES)[0],
                                                            project=project,
                                                            order=i)
                names = set([self.sd.words(1, 3) for i in range(1, 6)])
                for name in names:
                    TaskCustomAttribute.objects.create(name=name,
                                                       description=self.sd.words(3, 12),
                                                       type=self.sd.choice(TYPES_CHOICES)[0],
                                                       project=project,
                                                       order=i)
                names = set([self.sd.words(1, 3) for i in range(1, 6)])
                for name in names:
                    IssueCustomAttribute.objects.create(name=name,
                                                        description=self.sd.words(3, 12),
                                                        type=self.sd.choice(TYPES_CHOICES)[0],
                                                        project=project,
                                                        order=i)

                start_date = now() - datetime.timedelta(55)

                # create milestones
                for y in range(self.sd.int(*NUM_MILESTONES)):
                    end_date = start_date + datetime.timedelta(15)
                    milestone = self.create_milestone(project, start_date, end_date)

                    # create uss asociated to milestones
                    for z in range(self.sd.int(*NUM_USS)):
                        us = self.create_us(project, milestone, computable_project_roles)

                        # create tasks
                        rang = NUM_TASKS_FINISHED if start_date <= now() and end_date <= now() else NUM_TASKS
                        for w in range(self.sd.int(*rang)):
                            if start_date <= now() and end_date <= now():
                                task = self.create_task(project, milestone, us, start_date,
                                                        end_date, closed=True)
                            elif start_date <= now() and end_date >= now():
                                task = self.create_task(project, milestone, us, start_date,
                                                        now())
                            else:
                                # No task on not initiated milestones
                                pass

                    start_date = end_date

                # created unassociated uss.
                for y in range(self.sd.int(*NUM_USS_BACK)):
                    us = self.create_us(project, None, computable_project_roles)

                # create bugs.
                for y in range(self.sd.int(*NUM_ISSUES)):
                    bug = self.create_bug(project)

                # create a wiki pages and wiki links
                wiki_page = self.create_wiki_page(project, "home")

                for y in range(self.sd.int(*NUM_WIKI_LINKS)):
                    wiki_link = self.create_wiki_link(project)
                    if self.sd.boolean():
                        self.create_wiki_page(project, wiki_link.href)

                # create epics
                for y in range(self.sd.int(*NUM_EPICS)):
                    epic = self.create_epic(project)

            project.refresh_from_db()

            # Set color for some tags:
            for tag in project.tags_colors:
                if self.sd.boolean():
                    tag[1] = self.generate_color(tag[0])

            # Set a value to total_story_points to show the deadline in the backlog
            project_stats = get_stats_for_project(project)
            defined_points = project_stats["defined_points"]
            project.total_story_points = int(defined_points * self.sd.int(5,12) / 10)
            project.save()

            self.create_likes(project)

Example 37

Project: WIPSTER
Source File: views.py
View license
def upload_form(request):

    if request.method == 'POST':
        form = UploadFileForm(request.POST, request.FILES)
        if form.is_valid():
#            handle_uploaded_file(request.FILES['file'])
#            newsample = Sample(sample = request.FILES['sample'])
            f = request.FILES['sample']

            
            newsample = Sample(
                sample = f,
                ticket = request.POST['ticket'],
                filename = f.name,
                size = f.size,
#                type = f.content_type,
                type = handler.get_filetype(f),
                md5 = handler.get_md5(f),
                sha1 = handler.get_sha1(f),
                sha256 = handler.get_sha256(f),
                fuzzy = handler.get_fuzzy(f),
            )
            #breakdebug
            newsample.save()

            #Do post-processing stuff here
            s = Sample.objects.filter().order_by('-id')[0]
            #s.exif = handler.get_exif(s.sample).encode('ascii', errors='replace')
            #s.exif = unicode(handler.get_exif(s.sample))
            s.exif = handler.get_exif(s.sample)
            
            s.strings = handler.get_strings(s.sample)
            s.balbuzard = handler.get_balbuzard(s.sample)
            s.trid = handler.get_trid(s.sample)

            #SSDEEP/Fuzzy hash comparison
            s.ssdeep_compare = handler.ssdeep_compare(s.fuzzy, s.md5)

            #VirusTotal Search
            vt_res, vt_short_res = handler.get_vt(s.md5)
            if vt_res:
                s.vt = vt_res
                s.vt_short = vt_short_res

            #If EXE file, run EXE-specific checks
            if "PE32" and "Windows" in s.type:
                s.peframe = handler.get_peframe(s.sample)
                s.pescanner = handler.get_pescanner(s.sample)

            #If PDF file, run PDF-specific checks
            if "PDF" in s.type:
                s.pdfid = handler.get_pdfid(s.sample)
                s.peepdf = handler.get_peepdf(s.sample)
                s.pdf_strings = handler.get_pdfstrings(s.sample)

            #If DOC file, run DOC-specific checks
            if "Document File V2" in s.type:
                s.oleid = handler.get_oleid(s.sample)
                #If valid OLE file, run OLEMETA
                olematch = re.compile(r'\|\s+OLE format\s+\|\s+True\s+\|')
                if olematch.search(s.oleid):
                    s.olemeta = handler.get_olemeta(s.sample)
                #If VBA code detected, run OLEVBA
                vbamatch = re.compile(r'\|\s+VBA Macros\s+\|\s+True\s+\|')
                if vbamatch.search(s.oleid):
                    s.olevba = handler.get_olevba(s.sample)

            #If RTF file, run RTFOBJ
            if "Rich Text Format" in s.type:
                rtfobj, rtflist = handler.get_rtfobj(s.sample)
                s.rtfobj = rtfobj

            #If Objects found, run strings/balbuzard against them
            #REMOVED - TOO RESOURCE-INTENSIVE
#            if rtflist:
#                s.rtfobj_str = handler.get_rtfobj_str(rtflist)
#                s.rtfobj_balbuz = handler.get_rtfobj_balbuz(rtflist)
            
            

            s.save()

            newpage = "/sanalysis/md5/" + s.md5 + "/?upload=True"

            return HttpResponseRedirect(newpage)
        else:
            form = UploadFileForm()
            sample = Sample.objects.filter(created__lte=timezone.now()).order_by('-id')[:25]
            return render(request, 'sanalysis/upload_form.html', {'form': form, 'sample': sample},
                            context_instance = RequestContext(request))

#            return HttpResponseRedirect('/sanalysis/')

#            return render(request, 'sanalysis/sample_page.html', {'sample': sample,
#                                                                  'savename': savename,
#                                                                  'ta_use': ta_use,
#                                                                  'ta_analyses': ta_analyses,
#                                                                  'ta_risks': ta_risks,
#                                                                  'ta_network': ta_network,
#                                                                  'ta_ips': ta_ips,
#                                                                  'ta_domains': ta_domains,
#                                                                  'ta_commands': ta_commands,
#                                                                  'ta_submit': ta_submit,
#                                                                  'crits_use': crits_use,
#                                                                  'crits': crits_dict,
#                                                                  'crits_submit': crits_submit, })


    else:
        form = UploadFileForm()
        sample = Sample.objects.filter(created__lte=timezone.now()).order_by('-id')[:25]
        return render(request, 'sanalysis/upload_form.html', {'form': form, 'sample': sample})

Example 38

Project: django-bulbs
Source File: models.py
View license
    def percolate_special_coverage(self, max_size=10, sponsored_only=False):
        """gets list of active, sponsored special coverages containing this content via
        Elasticsearch Percolator (see SpecialCoverage._save_percolator)

        Sorting:
            1) Manually added
            2) Most recent start date
        """

        # Elasticsearch v1.4 percolator range query does not support DateTime range queries
        # (PercolateContext.nowInMillisImpl is not implemented). Once using
        # v1.6+ we can instead compare "start_date/end_date" to python DateTime
        now_epoch = datetime_to_epoch_seconds(timezone.now())

        MANUALLY_ADDED_BOOST = 10
        SPONSORED_BOOST = 100  # Must be order of magnitude higher than "Manual" boost

        # Unsponsored boosting to either lower priority or exclude
        if sponsored_only:
            # Omit unsponsored
            unsponsored_boost = 0
        else:
            # Below sponsored (inverse boost, since we're filtering on "sponsored=False"
            unsponsored_boost = (1.0 / SPONSORED_BOOST)

        # ES v1.4 has more limited percolator capabilities than later
        # implementations. As such, in order to get this to work, we need to
        # sort via scoring_functions, and then manually filter out zero scores.
        sponsored_filter = {
            "query": {
                "function_score": {
                    "functions": [

                        # Boost Recent Special Coverage
                        # Base score is start time
                        # Note: ES 1.4 sorting granularity is poor for times
                        # within 1 hour of each other.
                        {

                            # v1.4 "field_value_factor" does not yet support
                            # "missing" param, and so must filter on whether
                            # "start_date" field exists.
                            "filter": {
                                "exists": {
                                    "field": "start_date",
                                },
                            },
                            "field_value_factor": {
                                "field": "start_date",
                            }
                        },
                        {
                            # Related to above, if "start_date" not found, omit
                            # via zero score.
                            "filter": {
                                "not": {
                                    "exists": {
                                        "field": "start_date",
                                    },
                                },
                            },
                            "weight": 0,
                        },


                        # Ignore non-special-coverage percolator entries
                        {
                            "filter": {
                                "not": {
                                    "prefix": {"_id": "specialcoverage"},
                                },
                            },
                            "weight": 0,
                        },

                        # Boost Manually Added Content
                        {
                            "filter": {
                                "terms": {
                                    "included_ids": [self.id],
                                }
                            },
                            "weight": MANUALLY_ADDED_BOOST,
                        },
                        # Penalize Inactive (Zero Score Will be Omitted)
                        {
                            "filter": {
                                "or": [
                                    {
                                        "range": {
                                            "start_date_epoch": {
                                                "gt": now_epoch,
                                            },
                                        }
                                    },
                                    {
                                        "range": {
                                            "end_date_epoch": {
                                                "lte": now_epoch,
                                            },
                                        }
                                    },
                                ],
                            },
                            "weight": 0,
                        },
                        # Penalize Unsponsored (will either exclude or lower
                        # based on "sponsored_only" flag)
                        {
                            "filter": {
                                "term": {
                                    "sponsored": False,
                                }
                            },
                            "weight": unsponsored_boost,
                        },
                    ],
                },
            },

            "sort": "_score",  # The only sort method supported by ES v1.4 percolator
            "size": max_size,  # Required for sort
        }

        results = _percolate(index=self.mapping.index,
                             doc_type=self.mapping.doc_type,
                             content_id=self.id,
                             body=sponsored_filter)

        return [r["_id"] for r in results
                # Zero score used to omit results via scoring function (ex: inactive)
                if r['_score'] > 0]

Example 39

Project: django-bulbs
Source File: test_custom_search.py
View license
    def setUp(self):
        super(BaseCustomSearchFilterTests, self).setUp()
        feature_type_names = (
            "News", "Slideshow", "TV Club", "Video",
        )
        feature_types = []
        for name in feature_type_names:
            feature_types.append(FeatureType.objects.create(name=name))
        tag_names = (
            "Barack Obama", "Joe Biden", "Wow", "Funny", "Politics"
        )
        tags = []
        for name in tag_names:
            tags.append(Tag.objects.create(name=name))
        content_data = (
            dict(
                title="Obama Does It Again",
                feature_type=0,
                tags=[0, 2, 4]
            ),
            dict(
                title="Biden Does It Again",
                feature_type=0,
                tags=[1, 2, 4]
            ),
            dict(
                title="Obama In Slides Is Flawless",
                feature_type=1,
                tags=[0, 2, 4]
            ),
            dict(
                title="Obama On TV",
                feature_type=2,
                tags=[0, 2]
            ),
            dict(
                title="Flawless video here",
                feature_type=3,
                tags=[3, 2]
            ),
            dict(
                title="Both Obama and Biden in One Article",
                feature_type=3,
                tags=[0, 1, 2]
            ),
        )
        time_step = timedelta(hours=12)
        pubtime = timezone.now() + time_step
        content_list = []
        for data in content_data:
            data["published"] = pubtime
            data["feature_type"] = feature_types[data.pop("feature_type")]
            data["tags"] = [tags[tag_idx] for tag_idx in data.pop("tags")]
            content = make_content(**data)
            content_list.append(content)
            content.index()  # reindex for related object updates
            pubtime -= time_step
        self.content_list = content_list
        self.feature_types = feature_types
        self.tags = tags
        Content.search_objects.refresh()

        # NOTE: we updated some field names after I initially typed this up.
        # NOTE: These functions munge the existing data into the new form.
        def makeGroups(groups):
            result = []
            for group in groups:
                if isinstance(group, dict):
                    this_group = group
                else:
                    this_group = dict(conditions=[])
                    for condition in group:
                        this_group["conditions"].append(makeCondition(*condition))
                result.append(this_group)
            return result

        def makeCondition(field, type, values):
            return dict(
                field=field, type=type,
                values=[dict(label=v, value=v) for v in values]
            )

        s_biden = dict(
            label="All Biden, Baby",
            query=dict(
                groups=makeGroups([
                    [
                        ("tag", "all", [self.tags[1].slug]),
                    ],
                ])
            )
        )
        s_obama = dict(
            label="All Obama, Baby",
            query=dict(
                groups=makeGroups([
                    [
                        ("tag", "all", [self.tags[0].slug]),
                    ],
                ])
            )
        )
        # logical and
        s_b_and_b = dict(
            label="Obama and Biden, together!",
            query=dict(
                groups=makeGroups([
                    [
                        ("tag", "all", [
                            self.tags[0].slug,
                            self.tags[1].slug
                        ]),
                    ],
                ])
            )
        )
        # logical or
        s_b_or_b = dict(
            label="Obama or Biden, whatever!",
            query=dict(
                groups=makeGroups([
                    [
                        ("tag", "any", [
                            self.tags[0].slug,
                            self.tags[1].slug
                        ]),
                    ],
                ])
            )
        )
        # excluding some tags
        s_lite_obama = dict(
            label="Obama but not political stuff",
            query=dict(
                groups=makeGroups([
                    [
                        ("tag", "all", [
                            self.tags[0].slug,  # obama
                        ]),
                        ("tag", "none", [
                            self.tags[4].slug,  # politics
                        ]),
                    ],
                ])
            )
        )
        # multiple, disjoint groups
        s_funny_and_slideshows = dict(
            label="Anything funny and also slideshows!",
            query=dict(
                groups=makeGroups([
                    [
                        ("tag", "any", [
                            self.tags[3].slug  # funny tags
                        ]),
                    ],
                    [
                        ("feature-type", "any", [
                            self.feature_types[1].slug  # slideshow
                        ]),
                    ],
                ])
            )
        )
        # this tag is on everything
        s_wow = dict(
            label="Wow!",
            query=dict(
                groups=makeGroups([
                    [
                        ("tag", "all", [
                            self.tags[2].slug  # funny tags
                        ]),
                    ],
                ])
            )
        )
        # filter by content type
        s_doctype = dict(
            label="Doctype",
            query=dict(
                groups=makeGroups([
                    [
                        ("content-type", "all", [
                            TestContentObjTwo.search_objects.mapping.doc_type
                        ])
                    ]
                ])
            )
        )
        # include some ids
        s_one_article = dict(
            label="Just this article",
            query=dict(
                groups=[],
                included_ids=[self.content_list[0].id]
            )
        )
        s_two_articles = dict(
            label="Just two articles",
            query=dict(
                groups=[],
                included_ids=[
                    self.content_list[0].id,
                    self.content_list[3].id
                ]
            )
        )
        # exclude ids
        s_all_but_one_article = dict(
            label="All but one article",
            query=dict(
                groups=[],
                excluded_ids=[
                    self.content_list[0].id
                ]
            )
        )
        # last day of articles
        s_last_day = dict(
            label="Last day",
            query=dict(
                groups=[dict(
                    conditions=[],
                    time="Past day"
                )],
            )
        )
        # pinned
        s_pinned = dict(
            label="Pinned something",
            query=dict(
                pinned_ids=[
                    content_list[-1].id  # last in time
                ]
            )
        )
        # pinned 2
        s_pinned_2 = dict(
            label="Pinned 2 things",
            query=dict(
                pinned_ids=[
                    content_list[-1].id,  # last in time
                    content_list[-2].id  # penultimate
                ]
            )
        )
        # pinned 2 with groups
        s_pinned_2_groups = dict(
            label="Pinned 2 things with other filters",
            query=dict(
                groups=makeGroups([
                    [
                        ("tag", "any", [
                            self.tags[0].slug,
                            self.tags[1].slug,
                            self.tags[2].slug,
                            self.tags[3].slug,
                            self.tags[4].slug
                        ]),
                    ]
                ]),
                pinned_ids=[
                    content_list[-1].id,  # last in time
                    content_list[-2].id  # penultimate
                ]
            )
        )
        # text query
        s_text_query = dict(
            label="Text query",
            query=dict(
                query="again"
            )
        )
        # text query with pinned ids
        s_text_query_pinned = dict(
            label="Text query",
            query=dict(
                groups=makeGroups([
                    [
                        ("tag", "any", [self.tags[2].slug]),
                    ]
                ]),
                pinned_ids=[self.content_list[4].id],
                query="Flawless"
            )
        )
        # saved search and the expected result count
        self.search_expectations = (
            (s_biden, 2),
            (s_obama, 4),
            (s_b_and_b, 1),
            (s_b_or_b, 5),
            (s_lite_obama, 2),
            (s_funny_and_slideshows, 2),
            (s_wow, len(self.content_list)),
            (s_one_article, 1),
            (s_two_articles, 2),
            (s_all_but_one_article, len(self.content_list) - 1),
            (s_last_day, 3),
            (s_pinned, len(self.content_list)),
            (s_pinned_2, len(self.content_list)),
            (s_pinned_2_groups, len(self.content_list)),
            (s_doctype, TestContentObjTwo.objects.count()),
            (s_text_query, 2),
            (s_text_query_pinned, 2),
        )
        self.preview_expectations = (
            (s_biden, 2),
            (s_obama, 4),
            (s_b_and_b, 1),
            (s_b_or_b, 5),
            (s_lite_obama, 2),
            (s_funny_and_slideshows, 2),
            (s_wow, len(self.content_list)),
            (s_one_article, 1),
            (s_two_articles, 2),
            (s_all_but_one_article, len(self.content_list)),  # excluded
            (s_last_day, 3),
            (s_doctype, TestContentObjTwo.objects.count()),
            (s_text_query, 2),
            (s_text_query_pinned, 2),
        )
        self.group_preview_expectations = (
            (s_biden, 2),
            (s_obama, 4),
            (s_b_and_b, 1),
            (s_wow, len(self.content_list)),
            (s_one_article, 1),
            (s_two_articles, 2),
            (s_all_but_one_article, len(self.content_list)),  # excluded
        )
        # is not published and not is_preview
        self.unpublished_expectations = (
            (s_biden, 2),
            (s_obama, 4),
            (s_b_and_b, 1),
            (s_b_or_b, 5),
            (s_lite_obama, 2),
            (s_funny_and_slideshows, 2),
            (s_wow, len(self.content_list)),
            (s_one_article, 1),
            (s_two_articles, 2),
            (s_all_but_one_article, len(self.content_list) - 1),
            (s_last_day, 3),
            (s_pinned, len(self.content_list)),
            (s_pinned_2, len(self.content_list)),
            (s_pinned_2_groups, len(self.content_list)),
            (s_text_query, 2),
            (s_text_query_pinned, 2),
        )
        # is published and not is_preview
        self.published_expectations = (
            (s_biden, 2),
            (s_obama, 3),
            (s_b_and_b, 1),
            (s_b_or_b, 5 - 1),
            (s_lite_obama, 2),
            (s_funny_and_slideshows, 2),
            (s_wow, len(self.content_list) - 1),
            (s_one_article, 1 - 1),
            (s_two_articles, 2 - 1),
            (s_all_but_one_article, len(self.content_list) - 1),
            (s_last_day, 2),
            (s_pinned, len(self.content_list) - 1),
            (s_pinned_2, len(self.content_list) - 1),
            (s_pinned_2_groups, len(self.content_list) - 1),
            (s_text_query, 1),
            (s_text_query_pinned, 2),
        )
        self.published_not_pinned_expectations = (
            (s_biden, 2),
            (s_obama, 3),
            (s_b_and_b, 1),
            (s_b_or_b, 5 - 1),
            (s_lite_obama, 2),
            (s_funny_and_slideshows, 2),
            (s_wow, len(self.content_list) - 1),
            (s_one_article, 1 - 1),
            (s_two_articles, 2 - 1),
            (s_all_but_one_article, len(self.content_list) - 1),
            (s_last_day, 2),
        )
        # (search filter, (list, of, ids, in, order)),
        self.ordered_expectations = (
            (s_all_but_one_article, (content_list[1].id, content_list[2].id, content_list[3].id)),
            (s_text_query_pinned, (content_list[4].id, content_list[2].id)),
        )
        self.pinned_expectations = (
            (s_pinned, (
                content_list[-1].id,
            )),
            (s_pinned_2, (
                content_list[-2].id, content_list[-1].id,
            )),
            (s_pinned_2_groups, (
                content_list[-2].id, content_list[-1].id,
            )),
        )

Example 40

View license
    def setUp(self):
        super(RatePayTestCase, self).setUp()
        contributor_cls = get_user_model()
        self.now = timezone.now()
        self.contributors = {
            'jarvis': contributor_cls.objects.create(
                first_name='jarvis',
                last_name='monster',
                username='garbage'
            )
        }
        self.roles = {
            'featuretype': ContributorRole.objects.create(
                name='FeatureType',
                payment_type=1
            ),
            'flatrate': ContributorRole.objects.create(
                name='FlatRate',
                payment_type=0
            ),
            'hourly': ContributorRole.objects.create(
                name='Hourly',
                payment_type=2
            ),
            'manual': ContributorRole.objects.create(
                name='Manual',
                payment_type=3
            )
        }
        self.overrides = {
            'jarvis': {
                'featuretype': OverrideProfile.objects.create(
                    contributor=self.contributors['jarvis'],
                    role=self.roles['featuretype']
                ),
                'flatrate': OverrideProfile.objects.create(
                    contributor=self.contributors['jarvis'],
                    role=self.roles['flatrate']
                ),
                'hourly': OverrideProfile.objects.create(
                    contributor=self.contributors['jarvis'],
                    role=self.roles['hourly']
                ),
            }
        }
        self.feature_types = {
            'tvclub': FeatureType.objects.create(name='TV Club'),
            'news': FeatureType.objects.create(name='News')
        }
        tvclub_rate = FeatureTypeRate.objects.get(
            feature_type=self.feature_types['tvclub'], role=self.roles['featuretype']
        )
        tvclub_rate.rate = 30
        tvclub_rate.save()
        news_rate = FeatureTypeRate.objects.get(
            feature_type=self.feature_types['news'], role=self.roles['featuretype']
        )
        news_rate.rate = 50
        news_rate.save()
        self.rates = {
            'featuretype': {
                'tvclub': tvclub_rate,
                'news': news_rate
            },
            'flatrate': {
                'flatrate': FlatRate.objects.create(
                    role=self.roles['flatrate'],
                    rate=200
                )
            },
            'hourly': {
                'hourly': HourlyRate.objects.create(
                    role=self.roles['hourly'],
                    rate=60
                )
            },
        }
        self.content = {
            'c1': Content.objects.create(
                title='Good Content',
                feature_type=self.feature_types['tvclub'],
                published=self.now - timezone.timedelta(days=1)
            ),
            'c2': Content.objects.create(
                title='More Content',
                feature_type=self.feature_types['news'],
                published=self.now - timezone.timedelta(days=2)
            )
        }
        self.contributions = {
            'featuretype': {
                'tvclub': Contribution.objects.create(
                    contributor=self.contributors['jarvis'],
                    role=self.roles['featuretype'],
                    content=self.content['c1']
                ),
                'news': Contribution.objects.create(
                    contributor=self.contributors['jarvis'],
                    role=self.roles['featuretype'],
                    content=self.content['c2']
                )
            },
            'flatrate': Contribution.objects.create(
                contributor=self.contributors['jarvis'],
                role=self.roles['flatrate'],
                content=self.content['c1']
            ),
            'hourly': Contribution.objects.create(
                contributor=self.contributors['jarvis'],
                minutes_worked=30,
                role=self.roles['hourly'],
                content=self.content['c1']
            ),
            'manual': Contribution.objects.create(
                contributor=self.contributors['jarvis'],
                role=self.roles['manual'],
                content=self.content['c1']
            )
        }
        self.rates['manual'] = {
            'manual': ManualRate.objects.create(
                contribution=self.contributions['manual'],
                rate=1000
            )
        }

Example 41

Project: django-bulbs
Source File: test_spec_cov_query.py
View license
    def setUp(self):
        super(BaseCustomSearchFilterTests, self).setUp()
        feature_type_names = (
            "News", "Slideshow", "TV Club", "Video",
        )
        feature_types = []
        for name in feature_type_names:
            feature_types.append(FeatureType.objects.create(name=name))
        tag_names = (
            "Barack Obama", "Joe Biden", "Wow", "Funny", "Politics"
        )
        tags = []
        for name in tag_names:
            tags.append(Tag.objects.create(name=name))
        content_data = (
            dict(
                title="Obama Does It Again",
                feature_type=0,
                tags=[0, 2, 4]
            ),
            dict(
                title="Biden Does It Again",
                feature_type=0,
                tags=[1, 2, 4]
            ),
            dict(
                title="Obama In Slides Is Flawless",
                feature_type=1,
                tags=[0, 2, 4]
            ),
            dict(
                title="Obama On TV",
                feature_type=2,
                tags=[0, 2]
            ),
            dict(
                title="Flawless video here",
                feature_type=3,
                tags=[3, 2]
            ),
            dict(
                title="Both Obama and Biden in One Article",
                feature_type=3,
                tags=[0, 1, 2]
            ),
        )
        time_step = timedelta(hours=12)
        pubtime = timezone.now() + time_step
        content_list = []
        for data in content_data:
            data["published"] = pubtime
            data["feature_type"] = feature_types[data.pop("feature_type")]
            data["tags"] = [tags[tag_idx] for tag_idx in data.pop("tags")]
            content = make_content(**data)
            content_list.append(content)
            content.index()  # reindex for related object updates
            pubtime -= time_step
        self.content_list = content_list
        self.feature_types = feature_types
        self.tags = tags
        Content.search_objects.refresh()

        # NOTE: we updated some field names after I initially typed this up.
        # NOTE: These functions munge the existing data into the new form.
        def makeGroups(groups):
            result = []
            for group in groups:
                if isinstance(group, dict):
                    this_group = group
                else:
                    this_group = dict(conditions=[])
                    for condition in group:
                        this_group["conditions"].append(makeCondition(*condition))
                result.append(this_group)
            return result

        def makeCondition(field, type, values):
            return dict(
                field=field, type=type,
                values=[dict(label=v, value=v) for v in values]
            )

        s_biden = dict(
            label="All Biden, Baby",
            query=dict(
                groups=makeGroups([
                    [
                        ("tag", "all", [self.tags[1].slug]),
                    ],
                ])
            )
        )
        s_obama = dict(
            label="All Obama, Baby",
            query=dict(
                groups=makeGroups([
                    [
                        ("tag", "all", [self.tags[0].slug]),
                    ],
                ])
            )
        )
        # logical and
        s_b_and_b = dict(
            label="Obama and Biden, together!",
            query=dict(
                groups=makeGroups([
                    [
                        ("tag", "all", [
                            self.tags[0].slug,
                            self.tags[1].slug
                        ]),
                    ],
                ])
            )
        )
        # logical or
        s_b_or_b = dict(
            label="Obama or Biden, whatever!",
            query=dict(
                groups=makeGroups([
                    [
                        ("tag", "any", [
                            self.tags[0].slug,
                            self.tags[1].slug
                        ]),
                    ],
                ])
            )
        )
        # excluding some tags
        s_lite_obama = dict(
            label="Obama but not political stuff",
            query=dict(
                groups=makeGroups([
                    [
                        ("tag", "all", [
                            self.tags[0].slug,  # obama
                        ]),
                        ("tag", "none", [
                            self.tags[4].slug,  # politics
                        ]),
                    ],
                ])
            )
        )
        # multiple, disjoint groups
        s_funny_and_slideshows = dict(
            label="Anything funny and also slideshows!",
            query=dict(
                groups=makeGroups([
                    [
                        ("tag", "any", [
                            self.tags[3].slug  # funny tags
                        ]),
                    ],
                    [
                        ("feature-type", "any", [
                            self.feature_types[1].slug  # slideshow
                        ]),
                    ],
                ])
            )
        )
        # this tag is on everything
        s_wow = dict(
            label="Wow!",
            query=dict(
                groups=makeGroups([
                    [
                        ("tag", "all", [
                            self.tags[2].slug  # funny tags
                        ]),
                    ],
                ])
            )
        )
        # filter by content type
        s_doctype = dict(
            label="Doctype",
            query=dict(
                groups=makeGroups([
                    [
                        ("content-type", "all", [
                            TestContentObjTwo.search_objects.mapping.doc_type
                        ])
                    ]
                ])
            )
        )
        # include some ids
        s_one_article = dict(
            label="Just this article",
            query=dict(
                groups=[],
                included_ids=[self.content_list[0].id]
            )
        )
        s_two_articles = dict(
            label="Just two articles",
            query=dict(
                groups=[],
                included_ids=[
                    self.content_list[0].id,
                    self.content_list[3].id
                ]
            )
        )
        # exclude ids
        s_all_but_one_article = dict(
            label="All but one article",
            query=dict(
                groups=[],
                excluded_ids=[
                    self.content_list[0].id
                ]
            )
        )
        # last day of articles
        s_last_day = dict(
            label="Last day",
            query=dict(
                groups=[dict(
                    conditions=[],
                    time="1 day"
                )],
            )
        )
        # pinned
        s_pinned = dict(
            label="Pinned something",
            query=dict(
                pinned_ids=[
                    content_list[-1].id  # last in time
                ]
            )
        )
        # pinned 2
        s_pinned_2 = dict(
            label="Pinned 2 things",
            query=dict(
                pinned_ids=[
                    content_list[-1].id,  # last in time
                    content_list[-2].id  # penultimate
                ]
            )
        )
        # pinned 2 with groups
        s_pinned_2_groups = dict(
            label="Pinned 2 things with other filters",
            query=dict(
                groups=makeGroups([
                    [
                        ("tag", "any", [
                            self.tags[0].slug,
                            self.tags[1].slug,
                            self.tags[2].slug,
                            self.tags[3].slug,
                            self.tags[4].slug
                        ]),
                    ]
                ]),
                pinned_ids=[
                    content_list[-1].id,  # last in time
                    content_list[-2].id  # penultimate
                ]
            )
        )
        # text query
        s_text_query = dict(
            label="Text query",
            query=dict(
                query="again"
            )
        )
        # text query with pinned ids
        s_text_query_pinned = dict(
            label="Text query",
            query=dict(
                groups=makeGroups([
                    [
                        ("tag", "any", [self.tags[2].slug]),
                    ]
                ]),
                pinned_ids=[self.content_list[4].id],
                query="Flawless"
            )
        )
        # saved search and the expected result count
        self.search_expectations = (
            (s_biden, 2),
            (s_obama, 4),
            (s_b_and_b, 1),
            (s_b_or_b, 5),
            (s_lite_obama, 2),
            (s_funny_and_slideshows, 2),
            (s_wow, len(self.content_list)),
            (s_one_article, 1),
            (s_two_articles, 2),
            (s_all_but_one_article, len(self.content_list) - 1),
            (s_last_day, 3),
            (s_pinned, len(self.content_list)),
            (s_pinned_2, len(self.content_list)),
            (s_pinned_2_groups, len(self.content_list)),
            (s_doctype, TestContentObjTwo.objects.count()),
            (s_text_query, 2),
            (s_text_query_pinned, 2),
        )
        self.preview_expectations = (
            (s_biden, 2),
            (s_obama, 4),
            (s_b_and_b, 1),
            (s_b_or_b, 5),
            (s_lite_obama, 2),
            (s_funny_and_slideshows, 2),
            (s_wow, len(self.content_list)),
            (s_one_article, 1),
            (s_two_articles, 2),
            (s_all_but_one_article, len(self.content_list)),  # excluded
            (s_last_day, 3),
            (s_doctype, TestContentObjTwo.objects.count()),
            (s_text_query, 2),
            (s_text_query_pinned, 2),
        )
        self.group_preview_expectations = (
            (s_biden, 2),
            (s_obama, 4),
            (s_b_and_b, 1),
            (s_wow, len(self.content_list)),
            (s_one_article, 1),
            (s_two_articles, 2),
            (s_all_but_one_article, len(self.content_list)),  # excluded
        )
        # is not published and not is_preview
        self.unpublished_expectations = (
            (s_biden, 2),
            (s_obama, 4),
            (s_b_and_b, 1),
            (s_b_or_b, 5),
            (s_lite_obama, 2),
            (s_funny_and_slideshows, 2),
            (s_wow, len(self.content_list)),
            (s_one_article, 1),
            (s_two_articles, 2),
            (s_all_but_one_article, len(self.content_list) - 1),
            (s_last_day, 3),
            (s_pinned, len(self.content_list)),
            (s_pinned_2, len(self.content_list)),
            (s_pinned_2_groups, len(self.content_list)),
            (s_text_query, 2),
            (s_text_query_pinned, 2),
        )
        # is published and not is_preview
        self.published_expectations = (
            (s_biden, 2),
            (s_obama, 3),
            (s_b_and_b, 1),
            (s_b_or_b, 5 - 1),
            (s_lite_obama, 2),
            (s_funny_and_slideshows, 2),
            (s_wow, len(self.content_list) - 1),
            (s_one_article, 1 - 1),
            (s_two_articles, 2 - 1),
            (s_all_but_one_article, len(self.content_list) - 1),
            (s_last_day, 2),
            (s_pinned, len(self.content_list) - 1),
            (s_pinned_2, len(self.content_list) - 1),
            (s_pinned_2_groups, len(self.content_list) - 1),
            (s_text_query, 1),
            (s_text_query_pinned, 2),
        )
        self.published_not_pinned_expectations = (
            (s_biden, 2),
            (s_obama, 3),
            (s_b_and_b, 1),
            (s_b_or_b, 5 - 1),
            (s_lite_obama, 2),
            (s_funny_and_slideshows, 2),
            (s_wow, len(self.content_list) - 1),
            (s_one_article, 1 - 1),
            (s_two_articles, 2 - 1),
            (s_all_but_one_article, len(self.content_list) - 1),
            (s_last_day, 2),
        )
        # (search filter, (list, of, ids, in, order)),
        self.ordered_expectations = (
            (s_all_but_one_article, (2, 3, 4)),
            (s_text_query_pinned, (content_list[4].id, content_list[2].id)),
        )
        self.pinned_expectations = (
            (s_pinned, (
                content_list[-1].id,
                content_list[0].id, content_list[1].id,
            )),
            (s_pinned_2, (
                content_list[-2].id, content_list[-1].id,
                content_list[0].id, content_list[1].id,
            )),
            (s_pinned_2_groups, (
                content_list[-2].id, content_list[-1].id,
                content_list[0].id, content_list[1].id,
            )),
        )

Example 42

View license
    def setUp(self):
        super(SpecialCoverageSearchTests, self).setUp()
        self.now = timezone.now()
        feature_type_names = (
            "News", "Slideshow", "TV Club", "Video",
        )
        feature_types = []
        for name in feature_type_names:
            feature_types.append(FeatureType.objects.create(name=name))
        tag_names = (
            "Barack Obama", "Joe Biden", "wow", "Funny", "Politics"
        )
        tags = []
        for name in tag_names:
            tags.append(Tag.objects.create(name=name))
        content_data = (
            dict(
                title="Obama Does It Again",
                feature_type=0,
                tags=[0, 2, 4]
            ),
            dict(
                title="Biden Does It Again",
                feature_type=0,
                tags=[1, 2, 4]
            ),
            dict(
                title="Obama In Slides Is Flawless",
                feature_type=1,
                tags=[0, 2, 4]
            ),
            dict(
                title="Obama On TV",
                feature_type=2,
                tags=[0, 2]
            ),
            dict(
                title="Flawless video here",
                feature_type=3,
                tags=[3, 2]
            ),
            dict(
                title="Both Obama and Biden in One Article",
                feature_type=3,
                tags=[0, 1, 2]
            ),
        )
        time_step = timedelta(hours=12)
        pubtime = self.now + time_step
        content_list = []
        for data in content_data:
            data["published"] = pubtime
            data["feature_type"] = feature_types[data.pop("feature_type")]
            data["tags"] = [tags[tag_idx] for tag_idx in data.pop("tags")]
            content = make_content(**data)
            content_list.append(content)
            content.index()  # reindex for related object updates
            pubtime -= time_step
        self.content_list = content_list
        self.feature_types = feature_types
        self.tags = tags

        self.special_coverages = [
            SpecialCoverage.objects.create(
                name="Slime Season",
                tunic_campaign_id=1,
                start_date=self.now - timezone.timedelta(days=10),
                end_date=self.now + timezone.timedelta(days=10),
                query={
                    "groups": [{
                        "conditions": [{
                            "field": "feature-type",
                            "type": "any",
                            "values": [{
                                "name": "news", "value": "news"
                            }]
                        }],
                        "time": None
                    }]
                }
            ),
            SpecialCoverage.objects.create(
                name="Slime Season 2",
                start_date=self.now - timezone.timedelta(days=10),
                end_date=self.now + timezone.timedelta(days=10),
                query={
                    "groups": [{
                        "conditions": [{
                            "field": "tag",
                            "type": "any",
                            "values": [{
                                "name": "wow", "value": "wow"
                            }]
                        }],
                        "time": None
                    }],
                }
            )
        ]

        Content.search_objects.refresh()

Example 43

Project: wagtail
Source File: pages.py
View license
def create(request, content_type_app_name, content_type_model_name, parent_page_id):
    parent_page = get_object_or_404(Page, id=parent_page_id).specific
    parent_page_perms = parent_page.permissions_for_user(request.user)
    if not parent_page_perms.can_add_subpage():
        raise PermissionDenied

    try:
        content_type = ContentType.objects.get_by_natural_key(content_type_app_name, content_type_model_name)
    except ContentType.DoesNotExist:
        raise Http404

    # Get class
    page_class = content_type.model_class()

    # Make sure the class is a descendant of Page
    if not issubclass(page_class, Page):
        raise Http404

    # page must be in the list of allowed subpage types for this parent ID
    if page_class not in parent_page.creatable_subpage_models():
        raise PermissionDenied

    if not page_class.can_create_at(parent_page):
        raise PermissionDenied

    for fn in hooks.get_hooks('before_create_page'):
        result = fn(request, parent_page, page_class)
        if hasattr(result, 'status_code'):
            return result

    page = page_class(owner=request.user)
    edit_handler_class = page_class.get_edit_handler()
    form_class = edit_handler_class.get_form_class(page_class)

    next_url = get_valid_next_url_from_request(request)

    if request.method == 'POST':
        form = form_class(request.POST, request.FILES, instance=page,
                          parent_page=parent_page)

        if form.is_valid():
            page = form.save(commit=False)

            is_publishing = bool(request.POST.get('action-publish')) and parent_page_perms.can_publish_subpage()
            is_submitting = bool(request.POST.get('action-submit'))

            if not is_publishing:
                page.live = False

            # Save page
            parent_page.add_child(instance=page)

            # Save revision
            revision = page.save_revision(
                user=request.user,
                submitted_for_moderation=is_submitting,
            )

            # Publish
            if is_publishing:
                revision.publish()

            # Notifications
            if is_publishing:
                if page.go_live_at and page.go_live_at > timezone.now():
                    messages.success(request, _("Page '{0}' created and scheduled for publishing.").format(page.get_admin_display_title()), buttons=[
                        messages.button(reverse('wagtailadmin_pages:edit', args=(page.id,)), _('Edit'))
                    ])
                else:
                    messages.success(request, _("Page '{0}' created and published.").format(page.get_admin_display_title()), buttons=[
                        messages.button(page.url, _('View live')),
                        messages.button(reverse('wagtailadmin_pages:edit', args=(page.id,)), _('Edit'))
                    ])
            elif is_submitting:
                messages.success(
                    request,
                    _("Page '{0}' created and submitted for moderation.").format(page.get_admin_display_title()),
                    buttons=[
                        messages.button(reverse('wagtailadmin_pages:view_draft', args=(page.id,)), _('View draft')),
                        messages.button(reverse('wagtailadmin_pages:edit', args=(page.id,)), _('Edit'))
                    ]
                )
                if not send_notification(page.get_latest_revision().id, 'submitted', request.user.pk):
                    messages.error(request, _("Failed to send notifications to moderators"))
            else:
                messages.success(request, _("Page '{0}' created.").format(page.get_admin_display_title()))

            for fn in hooks.get_hooks('after_create_page'):
                result = fn(request, page)
                if hasattr(result, 'status_code'):
                    return result

            if is_publishing or is_submitting:
                # we're done here
                if next_url:
                    # redirect back to 'next' url if present
                    return redirect(next_url)
                # redirect back to the explorer
                return redirect('wagtailadmin_explore', page.get_parent().id)
            else:
                # Just saving - remain on edit page for further edits
                target_url = reverse('wagtailadmin_pages:edit', args=[page.id])
                if next_url:
                    # Ensure the 'next' url is passed through again if present
                    target_url += '?next=%s' % urlquote(next_url)
                return redirect(target_url)
        else:
            messages.error(request, _("The page could not be created due to validation errors"))
            edit_handler = edit_handler_class(instance=page, form=form)
            has_unsaved_changes = True
    else:
        signals.init_new_page.send(sender=create, page=page, parent=parent_page)
        form = form_class(instance=page)
        edit_handler = edit_handler_class(instance=page, form=form)
        has_unsaved_changes = False

    return render(request, 'wagtailadmin/pages/create.html', {
        'content_type': content_type,
        'page_class': page_class,
        'parent_page': parent_page,
        'edit_handler': edit_handler,
        'preview_modes': page.preview_modes,
        'form': form,
        'next': next_url,
        'has_unsaved_changes': has_unsaved_changes,
    })

Example 44

Project: wagtail
Source File: pages.py
View license
def edit(request, page_id):
    latest_revision = get_object_or_404(Page, id=page_id).get_latest_revision()
    page = get_object_or_404(Page, id=page_id).get_latest_revision_as_page()
    parent = page.get_parent()

    content_type = ContentType.objects.get_for_model(page)
    page_class = content_type.model_class()

    page_perms = page.permissions_for_user(request.user)
    if not page_perms.can_edit():
        raise PermissionDenied

    for fn in hooks.get_hooks('before_edit_page'):
        result = fn(request, page)
        if hasattr(result, 'status_code'):
            return result

    edit_handler_class = page_class.get_edit_handler()
    form_class = edit_handler_class.get_form_class(page_class)

    next_url = get_valid_next_url_from_request(request)

    errors_debug = None

    if request.method == 'POST':
        form = form_class(request.POST, request.FILES, instance=page,
                          parent_page=parent)

        if form.is_valid() and not page.locked:
            page = form.save(commit=False)

            is_publishing = bool(request.POST.get('action-publish')) and page_perms.can_publish()
            is_submitting = bool(request.POST.get('action-submit'))
            is_reverting = bool(request.POST.get('revision'))

            # If a revision ID was passed in the form, get that revision so its
            # date can be referenced in notification messages
            if is_reverting:
                previous_revision = get_object_or_404(page.revisions, id=request.POST.get('revision'))

            # Save revision
            revision = page.save_revision(
                user=request.user,
                submitted_for_moderation=is_submitting,
            )

            # Publish
            if is_publishing:
                revision.publish()
                # Need to reload the page because the URL may have changed, and we
                # need the up-to-date URL for the "View Live" button.
                page = page.specific_class.objects.get(pk=page.pk)

            # Notifications
            if is_publishing:
                if page.go_live_at and page.go_live_at > timezone.now():
                    # Page has been scheduled for publishing in the future

                    if is_reverting:
                        message = _(
                            "Revision from {0} of page '{1}' has been scheduled for publishing."
                        ).format(
                            previous_revision.created_at.strftime("%d %b %Y %H:%M"),
                            page.get_admin_display_title()
                        )
                    else:
                        message = _(
                            "Page '{0}' has been scheduled for publishing."
                        ).format(
                            page.get_admin_display_title()
                        )

                    messages.success(request, message, buttons=[
                        messages.button(
                            reverse('wagtailadmin_pages:edit', args=(page.id,)),
                            _('Edit')
                        )
                    ])

                else:
                    # Page is being published now

                    if is_reverting:
                        message = _(
                            "Revision from {0} of page '{1}' has been published."
                        ).format(
                            previous_revision.created_at.strftime("%d %b %Y %H:%M"),
                            page.get_admin_display_title()
                        )
                    else:
                        message = _(
                            "Page '{0}' has been published."
                        ).format(
                            page.get_admin_display_title()
                        )

                    messages.success(request, message, buttons=[
                        messages.button(
                            page.url,
                            _('View live')
                        ),
                        messages.button(
                            reverse('wagtailadmin_pages:edit', args=(page_id,)),
                            _('Edit')
                        )
                    ])

            elif is_submitting:

                message = _(
                    "Page '{0}' has been submitted for moderation."
                ).format(
                    page.get_admin_display_title()
                )

                messages.success(request, message, buttons=[
                    messages.button(
                        reverse('wagtailadmin_pages:view_draft', args=(page_id,)),
                        _('View draft')
                    ),
                    messages.button(
                        reverse('wagtailadmin_pages:edit', args=(page_id,)),
                        _('Edit')
                    )
                ])

                if not send_notification(page.get_latest_revision().id, 'submitted', request.user.pk):
                    messages.error(request, _("Failed to send notifications to moderators"))

            else:  # Saving

                if is_reverting:
                    message = _(
                        "Page '{0}' has been replaced with revision from {1}."
                    ).format(
                        page.get_admin_display_title(),
                        previous_revision.created_at.strftime("%d %b %Y %H:%M")
                    )
                else:
                    message = _(
                        "Page '{0}' has been updated."
                    ).format(
                        page.get_admin_display_title()
                    )

                messages.success(request, message)

            for fn in hooks.get_hooks('after_edit_page'):
                result = fn(request, page)
                if hasattr(result, 'status_code'):
                    return result

            if is_publishing or is_submitting:
                # we're done here - redirect back to the explorer
                if next_url:
                    # redirect back to 'next' url if present
                    return redirect(next_url)
                # redirect back to the explorer
                return redirect('wagtailadmin_explore', page.get_parent().id)
            else:
                # Just saving - remain on edit page for further edits
                target_url = reverse('wagtailadmin_pages:edit', args=[page.id])
                if next_url:
                    # Ensure the 'next' url is passed through again if present
                    target_url += '?next=%s' % urlquote(next_url)
                return redirect(target_url)
        else:
            if page.locked:
                messages.error(request, _("The page could not be saved as it is locked"))
            else:
                messages.error(request, _("The page could not be saved due to validation errors"))

            edit_handler = edit_handler_class(instance=page, form=form)
            errors_debug = (
                repr(edit_handler.form.errors) +
                repr([
                    (name, formset.errors)
                    for (name, formset) in edit_handler.form.formsets.items()
                    if formset.errors
                ])
            )
            has_unsaved_changes = True
    else:
        form = form_class(instance=page)
        edit_handler = edit_handler_class(instance=page, form=form)
        has_unsaved_changes = False

    # Check for revisions still undergoing moderation and warn
    if latest_revision and latest_revision.submitted_for_moderation:
        messages.warning(request, _("This page is currently awaiting moderation"))

    return render(request, 'wagtailadmin/pages/edit.html', {
        'page': page,
        'content_type': content_type,
        'edit_handler': edit_handler,
        'errors_debug': errors_debug,
        'preview_modes': page.preview_modes,
        'form': form,
        'next': next_url,
        'has_unsaved_changes': has_unsaved_changes,
    })

Example 45

Project: wagtail
Source File: publish_scheduled_pages.py
View license
    def handle(self, *args, **options):
        dryrun = False
        if options['dryrun']:
            print("Will do a dry run.")
            dryrun = True

        # 1. get all expired pages with live = True
        expired_pages = Page.objects.filter(
            live=True,
            expire_at__lt=timezone.now()
        )
        if dryrun:
            if expired_pages:
                print("Expired pages to be deactivated:")
                print("Expiry datetime\t\tSlug\t\tName")
                print("---------------\t\t----\t\t----")
                for ep in expired_pages:
                    print("{0}\t{1}\t{2}".format(
                        ep.expire_at.strftime("%Y-%m-%d %H:%M"),
                        ep.slug,
                        ep.title
                    ))
            else:
                print("No expired pages to be deactivated found.")
        else:
            # Unpublish the expired pages
            # Cast to list to make sure the query is fully evaluated
            # before unpublishing anything
            for page in list(expired_pages):
                page.unpublish(set_expired=True)

        # 2. get all page revisions for moderation that have been expired
        expired_revs = [
            r for r in PageRevision.objects.filter(
                submitted_for_moderation=True
            ) if revision_date_expired(r)
        ]
        if dryrun:
            print("---------------------------------")
            if expired_revs:
                print("Expired revisions to be dropped from moderation queue:")
                print("Expiry datetime\t\tSlug\t\tName")
                print("---------------\t\t----\t\t----")
                for er in expired_revs:
                    rev_data = json.loads(er.content_json)
                    print("{0}\t{1}\t{2}".format(
                        dateparse.parse_datetime(
                            rev_data.get('expire_at')
                        ).strftime("%Y-%m-%d %H:%M"),
                        rev_data.get('slug'),
                        rev_data.get('title')
                    ))
            else:
                print("No expired revision to be dropped from moderation.")
        else:
            for er in expired_revs:
                er.submitted_for_moderation = False
                er.save()

        # 3. get all revisions that need to be published
        revs_for_publishing = PageRevision.objects.filter(
            approved_go_live_at__lt=timezone.now()
        )
        if dryrun:
            print("---------------------------------")
            if revs_for_publishing:
                print("Revisions to be published:")
                print("Go live datetime\t\tSlug\t\tName")
                print("---------------\t\t\t----\t\t----")
                for rp in revs_for_publishing:
                    rev_data = json.loads(rp.content_json)
                    print("{0}\t\t{1}\t{2}".format(
                        rp.approved_go_live_at.strftime("%Y-%m-%d %H:%M"),
                        rev_data.get('slug'),
                        rev_data.get('title')
                    ))
            else:
                print("No pages to go live.")
        else:
            for rp in revs_for_publishing:
                # just run publish for the revision -- since the approved go
                # live datetime is before now it will make the page live
                rp.publish()

Example 46

Project: pootle
Source File: models.py
View license
    @classmethod
    def top_scorers(cls, days=30, language=None, project=None, limit=5,
                    offset=0):
        """Returns users with the top scores.

        :param days: period of days to account for scores.
        :param language: limit results to the given language code.
        :param project: limit results to the given project code.
        :param limit: limit results to this number of users. Values other
            than positive numbers will return the entire result set.
        """
        cache_kwargs = {
            'days': days,
            'language': language,
            'project': project,
            'limit': limit,
            'offset': offset,
        }
        cache_key = make_method_key(cls, 'top_scorers', cache_kwargs)

        top_scorers = cache.get(cache_key, None)
        if top_scorers is not None:
            return top_scorers

        now = timezone.now()
        past = now + datetime.timedelta(-days)

        lookup_kwargs = {
            'creation_time__range': [past, now],
        }

        if language is not None:
            lookup_kwargs.update({
                'submission__translation_project__language__code':
                    language,
            })

        if project is not None:
            lookup_kwargs.update({
                'submission__translation_project__project__code':
                    project,
            })

        meta_user_ids = cls.objects.meta_users().values_list('id', flat=True)
        top_scores = ScoreLog.objects.values("user").filter(
            **lookup_kwargs
        ).exclude(
            user__pk__in=meta_user_ids,
        ).annotate(
            total_score=Sum('score_delta'),
            suggested=Sum(
                Case(
                    When(
                        action_code=TranslationActionCodes.SUGG_ADDED,
                        then='wordcount'
                    ),
                    default=0,
                    output_field=models.IntegerField()
                )
            ),
            translated=Sum(
                Case(
                    When(
                        translated_wordcount__isnull=False,
                        then='translated_wordcount'
                    ),
                    default=0,
                    output_field=models.IntegerField()
                )
            ),
            reviewed=Sum(
                Case(
                    When(
                        action_code__in=[
                            TranslationActionCodes.SUGG_REVIEWED_ACCEPTED,
                            TranslationActionCodes.REVIEWED,
                            TranslationActionCodes.EDITED,
                        ],
                        translated_wordcount__isnull=True,
                        then='wordcount',
                    ),
                    default=0,
                    output_field=models.IntegerField()
                )
            ),
        ).order_by('-total_score')[offset:]

        if isinstance(limit, (int, long)) and limit > 0:
            top_scores = top_scores[:limit]

        users = dict(
            (user.id, user)
            for user in cls.objects.filter(
                pk__in=[item['user'] for item in top_scores]
            )
        )

        top_scorers = []
        for item in top_scores:
            item['user'] = users[item['user']]
            item['public_total_score'] = _humanize_score(item['total_score'])
            top_scorers.append(item)

        cache.set(cache_key, top_scorers, 60)
        return top_scorers

Example 47

Project: pootle
Source File: models.py
View license
    def save(self, *args, **kwargs):
        created = self.id is None
        source_updated = kwargs.pop("source_updated", None) or self._source_updated
        target_updated = kwargs.pop("target_updated", None) or self._target_updated
        state_updated = kwargs.pop("state_updated", None) or self._state_updated
        auto_translated = (
            kwargs.pop("auto_translated", None)
            or self._auto_translated)
        comment_updated = (
            kwargs.pop("comment_updated", None)
            or self._comment_updated)
        action = kwargs.pop("action", None) or getattr(self, "_save_action", None)

        if not hasattr(self, '_log_user'):
            User = get_user_model()
            self._log_user = User.objects.get_system_user()
        user = kwargs.pop("user", self._log_user)

        if created:
            action = UNIT_ADDED

        if source_updated:
            # update source related fields
            self.source_hash = md5(self.source_f.encode("utf-8")).hexdigest()
            self.source_length = len(self.source_f)
            self.update_wordcount(auto_translate=True)

        if target_updated:
            # update target related fields
            self.target_wordcount = count_words(self.target_f.strings)
            self.target_length = len(self.target_f)
            if filter(None, self.target_f.strings):
                if self.state == UNTRANSLATED:
                    self.state = TRANSLATED
                    action = action or TRANSLATION_ADDED
                else:
                    action = action or TRANSLATION_CHANGED
            else:
                action = TRANSLATION_DELETED
                # if it was TRANSLATED then set to UNTRANSLATED
                if self.state > FUZZY:
                    self.state = UNTRANSLATED

        # Updating unit from the .po file set its revision property to
        # a new value (the same for all units during its store updated)
        # since that change doesn't require further sync but note that
        # auto_translated units require further sync
        revision = kwargs.pop('revision', None)
        if revision is not None and not auto_translated:
            self.revision = revision
        elif target_updated or state_updated or comment_updated:
            self.revision = Revision.incr()

        if not created and action:
            action_log(
                user=self._log_user,
                action=action,
                lang=self.store.translation_project.language.code,
                unit=self.id,
                translation=self.target_f,
                path=self.store.pootle_path)
        was_fuzzy = (
            state_updated and self.state == TRANSLATED
            and action == TRANSLATION_CHANGED
            and not target_updated)
        if was_fuzzy:
            # set reviewer data if FUZZY has been removed only and
            # translation hasn't been updated
            self.reviewed_on = timezone.now()
            self.reviewed_by = self._log_user
        elif self.state == FUZZY:
            # clear reviewer data if unit has been marked as FUZZY
            self.reviewed_on = None
            self.reviewed_by = None
        elif self.state == UNTRANSLATED:
            # clear reviewer and translator data if translation
            # has been deleted
            self.reviewed_on = None
            self.reviewed_by = None
            self.submitted_by = None
            self.submitted_on = None

        super(Unit, self).save(*args, **kwargs)

        if action and action == UNIT_ADDED:
            action_log(
                user=self._log_user,
                action=action,
                lang=self.store.translation_project.language.code,
                unit=self.id,
                translation=self.target_f,
                path=self.store.pootle_path)
            self.add_initial_submission(user=user)

        if source_updated or target_updated:
            if not (created and self.state == UNTRANSLATED):
                self.update_qualitychecks()
            if self.istranslated():
                self.update_tmserver()

        # done processing source/target update remove flag
        self._source_updated = False
        self._target_updated = False
        self._state_updated = False
        self._comment_updated = False
        self._auto_translated = False

        update_data.send(
            self.store.__class__, instance=self.store)

Example 48

Project: pootle
Source File: models.py
View license
    def update(self, unit, user=None):
        """Update in-DB translation from the given :param:`unit`.

        :param user: User to attribute updates to.
        :rtype: bool
        :return: True if the new :param:`unit` differs from the current unit.
            Two units differ when any of the fields differ (source, target,
            translator/developer comments, locations, context, status...).
        """
        changed = False

        if user is None:
            User = get_user_model()
            user = User.objects.get_system_user()

        update_source = (
            self.source != unit.source
            or (len(self.source.strings)
                != stringcount(unit.source))
            or (self.hasplural()
                != unit.hasplural()))
        if update_source:
            if unit.hasplural() and len(unit.source.strings) == 1:
                self.source = [unit.source, PLURAL_PLACEHOLDER]
            else:
                self.source = unit.source
            changed = True

        update_target = (
            self.target != unit.target
            or (len(self.target.strings)
                != stringcount(unit.target)))
        if update_target:
            notempty = filter(None, self.target_f.strings)
            self.target = unit.target
            self.submitted_by = user
            self.submitted_on = timezone.now()

            if filter(None, self.target_f.strings) or notempty:
                # FIXME: we need to do this cause we discard nplurals for empty
                # plurals
                changed = True

        notes = unit.getnotes(origin="developer")

        if (self.developer_comment != notes and
            (self.developer_comment or notes)):
            self.developer_comment = notes or None
            changed = True

        notes = unit.getnotes(origin="translator")

        if (self.translator_comment != notes and
            (self.translator_comment or notes)):
            self.translator_comment = notes or None
            changed = True
            self._comment_updated = True

        locations = "\n".join(unit.getlocations())
        if self.locations != locations and (self.locations or locations):
            self.locations = locations or None
            changed = True

        context = unit.getcontext()
        if self.context != unit.getcontext() and (self.context or context):
            self.context = context or None
            changed = True

        if self.isfuzzy() != unit.isfuzzy():
            self.markfuzzy(unit.isfuzzy())
            changed = True

        if self.isobsolete() != unit.isobsolete():
            if unit.isobsolete():
                self.makeobsolete()
            else:
                self.resurrect(unit.isfuzzy())

            changed = True

        if self.unitid != unit.getid():
            self.unitid = unicode(unit.getid()) or unicode(unit.source)
            self.unitid_hash = md5(self.unitid.encode("utf-8")).hexdigest()
            changed = True

        return changed

Example 49

Project: openduty
Source File: incidents.py
View license
    def create(self, request, *args, **kwargs):
        try:
            token = Token.objects.get(key=request.DATA["service_key"])
            serviceToken = ServiceTokens.objects.get(token_id=token)
            service = serviceToken.service_id
        except ServiceTokens.DoesNotExist:
            return Response({}, status=status.HTTP_404_NOT_FOUND)
        except Token.DoesNotExist:
            return Response({}, status=status.HTTP_403_FORBIDDEN)

        with transaction.atomic():
            try:
                incident = Incident.objects.get(
                    incident_key=request.DATA["incident_key"],
                    service_key=service)

                event_log_message = "%s api key changed %s from %s to %s" % (
                    serviceToken.name, incident.incident_key,
                    incident.event_type, request.DATA['event_type'])
            except (Incident.DoesNotExist, KeyError):
                incident = Incident()
                try:
                    incident.incident_key = request.DATA["incident_key"]
                except KeyError:
                    if request.DATA["event_type"] == Incident.TRIGGER:
                        incident.incident_key = base64.urlsafe_b64encode(
                            uuid.uuid1().bytes).replace(
                            '=',
                            '')
                    else:
                        response = {}
                        response["status"] = "failure"
                        response["message"] = "Mandatory parameter missing"
                        return Response(
                            response,
                            status=status.HTTP_400_BAD_REQUEST)
                incident.service_key = service

                event_log_message = "%s api key created %s with status %s" % (
                    serviceToken.name, incident.incident_key, request.DATA['event_type'])

            if self.is_relevant(incident, request.DATA['event_type']):
                event_log = EventLog()
                # Anonymous user for testing
                if request.user.is_anonymous():
                    user = None
                else:
                    user = request.user
                event_log.user = user
                event_log.service_key = incident.service_key
                event_log.data = event_log_message
                event_log.occurred_at = timezone.now()

                incident.event_type = request.DATA["event_type"]
                incident.description = request.DATA["description"][:100]
                incident.details = request.DATA.get("details", "")
                incident.occurred_at = timezone.now()
                try:
                    incident.full_clean()
                except ValidationError as e:
                    return Response(
                        {'errors': e.messages},
                        status=status.HTTP_400_BAD_REQUEST)
                incident.save()
                event_log.incident_key = incident
                event_log.action = incident.event_type
                event_log.save()
                servicesilenced = ServiceSilenced.objects.filter(
                    service=service).count() > 0
                if incident.event_type == Incident.TRIGGER and not servicesilenced:
                    NotificationHelper.notify_incident(incident)
                if incident.event_type == "resolve" or incident.event_type == Incident.ACKNOWLEDGE:
                    ScheduledNotification.remove_all_for_incident(incident)

            headers = self.get_success_headers(request.POST)

            response = {}
            response["status"] = "success"
            response["message"] = "Event processed"
            response["incident_key"] = incident.incident_key
            return Response(
                response,
                status=status.HTTP_201_CREATED,
                headers=headers)

Example 50

Project: nodewatcher
Source File: processors.py
View license
    def process(self, context, nodes):
        """
        Performs network-wide processing and selects the nodes that will be processed
        in any following processors.

        :param context: Current context
        :param nodes: A set of nodes that are to be processed
        :return: A (possibly) modified context and a (possibly) modified set of nodes
        """

        # Detect the location of fping binary
        fping = which.which('fping')
        if not fping:
            self.logger.error("Unable to find 'fping' binary!")
            return context, nodes

        # Check if source node for measurements is configured and valid
        source_node_id = getattr(settings, 'MEASUREMENT_SOURCE_NODE', None)
        try:
            context.rtt.source_node = core_models.Node.objects.get(uuid=source_node_id)
        except core_models.Node.DoesNotExist:
            self.logger.warning("Invalid or no measurement source UUID specified in MEASUREMENT_SOURCE_NODE!")
            context.rtt.source_node = None

        # Prepare a list of node IPv4 addresses
        node_ips = []
        for node in nodes:
            try:
                node_ips.append(str(node.config.core.routerid(queryset=True).filter(rid_family='ipv4')[0].router_id))
            except IndexError:
                continue

        # If there are no node IPs skip the measurement procedure
        if not node_ips:
            self.logger.warning("No nodes selected for measurement. Skipping RTT measurement.")
            return context, nodes

        # Perform ping tests of different sizes
        processes = []
        threads = []
        outputs = collections.deque()
        metadata = collections.deque()
        for size in self.PACKET_SIZES:
            args = [
                fping,
                '-q',
                '-p', '20',
                '-b', str(size),
                '-C', str(self.PACKET_COUNT),
            ]

            self.logger.info("Performing ICMP ECHO RTT measurements with %d byte packets to %d nodes." % (size, len(node_ips)))
            process = subprocess.Popen(
                args,
                stdin=subprocess.PIPE,
                stdout=subprocess.PIPE,
                stderr=subprocess.STDOUT,
                close_fds=True,
            )
            processes.append(process)

            def communicator(node_ips, size, process):
                t0 = timezone.now()
                data = process.communicate(("\n".join(node_ips)) + '\n')[0]
                t1 = timezone.now()
                outputs.append((size, [x.split() for x in data.strip().split('\n')]))
                metadata.append((size, t0, t1))

            thread = threading.Thread(target=communicator, args=(node_ips, size, process))
            thread.daemon = True
            threads.append(thread)
            thread.start()

        for t in threads:
            t.join()
        for p in processes:
            try:
                p.kill()
            except OSError:
                pass

        self.logger.info("All ICMP ECHO RTT measurements completed.")

        context.rtt.meta = {}
        for size, start, end in metadata:
            context.rtt.meta[size] = {
                'start': start,
                'end': end,
            }

        context.rtt.results = {}
        for size, results in outputs:
            for result in results:
                try:
                    node_ip = ipaddr.IPAddress(result[0])
                except ValueError:
                    # fping error message for a specific packet
                    continue

                try:
                    rtt = [float(x) for x in result[2:] if x != '-']
                except ValueError:
                    # TODO: Handle output for duplicate packets
                    continue

                n = len(rtt)
                s = sum(rtt)
                ss = sum([x ** 2 for x in rtt])

                if n == 0:
                    std = None
                elif n == 1:
                    std = 0.0
                else:
                    std = math.sqrt((float(n) * ss - s ** 2) / (n * (n - 1)))

                context.rtt.results.setdefault(str(node_ip), {})[size] = {
                    'sent': self.PACKET_COUNT,
                    'successful': n,
                    'failed': max(0, self.PACKET_COUNT - n),
                    'rtt_min': min(rtt) if rtt else None,
                    'rtt_max': max(rtt) if rtt else None,
                    'rtt_avg': (float(s) / n) if rtt else None,
                    'rtt_std': std,
                }

        return context, nodes