django.conf.settings.DEBUG

Here are the examples of the python api django.conf.settings.DEBUG taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

200 Examples 7

Example 1

Project: talk.org
Source File: simple.py
View license
def run_tests(test_labels, verbosity=1, interactive=True, extra_tests=[]):
    """
    Run the unit tests for all the test labels in the provided list.
    Labels must be of the form:
     - app.TestClass.test_method
        Run a single specific test method
     - app.TestClass
        Run all the test methods in a given class
     - app
        Search for doctests and unittests in the named application.

    When looking for tests, the test runner will look in the models and
    tests modules for the application.
    
    A list of 'extra' tests may also be provided; these tests
    will be added to the test suite.
    
    Returns the number of tests that failed.
    """
    setup_test_environment()
    
    settings.DEBUG = False    
    suite = unittest.TestSuite()
    
    if test_labels:
        for label in test_labels:
            if '.' in label:
                suite.addTest(build_test(label))
            else:
                app = get_app(label)
                suite.addTest(build_suite(app))
    else:
        for app in get_apps():
            suite.addTest(build_suite(app))
    
    for test in extra_tests:
        suite.addTest(test)

    old_name = settings.DATABASE_NAME
    create_test_db(verbosity, autoclobber=not interactive)
    result = unittest.TextTestRunner(verbosity=verbosity).run(suite)
    destroy_test_db(old_name, verbosity)
    
    teardown_test_environment()
    
    return len(result.failures) + len(result.errors)

Example 2

Project: logtacts
Source File: send_contact_reminders.py
View license
    def handle(self, *args, **options):
        logger.debug("Starting contact reminder sending")
        last_month = timezone.now() - timedelta(weeks=4)
        profiles_opted_in = Profile.objects.filter(send_contact_reminders=True)
        for profile in profiles_opted_in:
            logger.debug("Starting compilation for {}".format(profile.user))
            contact = Contact.objects.get_contacts_for_user(
                profile.user
            ).filter(
                Q(last_contact__lte=last_month) | Q(last_contact=None),
                should_surface=True,
            ).order_by('?')[0]
            subject = '[Contact Otter] Contact reminder'
            context = {
                'contact': contact,
                'domain': Site.objects.get_current().domain,

            }
            txt = get_template('email/contact_reminder.txt').render(context)
            html = get_template('email/contact_reminder.html').render(context)
            message = EmailMultiAlternatives(
                subject=subject,
                body=txt,
                from_email="ContactOtter <[email protected]>",
                to=[profile.user.email],
            )
            message.attach_alternative(html, "text/html")
            try:
                logger.debug("Trying to send message to {} about {}".format(
                    profile.user, contact
                ))
                message.send()
                logger.debug("Sent message to {} successfuly".format(profile.user))
            except:
                logger.exception('Problem sending reminder for %s' % (profile))
                try:
                    if not settings.DEBUG:
                        payload = {
                            'text': 'Error in contactotter reminder: {}'.format(profile)
                        }
                        r = requests.post(
                            settings.SLACK_WEBHOOK_URL,
                            data=json.dumps(payload),
                        )
                except:
                    logger.exception("Error sending error to slack")

        profiles_opted_in = Profile.objects.filter(send_birthday_reminders=True)
        for profile in profiles_opted_in:
            birthdays = ContactField.objects.filter(
                Q(label='Birthday') | Q(label='birthday') | Q(label='BIRTHDAY'),
                kind=contact_constants.FIELD_TYPE_DATE,
                value=timezone.now().strftime("%Y-%m-%d")
            )
            contacts = None
            if birthdays:
                contacts = [birthday.contact for birthday in birthdays]
            if contacts:
                context = {
                    'contacts': contacts,
                    'domain': Site.objects.get_current().domain,
                }
                subject="[ContactOtter] Birthday reminder"
                txt = get_template('email/birthday_reminder.txt').render(context)
                html = get_template('email/birthday_reminder.html').render(context)
                message = EmailMultiAlternatives(
                    subject=subject,
                    body=txt,
                    from_email='ContactOtter <[email protected]>',
                    to=[profile.user.email],
                )
                message.attach_alternative(html, "text/html")
                try:
                    logger.debug("Trying to send message to {} about {}".format(
                        profile.user, contact
                    ))
                    message.send()
                    logger.debug("Sent message to {} successfuly".format(profile.user))
                except:
                    logger.exception('Problem sending reminder for %s' % (profile))
                    try:
                        if not settings.DEBUG:
                            payload = {
                                'text': 'Error in logtacts reminder: {}'.format(profile)
                            }
                            r = requests.post(
                                settings.SLACK_WEBHOOK_URL,
                                data=json.dumps(payload),
                            )
                    except:
                        logger.exception("Error sending error to slack")

Example 3

Project: logtacts
Source File: send_invites.py
View license
    def handle(self, *args, **options):
        invites_to_send = Invitation.objects.filter(
            status=Invitation.PENDING
        )[:4]

        for invite in invites_to_send:
            logger.debug('Sending invite %s' % (invite.id))
            invite.status = Invitation.PROCESSING
            invite.save()
            if invite.book:
                subject = "[ContactOtter] Invitation to share %s's contact book" % (invite.sender)
                body = (
                        "%s has invited you to share their contact book on ContactOtter.\n"
                        "Go to https://%s/invites/accept/%s/ to join!"
                    ) % (
                        invite.sender,
                        Site.objects.get_current().domain,
                        invite.key,
                    )
            else:
                subject = "[ContactOtter] Invitation to join ContactOtter from %s" % (invite.sender)
                body = "Go to https://%s/invites/accept/%s/ to join!" % (
                        Site.objects.get_current().domain,
                        invite.key,
                    )
            try:
                message = EmailMessage(
                    subject=subject,
                    body=body,
                    from_email="ContactOtter <[email protected]>",
                    to=[invite.email,],
                )
                message.send()
                invite.status = Invitation.SENT
                invite.sent = timezone.now()
                invite.save()
            except:
                logger.exception('Problem sending invite %s' % (invite.id))
                invite.status = Invitation.ERROR
                invite.save()
                try:
                    if not settings.DEBUG:
                        payload = {
                            'text': 'Error in logtacts invite: {}'.format(job.id)
                        }
                        r = requests.post(
                            settings.SLACK_WEBHOOK_URL,
                            data=json.dumps(payload),
                        )
                except:
                    logger.exception("Error sending error to slack")

Example 4

Project: django-inlineobjects
Source File: parser.py
View license
def render_inline(inline):
    """
    Replace inline markup with template markup that matches the
    appropriate app and model.
    """

    # Look for inline type, 'app.model'
    try:
        app_label, model_name = inline['type'].split('.')
    except:
        if settings.DEBUG:
            raise TemplateSyntaxError("Couldn't find the attribute 'type' in "
                                       "the <inline> tag.")
        else:
            return ''

    # Look for content type
    try:
        content_type = ContentType.objects.get(app_label=app_label,
                                               model=model_name)
        model = content_type.model_class()
    except ContentType.DoesNotExist:
        if settings.DEBUG:
            raise TemplateSyntaxError("Inline ContentType not found.")
        else:
            return ''

    # Create the context with all the attributes in the inline markup.
    context = dict((attr[0], attr[1]) for attr in inline.attrs)

    # If multiple IDs were specified, build a list of all requested objects
    # and add them to the context.
    try:
        try:
            id_list = [int(i) for i in inline['ids'].split(',')]
            obj_list = model.objects.in_bulk(id_list)
            obj_list = list(obj_list[int(i)] for i in id_list)
            context['object_list'] = obj_list
        except ValueError:
            if settings.DEBUG:
                raise ValueError("The <inline> ids attribute is missing or "
                                 "invalid.")
            else:
                return ''

    # If only one ID was specified, retrieve the requested object and add it
    # to the context.
    except KeyError:
        try:
            obj = model.objects.get(pk=inline['id'])
            context['object'] = obj
            context['settings'] = settings
        except model.DoesNotExist:
            if settings.DEBUG:
                raise model.DoesNotExist("%s with pk of '%s' does not exist"
                                         % (model_name, inline['id']))
            else:
                return ''
        except:
            if settings.DEBUG:
                raise TemplateSyntaxError("The <inline> id attribute is "
                                          "missing or invalid.")
            else:
                return ''

    # Set the name of the template that should be used to render the inline.
    template = ["inlines/%s_%s.html" % (app_label, model_name),
                "inlines/default.html"]

    # Return the template name and the context.
    return {'template': template, 'context': context}

Example 5

Project: GAE-Bulk-Mailer
Source File: loaddata.py
View license
    def handle(self, *fixture_labels, **options):

        ignore = options.get('ignore')
        using = options.get('database')

        connection = connections[using]

        if not len(fixture_labels):
            raise CommandError(
                "No database fixture specified. Please provide the path of at "
                "least one fixture in the command line."
            )

        verbosity = int(options.get('verbosity'))
        show_traceback = options.get('traceback')

        # commit is a stealth option - it isn't really useful as
        # a command line option, but it can be useful when invoking
        # loaddata from within another script.
        # If commit=True, loaddata will use its own transaction;
        # if commit=False, the data load SQL will become part of
        # the transaction in place when loaddata was invoked.
        commit = options.get('commit', True)

        # Keep a count of the installed objects and fixtures
        fixture_count = 0
        loaded_object_count = 0
        fixture_object_count = 0
        models = set()

        humanize = lambda dirname: "'%s'" % dirname if dirname else 'absolute path'

        # Get a cursor (even though we don't need one yet). This has
        # the side effect of initializing the test database (if
        # it isn't already initialized).
        cursor = connection.cursor()

        # Start transaction management. All fixtures are installed in a
        # single transaction to ensure that all references are resolved.
        if commit:
            transaction.commit_unless_managed(using=using)
            transaction.enter_transaction_management(using=using)
            transaction.managed(True, using=using)

        class SingleZipReader(zipfile.ZipFile):
            def __init__(self, *args, **kwargs):
                zipfile.ZipFile.__init__(self, *args, **kwargs)
                if settings.DEBUG:
                    assert len(self.namelist()) == 1, "Zip-compressed fixtures must contain only one file."
            def read(self):
                return zipfile.ZipFile.read(self, self.namelist()[0])

        compression_types = {
            None:   open,
            'gz':   gzip.GzipFile,
            'zip':  SingleZipReader
        }
        if has_bz2:
            compression_types['bz2'] = bz2.BZ2File

        app_module_paths = []
        for app in get_apps():
            if hasattr(app, '__path__'):
                # It's a 'models/' subpackage
                for path in app.__path__:
                    app_module_paths.append(upath(path))
            else:
                # It's a models.py module
                app_module_paths.append(upath(app.__file__))

        app_fixtures = [os.path.join(os.path.dirname(path), 'fixtures') for path in app_module_paths]

        try:
            with connection.constraint_checks_disabled():
                for fixture_label in fixture_labels:
                    parts = fixture_label.split('.')

                    if len(parts) > 1 and parts[-1] in compression_types:
                        compression_formats = [parts[-1]]
                        parts = parts[:-1]
                    else:
                        compression_formats = compression_types.keys()

                    if len(parts) == 1:
                        fixture_name = parts[0]
                        formats = serializers.get_public_serializer_formats()
                    else:
                        fixture_name, format = '.'.join(parts[:-1]), parts[-1]
                        if format in serializers.get_public_serializer_formats():
                            formats = [format]
                        else:
                            formats = []

                    if formats:
                        if verbosity >= 2:
                            self.stdout.write("Loading '%s' fixtures..." % fixture_name)
                    else:
                        raise CommandError(
                            "Problem installing fixture '%s': %s is not a known serialization format." %
                                (fixture_name, format))

                    if os.path.isabs(fixture_name):
                        fixture_dirs = [fixture_name]
                    else:
                        fixture_dirs = app_fixtures + list(settings.FIXTURE_DIRS) + ['']

                    for fixture_dir in fixture_dirs:
                        if verbosity >= 2:
                            self.stdout.write("Checking %s for fixtures..." % humanize(fixture_dir))

                        label_found = False
                        for combo in product([using, None], formats, compression_formats):
                            database, format, compression_format = combo
                            file_name = '.'.join(
                                p for p in [
                                    fixture_name, database, format, compression_format
                                ]
                                if p
                            )

                            if verbosity >= 3:
                                self.stdout.write("Trying %s for %s fixture '%s'..." % \
                                    (humanize(fixture_dir), file_name, fixture_name))
                            full_path = os.path.join(fixture_dir, file_name)
                            open_method = compression_types[compression_format]
                            try:
                                fixture = open_method(full_path, 'r')
                            except IOError:
                                if verbosity >= 2:
                                    self.stdout.write("No %s fixture '%s' in %s." % \
                                        (format, fixture_name, humanize(fixture_dir)))
                            else:
                                try:
                                    if label_found:
                                        raise CommandError("Multiple fixtures named '%s' in %s. Aborting." %
                                            (fixture_name, humanize(fixture_dir)))

                                    fixture_count += 1
                                    objects_in_fixture = 0
                                    loaded_objects_in_fixture = 0
                                    if verbosity >= 2:
                                        self.stdout.write("Installing %s fixture '%s' from %s." % \
                                            (format, fixture_name, humanize(fixture_dir)))

                                    objects = serializers.deserialize(format, fixture, using=using, ignorenonexistent=ignore)

                                    for obj in objects:
                                        objects_in_fixture += 1
                                        if router.allow_syncdb(using, obj.object.__class__):
                                            loaded_objects_in_fixture += 1
                                            models.add(obj.object.__class__)
                                            try:
                                                obj.save(using=using)
                                            except (DatabaseError, IntegrityError) as e:
                                                e.args = ("Could not load %(app_label)s.%(object_name)s(pk=%(pk)s): %(error_msg)s" % {
                                                        'app_label': obj.object._meta.app_label,
                                                        'object_name': obj.object._meta.object_name,
                                                        'pk': obj.object.pk,
                                                        'error_msg': force_text(e)
                                                    },)
                                                raise

                                    loaded_object_count += loaded_objects_in_fixture
                                    fixture_object_count += objects_in_fixture
                                    label_found = True
                                except Exception as e:
                                    if not isinstance(e, CommandError):
                                        e.args = ("Problem installing fixture '%s': %s" % (full_path, e),)
                                    raise
                                finally:
                                    fixture.close()

                                # If the fixture we loaded contains 0 objects, assume that an
                                # error was encountered during fixture loading.
                                if objects_in_fixture == 0:
                                    raise CommandError(
                                        "No fixture data found for '%s'. (File format may be invalid.)" %
                                            (fixture_name))

            # Since we disabled constraint checks, we must manually check for
            # any invalid keys that might have been added
            table_names = [model._meta.db_table for model in models]
            try:
                connection.check_constraints(table_names=table_names)
            except Exception as e:
                e.args = ("Problem installing fixtures: %s" % e,)
                raise

        except (SystemExit, KeyboardInterrupt):
            raise
        except Exception as e:
            if commit:
                transaction.rollback(using=using)
                transaction.leave_transaction_management(using=using)
            raise

        # If we found even one object in a fixture, we need to reset the
        # database sequences.
        if loaded_object_count > 0:
            sequence_sql = connection.ops.sequence_reset_sql(no_style(), models)
            if sequence_sql:
                if verbosity >= 2:
                    self.stdout.write("Resetting sequences\n")
                for line in sequence_sql:
                    cursor.execute(line)

        if commit:
            transaction.commit(using=using)
            transaction.leave_transaction_management(using=using)

        if verbosity >= 1:
            if fixture_object_count == loaded_object_count:
                self.stdout.write("Installed %d object(s) from %d fixture(s)" % (
                    loaded_object_count, fixture_count))
            else:
                self.stdout.write("Installed %d object(s) (of %d) from %d fixture(s)" % (
                    loaded_object_count, fixture_object_count, fixture_count))

        # Close the DB connection. This is required as a workaround for an
        # edge case in MySQL: if the same connection is used to
        # create tables, load data, and query, the query can return
        # incorrect results. See Django #7572, MySQL #37735.
        if commit:
            connection.close()

Example 6

Project: GAE-Bulk-Mailer
Source File: dispatcher.py
View license
    def connect(self, receiver, sender=None, weak=True, dispatch_uid=None):
        """
        Connect receiver to sender for signal.

        Arguments:

            receiver
                A function or an instance method which is to receive signals.
                Receivers must be hashable objects.

                If weak is True, then receiver must be weak-referencable (more
                precisely saferef.safeRef() must be able to create a reference
                to the receiver).

                Receivers must be able to accept keyword arguments.

                If receivers have a dispatch_uid attribute, the receiver will
                not be added if another receiver already exists with that
                dispatch_uid.

            sender
                The sender to which the receiver should respond. Must either be
                of type Signal, or None to receive events from any sender.

            weak
                Whether to use weak references to the receiver. By default, the
                module will attempt to use weak references to the receiver
                objects. If this parameter is false, then strong references will
                be used.

            dispatch_uid
                An identifier used to uniquely identify a particular instance of
                a receiver. This will usually be a string, though it may be
                anything hashable.
        """
        from django.conf import settings

        # If DEBUG is on, check that we got a good receiver
        if settings.DEBUG:
            import inspect
            assert callable(receiver), "Signal receivers must be callable."

            # Check for **kwargs
            # Not all callables are inspectable with getargspec, so we'll
            # try a couple different ways but in the end fall back on assuming
            # it is -- we don't want to prevent registration of valid but weird
            # callables.
            try:
                argspec = inspect.getargspec(receiver)
            except TypeError:
                try:
                    argspec = inspect.getargspec(receiver.__call__)
                except (TypeError, AttributeError):
                    argspec = None
            if argspec:
                assert argspec[2] is not None, \
                    "Signal receivers must accept keyword arguments (**kwargs)."

        if dispatch_uid:
            lookup_key = (dispatch_uid, _make_id(sender))
        else:
            lookup_key = (_make_id(receiver), _make_id(sender))

        if weak:
            receiver = saferef.safeRef(receiver, onDelete=self._remove_receiver)

        with self.lock:
            for r_key, _ in self.receivers:
                if r_key == lookup_key:
                    break
            else:
                self.receivers.append((lookup_key, receiver))

Example 7

View license
def exception_handler(exc, context):
    """
    This handler will overwrite rest framework default handler, additionally,
    it will process ValidationError (most of them were raised by
    django.db.models and django.forms), DB error (Refs PEP249).

    Show some details about the error, if it's safe. It could be more useful
    when the server does not work well in production env.

    Setting the `exception` attribute on the response is not necessary as it
    will be done by REST Framework.
    """
    response = views.exception_handler(exc, context)

    # For development, we want to show the root cause stack in page.
    if settings.DEBUG:
        return response

    if response is None:
        if isinstance(exc, (exceptions.ValidationError, exceptions.FieldError)):
            # value is not correct or name is invalid.
            msg = exc.messages if hasattr(exc, 'messages') else str(exc)
            return Response({'detail': msg},
                            status=status.HTTP_400_BAD_REQUEST)
        elif isinstance(exc, exceptions.ObjectDoesNotExist):
            return Response({'detail': 'Not found:  %s' % str(exc)},
                            status=status.HTTP_404_NOT_FOUND)
        elif isinstance(exc, ProtectedError):
            return Response({"detail": "%s %s" % exc.args},
                            status=status.HTTP_400_BAD_REQUEST)
        elif isinstance(exc, ValueError):
            return Response({'detail': str(exc)},
                            status=status.HTTP_400_BAD_REQUEST)
        elif isinstance(exc, db.IntegrityError):
            # Refs PEP249
            # Maybe a duplicate PK, FK check fails, index conflict.
            return Response({'detail': str(exc)},
                            status=status.HTTP_409_CONFLICT)
        elif isinstance(exc, db.DatabaseError):
            # Refs PEP249
            # Other DB errors, such as incorrect grammar, transaction error etc.
            return Response({'detail': 'The database encountered an internal '
                                       'error or misconfiguration and was '
                                       'unable to complete your request.'},
                            status=status.HTTP_500_INTERNAL_SERVER_ERROR)
        else:
            logger = logging.getLogger(__name__)
            logger.error('Unhandled exception', exc_info=sys.exc_info())
            return Response(data=settings.INTERNAL_SERVER_ERROR_RESPONSE,
                            status=status.HTTP_503_SERVICE_UNAVAILABLE)
    return response

Example 8

Project: PyClassLessons
Source File: sites.py
View license
    def register(self, model_or_iterable, admin_class=None, **options):
        """
        Registers the given model(s) with the given admin class.

        The model(s) should be Model classes, not instances.

        If an admin class isn't given, it will use ModelAdmin (the default
        admin options). If keyword arguments are given -- e.g., list_display --
        they'll be applied as options to the admin class.

        If a model is already registered, this will raise AlreadyRegistered.

        If a model is abstract, this will raise ImproperlyConfigured.
        """
        if not admin_class:
            admin_class = ModelAdmin

        if isinstance(model_or_iterable, ModelBase):
            model_or_iterable = [model_or_iterable]
        for model in model_or_iterable:
            if model._meta.abstract:
                raise ImproperlyConfigured('The model %s is abstract, so it '
                      'cannot be registered with admin.' % model.__name__)

            if model in self._registry:
                raise AlreadyRegistered('The model %s is already registered' % model.__name__)

            # Ignore the registration if the model has been
            # swapped out.
            if not model._meta.swapped:
                # If we got **options then dynamically construct a subclass of
                # admin_class with those **options.
                if options:
                    # For reasons I don't quite understand, without a __module__
                    # the created class appears to "live" in the wrong place,
                    # which causes issues later on.
                    options['__module__'] = __name__
                    admin_class = type("%sAdmin" % model.__name__, (admin_class,), options)

                if admin_class is not ModelAdmin and settings.DEBUG:
                    system_check_errors.extend(admin_class.check(model))

                # Instantiate the admin class to save in the registry
                self._registry[model] = admin_class(model, self)

Example 9

Project: djangopackages
Source File: models.py
View license
    def fetch_pypi_data(self, *args, **kwargs):
        # Get the releases from pypi
        if self.pypi_url.strip() and self.pypi_url != "http://pypi.python.org/pypi/":

            total_downloads = 0
            url = "https://pypi.python.org/pypi/{0}/json".format(self.pypi_name)
            response = requests.get(url)
            if settings.DEBUG:
                if response.status_code not in (200, 404):
                    print("BOOM!")
                    print(self, response.status_code)
            if response.status_code == 404:
                if settings.DEBUG:
                    print("BOOM!")
                    print(self, response.status_code)
                return False
            release = json.loads(response.content)
            info = release['info']

            version, created = Version.objects.get_or_create(
                package=self,
                number=info['version']
            )

            # add to versions
            license = info['license']
            if not info['license'] or not license.strip()  or 'UNKNOWN' == license.upper():
                for classifier in info['classifiers']:
                    if classifier.strip().startswith('License'):
                        # Do it this way to cover people not quite following the spec
                        # at http://docs.python.org/distutils/setupscript.html#additional-meta-data
                        license = classifier.strip().replace('License ::', '')
                        license = license.replace('OSI Approved :: ', '')
                        break

            if license and len(license) > 100:
                license = "Other (see http://pypi.python.org/pypi/%s)" % self.pypi_name

            version.license = license

            #version stuff
            try:
                url_data = release['urls'][0]
                version.downloads = url_data['downloads']
                version.upload_time = url_data['upload_time']
            except IndexError:
                # Not a real release so we just guess the upload_time.
                version.upload_time = version.created

            version.hidden = info['_pypi_hidden']
            for classifier in info['classifiers']:
                if classifier.startswith('Development Status'):
                    version.development_status = status_choices_switch(classifier)
                    break
            for classifier in info['classifiers']:
                if classifier.startswith('Programming Language :: Python :: 3'):
                    version.supports_python3 = True
                    break
            version.save()

            self.pypi_downloads = total_downloads
            # Calculate total downloads

            return True
        return False

Example 10

Project: django-pyodbc
Source File: ss_loaddata.py
View license
    def handle(self, *fixture_labels, **options):
        from django.db.models import get_apps
        from django.core import serializers
        from django.db import connection, transaction
        from django.conf import settings

        self.style = no_style()

        verbosity = int(options.get('verbosity', 1))
        show_traceback = options.get('traceback', False)

        # commit is a stealth option - it isn't really useful as
        # a command line option, but it can be useful when invoking
        # loaddata from within another script.
        # If commit=True, loaddata will use its own transaction;
        # if commit=False, the data load SQL will become part of
        # the transaction in place when loaddata was invoked.
        commit = options.get('commit', True)

        # Keep a count of the installed objects and fixtures
        fixture_count = 0
        object_count = 0
        models = set()

        humanize = lambda dirname: dirname and "'%s'" % dirname or 'absolute path'

        # Get a cursor (even though we don't need one yet). This has
        # the side effect of initializing the test database (if
        # it isn't already initialized).
        cursor = connection.cursor()

        # Start transaction management. All fixtures are installed in a
        # single transaction to ensure that all references are resolved.
        if commit:
            transaction.commit_unless_managed()
            transaction.enter_transaction_management()
            transaction.managed(True)

        self.disable_forward_ref_checks()

        class SingleZipReader(zipfile.ZipFile):
            def __init__(self, *args, **kwargs):
                zipfile.ZipFile.__init__(self, *args, **kwargs)
                if settings.DEBUG:
                    assert len(self.namelist()) == 1, "Zip-compressed fixtures must contain only one file."
            def read(self):
                return zipfile.ZipFile.read(self, self.namelist()[0])

        compression_types = {
            None:   file,
            'gz':   gzip.GzipFile,
            'zip':  SingleZipReader
        }
        if has_bz2:
            compression_types['bz2'] = bz2.BZ2File

        app_fixtures = [os.path.join(os.path.dirname(app.__file__), 'fixtures') for app in get_apps()]
        for fixture_label in fixture_labels:
            parts = fixture_label.split('.')

            if len(parts) > 1 and parts[-1] in compression_types:
                compression_formats = [parts[-1]]
                parts = parts[:-1]
            else:
                compression_formats = compression_types.keys()

            if len(parts) == 1:
                fixture_name = parts[0]
                formats = serializers.get_public_serializer_formats()
            else:
                fixture_name, format = '.'.join(parts[:-1]), parts[-1]
                if format in serializers.get_public_serializer_formats():
                    formats = [format]
                else:
                    formats = []

            if formats:
                if verbosity > 1:
                    print "Loading '%s' fixtures..." % fixture_name
            else:
                self.enable_forward_ref_checks(cursor)
                sys.stderr.write(
                    self.style.ERROR("Problem installing fixture '%s': %s is not a known serialization format." %
                        (fixture_name, format)))
                transaction.rollback()
                transaction.leave_transaction_management()
                return

            if os.path.isabs(fixture_name):
                fixture_dirs = [fixture_name]
            else:
                fixture_dirs = app_fixtures + list(settings.FIXTURE_DIRS) + ['']

            for fixture_dir in fixture_dirs:
                if verbosity > 1:
                    print "Checking %s for fixtures..." % humanize(fixture_dir)

                label_found = False
                for format in formats:
                    for compression_format in compression_formats:
                        if compression_format:
                            file_name = '.'.join([fixture_name, format,
                                                  compression_format])
                        else:
                            file_name = '.'.join([fixture_name, format])

                        if verbosity > 1:
                            print "Trying %s for %s fixture '%s'..." % \
                                (humanize(fixture_dir), file_name, fixture_name)
                        full_path = os.path.join(fixture_dir, file_name)
                        open_method = compression_types[compression_format]
                        try:
                            fixture = open_method(full_path, 'r')
                            if label_found:
                                fixture.close()
                                self.enable_forward_ref_checks(cursor)
                                print self.style.ERROR("Multiple fixtures named '%s' in %s. Aborting." %
                                    (fixture_name, humanize(fixture_dir)))
                                transaction.rollback()
                                transaction.leave_transaction_management()
                                return
                            else:
                                fixture_count += 1
                                objects_in_fixture = 0
                                if verbosity > 0:
                                    print "Installing %s fixture '%s' from %s." % \
                                        (format, fixture_name, humanize(fixture_dir))
                                try:
                                    objects = serializers.deserialize(format, fixture)
                                    for obj in objects:
                                        objects_in_fixture += 1
                                        self.handle_ref_checks(cursor, obj)
                                        models.add(obj.object.__class__)
                                        obj.save()
                                    object_count += objects_in_fixture
                                    label_found = True
                                except (SystemExit, KeyboardInterrupt):
                                    self.enable_forward_ref_checks(cursor)
                                    raise
                                except Exception:
                                    import traceback
                                    fixture.close()
                                    self.enable_forward_ref_checks(cursor)
                                    transaction.rollback()
                                    transaction.leave_transaction_management()
                                    if show_traceback:
                                        traceback.print_exc()
                                    else:
                                        sys.stderr.write(
                                            self.style.ERROR("Problem installing fixture '%s': %s\n" %
                                                 (full_path, ''.join(traceback.format_exception(sys.exc_type,
                                                     sys.exc_value, sys.exc_traceback)))))
                                    return
                                fixture.close()

                                # If the fixture we loaded contains 0 objects, assume that an
                                # error was encountered during fixture loading.
                                if objects_in_fixture == 0:
                                    self.enable_forward_ref_checks(cursor)
                                    sys.stderr.write(
                                        self.style.ERROR("No fixture data found for '%s'. (File format may be invalid.)" %
                                            (fixture_name)))
                                    transaction.rollback()
                                    transaction.leave_transaction_management()
                                    return

                        except Exception, e:
                            if verbosity > 1:
                                print "No %s fixture '%s' in %s." % \
                                    (format, fixture_name, humanize(fixture_dir))

        self.enable_forward_ref_checks(cursor)

        # If we found even one object in a fixture, we need to reset the
        # database sequences.
        if object_count > 0:
            sequence_sql = connection.ops.sequence_reset_sql(self.style, models)
            if sequence_sql:
                if verbosity > 1:
                    print "Resetting sequences"
                for line in sequence_sql:
                    cursor.execute(line)

        if commit:
            transaction.commit()
            transaction.leave_transaction_management()

        if object_count == 0:
            if verbosity > 1:
                print "No fixtures found."
        else:
            if verbosity > 0:
                print "Installed %d object(s) from %d fixture(s)" % (object_count, fixture_count)

        # Close the DB connection. This is required as a workaround for an
        # edge case in MySQL: if the same connection is used to
        # create tables, load data, and query, the query can return
        # incorrect results. See Django #7572, MySQL #37735.
        if commit:
            connection.close()

Example 11

Project: myks-gallery
Source File: views.py
View license
def serve_private_media(request, path):
    """Serve a private media file with the webserver's "sendfile" if possible.

    Here's an example of how to use this function. The 'Document' model tracks
    files. It provides a 'get_file_path' method returning the absolute path to
    the actual file. The following view serves file only to users having the
    'can_download' permission::

        @permission_required('documents.can_download')
        def download_document(request, document_id):
            path = Document.objects.get(pk=document_id).get_file_path()
            return serve_private_media(request, path)

    If ``DEBUG`` is ``False`` and ``settings.GALLERY_SENDFILE_HEADER`` is set,
    this function sets a header and doesn't send the actual contents of the
    file. Use ``'X-Accel-Redirect'`` for nginx and ``'X-SendFile'`` for Apache
    with mod_xsendfile. Otherwise, this function behaves like Django's static
    serve view.

    ``path`` must be an absolute path. Depending on your webserver's
    configuration, you might want a full path or a relative path in the
    header's value. ``settings.GALLERY_SENDFILE_ROOT`` will be stripped from
    the beginning of the path to create the header's value.
    """

    if not os.path.exists(path):
        # Don't reveal the file name on the filesystem.
        raise Http404("Requested file doesn't exist.")

    # begin copy-paste from django.views.static.serve
    statobj = os.stat(path)
    content_type, encoding = mimetypes.guess_type(path)
    content_type = content_type or 'application/octet-stream'
    if not was_modified_since(request.META.get('HTTP_IF_MODIFIED_SINCE'),
                              statobj.st_mtime, statobj.st_size):   # pragma: no cover
        return HttpResponseNotModified()
    # pause copy-paste from django.views.static.serve

    sendfile_header = getattr(settings, 'GALLERY_SENDFILE_HEADER', '')
    sendfile_root = getattr(settings, 'GALLERY_SENDFILE_ROOT', '')

    if settings.DEBUG or not sendfile_header:
        response = StreamingHttpResponse(open(path, 'rb'), content_type=content_type)
    else:
        response = HttpResponse('', content_type=content_type)
        if sendfile_root:
            if not path.startswith(sendfile_root):
                raise ValueError("Requested file isn't under GALLERY_SENDFILE_ROOT.")
            path = path[len(sendfile_root):]
        response[sendfile_header] = path.encode(sys.getfilesystemencoding())

    # resume copy-paste from django.views.static.serve
    response["Last-Modified"] = http_date(statobj.st_mtime)
    if stat.S_ISREG(statobj.st_mode):                       # pragma: no cover
        response["Content-Length"] = statobj.st_size
    if encoding:                                            # pragma: no cover
        response["Content-Encoding"] = encoding
    # end copy-paste from django.views.static.serve

    return response

Example 12

Project: rapidpro
Source File: tests.py
View license
    def setUp(self):

        # if we are super verbose, turn on debug for sql queries
        if self.get_verbosity() > 2:
            settings.DEBUG = True

        self.clear_cache()

        self.superuser = User.objects.create_superuser(username="super", email="[email protected]", password="super")

        # create different user types
        self.non_org_user = self.create_user("NonOrg")
        self.user = self.create_user("User")
        self.editor = self.create_user("Editor")
        self.admin = self.create_user("Administrator")
        self.surveyor = self.create_user("Surveyor")

        # setup admin boundaries for Rwanda
        self.country = AdminBoundary.objects.create(osm_id='171496', name='Rwanda', level=0)
        self.state1 = AdminBoundary.objects.create(osm_id='1708283', name='Kigali City', level=1, parent=self.country)
        self.state2 = AdminBoundary.objects.create(osm_id='171591', name='Eastern Province', level=1, parent=self.country)
        self.district1 = AdminBoundary.objects.create(osm_id='1711131', name='Gatsibo', level=2, parent=self.state2)
        self.district2 = AdminBoundary.objects.create(osm_id='1711163', name='Kayônza', level=2, parent=self.state2)
        self.district3 = AdminBoundary.objects.create(osm_id='3963734', name='Nyarugenge', level=2, parent=self.state1)
        self.district4 = AdminBoundary.objects.create(osm_id='1711142', name='Rwamagana', level=2, parent=self.state2)
        self.ward1 = AdminBoundary.objects.create(osm_id='171113181', name='Kageyo', level=3, parent=self.district1)
        self.ward2 = AdminBoundary.objects.create(osm_id='171116381', name='Kabare', level=3, parent=self.district2)
        self.ward3 = AdminBoundary.objects.create(osm_id='171114281', name='Bukure', level=3, parent=self.district4)

        self.org = Org.objects.create(name="Temba", timezone="Africa/Kigali", country=self.country, brand=settings.DEFAULT_BRAND,
                                      created_by=self.user, modified_by=self.user)

        self.org.initialize(topup_size=1000)

        # add users to the org
        self.user.set_org(self.org)
        self.org.viewers.add(self.user)

        self.editor.set_org(self.org)
        self.org.editors.add(self.editor)

        self.admin.set_org(self.org)
        self.org.administrators.add(self.admin)

        self.surveyor.set_org(self.org)
        self.org.surveyors.add(self.surveyor)

        self.superuser.set_org(self.org)

        # welcome topup with 1000 credits
        self.welcome_topup = self.org.topups.all()[0]

        # a single Android channel
        self.channel = Channel.create(self.org, self.user, 'RW', 'A', name="Test Channel", address="+250785551212",
                                      device="Nexus 5X", secret="12345", gcm_id="123")

        # reset our simulation to False
        Contact.set_simulation(False)

Example 13

Project: django-twilio
Source File: decorators.py
View license
def twilio_view(f):
    """
    This decorator provides several helpful shortcuts for writing Twilio views.

        - It ensures that only requests from Twilio are passed through. This
          helps protect you from forged requests.

        - It ensures your view is exempt from CSRF checks via Django's
          @csrf_exempt decorator. This is necessary for any view that accepts
          POST requests from outside the local domain (eg: Twilio's servers).

        - It enforces the blacklist. If you've got any ``Caller``s who are
          blacklisted, any requests from them will be rejected.

        - It allows your view to (optionally) return TwiML to pass back to
          Twilio's servers instead of building an ``HttpResponse`` object
          manually.

        - It allows your view to (optionally) return any ``twilio.Verb`` object
          instead of building a ``HttpResponse`` object manually.

          .. note::
            The forgery protection checks ONLY happen if ``settings.DEBUG =
            False`` (aka, your site is in production).

    Usage::

        from twilio import twiml

        @twilio_view
        def my_view(request):
            r = twiml.Response()
            r.message('Thanks for the SMS message!')
            return r
    """
    @csrf_exempt
    @wraps(f)
    def decorator(request_or_self, *args, **kwargs):

        class_based_view = not isinstance(request_or_self, HttpRequest)
        if not class_based_view:
            request = request_or_self
        else:
            assert len(args) >= 1
            request = args[0]

        # Turn off Twilio authentication when explicitly requested, or
        # in debug mode. Otherwise things do not work properly. For
        # more information, see the docs.
        use_forgery_protection = getattr(
            settings,
            'DJANGO_TWILIO_FORGERY_PROTECTION',
            not settings.DEBUG,
        )
        if use_forgery_protection:

            if request.method not in ['GET', 'POST']:
                return HttpResponseNotAllowed(request.method)

            # Forgery check
            try:
                validator = RequestValidator(TWILIO_AUTH_TOKEN)
                url = request.build_absolute_uri()
                signature = request.META['HTTP_X_TWILIO_SIGNATURE']
            except (AttributeError, KeyError):
                return HttpResponseForbidden()

            if request.method == 'POST':
                if not validator.validate(url, request.POST, signature):
                    return HttpResponseForbidden()
            if request.method == 'GET':
                if not validator.validate(url, request.GET, signature):
                    return HttpResponseForbidden()

        # Blacklist check, by default is true
        check_blacklist = getattr(
            settings,
            'DJANGO_TWILIO_BLACKLIST_CHECK',
            True
        )
        if check_blacklist:
            blacklisted_resp = get_blacklisted_response(request)
            if blacklisted_resp:
                return blacklisted_resp

        response = f(request_or_self, *args, **kwargs)

        if isinstance(response, (text_type, bytes)):
            return HttpResponse(response, content_type='application/xml')
        elif isinstance(response, Verb):
            return HttpResponse(str(response), content_type='application/xml')
        else:
            return response
    return decorator

Example 14

Project: reviewboard
Source File: managers.py
View license
    def _transition_hashes(self, cursor, hash_field_name, diff_hashes):
        """Transitions FileDiff-associated hashes to RawFileDiffData.

        This queries all FileDiffs and RawFileDiffData entries referencing
        the given list of diff hashes, and updates the FileDiffs to point
        to those instead of the formerly-associated LegacyFileDiffDatas.
        """
        from reviewboard.diffviewer.models import RawFileDiffData

        legacy_hash_field_name = 'legacy_%s' % hash_field_name

        # Since this is a pretty complex operation, we're going to sanity-check
        # results on DEBUG setups, to help catch issues that might come up as
        # this code changes.
        if settings.DEBUG:
            old_filediff_info = dict(
                (filediff.pk, getattr(filediff, legacy_hash_field_name).pk)
                for filediff in self.filter(**{
                    legacy_hash_field_name + '__in': diff_hashes,
                })
            )
        else:
            old_filediff_info = None

        # If the database supports joins on updates, then we can craft
        # a query that will massively speed up the diff transition time.
        # Otherwise, we need to fall back on doing a select and then an
        # update per result.
        #
        # The queries are different between databases (yay standards), so
        # we can't be smart and do this in a generic way. We have to check
        # the database types.
        if connection.vendor == 'mysql':
            cursor.execute(
                'UPDATE %(filediff_table)s'
                '  INNER JOIN %(raw_fdd_table)s raw_fdd'
                '    ON raw_fdd.binary_hash = '
                '       %(filediff_table)s.%(hash_field_name)s_id'
                '  SET'
                '    raw_%(hash_field_name)s_id = raw_fdd.id,'
                '    %(hash_field_name)s_id = NULL'
                '  WHERE raw_fdd.binary_hash IN (%(diff_hashes)s)'
                % {
                    'filediff_table': self.model._meta.db_table,
                    'raw_fdd_table': RawFileDiffData._meta.db_table,
                    'hash_field_name': hash_field_name,
                    'diff_hashes': ','.join(
                        "'%s'" % diff_hash
                        for diff_hash in diff_hashes
                    ),
                })
        elif connection.vendor == 'postgresql':
            cursor.execute(
                'UPDATE %(filediff_table)s'
                '  SET'
                '    raw_%(hash_field_name)s_id = raw_fdd.id,'
                '    %(hash_field_name)s_id = NULL'
                '  FROM %(raw_fdd_table)s raw_fdd'
                '  WHERE'
                '    raw_fdd.binary_hash IN (%(diff_hashes)s) AND'
                '    raw_fdd.binary_hash = '
                '        %(hash_field_name)s_id'
                % {
                    'filediff_table': self.model._meta.db_table,
                    'raw_fdd_table': RawFileDiffData._meta.db_table,
                    'hash_field_name': hash_field_name,
                    'diff_hashes': ','.join(
                        "'%s'" % diff_hash
                        for diff_hash in diff_hashes
                    ),
                })
        else:
            raw_fdds = RawFileDiffData.objects.filter(
                binary_hash__in=diff_hashes).only('pk', 'binary_hash')

            for raw_fdd in raw_fdds:
                self.filter(**{
                    legacy_hash_field_name: raw_fdd.binary_hash
                }).update(**{
                    hash_field_name: raw_fdd.pk,
                    legacy_hash_field_name: None
                })

        if settings.DEBUG:
            new_filediff_info = dict(
                (filediff.pk, getattr(filediff, hash_field_name).binary_hash)
                for filediff in self.filter(pk__in=old_filediff_info.keys())
            )

            assert old_filediff_info == new_filediff_info

Example 15

Project: reviewboard
Source File: __init__.py
View license
def initialize():
    """Begin initialization of Review Board.

    This sets up the logging, generates cache serial numbers, loads extensions,
    and sets up other aspects of Review Board. Once it has finished, it will
    fire the :py:data:`reviewboard.signals.initializing` signal.

    This must be called at some point before most features will work, but it
    will be called automatically in a standard install. If you are writing
    an extension or management command, you do not need to call this yourself.
    """
    import logging
    import os

    import settings_local

    # Set RBSITE_PYTHON_PATH to the path we need for any RB-bundled
    # scripts we may call.
    os.environ[b'RBSITE_PYTHONPATH'] = \
        os.path.dirname(settings_local.__file__)

    from Crypto import Random
    from django.conf import settings
    from django.db import DatabaseError
    from djblets import log
    from djblets.cache.serials import generate_ajax_serial

    from reviewboard import signals
    from reviewboard.admin.siteconfig import load_site_config
    from reviewboard.extensions.base import get_extension_manager

    # This overrides a default django templatetag (url), and we want to make
    # sure it will always get loaded in every python instance.
    import reviewboard.site.templatetags

    is_running_test = getattr(settings, 'RUNNING_TEST', False)

    if not is_running_test:
        # Force PyCrypto to re-initialize the random number generator.
        Random.atfork()

        # Set up logging.
        log.init_logging()

    load_site_config()

    if not is_running_test:
        if settings.DEBUG:
            logging.debug("Log file for Review Board v%s (PID %s)" %
                          (get_version_string(), os.getpid()))

        # Generate the AJAX serial, used for AJAX request caching.
        generate_ajax_serial()

        # Store the AJAX serial as a template serial, so we have a reference
        # to the real serial last modified timestamp of our templates. This
        # is useful since the extension manager will be modifying AJAX_SERIAL
        # to prevent stale caches for templates using hooks. Not all templates
        # use hooks, and may want to base cache keys off TEMPLATE_SERIAL
        # instead.
        #
        # We only want to do this once, so we don't end up replacing it
        # later with a modified AJAX_SERIAL later.
        if not getattr(settings, 'TEMPLATE_SERIAL', None):
            settings.TEMPLATE_SERIAL = settings.AJAX_SERIAL

        # Load all extensions
        try:
            get_extension_manager().load()
        except DatabaseError:
            # This database is from a time before extensions, so don't attempt
            # to load any extensions yet.
            pass

    signals.initializing.send(sender=None)

Example 16

Project: classic.rhizome.org
Source File: parser.py
View license
def render_inline(inline):
    """
    Replace inline markup with template markup that matches the
    appropriate app and model.

    """

    # Look for inline type, 'app.model'
    try:
        app_label, model_name = inline['type'].split('.')
    except:
        if settings.DEBUG:
            raise TemplateSyntaxError, "Couldn't find the attribute 'type' in the <inline> tag."
        else:
            return ''

    # Look for content type
    try:
        content_type = ContentType.objects.get(app_label=app_label, model=model_name)
        model = content_type.model_class()
    except ContentType.DoesNotExist:
        if settings.DEBUG:
            raise TemplateSyntaxError, "Inline ContentType not found."
        else:
            return ''

    # Check for an inline class attribute
    try:
        inline_class = smart_unicode(inline['class'])
    except:
        inline_class = ''

    try:
        try:
            id_list = [int(i) for i in inline['ids'].split(',')]
            obj_list = model.objects.in_bulk(id_list)
            obj_list = list(obj_list[int(i)] for i in id_list)
            context = { 'object_list': obj_list, 'class': inline_class }
        except ValueError:
            if settings.DEBUG:
                raise ValueError, "The <inline> ids attribute is missing or invalid."
            else:
                return ''
    except KeyError:
        try:
            obj = model.objects.get(pk=inline['id'])
            context = { 'content_type':"%s.%s" % (app_label, model_name), 'object': obj, 'class': inline_class, 'settings': settings }
        except model.DoesNotExist:
            if settings.DEBUG:
                raise model.DoesNotExist, "%s with pk of '%s' does not exist" % (model_name, inline['id'])
            else:
                return ''
        except:
            if settings.DEBUG:
                raise TemplateSyntaxError, "The <inline> id attribute is missing or invalid."
            else:
                return ''

    template = ["inlines/%s_%s.html" % (app_label, model_name), "inlines/default.html"]
    rendered_inline = {'template':template, 'context':context}

    return rendered_inline

Example 17

View license
    def handle(self, *args, **options):
        replay_obj = Replay.objects.get(pk=options['replay'])

        if settings.DEBUG:
            replay = json.loads(subprocess.check_output('octane-binaries/octane-*-osx {}'.format(replay_obj.file.path), shell=True).decode('utf-8'))
        else:
            replay = json.loads(subprocess.check_output('octane-binaries/octane-*-linux {}'.format(replay_obj.file.url), shell=True).decode('utf-8'))

        Goal.objects.filter(replay=replay_obj).delete()
        Player.objects.filter(replay=replay_obj).delete()
        BoostData.objects.filter(replay=replay_obj).delete()

        assert Goal.objects.filter(replay=replay_obj).count() == 0
        assert Player.objects.filter(replay=replay_obj).count() == 0

        # Assign the metadata to the replay object.
        replay_obj.team_sizes = replay['Metadata']['TeamSize']['Value']
        replay_obj.team_0_score = replay['Metadata'].get('Team0Score', {'Value': 0})['Value']
        replay_obj.team_1_score = replay['Metadata'].get('Team1Score', {'Value': 0})['Value']
        replay_obj.player_name = replay['Metadata']['PlayerName']['Value']
        replay_obj.player_team = replay['Metadata'].get('PrimaryPlayerTeam', {'Value': 0})['Value']
        replay_obj.match_type = replay['Metadata']['MatchType']['Value']
        replay_obj.keyframe_delay = replay['Metadata']['KeyframeDelay']['Value']
        replay_obj.max_channels = replay['Metadata']['MaxChannels']['Value']
        replay_obj.max_replay_size_mb = replay['Metadata']['MaxReplaySizeMB']['Value']
        replay_obj.num_frames = replay['Metadata']['NumFrames']['Value']
        replay_obj.record_fps = replay['Metadata']['RecordFPS']['Value']

        if replay['Metadata'].get('MapName'):
            map_obj, created = Map.objects.get_or_create(
                slug=replay['Metadata']['MapName']['Value'].lower(),
            )
        else:
            map_obj = None

        replay_obj.map = map_obj
        replay_obj.timestamp = timezone.make_aware(
            datetime.fromtimestamp(
                time.mktime(
                    time.strptime(
                        replay['Metadata']['Date']['Value'],
                        '%Y-%m-%d:%H-%M',
                    )
                )
            ),
            timezone.get_current_timezone()
        )

        get_season = Season.objects.filter(
            start_date__lte=replay_obj.timestamp,
        )

        if get_season:
            replay_obj.season = get_season[0]

        if 'ReplayName' in replay['Metadata']:
            replay_obj.title = replay['Metadata']['ReplayName']['Value']

        goals = {
            goal['frame']['Value']: {'PlayerName': goal['PlayerName']['Value'], 'PlayerTeam': goal['PlayerTeam']['Value']}
            for goal in replay['Metadata'].get('Goals', {'Value': []})['Value']
        }

        last_hits = {
            0: None,
            1: None
        }

        actors = {}  # All actors
        player_actors = {}  # XXX: This will be used to make the replay.save() easier.
        goal_actors = {}
        team_data = {}
        actor_positions = {}  # The current position data for all actors. Do we need this?
        player_cars = {}  # Car -> Player actor ID mappings.
        ball_angularvelocity = None  # The current angular velocity of the ball.
        ball_possession = None  # The team currently in possession of the ball.
        cars_frozen = False  # Whether the cars are frozen in place (3.. 2.. 1..)
        shot_data = []  # The locations of the player and the ball when goals were scored.
        unknown_boost_data = {}  # Holding dict for boosts without player data.

        location_data = []  # Used for the location JSON.
        boost_data = {}  # Used for the boost stats.
        boost_objects = []
        heatmap_data = {}
        seconds_mapping = {}  # Frame -> seconds remaining mapping.

        heatmap_json_filename = 'uploads/replay_json_files/{}.json'.format(replay_obj.replay_id)
        location_json_filename = 'uploads/replay_location_json_files/{}.json'.format(replay_obj.replay_id)

        for index, frame in enumerate(replay['Frames']):
            # Add an empty location list for this frame.
            location_data.append([])

            ball_hit = False
            confirmed_ball_hit = False
            ball_spawned = False

            if index in goals:
                # Get the ball position.
                ball_actor_id = list(filter(lambda x: actors[x]['Class'] == 'TAGame.Ball_TA', actors))[0]
                ball_position = actor_positions[ball_actor_id]

                # XXX: Update this to also register the hitter?
                hit_position = last_hits[goals[index]['PlayerTeam']]

                shot_data.append({
                    'player': hit_position,
                    'ball': ball_position
                })

                # Reset the last hits.
                last_hits = {
                    0: None,
                    1: None
                }

            # Handle any new actors.
            for actor_id, value in frame['Spawned'].items():
                actor_id = int(actor_id)

                if actor_id not in actors:
                    actors[actor_id] = value

                if 'Engine.Pawn:PlayerReplicationInfo' in value:
                    player_actor_id = value['Engine.Pawn:PlayerReplicationInfo']['Value'][1]
                    player_cars[player_actor_id] = actor_id

                if value['Class'] == 'TAGame.Ball_TA':
                    ball_spawned = True

                if value['Class'] == 'TAGame.PRI_TA':
                    player_actors[actor_id] = value
                    player_actors[actor_id]['joined'] = index

                if value['Class'] == 'TAGame.Team_Soccar_TA':
                    team_data[actor_id] = value['Name'].replace('Archetypes.Teams.Team', '')

            # Handle any updates to existing actors.
            for actor_id, value in frame['Updated'].items():
                actor_id = int(actor_id)

                # Merge the new properties with the existing.
                if actors[actor_id] != value:
                    actors[actor_id] = {**actors[actor_id], **value}

                    if actor_id in player_actors:
                        player_actors[actor_id] = actors[actor_id]

                if 'Engine.Pawn:PlayerReplicationInfo' in value:
                    player_actor_id = value['Engine.Pawn:PlayerReplicationInfo']['Value'][1]
                    player_cars[player_actor_id] = actor_id

            # Handle removing any destroyed actors.
            for actor_id in frame['Destroyed']:
                del actors[actor_id]

                if actor_id in player_actors:
                    player_actors[actor_id]['left'] = index

            # Loop over actors which have changed in this frame.
            for actor_id, value in {**frame['Spawned'], **frame['Updated']}.items():
                actor_id = int(actor_id)

                # Look for any position data.
                if 'TAGame.RBActor_TA:ReplicatedRBState' in value:
                    actor_positions[actor_id] = value['TAGame.RBActor_TA:ReplicatedRBState']['Value']['Position']

                    # Get the player actor id.
                    real_actor_id = actor_id

                    for player_actor_id, car_actor_id in player_cars.items():
                        if actor_id == car_actor_id:
                            real_actor_id = player_actor_id
                            break

                    if real_actor_id == actor_id:
                        real_actor_id = 'ball'

                    data_dict = {'id': real_actor_id}
                    data_dict['x'], data_dict['y'], data_dict['z'] = value['TAGame.RBActor_TA:ReplicatedRBState']['Value']['Position']
                    data_dict['yaw'], data_dict['pitch'], data_dict['roll'] = value['TAGame.RBActor_TA:ReplicatedRBState']['Value']['Rotation']
                    location_data[index].append(data_dict)

                # If this property exists, the ball has changed possession.
                if 'TAGame.Ball_TA:HitTeamNum' in value:
                    ball_hit = confirmed_ball_hit = True
                    hit_team_num = value['TAGame.Ball_TA:HitTeamNum']['Value']
                    ball_possession = hit_team_num

                    # Clean up the actor positions.
                    actor_positions_copy = actor_positions.copy()
                    for actor_position in actor_positions_copy:
                        found = False

                        for car in player_cars:
                            if actor_position == player_cars[car]:
                                found = True

                        if not found and actor_position != ball_actor_id:
                            del actor_positions[actor_position]

                # Store the boost data for each actor at each frame where it changes.
                if 'TAGame.CarComponent_Boost_TA:ReplicatedBoostAmount' in value:
                    boost_value = value['TAGame.CarComponent_Boost_TA:ReplicatedBoostAmount']['Value']
                    assert 0 <= boost_value <= 255, 'Boost value {} is not in range 0-255.'.format(boost_value)

                    if actor_id not in boost_data:
                        boost_data[actor_id] = {}

                    # Sometimes we have a boost component without a reference to
                    # a car. We don't want to lose that data, so stick it into a
                    # holding dictionary until we can figure out who it belongs to.

                    if 'TAGame.CarComponent_TA:Vehicle' not in actors[actor_id]:
                        if actor_id not in unknown_boost_data:
                            unknown_boost_data[actor_id] = {}

                        unknown_boost_data[actor_id][index] = boost_value
                    else:
                        car_id = actors[actor_id]['TAGame.CarComponent_TA:Vehicle']['Value'][1]

                        # Find out which player this car belongs to.
                        try:
                            player_actor_id = [
                                player_actor_id
                                for player_actor_id, car_actor_id in player_cars.items()
                                if car_actor_id == car_id
                            ][0]

                            if player_actor_id not in boost_data:
                                boost_data[player_actor_id] = {}

                            boost_data[player_actor_id][index] = boost_value

                            # Attach any floating data (if we can).
                            if actor_id in unknown_boost_data:
                                for frame_index, boost_value in unknown_boost_data[actor_id].items():
                                    boost_data[player_actor_id][frame_index] = boost_value

                                del unknown_boost_data[actor_id]

                        except IndexError:
                            pass

                # Store the mapping of frame -> clock time.
                if 'TAGame.GameEvent_Soccar_TA:SecondsRemaining' in value:
                    seconds_mapping[index] = value['TAGame.GameEvent_Soccar_TA:SecondsRemaining']['Value']

                # See if the cars are frozen in place.
                if 'TAGame.GameEvent_TA:ReplicatedGameStateTimeRemaining' in value:
                    if value['TAGame.GameEvent_TA:ReplicatedGameStateTimeRemaining']['Value'] == 3:
                        cars_frozen = True
                    elif value['TAGame.GameEvent_TA:ReplicatedGameStateTimeRemaining']['Value'] == 0:
                        cars_frozen = False

                # Get the camera details.
                if 'TAGame.CameraSettingsActor_TA:ProfileSettings' in value:
                    if actors[actor_id]['Class'] == 'TAGame.CameraSettingsActor_TA':
                        # Define some short variable names to stop the next line
                        # being over 200 characters long.  This block of code
                        # makes new replays have a camera structure which is
                        # similar to that of the old replays - where the camera
                        # settings are directly attached to the player rather
                        # than a CameraActor (which is what the actor in this
                        # current loop is).

                        csa = 'TAGame.CameraSettingsActor_TA:PRI'
                        ps = 'TAGame.CameraSettingsActor_TA:ProfileSettings'
                        cs = 'TAGame.PRI_TA:CameraSettings'

                        player_actor_id = value[csa]['Value'][1]
                        actors[player_actor_id][cs] = value[ps]['Value']

                if 'Engine.GameReplicationInfo:ServerName' in value:
                    replay_obj.server_name = value['Engine.GameReplicationInfo:ServerName']['Value']

                if 'ProjectX.GRI_X:ReplicatedGamePlaylist' in value:
                    replay_obj.playlist = value['ProjectX.GRI_X:ReplicatedGamePlaylist']['Value']

                if 'TAGame.GameEvent_Team_TA:MaxTeamSize' in value:
                    replay_obj.team_sizes = value['TAGame.GameEvent_Team_TA:MaxTeamSize']['Value']

                if 'TAGame.PRI_TA:MatchGoals' in value:
                    # Get the closest goal to this frame.
                    # print(index, actor_id, value['TAGame.PRI_TA:MatchGoals']['Value'])
                    # goal_frame = min(sorted(goals), key=lambda x: abs(x - index))
                    # print(goals[goal_frame])
                    goal_actors[index] = actor_id
                    # del goals[goal_frame]

                if 'Engine.TeamInfo:Score' in value:
                    if index not in goal_actors:
                        goal_actors[index] = actor_id

            # Work out which direction the ball is travelling and if it has
            # changed direction or speed.
            ball = None
            ball_actor_id = None
            for actor_id, value in actors.items():
                if value['Class'] == 'TAGame.Ball_TA':
                    ball_actor_id = actor_id
                    ball = value
                    break

            ball_hit = False

            # Take a look at the ball this frame, has anything changed?
            new_ball_angularvelocity = ball['TAGame.RBActor_TA:ReplicatedRBState']['Value']['AngularVelocity']

            # The ball has *changed direction*, but not necessarily been hit (it
            # may have bounced).

            if ball_angularvelocity != new_ball_angularvelocity:
                ball_hit = True

            ball_angularvelocity = new_ball_angularvelocity

            # Calculate the current distances between cars and the ball.
            # Do we have position data for the ball?
            if ball_hit and not ball_spawned and ball_actor_id in actor_positions:

                # Iterate over the cars to get the players.
                lowest_distance = None
                lowest_distance_car_actor = None

                for player_id, car_actor_id in player_cars.items():
                    # Get the team.
                    team_id = actors[player_id]['Engine.PlayerReplicationInfo:Team']['Value'][1]
                    team_actor = actors[team_id]
                    team = int(team_actor['Name'].replace('Archetypes.Teams.Team', ''))

                    # Make sure this actor is in on the team which is currently
                    # in possession.

                    if team != ball_possession:
                        continue

                    if car_actor_id in actor_positions:
                        actor_distance = distance(actor_positions[car_actor_id], actor_positions[ball_actor_id])

                        if not confirmed_ball_hit:
                            if actor_distance > 350:  # Value taken from the max confirmed distance.
                                continue

                        # Get the player on this team with the lowest distance.
                        if lowest_distance is None or actor_distance < lowest_distance:
                            lowest_distance = actor_distance
                            lowest_distance_car_actor = car_actor_id

                if lowest_distance_car_actor:
                    last_hits[ball_possession] = actor_positions[lowest_distance_car_actor]

            # Generate the heatmap data for this frame.  Get all of the players
            # and the ball.
            if not cars_frozen:
                moveable_actors = [
                    (actor_id, value)
                    for actor_id, value in actors.items()
                    if value['Class'] in ['TAGame.Ball_TA', 'TAGame.PRI_TA', 'TAGame.Car_TA'] and
                    (
                        'TAGame.RBActor_TA:ReplicatedRBState' in value or
                        'Position' in value
                    )
                ]

                for actor_id, value in moveable_actors:
                    if value['Class'] == 'TAGame.Ball_TA':
                        actor_id = 'ball'
                    elif value['Class'] == 'TAGame.Car_TA':
                        if 'Engine.Pawn:PlayerReplicationInfo' not in value:
                            continue

                        actor_id = value['Engine.Pawn:PlayerReplicationInfo']['Value'][1]

                    if 'TAGame.RBActor_TA:ReplicatedRBState' in value:
                        key = '{},{}'.format(
                            value['TAGame.RBActor_TA:ReplicatedRBState']['Value']['Position'][0],
                            value['TAGame.RBActor_TA:ReplicatedRBState']['Value']['Position'][1],
                        )
                    elif 'Position' in value:
                        key = '{},{}'.format(
                            value['Position'][0],
                            value['Position'][1],
                        )

                    if actor_id not in heatmap_data:
                        heatmap_data[actor_id] = {}

                    if key in heatmap_data[actor_id]:
                        heatmap_data[actor_id][key] += 1
                    else:
                        heatmap_data[actor_id][key] = 1

Example 18

Project: rocket-league-replays
Source File: parser.py
View license
def parse_replay_netstream(replay_id):
    from .models import PLATFORMS, BoostData, Goal, Player, Replay

    replay_obj = Replay.objects.get(pk=replay_id)

    try:
        if settings.DEBUG:
            if not os.path.isfile(replay_obj.file.path):
                # Download the file.
                command = 'wget https://media.rocketleaguereplays.com/{} -qO {}'.format(
                    replay_obj.file.name,
                    replay_obj.file.path,
                )

                os.system(command)

            replay = json.loads(subprocess.check_output('octane-binaries/octane-*-osx {}'.format(replay_obj.file.path), shell=True).decode('utf-8'))
        else:
            replay = json.loads(subprocess.check_output('octane-binaries/octane-*-linux {}'.format(replay_obj.file.url), shell=True).decode('utf-8'))
    except subprocess.CalledProcessError:
        # Parsing the file failed.
        replay_obj.processed = False
        replay_obj.save()
        return

    replay_obj, replay = _parse_header(replay_obj, replay)

    goals = {
        goal['frame']['Value']: {'PlayerName': goal['PlayerName']['Value'], 'PlayerTeam': goal['PlayerTeam']['Value']}
        for goal in replay['Metadata'].get('Goals', {'Value': []})['Value']
    }

    last_hits = {
        0: None,
        1: None
    }

    actors = {}  # All actors
    player_actors = {}  # XXX: This will be used to make the replay.save() easier.
    goal_actors = {}
    team_data = {}
    actor_positions = {}  # The current position data for all actors. Do we need this?
    player_cars = {}  # Car -> Player actor ID mappings.
    ball_angularvelocity = None  # The current angular velocity of the ball.
    ball_possession = None  # The team currently in possession of the ball.
    cars_frozen = False  # Whether the cars are frozen in place (3.. 2.. 1..)
    shot_data = []  # The locations of the player and the ball when goals were scored.
    unknown_boost_data = {}  # Holding dict for boosts without player data.
    ball_actor_id = None

    location_data = []  # Used for the location JSON.
    boost_data = {}  # Used for the boost stats.
    boost_objects = []
    heatmap_data = {}
    seconds_mapping = {}  # Frame -> seconds remaining mapping.

    heatmap_json_filename = 'uploads/replay_json_files/{}.json'.format(replay_obj.replay_id)
    location_json_filename = 'uploads/replay_location_json_files/{}.json'.format(replay_obj.replay_id)

    for index, frame in enumerate(replay['Frames']):
        # Add an empty location list for this frame.
        location_data.append([])

        ball_hit = False
        confirmed_ball_hit = False
        ball_spawned = False

        if index in goals:
            # Get the ball position.
            ball_actor_id = list(filter(lambda x: actors[x]['Class'] == 'TAGame.Ball_TA', actors))[0]
            ball_position = actor_positions[ball_actor_id]

            # XXX: Update this to also register the hitter?
            hit_position = last_hits[goals[index]['PlayerTeam']]

            shot_data.append({
                'player': hit_position,
                'ball': ball_position,
                'frame': index
            })

            # Reset the last hits.
            last_hits = {
                0: None,
                1: None
            }

        # Handle any new actors.
        for actor_id, value in frame['Spawned'].items():
            actor_id = int(actor_id)

            if actor_id not in actors:
                actors[actor_id] = value

            if 'Engine.Pawn:PlayerReplicationInfo' in value:
                player_actor_id = value['Engine.Pawn:PlayerReplicationInfo']['Value'][1]
                player_cars[player_actor_id] = actor_id

            if value['Class'] == 'TAGame.Ball_TA':
                ball_spawned = True

            if value['Class'] == 'TAGame.PRI_TA':
                player_actors[actor_id] = value
                player_actors[actor_id]['joined'] = index

            if value['Class'] == 'TAGame.Team_Soccar_TA':
                team_data[actor_id] = value['Name'].replace('Archetypes.Teams.Team', '')

        # Handle any updates to existing actors.
        for actor_id, value in frame['Updated'].items():
            actor_id = int(actor_id)

            if 'Engine.PlayerReplicationInfo:Team' in value and not value['Engine.PlayerReplicationInfo:Team']['Value']['Int']:
                del value['Engine.PlayerReplicationInfo:Team']

            # Merge the new properties with the existing.
            if actors[actor_id] != value:
                actors[actor_id] = {**actors[actor_id], **value}

                if actor_id in player_actors:
                    player_actors[actor_id] = actors[actor_id]

            if 'Engine.Pawn:PlayerReplicationInfo' in value:
                player_actor_id = value['Engine.Pawn:PlayerReplicationInfo']['Value']['Int']
                player_cars[player_actor_id] = actor_id

        # Handle removing any destroyed actors.
        for actor_id in frame['Destroyed']:
            del actors[actor_id]

            if actor_id in player_actors:
                player_actors[actor_id]['left'] = index

        # Loop over actors which have changed in this frame.
        for actor_id, value in {**frame['Spawned'], **frame['Updated']}.items():
            actor_id = int(actor_id)

            # Look for any position data.
            if 'TAGame.RBActor_TA:ReplicatedRBState' in value:
                actor_positions[actor_id] = value['TAGame.RBActor_TA:ReplicatedRBState']['Value']['Position']

                # Get the player actor id.
                real_actor_id = actor_id

                for player_actor_id, car_actor_id in player_cars.items():
                    if actor_id == car_actor_id:
                        real_actor_id = player_actor_id
                        break

                if real_actor_id == actor_id:
                    real_actor_id = 'ball'

                data_dict = {'id': real_actor_id}
                data_dict['x'], data_dict['y'], data_dict['z'] = value['TAGame.RBActor_TA:ReplicatedRBState']['Value']['Position']
                data_dict['yaw'], data_dict['pitch'], data_dict['roll'] = value['TAGame.RBActor_TA:ReplicatedRBState']['Value']['Rotation']
                location_data[index].append(data_dict)

            # If this property exists, the ball has changed possession.
            if 'TAGame.Ball_TA:HitTeamNum' in value:
                ball_hit = confirmed_ball_hit = True
                hit_team_num = value['TAGame.Ball_TA:HitTeamNum']['Value']
                ball_possession = hit_team_num

                # Clean up the actor positions.
                actor_positions_copy = actor_positions.copy()
                for actor_position in actor_positions_copy:
                    found = False

                    for car in player_cars:
                        if actor_position == player_cars[car]:
                            found = True

                    if not found and actor_position != ball_actor_id:
                        del actor_positions[actor_position]

            # Store the boost data for each actor at each frame where it changes.
            if 'TAGame.CarComponent_Boost_TA:ReplicatedBoostAmount' in value:
                boost_value = value['TAGame.CarComponent_Boost_TA:ReplicatedBoostAmount']['Value']
                assert 0 <= boost_value <= 255, 'Boost value {} is not in range 0-255.'.format(boost_value)

                if actor_id not in boost_data:
                    boost_data[actor_id] = {}

                # Sometimes we have a boost component without a reference to
                # a car. We don't want to lose that data, so stick it into a
                # holding dictionary until we can figure out who it belongs to.

                if 'TAGame.CarComponent_TA:Vehicle' not in actors[actor_id]:
                    if actor_id not in unknown_boost_data:
                        unknown_boost_data[actor_id] = {}

                    unknown_boost_data[actor_id][index] = boost_value
                else:
                    car_id = actors[actor_id]['TAGame.CarComponent_TA:Vehicle']['Value']['Int']

                    # Find out which player this car belongs to.
                    try:
                        player_actor_id = [
                            player_actor_id
                            for player_actor_id, car_actor_id in player_cars.items()
                            if car_actor_id == car_id
                        ][0]

                        if player_actor_id not in boost_data:
                            boost_data[player_actor_id] = {}

                        boost_data[player_actor_id][index] = boost_value

                        # Attach any floating data (if we can).
                        if actor_id in unknown_boost_data:
                            for frame_index, boost_value in unknown_boost_data[actor_id].items():
                                boost_data[player_actor_id][frame_index] = boost_value

                            del unknown_boost_data[actor_id]

                    except IndexError:
                        pass

            # Store the mapping of frame -> clock time.
            if 'TAGame.GameEvent_Soccar_TA:SecondsRemaining' in value:
                seconds_mapping[index] = value['TAGame.GameEvent_Soccar_TA:SecondsRemaining']['Value']

            # See if the cars are frozen in place.
            if 'TAGame.GameEvent_TA:ReplicatedGameStateTimeRemaining' in value:
                if value['TAGame.GameEvent_TA:ReplicatedGameStateTimeRemaining']['Value'] == 3:
                    cars_frozen = True
                elif value['TAGame.GameEvent_TA:ReplicatedGameStateTimeRemaining']['Value'] == 0:
                    cars_frozen = False

            # Get the camera details.
            if 'TAGame.CameraSettingsActor_TA:ProfileSettings' in value:
                if actors[actor_id]['Class'] == 'TAGame.CameraSettingsActor_TA':
                    # Define some short variable names to stop the next line
                    # being over 200 characters long.  This block of code
                    # makes new replays have a camera structure which is
                    # similar to that of the old replays - where the camera
                    # settings are directly attached to the player rather
                    # than a CameraActor (which is what the actor in this
                    # current loop is).

                    csa = 'TAGame.CameraSettingsActor_TA:PRI'
                    ps = 'TAGame.CameraSettingsActor_TA:ProfileSettings'
                    cs = 'TAGame.PRI_TA:CameraSettings'

                    if csa in value:
                        player_actor_id = value[csa]['Value']['Int']
                        actors[player_actor_id][cs] = value[ps]['Value']

            if 'Engine.GameReplicationInfo:ServerName' in value:
                replay_obj.server_name = value['Engine.GameReplicationInfo:ServerName']['Value']

            if 'ProjectX.GRI_X:ReplicatedGamePlaylist' in value:
                replay_obj.playlist = value['ProjectX.GRI_X:ReplicatedGamePlaylist']['Value']

            if 'TAGame.GameEvent_Team_TA:MaxTeamSize' in value:
                replay_obj.team_sizes = value['TAGame.GameEvent_Team_TA:MaxTeamSize']['Value']

            if 'TAGame.PRI_TA:MatchGoals' in value:
                # Get the closest goal to this frame.
                goal_actors[index] = actor_id

            if 'Engine.TeamInfo:Score' in value:
                if index not in goal_actors:
                    goal_actors[index] = actor_id

        # Work out which direction the ball is travelling and if it has
        # changed direction or speed.
        ball = None
        ball_actor_id = None
        for actor_id, value in actors.items():
            if value['Class'] == 'TAGame.Ball_TA':
                ball_actor_id = actor_id
                ball = value
                break

        ball_hit = False

        # Take a look at the ball this frame, has anything changed?
        if ball and 'TAGame.RBActor_TA:ReplicatedRBState' in ball:
            new_ball_angularvelocity = ball['TAGame.RBActor_TA:ReplicatedRBState']['Value']['AngularVelocity']

            # The ball has *changed direction*, but not necessarily been hit (it
            # may have bounced).

            if ball_angularvelocity != new_ball_angularvelocity:
                ball_hit = True

            ball_angularvelocity = new_ball_angularvelocity

            # Calculate the current distances between cars and the ball.
            # Do we have position data for the ball?
            if ball_hit and not ball_spawned and ball_actor_id in actor_positions:

                # Iterate over the cars to get the players.
                lowest_distance = None
                lowest_distance_car_actor = None

                for player_id, car_actor_id in player_cars.items():
                    # Get the team.
                    if (
                        player_id in actors and
                        'Engine.PlayerReplicationInfo:Team' in actors[player_id] and
                        actors[player_id]['Engine.PlayerReplicationInfo:Team']['Value']['Int']
                    ):
                        team_id = actors[player_id]['Engine.PlayerReplicationInfo:Team']['Value']['Int']

                        try:
                            team_actor = actors[team_id]
                            team = int(team_actor['Name'].replace('Archetypes.Teams.Team', ''))
                        except KeyError:
                            team = -1
                    else:
                        team = -1

                    # Make sure this actor is in on the team which is currently
                    # in possession.

                    if team != ball_possession:
                        continue

                    if car_actor_id in actor_positions:
                        actor_distance = distance(actor_positions[car_actor_id], actor_positions[ball_actor_id])

                        if not confirmed_ball_hit:
                            if actor_distance > 350:  # Value taken from the max confirmed distance.
                                continue

                        # Get the player on this team with the lowest distance.
                        if lowest_distance is None or actor_distance < lowest_distance:
                            lowest_distance = actor_distance
                            lowest_distance_car_actor = car_actor_id

                if lowest_distance_car_actor:
                    last_hits[ball_possession] = actor_positions[lowest_distance_car_actor]

        # Generate the heatmap data for this frame.  Get all of the players
        # and the ball.
        if not cars_frozen:
            moveable_actors = [
                (actor_id, value)
                for actor_id, value in actors.items()
                if value['Class'] in ['TAGame.Ball_TA', 'TAGame.PRI_TA', 'TAGame.Car_TA'] and
                (
                    'TAGame.RBActor_TA:ReplicatedRBState' in value or
                    'Position' in value
                )
            ]

            for actor_id, value in moveable_actors:
                if value['Class'] == 'TAGame.Ball_TA':
                    actor_id = 'ball'
                elif value['Class'] == 'TAGame.Car_TA':
                    if 'Engine.Pawn:PlayerReplicationInfo' not in value:
                        continue

                    actor_id = value['Engine.Pawn:PlayerReplicationInfo']['Value']['Int']

                if 'TAGame.RBActor_TA:ReplicatedRBState' in value:
                    key = '{},{}'.format(
                        value['TAGame.RBActor_TA:ReplicatedRBState']['Value']['Position'][0],
                        value['TAGame.RBActor_TA:ReplicatedRBState']['Value']['Position'][1],
                    )
                elif 'Position' in value:
                    key = '{},{}'.format(
                        value['Position'][0],
                        value['Position'][1],
                    )

                if actor_id not in heatmap_data:
                    heatmap_data[actor_id] = {}

                if key in heatmap_data[actor_id]:
                    heatmap_data[actor_id][key] += 1
                else:
                    heatmap_data[actor_id][key] = 1

Example 19

Project: rockstor-core
Source File: api_wrapper.py
View license
    def api_call(self, url, data=None, calltype='get', headers=None, save_error=True):
        if (self.access_token is None or
            (time.time() > self.expiration)):
            self.set_token()

        api_auth_header = {'Authorization': 'Bearer ' + self.access_token, }
        call = getattr(requests, calltype)
        url = ('%s/api/%s' % (self.url, url))
        try:
            if (headers is not None):
                headers.update(api_auth_header)
                if (headers['content-type'] == 'application/json'):
                    r = call(url, verify=False, data=json.dumps(data),
                             headers=headers)
                else:
                    r = call(url, verify=False, data=data,
                             headers=headers)
            else:
                r = call(url, verify=False, headers=api_auth_header, data=data)
        except requests.exceptions.ConnectionError:
            print('Error connecting to Rockstor. Is it running?')
            raise

        if (r.status_code == 404):
            msg = ('Invalid api end point: %s' % url)
            raise RockStorAPIException(detail=msg)

        if (r.status_code != 200):
            try:
                error_d = json.loads(r.text)
                if (settings.DEBUG is True and save_error is True):
                    cur_time = str(int(time.time()))
                    err_file = '/tmp/err-%s.html' % cur_time
                    with open(err_file, 'w') as efo:
                        for line in r.text.split('\n'):
                            efo.write('%s\n' % line)
                        print('Error detail is saved at %s' % err_file)
                if ('detail' in error_d):
                    if (error_d['detail'] == 'Authentication credentials were not provided.'):
                        set_token()
                        return api_call(url, data=data, calltype=calltype,
                                        headers=headers, save_error=save_error)
                    raise RockStorAPIException(detail=error_d['detail'])
            except ValueError, e:
                raise RockStorAPIException(detail='Internal Server Error: %s' % e.__str__())
            r.raise_for_status()

        try:
            ret_val = r.json()
        except ValueError:
            ret_val = {}
        return ret_val

Example 20

Project: django-sql-inspector
Source File: __init__.py
View license
    def emit(self, record):
        sql_duration = record.duration
        sql_stmt_type = record.sql.split(" ")[0]       # Is this a 'SELECT'? or 'INSERT' or …?
        self.sql_stmt_type_hits[sql_stmt_type] += 1
        sql_params = record.params
        sql_tables = []

        raw_sql = record.sql

        # Turn off SQL logging, otherwise this will cause an infinite loop by
        # calling this function again when it logs the 'explain' query
        settings.DEBUG = False
        cursor = connection.cursor()
        try:
            cursor.execute("EXPLAIN "+raw_sql)
        except Exception as ex:
            if sql_stmt_type != 'SELECT':
                # If this is pre-mysql 5.6, EXPLAIN can only do SELECT, 5.6+ it
                # can explain others. So we can't profile/explain this query.
                # So just pretend that there is 0. This makes "sum(…)" results
                # later sensible
                sql_num_tables = 0
                sql_num_rows = 0
            else:
                # Unexpected error, raise to fail fast
                raise
        else:
            sql_queryplan = cursor.fetchall()
            # MySQL explain column output:
            # id, select_type, table, type, possible_keys, key, key_len, ref, rows, Extra

            sql_num_tables = len(sql_queryplan)     # one row per table joined

            sql_tables = [row[2] for row in sql_queryplan if row[2] is not None]

            # If the 'Extra' (row[8]) is 'Impossible WHERE noticed after
            # reading const tables' (or "Impossible WHERE'), that means that MySQL has looked at the
            # SQL and seen that it cannot match (using an index or something).
            # i.e. this query will go very quickly, so pretend it's zero by
            # ignoring that row.
            results_with_rows = [x for x in sql_queryplan if x[9] not in
                ('Impossible WHERE noticed after reading const tables',
                 'Impossible WHERE',
                 'Select tables optimized away' ) ]
            # Remove UNION rows aswell cause they have no data
            results_with_rows = [x for x in results_with_rows if not (x[8] is None and x[1] in
                ('UNION RESULT',) ) ]
            
            # If this assert fails, either there's a bug, or it's not excluding useless rows that it should.
            assert all(x[8] is not None for x in results_with_rows), repr(results_with_rows)
            sql_num_rows = sum(row[8] for row in results_with_rows)
        finally:
            # If there's an error, be sure to turn on DEBUG again, otherwise
            # the first error will permanently turn off the logging
            settings.DEBUG = True

        print ""    # blank line between 'queries'

        this_stack = []
        for frame_details in inspect.stack():
            filename = frame_details[1]

            # We only want to include /our/ files, otherwise there is loads of django stuff.
            # But we want to exclude /this/ file (measure_sql_performance.py)
            if self.is_file_to_be_included(filename):
                lineno = frame_details[2]

                # At each SQL call, at each frame on the stack, we store:
                # filename - filename of where we are
                # lineno - linenumber of the where we are
                # func_name - name of the function we're in
                # func_start_lineno - the line number of where this function starts (in filename obv.)
                # raw_sql - the SQL query with parameter placeholders
                # sql_params - the parameters for the SQL query
                # sql_duration - how many seconds this query took
                # sql_stmt_type - first word of SQL (e.g. 'SELECT'/'UPDATE')
                # sql_num_tables - How many tables were used 
                stack_summary = {
                    'filename':filename, 'lineno':lineno,
                    'func_start_lineno':frame_details[0].f_code.co_firstlineno, 'func_name':frame_details[3],
                    'raw_sql': raw_sql, 'sql_duration': sql_duration, 'sql_stmt_type': sql_stmt_type, 'sql_params': sql_params,
                    'sql_num_tables': sql_num_tables, 'sql_num_rows': sql_num_rows,
                    }
                self.filehits.append(stack_summary)
                this_stack.append(stack_summary)
                print "{func_name:>30} @ {filename:>40}:L{lineno:<5} (function starts at L{func_start_lineno:<5})".format(**stack_summary)

        print "Query used {sql_num_tables:>5} tables in {sql_duration:8.2f} sec and needed to look at {sql_num_rows:>5} rows".format(sql_duration=sql_duration, sql_num_rows=sql_num_rows, sql_num_tables=sql_num_tables)
        print "Query used {sql_num_tables:>5} tables: {tables}".format(sql_num_tables=sql_num_tables, tables=", ".join(sql_tables))
        print raw_sql

        self.queries.append({
            'raw_sql': raw_sql, 'params': sql_params, 'duration': sql_duration, 'num_tables': sql_num_tables, 'num_rows': sql_num_rows,
            'calling_stack': this_stack,
        })

Example 21

Project: readthedocs.org
Source File: middleware.py
View license
    def process_request(self, request):
        if not getattr(settings, 'USE_SUBDOMAIN', False):
            return None

        full_host = host = request.get_host().lower()
        path = request.get_full_path()
        log_kwargs = dict(host=host, path=path)
        public_domain = getattr(settings, 'PUBLIC_DOMAIN', None)
        production_domain = getattr(
            settings,
            'PRODUCTION_DOMAIN',
            'readthedocs.org'
        )

        if public_domain is None:
            public_domain = production_domain
        if ':' in host:
            host = host.split(':')[0]
        domain_parts = host.split('.')

        # Serve subdomains - but don't depend on the production domain only having 2 parts
        if len(domain_parts) == len(public_domain.split('.')) + 1:
            subdomain = domain_parts[0]
            is_www = subdomain.lower() == 'www'
            if not is_www and (
                # Support ports during local dev
                public_domain in host or public_domain in full_host
            ):
                request.subdomain = True
                request.slug = subdomain
                request.urlconf = SUBDOMAIN_URLCONF
                return None

        # Serve CNAMEs
        if (public_domain not in host and
                production_domain not in host and
                'localhost' not in host and
                'testserver' not in host):
            request.cname = True
            domains = Domain.objects.filter(domain=host)
            if domains.count():
                for domain in domains:
                    if domain.domain == host:
                        request.slug = domain.project.slug
                        request.urlconf = SUBDOMAIN_URLCONF
                        request.domain_object = True
                        log.debug(LOG_TEMPLATE.format(
                            msg='Domain Object Detected: %s' % domain.domain,
                            **log_kwargs))
                        break
            if (not hasattr(request, 'domain_object') and
                    'HTTP_X_RTD_SLUG' in request.META):
                request.slug = request.META['HTTP_X_RTD_SLUG'].lower()
                request.urlconf = SUBDOMAIN_URLCONF
                request.rtdheader = True
                log.debug(LOG_TEMPLATE.format(
                    msg='X-RTD-Slug header detected: %s' % request.slug,
                    **log_kwargs))
            # Try header first, then DNS
            elif not hasattr(request, 'domain_object'):
                try:
                    slug = cache.get(host)
                    if not slug:
                        slug = cname_to_slug(host)
                        cache.set(host, slug, 60 * 60)
                        # Cache the slug -> host mapping permanently.
                        log.debug(LOG_TEMPLATE.format(
                            msg='CNAME cached: %s->%s' % (slug, host),
                            **log_kwargs))
                    request.slug = slug
                    request.urlconf = SUBDOMAIN_URLCONF
                    log.debug(LOG_TEMPLATE.format(
                        msg='CNAME detected: %s' % request.slug,
                        **log_kwargs))
                except:
                    # Some crazy person is CNAMEing to us. 404.
                    log.exception(LOG_TEMPLATE.format(msg='CNAME 404', **log_kwargs))
                    raise Http404(_('Invalid hostname'))
        # Google was finding crazy www.blah.readthedocs.org domains.
        # Block these explicitly after trying CNAME logic.
        if len(domain_parts) > 3 and not settings.DEBUG:
            # Stop www.fooo.readthedocs.org
            if domain_parts[0] == 'www':
                log.debug(LOG_TEMPLATE.format(msg='404ing long domain', **log_kwargs))
                return HttpResponseBadRequest(_('Invalid hostname'))
            log.debug(LOG_TEMPLATE.format(msg='Allowing long domain name', **log_kwargs))
            # raise Http404(_('Invalid hostname'))
        # Normal request.
        return None

Example 22

Project: readthedocs.org
Source File: lib.py
View license
def search_file(request, query, project_slug=None, version_slug=LATEST, taxonomy=None):
    """Search index for files matching query

    Raises a 404 error on missing project

    :param request: request instance
    :param query: string to query for
    :param project_slug: :py:class:`Project` slug
    :param version_slug: slug for :py:class:`Project` version slug
    :param taxonomy: taxonomy for search
    """
    kwargs = {}
    body = {
        "query": {
            "bool": {
                "should": [
                    {"match_phrase": {
                        "title": {
                            "query": query,
                            "boost": 10,
                            "slop": 2,
                        },
                    }},
                    {"match_phrase": {
                        "headers": {
                            "query": query,
                            "boost": 5,
                            "slop": 3,
                        },
                    }},
                    {"match_phrase": {
                        "content": {
                            "query": query,
                            "slop": 5,
                        },
                    }},
                ]
            }
        },
        "facets": {
            "taxonomy": {
                "terms": {"field": "taxonomy"},
            },
            "project": {
                "terms": {"field": "project"},
            },
            "version": {
                "terms": {"field": "version"},
            },
        },
        "highlight": {
            "fields": {
                "title": {},
                "headers": {},
                "content": {},
            }
        },
        "fields": ["title", "project", "version", "path"],
        "size": 50  # TODO: Support pagination.
    }

    if project_slug or version_slug or taxonomy:
        final_filter = {"and": []}

        if project_slug:
            try:
                project = (Project.objects
                           .api(request.user)
                           .get(slug=project_slug))
                project_slugs = [project.slug]
                # We need to use the obtuse syntax here because the manager
                # doesn't pass along to ProjectRelationships
                project_slugs.extend(s.slug for s
                                     in Project.objects.public(
                                         request.user).filter(
                                         superprojects__parent__slug=project.slug))
                final_filter['and'].append({"terms": {"project": project_slugs}})

                # Add routing to optimize search by hitting the right shard.
                # This purposely doesn't apply routing if the project has more
                # than one parent project.
                if project.superprojects.exists():
                    if project.superprojects.count() == 1:
                        kwargs['routing'] = (project.superprojects.first()
                                             .parent.slug)
                else:
                    kwargs['routing'] = project_slug
            except Project.DoesNotExist:
                return None

        if version_slug:
            final_filter['and'].append({'term': {'version': version_slug}})

        if taxonomy:
            final_filter['and'].append({'term': {'taxonomy': taxonomy}})

        body['filter'] = final_filter
        body['facets']['project']['facet_filter'] = final_filter
        body['facets']['version']['facet_filter'] = final_filter
        body['facets']['taxonomy']['facet_filter'] = final_filter

    if settings.DEBUG:
        print "Before Signal"
        pprint(body)
    before_file_search.send(request=request, sender=PageIndex, body=body)
    if settings.DEBUG:
        print "After Signal"
        pprint(body)

    return PageIndex().search(body, **kwargs)

Example 23

Project: readthedocs.org
Source File: views.py
View license
def elastic_search(request):
    """
    Use elastic search for global search
    """

    query = request.GET.get('q')
    type = request.GET.get('type', 'project')
    # File Facets
    project = request.GET.get('project')
    version = request.GET.get('version', LATEST)
    taxonomy = request.GET.get('taxonomy')
    language = request.GET.get('language')
    results = ""

    facets = {}

    if query:
        if type == 'project':
            results = search_lib.search_project(request, query, language=language)
        elif type == 'file':
            results = search_lib.search_file(request, query, project_slug=project,
                                             version_slug=version,
                                             taxonomy=taxonomy)

    if results:
        # pre and post 1.0 compat
        for num, hit in enumerate(results['hits']['hits']):
            for key, val in hit['fields'].items():
                if isinstance(val, list):
                    results['hits']['hits'][num]['fields'][key] = val[0]

        if 'facets' in results:
            for facet_type in ['project', 'version', 'taxonomy', 'language']:
                if facet_type in results['facets']:
                    facets[facet_type] = collections.OrderedDict()
                    for term in results['facets'][facet_type]['terms']:
                        facets[facet_type][term['term']] = term['count']

    if settings.DEBUG:
        print pprint(results)
        print pprint(facets)

    if query:
        user = ''
        if request.user.is_authenticated():
            user = request.user
        log.info(LOG_TEMPLATE.format(
            user=user,
            project=project or '',
            type=type or '',
            version=version or '',
            language=language or '',
            msg=query or '',
        ))

    return render_to_response(
        'search/elastic_search.html',
        {
            # Input
            'query': query,
            'type': type,
            'project': project,
            'version': version,
            'taxonomy': taxonomy,
            'language': language,
            # Results
            'results': results,
            'facets': facets,
        },
        context_instance=RequestContext(request),
    )

Example 24

Project: Django--an-app-at-a-time
Source File: sites.py
View license
    def get_urls(self):
        from django.conf.urls import url, include
        # Since this module gets imported in the application's root package,
        # it cannot import models from other applications at the module level,
        # and django.contrib.contenttypes.views imports ContentType.
        from django.contrib.contenttypes import views as contenttype_views

        if settings.DEBUG:
            self.check_dependencies()

        def wrap(view, cacheable=False):
            def wrapper(*args, **kwargs):
                return self.admin_view(view, cacheable)(*args, **kwargs)
            return update_wrapper(wrapper, view)

        # Admin-site-wide views.
        urlpatterns = [
            url(r'^$', wrap(self.index), name='index'),
            url(r'^login/$', self.login, name='login'),
            url(r'^logout/$', wrap(self.logout), name='logout'),
            url(r'^password_change/$', wrap(self.password_change, cacheable=True), name='password_change'),
            url(r'^password_change/done/$', wrap(self.password_change_done, cacheable=True),
                name='password_change_done'),
            url(r'^jsi18n/$', wrap(self.i18n_javascript, cacheable=True), name='jsi18n'),
            url(r'^r/(?P<content_type_id>\d+)/(?P<object_id>.+)/$', wrap(contenttype_views.shortcut),
                name='view_on_site'),
        ]

        # Add in each model's views, and create a list of valid URLS for the
        # app_index
        valid_app_labels = []
        for model, model_admin in six.iteritems(self._registry):
            urlpatterns += [
                url(r'^%s/%s/' % (model._meta.app_label, model._meta.model_name), include(model_admin.urls)),
            ]
            if model._meta.app_label not in valid_app_labels:
                valid_app_labels.append(model._meta.app_label)

        # If there were ModelAdmins registered, we should have a list of app
        # labels for which we need to allow access to the app_index view,
        if valid_app_labels:
            regex = r'^(?P<app_label>' + '|'.join(valid_app_labels) + ')/$'
            urlpatterns += [
                url(regex, wrap(self.app_index), name='app_list'),
            ]
        return urlpatterns

Example 25

Project: django-json-rpc
Source File: site.py
View license
    def response_dict(self, request, D,
                      is_batch=False,
                      version_hint='1.0',
                      json_encoder=None):
        json_encoder = json_encoder or self.json_encoder
        version = version_hint
        response = self.empty_response(version=version)
        apply_version = {
            '2.0':
            lambda f, r, p: f(r, **encode_kw(p)) if type(p) is dict else f(r, *p),
            '1.1':
            lambda f, r, p: f(r, *encode_arg11(p), **encode_kw(encode_kw11(p))),
            '1.0': lambda f, r, p: f(r, *p)
        }

        try:
            # params: An Array or Object, that holds the actual parameter values
            # for the invocation of the procedure. Can be omitted if empty.
            if 'params' not in D:
                D['params'] = []
            if 'method' not in D or 'params' not in D:
                raise InvalidParamsError(
                    'Request requires str:"method" and list:"params"')
            if D['method'] not in self.urls:
                raise MethodNotFoundError(
                    'Method not found. Available methods: %s' % (
                        '\n'.join(self.urls.keys())))

            if 'jsonrpc' in D:
                if str(D['jsonrpc']) not in apply_version:
                    raise InvalidRequestError(
                        'JSON-RPC version %s not supported.' % D['jsonrpc'])
                version = request.jsonrpc_version = response['jsonrpc'] = str(
                    D['jsonrpc'])
            elif 'version' in D:
                if str(D['version']) not in apply_version:
                    raise InvalidRequestError(
                        'JSON-RPC version %s not supported.' % D['version'])
                version = request.jsonrpc_version = response['version'] = str(
                    D['version'])
            else:
                request.jsonrpc_version = '1.0'

            method = self.urls[str(D['method'])]
            if getattr(method, 'json_validate', False):
                validate_params(method, D)

            if 'id' in D and D['id'] is not None:  # regular request
                response['id'] = D['id']
                if version in ('1.1', '2.0') and 'error' in response:
                    response.pop('error')
            elif is_batch:  # notification, not ok in a batch format, but happened anyway
                raise InvalidRequestError

            R = apply_version[version](method, request, D['params'])

            if 'id' not in D or ('id' in D and D['id'] is None):  # notification
                return None, 204

            if isinstance(R, tuple):
                R = list(R)

            encoder = json_encoder()
            builtin_types = (dict, list, set, NoneType, bool, six.text_type
                       ) + six.integer_types + six.string_types
            if all(not isinstance(R, e) for e in builtin_types):
                try:
                    rs = encoder.default(R)  # ...or something this thing supports
                except TypeError as exc:
                    raise TypeError("Return type not supported, for %r" % R)

            response['result'] = R

            status = 200

        except Error as e:
            response['error'] = e.json_rpc_format
            if version in ('1.1', '2.0') and 'result' in response:
                response.pop('result')
            status = e.status
        except Exception as e:
            # exception missed by others
            signals.got_request_exception.send(sender=self.__class__,
                                               request=request)

            # Put stacktrace into the OtherError only if DEBUG is enabled
            if settings.DEBUG:
                other_error = OtherError(e)
            else:
                other_error = OtherError("Internal Server Error")

            response['error'] = other_error.json_rpc_format
            status = other_error.status
            if version in ('1.1', '2.0') and 'result' in response:
                response.pop('result')

        # Exactly one of result or error MUST be specified. It's not
        # allowed to specify both or none.
        if version in ('1.1', '2.0'
                   ) and 'error' in response and not response['error']:
            response.pop('error')

        return response, status

Example 26

Project: django-json-rpc
Source File: site.py
View license
    @csrf_exempt
    def dispatch(self, request, method='', json_encoder=None):
        from django.http import HttpResponse
        json_encoder = json_encoder or self.json_encoder

        try:
            # in case we do something json doesn't like, we always get back valid json-rpc response
            response = self.empty_response()
            if request.method.lower() == 'get':
                valid, D = self.validate_get(request, method)
                if not valid:
                    raise InvalidRequestError(
                        'The method you are trying to access is '
                        'not available by GET requests')
            elif not request.method.lower() == 'post':
                raise RequestPostError
            else:
                try:
                    if hasattr(request, "body"):
                        D = loads(request.body.decode('utf-8'))
                    else:
                        D = loads(request.raw_post_data.decode('utf-8'))
                except:
                    raise InvalidRequestError

            if type(D) is list:
                response = [self.response_dict(request, d,
                                               is_batch=True,
                                               json_encoder=json_encoder)[0]
                            for d in D]
                status = 200
            else:
                response, status = self.response_dict(
                    request, D,
                    json_encoder=json_encoder)
                if response is None and (not 'id' in D or D['id'] is None):  # a notification
                    return HttpResponse('', status=status)

            json_rpc = dumps(response, cls=json_encoder)
        except Error as e:
            response['error'] = e.json_rpc_format
            status = e.status
            json_rpc = dumps(response, cls=json_encoder)
        except Exception as e:
            # exception missed by others
            signals.got_request_exception.send(sender=self.__class__,
                                               request=request)

            # Put stacktrace into the OtherError only if DEBUG is enabled
            if settings.DEBUG:
                other_error = OtherError(e)
            else:
                other_error = OtherError("Internal Server Error")

            response['result'] = None
            response['error'] = other_error.json_rpc_format
            status = other_error.status

            json_rpc = dumps(response, cls=json_encoder)

        return HttpResponse(json_rpc,
                            status=status,
                            content_type='application/json-rpc')

Example 27

Project: coursys
Source File: new_grad_students.py
View license
    def handle(self, *args, **options):

        semester = options['semester']
        unit = Unit.objects.get(label=options['unit'])
        dryrun = options['dryrun']
        skip_duplicates = options['skip_duplicates'] 

        errors = []
        adm_appl_nbrs = []

        if settings.DEBUG:
            cmptunit = Unit.objects.get(label="COMP")
            program_map = {
                'CPPHD': GradProgram.objects.get(label="PhD", unit=cmptunit),
                'CPPZU': GradProgram.objects.get(label="PhD", unit=cmptunit),
                'CPMSC': GradProgram.objects.get(label="MSc Thesis", unit=cmptunit),
                'CPMCW': GradProgram.objects.get(label="MSc Project", unit=cmptunit),
                'CPMZU': GradProgram.objects.get(label="MSc Thesis", unit=cmptunit),
                'CPGND': GradProgram.objects.get(label="MSc Thesis", unit=cmptunit),
                'CPGQL': GradProgram.objects.get(label="MSc Thesis", unit=cmptunit)
            }
        else:
            cmptunit = Unit.objects.get(label="CMPT")
            mechunit = Unit.objects.get(label="MSE")
            enscunit = Unit.objects.get(label="ENSC")
            program_map = {
                'CPPHD': GradProgram.objects.get(label="PhD", unit=cmptunit),
                'CPPZU': GradProgram.objects.get(label="PhD", unit=cmptunit),
                'CPMSC': GradProgram.objects.get(label="MSc Thesis", unit=cmptunit),
                'CPMCW': GradProgram.objects.get(label="MSc Course", unit=cmptunit),
                'CPMZU': GradProgram.objects.get(label="MSc Thesis", unit=cmptunit),
                'CPGND': GradProgram.objects.get(label="Special", unit=cmptunit),
                'CPGQL': GradProgram.objects.get(label="Qualifying", unit=cmptunit),
                'CPMBD': GradProgram.objects.get(label="MSc Big Data", unit=cmptunit),

                'MSEPH': GradProgram.objects.get(label="Ph.D.", unit=mechunit),
                'MSEMS': GradProgram.objects.get(label="M.A.Sc.", unit=mechunit),
                'ESMAS': GradProgram.objects.get(label="M.A.Sc.", unit=enscunit),
                'ESMEN': GradProgram.objects.get(label="M.Eng.", unit=enscunit),
                'ESPHD': GradProgram.objects.get(label="Ph.D.", unit=enscunit)
            }

        if semester == None:
            print "You must provide a target semester (i.e. 1134) using the --semester argument." 
            exit()

        semester_object = Semester.objects.get(name=semester)

        for emplid, adm_appl_nbr, acad_prog in get_grad_table(semester, grad_programs[unit.acad_org]): 
            errors, adm_appl_nbrs = import_student( program_map, semester_object, dryrun, skip_duplicates, unit, emplid, adm_appl_nbr, acad_prog, errors, adm_appl_nbrs )

        if len(errors) > 0:
            print "----------------------------------------"
            print "Errors: "
            for error in errors:
                print error

Example 28

Project: bugle_project
Source File: typogrify.py
View license
def caps(text):
    """Wraps multiple capital letters in ``<span class="caps">`` 
    so they can be styled with CSS. 
    
    >>> caps("A message from KU")
    u'A message from <span class="caps">KU</span>'
    
    Uses the smartypants tokenizer to not screw with HTML or with tags it shouldn't.
    
    >>> caps("<PRE>CAPS</pre> more CAPS")
    u'<PRE>CAPS</pre> more <span class="caps">CAPS</span>'

    >>> caps("A message from 2KU2 with digits")
    u'A message from <span class="caps">2KU2</span> with digits'
        
    >>> caps("Dotted caps followed by spaces should never include them in the wrap D.O.T.   like so.")
    u'Dotted caps followed by spaces should never include them in the wrap <span class="caps">D.O.T.</span>  like so.'

    All caps with with apostrophes in them shouldn't break. Only handles dump apostrophes though.
    >>> caps("JIMMY'S")
    u'<span class="caps">JIMMY\\'S</span>'

    >>> caps("<i>D.O.T.</i>HE34T<b>RFID</b>")
    u'<i><span class="caps">D.O.T.</span></i><span class="caps">HE34T</span><b><span class="caps">RFID</span></b>'
    """
    text = force_unicode(text)
    try:
        import smartypants
    except ImportError:
        if settings.DEBUG:
            raise template.TemplateSyntaxError, "Error in {% caps %} filter: The Python SmartyPants library isn't installed."
        return text
        
    tokens = smartypants._tokenize(text)
    result = []
    in_skipped_tag = False    
    
    cap_finder = re.compile(r"""(
                            (\b[A-Z\d]*        # Group 2: Any amount of caps and digits
                            [A-Z]\d*[A-Z]      # A cap string much at least include two caps (but they can have digits between them)
                            [A-Z\d']*\b)       # Any amount of caps and digits or dumb apostsrophes
                            | (\b[A-Z]+\.\s?   # OR: Group 3: Some caps, followed by a '.' and an optional space
                            (?:[A-Z]+\.\s?)+)  # Followed by the same thing at least once more
                            (?:\s|\b|$))
                            """, re.VERBOSE)

    def _cap_wrapper(matchobj):
        """This is necessary to keep dotted cap strings to pick up extra spaces"""
        if matchobj.group(2):
            return """<span class="caps">%s</span>""" % matchobj.group(2)
        else:
            if matchobj.group(3)[-1] == " ":
                caps = matchobj.group(3)[:-1]
                tail = ' '
            else:
                caps = matchobj.group(3)
                tail = ''
            return """<span class="caps">%s</span>%s""" % (caps, tail)

    tags_to_skip_regex = re.compile("<(/)?(?:pre|code|kbd|script|math)[^>]*>", re.IGNORECASE)
    
    
    for token in tokens:
        if token[0] == "tag":
            # Don't mess with tags.
            result.append(token[1])
            close_match = tags_to_skip_regex.match(token[1])
            if close_match and close_match.group(1) == None:
                in_skipped_tag = True
            else:
                in_skipped_tag = False
        else:
            if in_skipped_tag:
                result.append(token[1])
            else:
                result.append(cap_finder.sub(_cap_wrapper, token[1]))
    output = "".join(result)
    return mark_safe(output)

Example 29

Project: splunk-webframework
Source File: loaddata.py
View license
    def handle(self, *fixture_labels, **options):

        ignore = options.get('ignore')
        using = options.get('database')

        connection = connections[using]

        if not len(fixture_labels):
            raise CommandError(
                "No database fixture specified. Please provide the path of at "
                "least one fixture in the command line."
            )

        verbosity = int(options.get('verbosity'))
        show_traceback = options.get('traceback')

        # commit is a stealth option - it isn't really useful as
        # a command line option, but it can be useful when invoking
        # loaddata from within another script.
        # If commit=True, loaddata will use its own transaction;
        # if commit=False, the data load SQL will become part of
        # the transaction in place when loaddata was invoked.
        commit = options.get('commit', True)

        # Keep a count of the installed objects and fixtures
        fixture_count = 0
        loaded_object_count = 0
        fixture_object_count = 0
        models = set()

        humanize = lambda dirname: "'%s'" % dirname if dirname else 'absolute path'

        # Get a cursor (even though we don't need one yet). This has
        # the side effect of initializing the test database (if
        # it isn't already initialized).
        cursor = connection.cursor()

        # Start transaction management. All fixtures are installed in a
        # single transaction to ensure that all references are resolved.
        if commit:
            transaction.commit_unless_managed(using=using)
            transaction.enter_transaction_management(using=using)
            transaction.managed(True, using=using)

        class SingleZipReader(zipfile.ZipFile):
            def __init__(self, *args, **kwargs):
                zipfile.ZipFile.__init__(self, *args, **kwargs)
                if settings.DEBUG:
                    assert len(self.namelist()) == 1, "Zip-compressed fixtures must contain only one file."
            def read(self):
                return zipfile.ZipFile.read(self, self.namelist()[0])

        compression_types = {
            None:   open,
            'gz':   gzip.GzipFile,
            'zip':  SingleZipReader
        }
        if has_bz2:
            compression_types['bz2'] = bz2.BZ2File

        app_module_paths = []
        for app in get_apps():
            if hasattr(app, '__path__'):
                # It's a 'models/' subpackage
                for path in app.__path__:
                    app_module_paths.append(upath(path))
            else:
                # It's a models.py module
                app_module_paths.append(upath(app.__file__))

        app_fixtures = [os.path.join(os.path.dirname(path), 'fixtures') for path in app_module_paths]

        try:
            with connection.constraint_checks_disabled():
                for fixture_label in fixture_labels:
                    parts = fixture_label.split('.')

                    if len(parts) > 1 and parts[-1] in compression_types:
                        compression_formats = [parts[-1]]
                        parts = parts[:-1]
                    else:
                        compression_formats = compression_types.keys()

                    if len(parts) == 1:
                        fixture_name = parts[0]
                        formats = serializers.get_public_serializer_formats()
                    else:
                        fixture_name, format = '.'.join(parts[:-1]), parts[-1]
                        if format in serializers.get_public_serializer_formats():
                            formats = [format]
                        else:
                            formats = []

                    if formats:
                        if verbosity >= 2:
                            self.stdout.write("Loading '%s' fixtures..." % fixture_name)
                    else:
                        raise CommandError(
                            "Problem installing fixture '%s': %s is not a known serialization format." %
                                (fixture_name, format))

                    if os.path.isabs(fixture_name):
                        fixture_dirs = [fixture_name]
                    else:
                        fixture_dirs = app_fixtures + list(settings.FIXTURE_DIRS) + ['']

                    for fixture_dir in fixture_dirs:
                        if verbosity >= 2:
                            self.stdout.write("Checking %s for fixtures..." % humanize(fixture_dir))

                        label_found = False
                        for combo in product([using, None], formats, compression_formats):
                            database, format, compression_format = combo
                            file_name = '.'.join(
                                p for p in [
                                    fixture_name, database, format, compression_format
                                ]
                                if p
                            )

                            if verbosity >= 3:
                                self.stdout.write("Trying %s for %s fixture '%s'..." % \
                                    (humanize(fixture_dir), file_name, fixture_name))
                            full_path = os.path.join(fixture_dir, file_name)
                            open_method = compression_types[compression_format]
                            try:
                                fixture = open_method(full_path, 'r')
                            except IOError:
                                if verbosity >= 2:
                                    self.stdout.write("No %s fixture '%s' in %s." % \
                                        (format, fixture_name, humanize(fixture_dir)))
                            else:
                                try:
                                    if label_found:
                                        raise CommandError("Multiple fixtures named '%s' in %s. Aborting." %
                                            (fixture_name, humanize(fixture_dir)))

                                    fixture_count += 1
                                    objects_in_fixture = 0
                                    loaded_objects_in_fixture = 0
                                    if verbosity >= 2:
                                        self.stdout.write("Installing %s fixture '%s' from %s." % \
                                            (format, fixture_name, humanize(fixture_dir)))

                                    objects = serializers.deserialize(format, fixture, using=using, ignorenonexistent=ignore)

                                    for obj in objects:
                                        objects_in_fixture += 1
                                        if router.allow_syncdb(using, obj.object.__class__):
                                            loaded_objects_in_fixture += 1
                                            models.add(obj.object.__class__)
                                            try:
                                                obj.save(using=using)
                                            except (DatabaseError, IntegrityError) as e:
                                                e.args = ("Could not load %(app_label)s.%(object_name)s(pk=%(pk)s): %(error_msg)s" % {
                                                        'app_label': obj.object._meta.app_label,
                                                        'object_name': obj.object._meta.object_name,
                                                        'pk': obj.object.pk,
                                                        'error_msg': force_text(e)
                                                    },)
                                                raise

                                    loaded_object_count += loaded_objects_in_fixture
                                    fixture_object_count += objects_in_fixture
                                    label_found = True
                                except Exception as e:
                                    if not isinstance(e, CommandError):
                                        e.args = ("Problem installing fixture '%s': %s" % (full_path, e),)
                                    raise
                                finally:
                                    fixture.close()

                                # If the fixture we loaded contains 0 objects, assume that an
                                # error was encountered during fixture loading.
                                if objects_in_fixture == 0:
                                    raise CommandError(
                                        "No fixture data found for '%s'. (File format may be invalid.)" %
                                            (fixture_name))

            # Since we disabled constraint checks, we must manually check for
            # any invalid keys that might have been added
            table_names = [model._meta.db_table for model in models]
            try:
                connection.check_constraints(table_names=table_names)
            except Exception as e:
                e.args = ("Problem installing fixtures: %s" % e,)
                raise

        except (SystemExit, KeyboardInterrupt):
            raise
        except Exception as e:
            if commit:
                transaction.rollback(using=using)
                transaction.leave_transaction_management(using=using)
            raise

        # If we found even one object in a fixture, we need to reset the
        # database sequences.
        if loaded_object_count > 0:
            sequence_sql = connection.ops.sequence_reset_sql(no_style(), models)
            if sequence_sql:
                if verbosity >= 2:
                    self.stdout.write("Resetting sequences\n")
                for line in sequence_sql:
                    cursor.execute(line)

        if commit:
            transaction.commit(using=using)
            transaction.leave_transaction_management(using=using)

        if verbosity >= 1:
            if fixture_object_count == loaded_object_count:
                self.stdout.write("Installed %d object(s) from %d fixture(s)" % (
                    loaded_object_count, fixture_count))
            else:
                self.stdout.write("Installed %d object(s) (of %d) from %d fixture(s)" % (
                    loaded_object_count, fixture_object_count, fixture_count))

        # Close the DB connection. This is required as a workaround for an
        # edge case in MySQL: if the same connection is used to
        # create tables, load data, and query, the query can return
        # incorrect results. See Django #7572, MySQL #37735.
        if commit:
            connection.close()

Example 30

Project: splunk-webframework
Source File: dispatcher.py
View license
    def connect(self, receiver, sender=None, weak=True, dispatch_uid=None):
        """
        Connect receiver to sender for signal.

        Arguments:

            receiver
                A function or an instance method which is to receive signals.
                Receivers must be hashable objects.

                If weak is True, then receiver must be weak-referencable (more
                precisely saferef.safeRef() must be able to create a reference
                to the receiver).

                Receivers must be able to accept keyword arguments.

                If receivers have a dispatch_uid attribute, the receiver will
                not be added if another receiver already exists with that
                dispatch_uid.

            sender
                The sender to which the receiver should respond. Must either be
                of type Signal, or None to receive events from any sender.

            weak
                Whether to use weak references to the receiver. By default, the
                module will attempt to use weak references to the receiver
                objects. If this parameter is false, then strong references will
                be used.

            dispatch_uid
                An identifier used to uniquely identify a particular instance of
                a receiver. This will usually be a string, though it may be
                anything hashable.
        """
        from django.conf import settings

        # If DEBUG is on, check that we got a good receiver
        if settings.DEBUG:
            import inspect
            assert callable(receiver), "Signal receivers must be callable."

            # Check for **kwargs
            # Not all callables are inspectable with getargspec, so we'll
            # try a couple different ways but in the end fall back on assuming
            # it is -- we don't want to prevent registration of valid but weird
            # callables.
            try:
                argspec = inspect.getargspec(receiver)
            except TypeError:
                try:
                    argspec = inspect.getargspec(receiver.__call__)
                except (TypeError, AttributeError):
                    argspec = None
            if argspec:
                assert argspec[2] is not None, \
                    "Signal receivers must accept keyword arguments (**kwargs)."

        if dispatch_uid:
            lookup_key = (dispatch_uid, _make_id(sender))
        else:
            lookup_key = (_make_id(receiver), _make_id(sender))

        if weak:
            receiver = saferef.safeRef(receiver, onDelete=self._remove_receiver)

        with self.lock:
            for r_key, _ in self.receivers:
                if r_key == lookup_key:
                    break
            else:
                self.receivers.append((lookup_key, receiver))

Example 31

Project: xadmin
Source File: base.py
View license
    @filter_hook
    def get_context(self):
        context = super(CommAdminView, self).get_context()

        if not settings.DEBUG and 'nav_menu' in self.request.session:
            nav_menu = json.loads(self.request.session['nav_menu'])
        else:
            menus = copy.copy(self.get_nav_menu())

            def check_menu_permission(item):
                need_perm = item.pop('perm', None)
                if need_perm is None:
                    return True
                elif callable(need_perm):
                    return need_perm(self.user)
                elif need_perm == 'super':
                    return self.user.is_superuser
                else:
                    return self.user.has_perm(need_perm)

            def filter_item(item):
                if 'menus' in item:
                    before_filter_length = len(item['menus'])
                    item['menus'] = [filter_item(
                        i) for i in item['menus'] if check_menu_permission(i)]
                    after_filter_length = len(item['menus'])
                    if after_filter_length == 0 and before_filter_length > 0:
                        return None
                return item

            nav_menu = [filter_item(item) for item in menus if check_menu_permission(item)]
            nav_menu = filter(lambda x:x, nav_menu)

            if not settings.DEBUG:
                self.request.session['nav_menu'] = json.dumps(nav_menu)
                self.request.session.modified = True

        def check_selected(menu, path):
            selected = False
            if 'url' in menu:
                chop_index = menu['url'].find('?')
                if chop_index == -1:
                    selected = path.startswith(menu['url'])
                else:
                    selected = path.startswith(menu['url'][:chop_index])
            if 'menus' in menu:
                for m in menu['menus']:
                    _s = check_selected(m, path)
                    if _s:
                        selected = True
            if selected:
                menu['selected'] = True
            return selected
        for menu in nav_menu:
            check_selected(menu, self.request.path)

        context.update({
            'menu_template': self.menu_template,
            'nav_menu': nav_menu,
            'site_title': self.site_title,
            'site_footer': self.site_footer,
            'breadcrumbs': self.get_breadcrumb()
        })

        return context

Example 32

Project: django-feedinator
Source File: core.py
View license
def update_feed(feed_id, reset=False):
    """
    Update an individual feed regardless of freshness.
    """

    feed = Feed.objects.get(pk=feed_id)

    if reset:
        FeedEntry.objects.filter(feed=feed).delete()

    if settings.DEBUG:
        print "--- updating %s" % feed.title

    f = feedparser.parse(feed.url)

    for entry in f.entries:

        if not "id" in entry:
            if settings.DEBUG:
                print "!!!", entry.title, "has no id"
            continue

        entry_exists = feed.entries.filter(uid=entry.id).count()

        if not entry_exists:

            fe = FeedEntry(
                feed=feed,
                uid=entry.id,
                title=entry.title,
                link=entry.link,
                date_updated=tuple_to_datetime(entry.updated_parsed, feed.timezone),
                last_fetched=now()
            )

            fe.summary = entry.get("summary", "")
            if "content" in entry:
                for content in entry.content:
                    fe.content = content.get("value", "")
            else:
                fe.content = entry.get("summary", "")

            if "published_parsed" in entry:
                fe.date_published = tuple_to_datetime(entry.published_parsed, feed.timezone)
            else:
                fe.date_published = tuple_to_datetime(entry.updated_parsed, feed.timezone)

            if "author_detail" in entry:
                fe.author_name = entry.author_detail.get("name", "")
                fe.author_email = entry.author_detail.get("email", None)
                fe.author_uri = entry.author_detail.get("href", None)
            elif "author" in entry:
                fe.author_name = entry.author

            fe.save()

            if "tags" in entry:
                for name in entry.tags:
                    Tag(name=name.term, feed_entry=fe).save()

            if settings.DEBUG:
                print fe

        else:

            fe = FeedEntry.objects.get(uid=entry.id, feed=feed)

            if fe.date_updated < tuple_to_datetime(entry.updated_parsed, feed.timezone):

                fe.title = entry.title
                fe.date_updated = tuple_to_datetime(entry.updated_parsed, feed.timezone)
                fe.last_fetched = now()

                fe.summary = entry.get("summary", "")

                if "content" in entry:
                    for content in entry.content:
                        fe.content = content.get("value", "")
                else:
                    fe.content = entry.get("summary", "")

                fe.save()

                if "tags" in entry:
                    fe.tags.all().delete()
                    for name in entry.tags:
                        Tag(name=name.term, feed_entry=fe).save()

                if settings.DEBUG:
                    print "UPDATED %s" % fe

    feed.last_fetched = now()
    feed.save()

Example 33

Project: django-evolution
Source File: runtests.py
View license
def run_tests(verbosity=1, interactive=False):
    from django.conf import settings
    from django.core import management
    from django.db import connections
    from django.test.utils import setup_test_environment, \
                                  teardown_test_environment

    if hasattr(django, 'setup'):
        # Django >= 1.7
        django.setup()

    setup_test_environment()
    settings.DEBUG = False

    old_db_names = []

    for alias in connections:
        connection = connections[alias]

        old_db_names.append((connection, connection.settings_dict['NAME']))
        connection.creation.create_test_db(verbosity,
                                           autoclobber=not interactive)

    if django.VERSION[:2] >= (1, 7):
        management.call_command('migrate', verbosity=verbosity,
                                interactive=interactive)
    else:
        management.call_command('syncdb', verbosity=verbosity,
                                interactive=interactive)

    nose_argv = ['runtests.py', '-v',
                 '--with-coverage',
                 '--with-doctest',
                 '--doctest-extension=.txt',
                 '--cover-package=django_evolution',
                 '--match=tests[\/]*.py',
                 '--match=^test']

    if len(sys.argv) > 2:
        nose_argv += sys.argv[2:]

    nose.run(argv=nose_argv)

    for connection, name in old_db_names:
        connection.creation.destroy_test_db(name, verbosity=0)

    teardown_test_environment()

Example 34

Project: extdirect.django
Source File: providers.py
View license
    def dispatcher(self, request, extdirect_req):
        """
        Parse the ExtDirect specification an call
        the function with the `request` instance.
        
        If the `request` didn't come from an Ext Form, then the
        parameters recieved will be added to the `request` in the
        `extdirect_post_data` attribute.
        """
        
        action = extdirect_req['action']
        method = extdirect_req['method']
        
        func = self.actions[action][method]['func']        
        
        data = None
        if not extdirect_req.get('isForm'):
            data = extdirect_req.pop('data')
        
        #the response object will be the same recieved but without `data`.
        #we will add the `result` later.
        response = extdirect_req
        
        #Checks for login or permissions required
        login_required = self.actions[action][method]['login_required']
        if(login_required):            
            if not request.user.is_authenticated():                
                response['result'] = dict(success=False, message='You must be authenticated to run this method.')
                return response
            
        permission = self.actions[action][method]['permission']
        if(permission):            
            if not request.user.has_perm(permission):                
                response['result'] = dict(success=False, messsage='You need `%s` permission to run this method' % permission)
                return response
                
        if data:
            #this is a simple hack to convert all the dictionaries keys
            #to strings instead of unicodes. {u'key': u'value'} --> {'key': u'value'}
            #This is needed if the function called want to pass the dictionaries as kw arguments.
            params = []
            for param in data:
                if isinstance(param, dict):
                    param = dict(map(lambda x: (str(x[0]), x[1]), param.items()))
                params.append(param)
                
            #Add the `extdirect_post_data` attribute to the request instance
            request.extdirect_post_data = params
            
        if extdirect_req.get('isForm'):
            extdirect_post_data = request.POST.copy()
            extdirect_post_data.pop('extAction')
            extdirect_post_data.pop('extMethod')
            extdirect_post_data.pop('extTID')
            extdirect_post_data.pop('extType')
            extdirect_post_data.pop('extUpload')
            
            request.extdirect_post_data = extdirect_post_data
        
        #finally, call the function passing the `request`
        try:
            response['result'] = func(request)
        except Exception, e:            
            if settings.DEBUG:
                etype, evalue, etb = sys.exc_info()
                response['type'] = 'exception'                
                response['message'] = traceback.format_exception_only(etype, evalue)[0]
                response['where'] = traceback.extract_tb(etb)[-1]
            else:
                raise e
        
        return response

Example 35

Project: couchdbkit
Source File: loading.py
View license
    def sync(self, app, verbosity=2, temp=None):
        """ used to sync views of all applications and eventually create
        database.

        When temp is specified, it is appended to the app's name on the docid.
        It can then be updated in the background and copied over the existing
        design docs to reduce blocking time of view updates """
        app_name = app.__name__.rsplit('.', 1)[0]
        app_labels = set()
        schema_list = self.app_schema.values()
        for schema_dict in schema_list:
            for schema in schema_dict.values():
                app_module = schema.__module__.rsplit(".", 1)[0]
                if app_module == app_name and not schema._meta.app_label in app_labels:
                    app_labels.add(schema._meta.app_label)
        for app_label in app_labels:
            if not app_label in self._databases:
                continue
            if verbosity >=1:
                print "sync `%s` in CouchDB" % app_name
            db = self.get_db(app_label)

            app_path = os.path.abspath(os.path.join(sys.modules[app.__name__].__file__, ".."))
            design_path = "%s/%s" % (app_path, "_design")
            if not os.path.isdir(design_path):
                if settings.DEBUG:
                    print >>sys.stderr, "%s don't exists, no ddoc synchronized" % design_path
                return

            if temp:
                design_name = '%s-%s' % (app_label, temp)
            else:
                design_name = app_label

            docid = "_design/%s" % design_name

            push(os.path.join(app_path, "_design"), db, force=True,
                    docid=docid)

            if temp:
                ddoc = db[docid]
                view_names = ddoc.get('views', {}).keys()
                if len(view_names) > 0:
                    if verbosity >= 1:
                        print 'Triggering view rebuild'

                    view = '%s/%s' % (design_name, view_names[0])
                    list(db.view(view, limit=0))

Example 36

View license
    def handle(self, *args, **options):
        total_start = time.time()

        # Validate the arguments
        all_feeds = options.get('all')
        if len(args) == 0 and not all_feeds:
            raise CommandError('You must pass in feed ID or --all.')
        if len(args) > 0 and all_feeds:
            raise CommandError("You can't specify a feeds and --all.")

        # Setup logging
        verbosity = int(options['verbosity'])
        console = logging.StreamHandler(self.stderr)
        formatter = logging.Formatter('%(levelname)s - %(message)s')
        logger_name = 'multigtfs'
        if verbosity == 0:
            level = logging.WARNING
        elif verbosity == 1:
            level = logging.INFO
        elif verbosity == 2:
            level = logging.DEBUG
        else:
            level = logging.DEBUG
            logger_name = ''
            formatter = logging.Formatter(
                '%(name)s - %(levelname)s - %(message)s')
        console.setLevel(level)
        console.setFormatter(formatter)
        logger = logging.getLogger(logger_name)
        logger.setLevel(level)
        logger.addHandler(console)

        # Disable database query logging
        if settings.DEBUG:
            connection.use_debug_cursor = False

        # Get the feeds
        if all_feeds:
            feeds = Feed.objects.order_by('id')
        else:
            feeds = []
            feed_ids = [int(a) for a in args]
            for feed_id in feed_ids:
                try:
                    feeds.append(Feed.objects.get(id=feed_id))
                except Feed.DoesNotExist:
                    raise CommandError('Feed %s not found' % feed_id)

        # Refresh the geometries
        for feed in feeds:
            logger.info(
                "Updating geometries in Feed %s (ID %s)...",
                feed.name, feed.id)

            start_time = time.time()
            shapes = Shape.objects.in_feed(feed)
            for shape in shapes:
                shape.update_geometry(update_parent=False)
            end_time = time.time()
            logger.debug(
                "Imported %s shape%s in %0.1f seconds",
                shapes.count(), '' if shapes.count() == 1 else 's',
                end_time - start_time)

            start_time = time.time()
            trips = Trip.objects.in_feed(feed)
            for trip in trips:
                trip.update_geometry(update_parent=False)
            end_time = time.time()
            logger.debug(
                "Imported %s trip%s in %0.1f seconds",
                trips.count(), '' if trips.count() == 1 else 's',
                end_time - start_time)

            start_time = time.time()
            routes = Route.objects.in_feed(feed)
            for route in routes:
                route.update_geometry()
            end_time = time.time()
            logger.debug(
                "Imported %s route%s in %0.1f seconds",
                routes.count(), '' if routes.count() == 1 else 's',
                end_time - start_time)

            total_end = time.time()
            logger.info(
                "Feed %d: Updated geometries in %d shape%s, %d trip%s, and"
                " %d route%s %0.1f seconds.",
                feed.id,
                shapes.count(), '' if shapes.count() == 1 else 's',
                trips.count(), '' if trips.count() == 1 else 's',
                routes.count(), '' if routes.count() == 1 else 's',
                total_end - total_start)

Example 37

Project: python-useful
Source File: views.py
View license
def page(template=None, **decorator_args):
    """
    Decorator to make template rendered by Django views dead simple.
    Takes the template path as first argument. See the code comments.
    Example::

        @page('payments/payments_info.html')
        def payment_info(request):
            return { ... context dict ... }
    """
    def page_decorator_wrapper(fn):
        @functools.wraps(fn)
        def page_decorator_inner_wrapper(request, *args, **kw):
            # Take the basic context dictionary from the optional decorator args.
            data = decorator_args.copy()

            try:
                # Call the original view.
                d = fn(request, *args, **kw)
            except HttpResponseException, e:
                return e.http_response

            # Return now if it returned some kind of HTTP response itself, no job.
            if isinstance(d, HttpResponse):
                return d

            if d:
                data.update(d)

            # The view can override the template to use.
            template_name = data.get('template',  template)

            # By adding the debug_template parameter we switch to possible
            # debugging template:
            # payments/payments_info.html -> payments/payments_info_debug.html
            if settings.DEBUG and request.GET.get('debug_template'):
                stn = os.path.splitext(template_name)
                template_name = stn[0] + '_debug' + stn[1]

            # The view or the decorator call can override the context
            # instance. Otherwise, use the usual RequestContext.
            context_instance = data.get('context') or RequestContext(request)

            # Render the template.
            response = render_to_response(template_name, data, context_instance)
            return response

        return page_decorator_inner_wrapper
    return page_decorator_wrapper

Example 38

Project: django-memoize
Source File: __init__.py
View license
    def memoize(self, timeout=DEFAULT_TIMEOUT, make_name=None, unless=None):
        """
        Use this to cache the result of a function, taking its arguments into
        account in the cache key.

        Information on
        `Memoization <http://en.wikipedia.org/wiki/Memoization>`_.

        Example::

            @memoize(timeout=50)
            def big_foo(a, b):
                return a + b + random.randrange(0, 1000)

        .. code-block:: pycon

            >>> big_foo(5, 2)
            753
            >>> big_foo(5, 3)
            234
            >>> big_foo(5, 2)
            753

        .. note::

            The returned decorated function now has three function attributes
            assigned to it.

                **uncached**
                    The original undecorated function. readable only

                **cache_timeout**
                    The cache timeout value for this function. For a custom
                    value to take affect, this must be set before the function
                    is called.

                    readable and writable

                **make_cache_key**
                    A function used in generating the cache_key used.

                    readable and writable


        :param timeout: Default: 300. If set to an integer, will cache
                        for that amount of time. Unit of time is in seconds.
        :param make_name: Default None. If set this is a function that accepts
                          a single argument, the function name, and returns a
                          new string to be used as the function name.
                          If not set then the function name is used.
        :param unless: Default None. Cache will *always* execute the caching
                       facilities unelss this callable is true.
                       This will bypass the caching entirely.

        """

        def memoize(f):
            @functools.wraps(f)
            def decorated_function(*args, **kwargs):
                #: bypass cache
                if callable(unless) and unless() is True:
                    return f(*args, **kwargs)

                try:
                    cache_key = decorated_function.make_cache_key(
                        f, *args, **kwargs
                    )
                    rv = self.get(cache_key)
                except Exception:
                    if settings.DEBUG:
                        raise
                    logger.exception(
                        "Exception possibly due to cache backend."
                    )
                    return f(*args, **kwargs)

                if rv is None:
                    rv = f(*args, **kwargs)
                    try:
                        self.set(
                            cache_key, rv,
                            timeout=decorated_function.cache_timeout
                        )
                    except Exception:
                        if settings.DEBUG:
                            raise
                        logger.exception(
                            "Exception possibly due to cache backend."
                        )
                return rv

            decorated_function.uncached = f
            decorated_function.cache_timeout = timeout
            decorated_function.make_cache_key = self._memoize_make_cache_key(
                make_name, decorated_function
            )
            decorated_function.delete_memoized = (
                lambda: self.delete_memoized(f)
            )

            return decorated_function
        return memoize

Example 39

Project: django-memoize
Source File: __init__.py
View license
    def delete_memoized(self, f, *args, **kwargs):
        """
        Deletes the specified functions caches, based by given parameters.
        If parameters are given, only the functions that were memoized with
        them will be erased. Otherwise all versions of the caches will be
        forgotten.

        Example::

            @memoize(50)
            def random_func():
                return random.randrange(1, 50)

            @memoize()
            def param_func(a, b):
                return a+b+random.randrange(1, 50)

        .. code-block:: pycon

            >>> random_func()
            43
            >>> random_func()
            43
            >>> delete_memoized('random_func')
            >>> random_func()
            16
            >>> param_func(1, 2)
            32
            >>> param_func(1, 2)
            32
            >>> param_func(2, 2)
            47
            >>> delete_memoized('param_func', 1, 2)
            >>> param_func(1, 2)
            13
            >>> param_func(2, 2)
            47

        Delete memoized is also smart about instance methods vs class methods.

        When passing a instancemethod, it will only clear the cache related
        to that instance of that object. (object uniqueness can be overridden
        by defining the __repr__ method, such as user id).

        When passing a classmethod, it will clear all caches related across
        all instances of that class.

        Example::

            class Adder(object):
                @memoize()
                def add(self, b):
                    return b + random.random()

        .. code-block:: pycon

            >>> adder1 = Adder()
            >>> adder2 = Adder()
            >>> adder1.add(3)
            3.23214234
            >>> adder2.add(3)
            3.60898509
            >>> delete_memoized(adder.add)
            >>> adder1.add(3)
            3.01348673
            >>> adder2.add(3)
            3.60898509
            >>> delete_memoized(Adder.add)
            >>> adder1.add(3)
            3.53235667
            >>> adder2.add(3)
            3.72341788

        :param fname: Name of the memoized function, or a reference to the
                      function.
        :param \*args: A list of positional parameters used with memoized
                       function.
        :param \**kwargs: A dict of named parameters used with memoized
                          function.

        .. note::

            django-memoize uses inspect to order kwargs into positional args
            when the function is memoized. If you pass a function reference
            into ``fname`` instead of the function name, django-memoize will
            be able to place the args/kwargs in the proper order, and delete
            the positional cache.

            However, if ``delete_memoized`` is just called with the name of the
            function, be sure to pass in potential arguments in the same order
            as defined in your function as args only, otherwise django-memoize
            will not be able to compute the same cache key.

        .. note::

            django-memoize maintains an internal random version hash for the
            function. Using delete_memoized will only swap out the version
            hash, causing the memoize function to recompute results and put
            them into another key.

            This leaves any computed caches for this memoized function within
            the caching backend.

            It is recommended to use a very high timeout with memoize if using
            this function, so that when the version has is swapped, the old
            cached results would eventually be reclaimed by the caching
            backend.
        """
        if not callable(f):
            raise DeprecationWarning(
                "Deleting messages by relative name is no longer"
                " reliable, please switch to a function reference"
            )

        try:
            if not args and not kwargs:
                self._memoize_version(f, reset=True)
            else:
                cache_key = f.make_cache_key(f.uncached, *args, **kwargs)
                self.delete(cache_key)
        except Exception:
            if settings.DEBUG:
                raise
            logger.exception("Exception possibly due to cache backend.")

Example 40

Project: django-uwsgi
Source File: runuwsgi.py
View license
    def handle(self, *args, **options):
        for arg in args:
            k, v = arg.split('=')
            if k == 'http':
                if self.http_port:
                    self.http_port = v
            elif k == 'socket':
                self.http_port = None
                self.socket_addr = v

        # load the Django WSGI handler
        os.environ['UWSGI_MODULE'] = '%s.wsgi' % django_project
        # DJANGO settings
        if options['settings']:
            os.environ['DJANGO_SETTINGS_MODULE'] = options['settings']
        else:
            os.environ['DJANGO_SETTINGS_MODULE'] = '%s.settings' % django_project

        # set protocol as uwsgi
        os.environ['UWSGI_PROTOCOL'] = 'uwsgi'

        # bind the http server to the default port
        if self.http_port:
            os.environ['UWSGI_HTTP_SOCKET'] = ':%s' % self.http_port
        elif self.socket_addr:
            os.environ['UWSGI_UWSGI_SOCKET'] = self.socket_addr
            os.environ['UWSGI_CHMOD_SOCKET'] = '664'
        # set process names
        os.environ['UWSGI_AUTO_PROCNAME'] = 'true'
        os.environ['UWSGI_PROCNAME_PREFIX_SPACED'] = '[uWSGI %s]' % django_project
        # remove sockets/pidfile at exit
        os.environ['UWSGI_VACUUM'] = 'true'
        # retrieve/set the PythonHome
        os.environ['UWSGI_VIRTUALENV'] = sys.prefix
        # add project to python path
        os.environ['UWSGI_PP'] = root

        os.environ['UWSGI_POST_BUFFERING'] = '1048576'
        os.environ['UWSGI_RELOAD_ON_RSS'] = '300'
        # increase buffer size a bit
        os.environ['UWSGI_BUFFER_SIZE'] = '65535'
        # some additions required by newrelic
        os.environ['UWSGI_ENABLE_THREADS'] = 'true'
        os.environ['UWSGI_LAZY_APPS'] = 'true'
        os.environ['UWSGI_SINGLE_INTERPRETER'] = 'true'
        os.environ['UWSGI_AUTOLOAD'] = 'true'
        # set 12 workers and cheaper to number of cpus
        os.environ['UWSGI_WORKERS'] = '12'
        os.environ['UWSGI_CHEAPER'] = str(multiprocessing.cpu_count())
        # enable the master process
        os.environ['UWSGI_MASTER'] = 'true'

        os.environ['UWSGI_NO_ORPHANS'] = 'true'
        os.environ['UWSGI_MEMORY_REPORT'] = 'true'
        os.environ['UWSGI_DISABLE_LOGGING'] = 'true'

        # set harakiri
        os.environ['UWSGI_HARAKIRI'] = '60'
        os.environ['UWSGI_HARAKIRI_VERBOSE'] = 'true'

        # set uid and gid
        os.environ['UWSGI_UID'] = str(os.getuid())
        os.environ['UWSGI_GID'] = str(os.getgid())
        # TODO: Figure out cache
        os.environ['UWSGI_CACHE2'] = 'name=%s,items=20000,keysize=128,blocksize=4096' % django_project
        if settings.DEBUG:
            if apps.is_installed('configurations'):
                os.environ.setdefault('DJANGO_CONFIGURATION', 'Development')
                import configurations
                configurations.setup()
            # map and serve static files
            os.environ['UWSGI_STATIC_MAP'] = '%s=%s' % (settings.STATIC_URL, settings.STATIC_ROOT)
            os.environ['UWSGI_PY_AUTORELOAD'] = '2'
        # run spooler for mail task
        if 'django_uwsgi' in settings.EMAIL_BACKEND:
            os.environ['UWSGI_SPOOLER'] = '/tmp'
            os.environ['UWSGI_SPOOLER_IMPORT'] = 'django_uwsgi.task'
        # exec the uwsgi binary
        if apps.ready:
            os.execvp('uwsgi', ('uwsgi',))

Example 41

Project: PiplMesh
Source File: storage.py
View license
def serve(request, path):
    """
    Serve files from default storage.

    To use, put a URL pattern such as::

        (r'^(?P<path>.*)$', 'piplmesh.utils.storage.serve')

    in your URLconf.
    """

    if not settings.DEBUG:
        raise exceptions.ImproperlyConfigured("The view can only be used in debug mode.")
    normalized_path = posixpath.normpath(urllib.unquote(path)).lstrip('/')

    if not storage.default_storage.exists(normalized_path):
        if path.endswith('/') or path == '':
            raise http.Http404("Directory indexes are not allowed here.")
        raise http.Http404("'%s' could not be found" % path)

    try:
        mimetype = storage.default_storage.mimetype(normalized_path) or 'application/octet-stream'
    except (NotImplementedError, AttributeError):
        mimetype = 'application/octet-stream'

    try:
        modified_time = time.mktime(storage.default_storage.modified_time(normalized_path).timetuple())
    except (NotImplementedError, AttributeError):
        modified_time = None

    size = storage.default_storage.size(normalized_path)

    if modified_time is not None and not static.was_modified_since(request.META.get('HTTP_IF_MODIFIED_SINCE'), modified_time, size):
        return http.HttpResponseNotModified(mimetype=mimetype)

    f = storage.default_storage.open(normalized_path, 'rb')
    try:
        response = http.HttpResponse(f.read(), mimetype=mimetype)
    finally:
        f.close()

    response['Content-Length'] = size

    if modified_time is not None:
        response['Last-Modified'] = http_utils.http_date(modified_time)

    return response

Example 42

Project: django-api
Source File: decorators.py
View license
def api_accepts(fields):
    """
    Define the accept schema of an API (GET or POST).

    'fields' is a dict of Django form fields keyed by field name that specifies
    the form-urlencoded fields that the API accepts*.

    The view function is then called with GET/POST data that has been cleaned
    by the Django form.

    In debug and test modes, failure to validate the fields will result in a
    400 Bad Request response.
    In production mode, failure to validate will just log a
    warning, unless overwritten by a 'strict' setting.

    For example:

    @api_accepts({
        'x': forms.IntegerField(min_value=0),
        'y': forms.IntegerField(min_value=0),
    })
    def add(request, *args, **kwargs):
        x = request.POST['x']
        y = request.POST['y']

        # x and y are integers already.
        return HttpResponse('%d' % (x + y))


    *: 'fields' can also include Django models as {'key': Model()}. If present,
    api_accepts will look for the field keyed by '<key>-id'
    and pick the object that has that primary key. For example, if the entry is
    {'course': Course()}, it will search for the key course_id='course-id' in
    the request object, and find the object Course.objects.get(pk=course_id)
    """
    def decorator(func):
        @wraps(func)
        def wrapped_func(request, *args, **kwargs):
            if request.method not in ['GET', 'POST']:
                return func(request, *args, **kwargs)

            # The fields dict passed into the type() function is modified, so
            # send in a copy instead.
            form_class = type('ApiForm', (forms.Form,), fields.copy())
            form = form_class(getattr(request, request.method))

            if not form.is_valid():
                if settings.DEBUG:
                    return JsonResponseBadRequest(
                        'failed to validate: %s' % dict(form.errors)
                    )
                else:
                    logger.warn(
                        'input to \'%s\' failed to validate: %s',
                        request.path,
                        dict(form.errors)
                    )
                    return func(request, *args, **kwargs)

            # Clean any models.Model fields, by looking up object based on
            # primary key in request.
            for (field_name, field_instance) in fields.items():
                if isinstance(field_instance, models.Model):
                    field_type = type(field_instance)
                    # TODO: irregular, should we remove?
                    field_id = '%s-id' % field_name
                    if field_id not in request.REQUEST:
                        return JsonResponseBadRequest(
                            'field %s not present' % field_name
                        )
                    field_pk = int(request.REQUEST[field_id])
                    try:
                        field_value = field_type.objects.get(pk=field_pk)
                    except field_type.DoesNotExist:
                        return JsonResponseNotFound(
                            '%s with pk=%d does not exist' % (
                                field_type, field_pk
                            )
                        )
                    form.cleaned_data[field_name] = field_value

            validated_request = ValidatedRequest(request, form)
            return func(validated_request, *args, **kwargs)
        return wrapped_func
    return decorator

Example 43

Project: django-api
Source File: decorators.py
View license
def api_returns(return_values):
    """
    Define the return schema of an API.

    'return_values' is a dictionary mapping
    HTTP return code => documentation
    In addition to validating that the status code of the response belongs to
    one of the accepted status codes, it also validates that the returned
    object is JSON (derived from JsonResponse)

    In debug and test modes, failure to validate the fields will result in a
    400 Bad Request response.
    In production mode, failure to validate will just log a
    warning, unless overwritten by a 'strict' setting.

    For example:

    @api_returns({
        200: 'Operation successful',
        403: 'User does not have persion',
        404: 'Resource not found',
        404: 'User not found',
    })
    def add(request, *args, **kwargs):
        if not request.user.is_superuser:
            return JsonResponseForbidden()  # 403

        return HttpResponse()  # 200
    """
    def decorator(func):
        @wraps(func)
        def wrapped_func(request, *args, **kwargs):
            return_value = func(request, *args, **kwargs)

            if not isinstance(return_value, JsonResponse):
                if settings.DEBUG:
                    return JsonResponseBadRequest('API did not return JSON')
                else:
                    logger.warn('API did not return JSON')

            accepted_return_codes = return_values.keys()
            # Never block 500s - these should be handled by other
            # reporting mechanisms
            accepted_return_codes.append(500)

            if return_value.status_code not in accepted_return_codes:
                if settings.DEBUG:
                    return JsonResponseBadRequest(
                        'API returned %d instead of acceptable values %s' %
                        (return_value.status_code, accepted_return_codes)
                    )
                else:
                    logger.warn(
                        'API returned %d instead of acceptable values %s',
                        return_value.status_code,
                        accepted_return_codes,
                    )

            return return_value
        return wrapped_func
    return decorator

Example 44

Project: zorna
Source File: debug_middleware.py
View license
    def process_response(self, request, response):

        # Don't bother if the url doesn't have the "debug"  query  string
        # Added by Jeff Schroeder for dynamically enabling/disabling this
        if "debug" not in request.GET:
            return response

        # Only include debug info for text/html pages not accessed via Ajax
        if 'text/html' not in response['Content-Type']:
            return response
        if request.is_ajax():
            return response
        if response.status_code != 200:
            return response

        templates = []
        for t in self.templates_used:
            if t.origin and t.origin.name:
                templates.append((t.name, t.origin.name))
            else:
                templates.append((t.name, "no origin"))

        sql_queries = connection.queries[self.sql_offset_start:]
        # Reformat sql queries a bit
        sql_total = 0.0
        sql_counts = {}
        for query in sql_queries:
            raw_sql = query['sql']
            query['sql'] = reformat_sql(query['sql'])
            sql_total += float(query['time'])
            count = sql_counts.get(raw_sql, 0) + 1
            sql_counts[raw_sql] = count
            if count > 1:
                query['count'] = mark_safe(
                    '<p>duplicate query count=%s</p>' % count)
            else:
                query['count'] = ''

        from django.core.urlresolvers import resolve
        view_func = resolve(request.META['PATH_INFO'])[0]

        view = '%s.%s' % (view_func.__module__, view_func.__name__)

        vf = view_func
        breaker = 10
        while not hasattr(vf, 'func_code'):
            if hasattr(vf, 'view_func'):
                vf = vf.view_func
            else:
                break  # somethings wrong about the assumptions of the decorator
            breaker = breaker - 1
            if breaker < 0:
                break
        if hasattr(vf, 'func_code'):
            co = vf.func_code
            view = '- '.join([view, ':'.join(
                [co.co_filename, str(co.co_firstlineno)])])

        debug_content = Template(TEMPLATE).render(Context({
            'debug': settings.DEBUG,
            'server_time': time.time() - self.time_started,
            'templates': templates,
            'sql': sql_queries,
            'sql_total': sql_total,
            'num_queries': len(sql_queries),
            'template_dirs': settings.TEMPLATE_DIRS,
            'view': view
        }))

        content = response.content
        response.content = force_unicode(
            content).replace('</body>', debug_content)

        return response

Example 45

Project: zulip
Source File: runtornado.py
View license
    def handle(self, addrport, **options):
        # type: (str, **bool) -> None
        interactive_debug_listen()

        import django
        from tornado import httpserver, web

        try:
            addr, port = addrport.split(':')
        except ValueError:
            addr, port = '', addrport

        if not addr:
            addr = '127.0.0.1'

        if not port.isdigit():
            raise CommandError("%r is not a valid port number." % (port,))

        xheaders = options.get('xheaders', True)
        no_keep_alive = options.get('no_keep_alive', False)
        quit_command = 'CTRL-C'

        if settings.DEBUG:
            logging.basicConfig(level=logging.INFO,
                format='%(asctime)s %(levelname)-8s %(message)s')

        def inner_run():
            # type: () -> None
            from django.conf import settings
            from django.utils import translation
            translation.activate(settings.LANGUAGE_CODE)

            print("Validating Django models.py...")
            self.validate(display_num_errors=True)
            print("\nDjango version %s" % (django.get_version()))
            print("Tornado server is running at http://%s:%s/" % (addr, port))
            print("Quit the server with %s." % (quit_command,))

            if settings.USING_RABBITMQ:
                queue_client = get_queue_client()
                # Process notifications received via RabbitMQ
                queue_client.register_json_consumer('notify_tornado', process_notification)
                queue_client.register_json_consumer('tornado_return', respond_send_message)

            try:
                urls = (r"/notify_tornado",
                        r"/json/events",
                        r"/api/v1/events",
                        )

                # Application is an instance of Django's standard wsgi handler.
                application = web.Application([(url, AsyncDjangoHandler) for url in urls]
                                              + get_sockjs_router().urls,
                                                debug=django.conf.settings.DEBUG,
                                              # Disable Tornado's own request logging, since we have our own
                                              log_function=lambda x: None)

                # start tornado web server in single-threaded mode
                http_server = httpserver.HTTPServer(application,
                                                    xheaders=xheaders,
                                                    no_keep_alive=no_keep_alive)
                http_server.listen(int(port), address=addr)

                setup_event_queue()
                add_client_gc_hook(missedmessage_hook)
                setup_tornado_rabbitmq()

                instance = ioloop.IOLoop.instance()

                if django.conf.settings.DEBUG:
                    instance.set_blocking_log_threshold(5)
                    instance.handle_callback_exception=handle_callback_exception
                instance.start()
            except KeyboardInterrupt:
                sys.exit(0)

        inner_run()

Example 46

Project: django-scribbler
Source File: scribbler_tags.py
View license
    def render(self, context):
        slug = self.slug.resolve(context)
        request = context.get('request', None)
        if request is None:  # pragma: no cover
            if settings.DEBUG:
                msg = '"django.core.context_processors.request" is required to use django-scribbler'
                raise ImproperlyConfigured(msg)
            else:
                return ''
        if self.url:
            url = self.url.resolve(context)
        else:
            url = request.path
        key = CACHE_KEY_FUNCTION(slug=slug, url=url)
        scribble = cache.get(key, None)
        if scribble is None:
            try:
                scribble = Scribble.objects.get(slug=slug, url=url)
            except Scribble.DoesNotExist:
                scribble = Scribble(slug=slug, url=url, content=self.raw)
            if CACHE_TIMEOUT:
                cache.set(key, scribble, CACHE_TIMEOUT)
        if scribble.pk:
            if hasattr(template, 'engines'):
                scribble_template = template.engines['django'].from_string(scribble.content)
            else:
                scribble_template = template.Template(scribble.content)
        else:
            scribble.content = self.raw
            if hasattr(template, 'engines'):
                scribble_template = template.engines['django'].from_string(self.raw)
            else:
                scribble_template = template.Template(self.raw)
        scribble_context = build_scribble_context(scribble)
        content = scribble_template.render(scribble_context, request)
        wrapper_template = template.loader.get_template('scribbler/scribble-wrapper.html')
        context['scribble'] = scribble
        context['rendered_scribble'] = content
        user = context.get('user', None)
        show_controls = False
        can_edit = False
        can_add = False
        can_delete = False
        if user:
            can_edit = scribble.pk and user.has_perm('scribbler.change_scribble')
            can_add = (not scribble.pk) and user.has_perm('scribbler.add_scribble')
            can_delete = scribble.pk and user.has_perm('scribbler.delete_scribble')
        show_controls = can_edit or can_add or can_delete
        if can_edit or can_add:
            context['scribble_form'] = ScribbleForm(instance=scribble, prefix=slug)
        context['show_controls'] = show_controls
        context['can_add_scribble'] = can_add
        context['can_edit_scribble'] = can_edit
        context['can_delete_scribble'] = can_delete
        context['raw_content'] = self.raw
        # render() takes a dict, so we have to extract the context dict from the object
        context_data = context.dicts[-1]
        return wrapper_template.render(context_data, request)

Example 47

Project: canvas
Source File: fact.py
View license
def record(fact_type, request_or_user, info):
    return

    extra_info = { 'type': fact_type, 'ts': time.time()}

    if hasattr(request_or_user, 'user'):
        request = request_or_user
        user = request.user
    else:
        request = None
        user = request_or_user

    if request:
        extra_info['session_key'] = request.session.session_key
        extra_info['ip'] = request.META.get('REMOTE_ADDR')
        extra_info['utma'] = request.COOKIES.get('__utma')

        if not hasattr(request, "experiments"):
            request.experiments = create_experiments_for_request(request)
            logging.debug("Request did not have experiments ... recreating ...")

        experiments = request.experiments
    elif user:
        experiments = user.redis.experiments
    else:
        raise Exception("request_or_user is required.")

    if user.is_authenticated():
        extra_info['user'] = user.id

    if experiments:
        experiments_mapping = dict((experiment.name, branch.name) for experiment, branch in experiments.get_all_current_branches())
        if experiments_mapping:
            extra_info["experiments"] = experiments_mapping

    info = dict(info, **extra_info)

    if settings.DEBUG:
        debug_fact_channel().publish(info)

    @bgwork.defer
    def make_request():
        try:
            req = urllib2.Request("http://%s/" % settings.FACT_HOST, headers={'X-Fact-Log': util.backend_dumps(info)})
            urllib2.urlopen(req, timeout=3)
        except IOError:
            from canvas.models import Metrics
            Metrics.fact_record_fail.record(request, record_fact=False)

Example 48

Project: cdr-stats
Source File: dashboard.py
View license
    def init_with_context(self, context):
        # we want a 3 columns layout
        self.columns = 3
        site_name = get_admin_site_name(context)

        # self.children.append(
        #            HistoryDashboardModule()
        # )

        self.children.append(modules.Group(
            title="General",
            display="tabs",
            children=[
                modules.AppList(
                    title=_('user').capitalize(),
                    models=('django.contrib.*', 'user_profile.*'),
                ),
                modules.AppList(
                    _('task manager').title(),
                    models=('djcelery.*', ),
                ),
                modules.RecentActions(_('Recent Actions'), 5),
            ]
        ))

        self.children.append(modules.AppList(
            _('CDR Voip'),
            models=('cdr.*',),
        ))

        self.children.append(modules.AppList(
            _('alert').title(),
            models=('cdr_alert.*', ),
        ))

        self.children.append(modules.AppList(
            _('country dialcode').title(),
            models=('country_dialcode.*', ),
        ))

        self.children.append(modules.AppList(
            _('Voip gateway').title(),
            models=('voip_gateway.*', ),
        ))

        self.children.append(modules.AppList(
            _('Voip billing').title(),
            models=('voip_billing.*', ),
        ))

        self.children.append(modules.AppList(
            _('Switch').title(),
            models=('switch.*', ),
        ))

        # append a link list module for "quick links"
        self.children.append(modules.LinkList(
            _('quick links').capitalize(),
            layout='inline',
            draggable=True,
            deletable=True,
            collapsible=True,
            children=[
                [_('Go to CDR-Stats.org'), 'http://www.cdr-stats.org/'],
                [_('change password').capitalize(),
                 reverse('%s:password_change' % site_name)],
                [_('log out').capitalize(), reverse('%s:logout' % site_name)],
            ]
        ))

        if not settings.DEBUG:
            # append a feed module
            self.children.append(modules.Feed(
                _('Latest CDR-Stats News'),
                feed_url='http://www.cdr-stats.org/category/blog/feed/',
                limit=5
            ))

Example 49

Project: regulations-core
Source File: import_reg.py
View license
    def handle(self, *args, **options):

        print(options)

        stub_base = options['stub_base']

        if options['regulation'] is None and options['files'] == []:
            raise CommandError('Must supply either a regulation to import or a specific JSON file.')

        elif options['regulation'] is not None and options['files'] != []:
            raise CommandError('Cannot specify both regulation and files at the same time.')

        elif options['regulation'] is None and options['files'] != []:
            files = [os.path.join(stub_base, f) for f in options['files']]

        elif options['regulation'] is not None and options['files'] == []:
            reg = options['regulation']
            files = self.find_regulation_files(stub_base, reg)

        # the request dummy is meant to fool the regcore_write api into thinking that
        # this is a request object

        class RequestDummy:
            def __init__(self):
                self.body = ''

        for f in files:
            data = json.dumps(json.load(open(f, 'r')))
            request = RequestDummy()
            request.body = data
            
            filename_data = f.replace(stub_base + '/', '').split('/')
            file_type = filename_data[0]

            logger.info('importing {}'.format(os.path.join(*filename_data)))

            if file_type == 'regulation':
                label = filename_data[1]
                version = filename_data[2]
                regulation.add(request, label, version)
            elif file_type == 'notice':
                part = options['regulation']
                version = filename_data[1]
                # Handle new-style per-part notices
                if version == part:
                    version = filename_data[2]
                notice.add(request, part, version)
            elif file_type == 'layer':
                layer_type = filename_data[1]
                label = filename_data[2]
                version = filename_data[3]
                layer.add(request, layer_type, label, version)
            elif file_type == 'diff':
                label = filename_data[1]
                old_version = filename_data[2]
                new_version = filename_data[3]
                diff.add(request, label, old_version, new_version)

            if settings.DEBUG:
                # If DEBUG is enabled, memory usage will increase linearly
                # for each JSON file imported. This should clean it up.
                # https://docs.djangoproject.com/en/1.6/faq/models/#why-is-django-leaking-memory
                db.reset_queries()

Example 50

Project: django-soap-server
Source File: wsgi.py
View license
def embed_wsgi(application):

    application = load_application(application)

    def view(request, *args, **kwargs):
        response = HttpResponse()

        # Enforce HTTPS if required
        if getattr(settings, 'SOAP_SERVER_HTTPS', True): #pragma: nocover
            if not request.is_secure() and not settings.DEBUG: # pragma: nocover 
                response.status_code = 400 # Bad request - we need https
                return response
        
        # Enforce basic auth
        if hasattr(settings, 'SOAP_SERVER_BASICAUTH_REALM'):
            if not 'HTTP_AUTHORIZATION' in request.environ:
                realm = getattr(settings, 'SOAP_SERVER_BASICAUTH_REALM', 'Webservice')
                response.status_code = 401 # Request auth
                response['WWW-Authenticate'] = 'Basic realm="%s"' % realm
                return response

        # request.environ and request.META are the same object, so changes
        # to the headers by middlewares will be seen here.
        environ = request.environ.copy()
        if len(args) > 0:
            shift_path(environ, '/' + args[0])
        # Django converts SCRIPT_NAME and PATH_INFO to unicode in WSGIRequest.
        environ['SCRIPT_NAME'] = environ.get('SCRIPT_NAME', '').encode('iso-8859-1')
        environ['PATH_INFO'] = environ.get('PATH_INFO', '').encode('iso-8859-1')

        headers_set = []
        headers_sent = []

        def write(data):
            if not headers_set:
                raise AssertionError("write() called before start_response()")
            if not headers_sent:
                # Send headers before the first output.
                for k, v in headers_set:
                    response[k] = v
                headers_sent[:] = [True]
            response.write(data)
            # We could call response.flush() here, but is actually a no-op.

        def start_response(status, headers, exc_info=None):
            # Let Django handle all errors.
            if exc_info:
                raise exc_info[1].with_traceback(exc_info[2])
            if headers_set:
                raise AssertionError("start_response() called again "
                                     "without exc_info")
            response.status_code = int(status.split(' ', 1)[0])
            headers_set[:] = headers
            # Django provides no way to set the reason phrase (#12747).
            return write

        result = application(environ, start_response)
        try:
            for data in result:
                if data:
                    write(data)
            if not headers_sent:
                write('')
        finally:
            if hasattr(result, 'close'):
                result.close()

        return response

    return view