django.db.transaction.atomic

Here are the examples of the python api django.db.transaction.atomic taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

173 Examples 7

Example 1

Project: pretix Source File: invoices.py
@transaction.atomic
def build_invoice(invoice: Invoice) -> Invoice:
    with language(invoice.locale):
        responses = register_payment_providers.send(invoice.event)
        for receiver, response in responses:
            provider = response(invoice.event)
            if provider.identifier == invoice.order.payment_provider:
                payment_provider = provider
                break

        invoice.invoice_from = invoice.event.settings.get('invoice_address_from')

        introductory = invoice.event.settings.get('invoice_introductory_text', as_type=LazyI18nString)
        additional = invoice.event.settings.get('invoice_additional_text', as_type=LazyI18nString)
        footer = invoice.event.settings.get('invoice_footer_text', as_type=LazyI18nString)
        payment = payment_provider.render_invoice_text(invoice.order)

        invoice.introductory_text = str(introductory).replace('\n', '<br />')
        invoice.additional_text = str(additional).replace('\n', '<br />')
        invoice.footer_text = str(footer)
        invoice.payment_provider_text = str(payment).replace('\n', '<br />')

        try:
            addr_template = pgettext("invoice", """{i.company}
{i.name}
{i.street}
{i.zipcode} {i.city}
{i.country}""")
            invoice.invoice_to = addr_template.format(i=invoice.order.invoice_address).strip()
            if invoice.order.invoice_address.vat_id:
                invoice.invoice_to += "\n" + pgettext("invoice", "VAT-ID: %s") % invoice.order.invoice_address.vat_id
        except InvoiceAddress.DoesNotExist:
            invoice.invoice_to = ""

        invoice.file = None
        invoice.save()
        invoice.lines.all().delete()

        for p in invoice.order.positions.all():
            desc = str(p.item.name)
            if p.variation:
                desc += " - " + str(p.variation.value)
            InvoiceLine.objects.create(
                invoice=invoice, description=desc,
                gross_value=p.price, tax_value=p.tax_value,
                tax_rate=p.tax_rate
            )

        if invoice.order.payment_fee:
            InvoiceLine.objects.create(
                invoice=invoice, description=_('Payment via {method}').format(method=str(payment_provider.verbose_name)),
                gross_value=invoice.order.payment_fee, tax_value=invoice.order.payment_fee_tax_value,
                tax_rate=invoice.order.payment_fee_tax_rate
            )

        return invoice

Example 2

Project: rockstor-core Source File: rockon.py
    @transaction.atomic
    def _create_update_meta(self, name, r_d):
        #Update our application state with any changes from hosted app
        #profiles(app.json files). Some attributes cannot be updated
        #if the Rock-on is currently installed. These will be logged and
        #ignored.
        ro_defaults = {'description': r_d['description'],
                       'website': r_d['website'],
                       'version': r_d['version'],
                       'state': 'available',
                       'status': 'stopped'}
        ro, created = RockOn.objects.get_or_create(name=name,
                                                   defaults=ro_defaults)
        if (not created):
            ro.description = ro_defaults['description']
            ro.website = ro_defaults['website']
            ro.version = ro_defaults['version']
        if ('ui' in r_d):
            ui_d = r_d['ui']
            ro.link = ui_d['slug']
            if ('https' in ui_d):
                ro.https = ui_d['https']
        if ('icon' in r_d):
            ro.icon = r_d['icon']
        if ('volume_add_support' in r_d):
            ro.volume_add_support = r_d['volume_add_support']
        if ('more_info' in r_d):
            ro.more_info = r_d['more_info']
        ro.save()

        containers = r_d['containers']
        cur_containers = [co.name for co in
                          DContainer.objects.filter(rockon=ro)]
        if (len(set(containers.keys()) ^ set(cur_containers)) != 0):
            if (ro.state not in ('available', 'install_failed')):
                e_msg = ('Cannot add/remove container definitions for %s as '
                         'it is not in available state. Uninstall the '
                         'Rock-on first and try again.' % ro.name)
                handle_exception(Exception(e_msg), self.request)
            #rock-on is in available state. we can safely wipe metadata
            #and start fresh.
            DContainer.objects.filter(rockon=ro).delete()

        for c in containers:
            c_d = containers[c]
            co = None
            if (DContainer.objects.filter(name=c).exists()):
                co = DContainer.objects.get(name=c)
                if (co.rockon.id != ro.id):
                    e_msg = ('Duplicate container(%s) definition detected. '
                             'It belongs to another Rock-on(%s). Uninstall '
                             'one of them and try again.' % (co.name, co.rockon.name))
                    handle_exception(Exception(e_msg), self.request)

                if (co.dimage.name != c_d['image']):
                    if (ro.state not in ('available', 'install_failed')):
                        e_msg = ('Cannot change image of the container(%s) '
                                 'as it belongs to an installed Rock-on(%s). '
                                 'Uninstall it first and try again.' %
                                 (co.name, ro.name))
                        handle_exception(Exception(e_msg), self.request)
                    co.dimage.delete()
            if (co is None):
                co = DContainer(name=c, rockon=ro)
            defaults = {'tag': c_d.get('tag', 'latest'),
                        'repo': 'na',}
            io, created = DImage.objects.get_or_create(name=c_d['image'],
                                                       defaults=defaults)
            co.dimage = io
            co.launch_order = c_d['launch_order']
            if ('uid' in c_d):
                co.uid = int(c_d['uid'])
            co.save()

            ports = containers[c].get('ports', {})
            cur_ports = [po.containerp for po in
                         DPort.objects.filter(container=co)]
            if (len(set(map(int, ports.keys())) ^ set(cur_ports)) != 0):
                if (ro.state not in ('available', 'install_failed')):
                    e_msg = ('Cannot add/remove port definitions of the '
                             'container(%s) as it belongs to an installed '
                             'Rock-on(%s). Uninstall it first and try again.' %
                             (co.name, ro.name))
                    handle_exception(Exception(e_msg), self.request)
                DPort.objects.filter(container=co).delete()

            for p in ports:
                p_d = ports[p]
                if ('protocol' not in p_d):
                    p_d['protocol'] = None
                p = int(p)
                po = None
                if (DPort.objects.filter(containerp=p, container=co).exists()):
                    po = DPort.objects.get(containerp=p, container=co)
                    if (po.hostp_default != p_d['host_default']):
                        po.hostp_default = self._next_available_default_hostp(p_d['host_default'])
                    po.description = p_d['description']
                    po.protocol = p_d['protocol']
                    po.label = p_d['label']
                else:
                    #let's find next available default if default is already taken
                    def_hostp = self._next_available_default_hostp(p_d['host_default'])
                    po = DPort(description=p_d['description'],
                               hostp=def_hostp, containerp=p,
                               hostp_default=def_hostp,
                               container=co,
                               protocol=p_d['protocol'],
                               label=p_d['label'])
                if ('ui' in p_d):
                    po.uiport = p_d['ui']
                if (po.uiport):
                    ro.ui = True
                    ro.save()
                po.save()

            v_d = c_d.get('volumes', {})
            cur_vols = [vo.dest_dir for vo in
                        DVolume.objects.filter(container=co)]
            # cur_vols can have entries not in the config for Shares mapped post
            # install.
            # If we have more volumes defined in the rock-on definition than
            # we have previously seen for this rockon, ie volumes added in newer
            # definition, then remove our existing volumes record.
            if (len(set(v_d.keys()) - set(cur_vols)) != 0):
                # but only if the current state is 'available' (to install) or
                # 'install failed', otherwise raise warning about changing an
                # installed rock-ons.
                if (ro.state not in ('available', 'install_failed')):
                    e_msg = ('Cannot add/remove volume definitions of the '
                             'container(%s) as it belongs to an installed '
                             'Rock-on(%s). Uninstall it first and try again.' %
                             (co.name, ro.name))
                    handle_exception(Exception(e_msg), self.request)
                # Delete all volume entries for this container so that they
                # might be created a fresh.
                DVolume.objects.filter(container=co).delete()
            # If the existing rock-on db entry indicates this container is not
            # installed ie state available or install_failed then check if we
            # need to remove any now deprecated volume entries.
            # Ie updated config that has removed a previously seen volume.
            if (ro.state in ('available', 'install_failed')):
                if (len(set(cur_vols) - set(v_d.keys())) > 0):
                    # we have some current volumes in db that are no longer in
                    # our updated rock-on definition so remove all volumes for
                    # this rock-on so they might be updated whole sale.
                    # Delete all volume entries for this container so that they
                    # might be created a fresh.
                    DVolume.objects.filter(container=co).delete()

            # Cycle through all volumes in the rock-on json definition and
            # update or create the appropriate db volumes entry.
            for v in v_d:
                cv_d = v_d[v]
                vo_defaults = {'description': cv_d['description'],
                               'label': cv_d['label']}

                vo, created = DVolume.objects.get_or_create(dest_dir=v, container=co,
                                                            defaults=vo_defaults)
                # If this db entry previously existed then update it's
                # description and label to that found in our rock-on json
                # This ensures changes made in repo json to the description and
                # label's get updated in the local db.
                if (not created):
                    vo.description = vo_defaults['description']
                    vo.label = vo_defaults['label']
                if ('min_size' in cv_d):
                    vo.min_size = cv_d['min_size']
                vo.save()

            self._update_env(co, c_d)
            options = containers[c].get('opts', [])
            id_l = []
            for o in options:
                #there are no unique constraints on this model, so we need this bandaid.
                if (ContainerOption.objects.filter(container=co, name=o[0], val=o[1]).count() > 1):
                    ContainerOption.objects.filter(container=co, name=o[0], val=o[1]).delete()
                oo, created = ContainerOption.objects.get_or_create(container=co,
                                                                    name=o[0],
                                                                    val=o[1])
                id_l.append(oo.id)
            for oo in ContainerOption.objects.filter(container=co):
                if (oo.id not in id_l):
                    oo.delete()

        l_d = r_d.get('container_links', {})
        for cname in l_d:
            ll = l_d[cname]
            lsources = [l['source_container'] for l in ll]
            co = DContainer.objects.get(rockon=ro, name=cname)
            for clo in co.destination_container.all():
                if (clo.name not in lsources):
                    clo.delete()
            for cl_d in ll:
                sco = DContainer.objects.get(rockon=ro, name=cl_d['source_container'])
                clo, created = DContainerLink.objects.get_or_create(source=sco,
                                                                    destination=co)
                clo.name = cl_d['name']
                clo.save()
        self._update_cc(ro, r_d)

Example 3

Project: djangobb Source File: views.py
@transaction.atomic
def show_topic(request, topic_id, full=True):
    """
    * Display a topic
    * save a reply
    * save a poll vote

    TODO: Add reply in lofi mode
    """
    post_request = request.method == "POST"
    user_is_authenticated = request.user.is_authenticated()
    if post_request and not user_is_authenticated:
        # Info: only user that are logged in should get forms in the page.
        raise PermissionDenied

    topic = get_object_or_404(Topic.objects.select_related(), pk=topic_id)
    if not topic.forum.category.has_access(request.user):
        raise PermissionDenied
    Topic.objects.filter(pk=topic.id).update(views=F('views') + 1)

    last_post = topic.last_post

    if request.user.is_authenticated():
        topic.update_read(request.user)
    posts = topic.posts.all().select_related()

    moderator = request.user.is_superuser or request.user in topic.forum.moderators.all()
    if user_is_authenticated and request.user in topic.subscribers.all():
        subscribed = True
    else:
        subscribed = False

    # reply form
    reply_form = None
    form_url = None
    back_url = None
    if user_is_authenticated and not topic.closed:
        form_url = request.path + "#reply" # if form validation failed: browser should scroll down to reply form ;)
        back_url = request.path
        ip = request.META.get('REMOTE_ADDR', None)
        post_form_kwargs = {"topic":topic, "user":request.user, "ip":ip}
        if post_request and AddPostForm.FORM_NAME in request.POST:
            reply_form = AddPostForm(request.POST, request.FILES, **post_form_kwargs)
            if reply_form.is_valid():
                post = reply_form.save()
                messages.success(request, _("Your reply saved."))
                return HttpResponseRedirect(post.get_absolute_url())
        else:
            reply_form = AddPostForm(
                initial={
                    'markup': request.user.forum_profile.markup,
                    'subscribe': request.user.forum_profile.auto_subscribe,
                },
                **post_form_kwargs
            )

    # handle poll, if exists
    poll_form = None
    polls = topic.poll_set.all()
    if not polls:
        poll = None
    else:
        poll = polls[0]
        if user_is_authenticated: # Only logged in users can vote
            poll.deactivate_if_expired()
            has_voted = request.user in poll.users.all()
            if not post_request or not VotePollForm.FORM_NAME in request.POST:
                # It's not a POST request or: The reply form was send and not a poll vote
                if poll.active and not has_voted:
                    poll_form = VotePollForm(poll)
            else:
                if not poll.active:
                    messages.error(request, _("This poll is not active!"))
                    return HttpResponseRedirect(topic.get_absolute_url())
                elif has_voted:
                    messages.error(request, _("You have already vote to this poll in the past!"))
                    return HttpResponseRedirect(topic.get_absolute_url())

                poll_form = VotePollForm(poll, request.POST)
                if poll_form.is_valid():
                    ids = poll_form.cleaned_data["choice"]
                    queryset = poll.choices.filter(id__in=ids)
                    queryset.update(votes=F('votes') + 1)
                    poll.users.add(request.user) # save that this user has vote
                    messages.success(request, _("Your votes are saved."))
                    return HttpResponseRedirect(topic.get_absolute_url())

    highlight_word = request.GET.get('hl', '')
    view_data = {
        'categories': Category.objects.all(),
        'topic': topic,
        'posts_page': get_page(posts, request, forum_settings.TOPIC_PAGE_SIZE),
        'poll': poll,
        'poll_form': poll_form,
    }
    if full:
        view_data.update({
            'last_post': last_post,
            'form_url': form_url,
            'reply_form': reply_form,
            'back_url': back_url,
            'moderator': moderator,
            'subscribed': subscribed,
            'highlight_word': highlight_word,
        })
        return render(request, 'djangobb_forum/topic.html', view_data)
    else:
        return render(request, 'djangobb_forum/lofi/topic.html', view_data)

Example 4

Project: sublimall-server Source File: views.py
    @transaction.atomic
    def post(self, request, *args, **kwargs):
        email = request.FILES.get('email')
        api_key = request.FILES.get('api_key')
        version = request.FILES.get('version')
        platform = request.FILES.get('platform')
        arch = request.FILES.get('arch')
        package_file = request.FILES.get('package')
        package_size = None

        if email:
            email = email.read()
        if api_key:
            api_key = api_key.read()
        if version:
            version = version.read()
        if platform:
            platform = platform.read()
        if arch:
            arch = arch.read()
        if package_file:
            package_size = package_file.seek(0, 2)
        if not email or not api_key or not package_size or not version:
            message = {'success': False, 'errors': []}
            if not email:
                message['errors'].append('Email is mandatory.')
            if not api_key:
                message['errors'].append('API key is mandatory.')
            if not version:
                message['errors'].append('Version is mandatory.')
            if not package_size:
                message['errors'].append('Package is mandatory.')
            return HttpResponseBadRequest(json.dumps(message))

        member = self.get_member(email, api_key)
        if member is None:
            return HttpResponseForbidden(
                json.dumps({'success': False, 'errors': ['Bad credentials.']}))
        if not member.is_active:
            return HttpResponseForbidden(
                json.dumps(
                    {'success': False, 'errors': ['Account not active.']}))

        try:
            version = int(version)
        except ValueError:
            return HttpResponseBadRequest(json.dumps({
                'success': False,
                'errors': ['Bad version. Must be 2 or 3.']}))

        if version not in [2, 3]:
            return HttpResponseBadRequest(json.dumps({
                'success': False,
                'errors': ['Bad version. Must be 2 or 3.']}))

        package = Package(
            member=member,
            version=version,
            platform=platform,
            arch=arch,
            package=package_file)

        try:
            package.full_clean()
        except ValidationError as err:
            return HttpResponseBadRequest(
                json.dumps({'success': False, 'errors': err.messages}))

        package.save()
        # Cleanup old packages
        old_packages = member.package_set.filter(
            version=version).exclude(pk=package.pk)
        for old_package in old_packages:
            old_package.delete()
        return HttpResponse(json.dumps({'success': True}), status=201)

Example 5

Project: shuup Source File: factories.py
def create_random_order(customer=None, products=(), completion_probability=0, shop=None):
    if not customer:
        customer = Contact.objects.all().order_by("?").first()

    if not customer:
        raise ValueError("No valid contacts")

    if shop is None:
        shop = get_default_shop()

    pricing_context = _get_pricing_context(shop, customer)

    source = OrderSource(shop)
    source.customer = customer
    source.customer_comment = "Mock Order"

    if customer.default_billing_address and customer.default_shipping_address:
        source.billing_address = customer.default_billing_address
        source.shipping_address = customer.default_shipping_address
    else:
        source.billing_address = create_random_address()
        source.shipping_address = create_random_address()
    source.order_date = now() - datetime.timedelta(days=random.uniform(0, 400))

    source.language = customer.language
    source.status = get_initial_order_status()

    if not products:
        products = list(Product.objects.listed(source.shop, customer).order_by("?")[:40])

    for i in range(random.randint(3, 10)):
        product = random.choice(products)
        quantity = random.randint(1, 5)
        price_info = product.get_price_info(pricing_context, quantity=quantity)
        shop_product = product.get_shop_instance(source.shop)
        supplier = shop_product.suppliers.first()
        line = source.add_line(
            type=OrderLineType.PRODUCT,
            product=product,
            supplier=supplier,
            quantity=quantity,
            base_unit_price=price_info.base_unit_price,
            discount_amount=price_info.discount_amount,
            sku=product.sku,
            text=product.safe_translation_getter("name", any_language=True)
        )
        assert line.price == price_info.price
    with atomic():
        oc = OrderCreator()
        order = oc.create_order(source)
        if random.random() < completion_probability:
            order.create_shipment_of_all_products()
            # also set complete
            order.status = OrderStatus.objects.get_default_complete()
            order.save(update_fields=("status",))
        return order

Example 6

Project: rockstor-core Source File: active_directory.py
    @transaction.atomic
    def post(self, request, command):

        with self._handle_exception(request):
            method = 'winbind'
            service = Service.objects.get(name='active-directory')
            if (command == 'config'):
                config = request.data.get('config')
                self._validate_config(config, request)

                #1. Name resolution check
                self._resolve_check(config.get('domain'), request)

                #2. realm discover check?
                #@todo: phase our realm and just use net?
                domain = config.get('domain')
                try:
                    cmd = ['realm', 'discover', '--name-only', domain]
                    o, e, rc = run_command(cmd)
                except Exception, e:
                    e_msg = ('Failed to discover the given(%s) AD domain. '
                             'Error: %s' % (domain, e.__str__()))
                    handle_exception(Exception(e_msg), request)

                default_range = '10000 - 999999'
                idmap_range = config.get('idmap_range', '10000 - 999999')
                idmap_range = idmap_range.strip()
                if (len(idmap_range) > 0):
                    rfields = idmap_range.split()
                    if (len(rfields) != 3):
                        raise Exception('Invalid idmap range. valid format is '
                                        'two integers separated by a -. eg: '
                                        '10000 - 999999')
                    try:
                        rlow = int(rfields[0].strip())
                        rhigh = int(rfields[2].strip())
                    except Exception, e:
                        raise Exception('Invalid idmap range. Numbers in the '
                                        'range must be valid integers. '
                                        'Error: %s.' % e.__str__())
                    if (rlow >= rhigh):
                        raise Exception('Invalid idmap range. Numbers in the '
                                        'range must go from low to high. eg: '
                                        '10000 - 999999')
                else:
                    config['idmap_range'] = default_range

                self._save_config(service, config)

            elif (command == 'start'):
                config = self._config(service, request)
                smbo = Service.objects.get(name='smb')
                smb_config = self._get_config(smbo)
                domain = config.get('domain')
                #1. make sure ntpd is running, or else, don't start.
                self._ntp_check(request)
                #2. Name resolution check?
                self._resolve_check(config.get('domain'), request)

                if (method == 'winbind'):
                    cmd = ['/usr/sbin/authconfig', ]
                    #nss
                    cmd += ['--enablewinbind', '--enablewins',]
                    #pam
                    cmd += ['--enablewinbindauth',]
                    #smb
                    cmd += ['--smbsecurity', 'ads', '--smbrealm', domain.upper(),]
                    #kerberos
                    cmd += ['--krb5realm=%s' % domain.upper(),]
                    #winbind
                    cmd += ['--enablewinbindoffline', '--enablewinbindkrb5',
                            '--winbindtemplateshell=/bin/sh',]
                    #general
                    cmd += ['--update', '--enablelocauthorize',]
                    run_command(cmd)
                config['workgroup'] = self._domain_workgroup(domain, method=method)
                self._save_config(service, config)
                update_global_config(smb_config, config)
                self._join_domain(config, method=method)
                if (method == 'sssd' and config.get('enumerate') is True):
                    self._update_sssd(domain)

                if (method == 'winbind'):
                    systemctl('winbind', 'enable')
                    systemctl('winbind', 'start')
                systemctl('smb', 'restart')
                systemctl('nmb', 'restart')

            elif (command == 'stop'):
                config = self._config(service, request)
                try:
                    self._leave_domain(config, method=method)
                    smbo = Service.objects.get(name='smb')
                    smb_config = self._get_config(smbo)
                    update_global_config(smb_config)
                    systemctl('smb', 'restart')
                    systemctl('nmb', 'restart')
                except Exception, e:
                    e_msg = ('Failed to leave AD domain(%s). Error: %s' %
                             (config.get('domain'), e.__str__()))
                    handle_exception(Exception(e_msg), request)

            return Response()

Example 7

Project: amy Source File: util.py
Function: merge_objects
def merge_objects(object_a, object_b, easy_fields, difficult_fields,
                  choices, base_a=True):
    """Merge two objects of the same model.

    `object_a` and `object_b` are two objects being merged. If `base_a==True`
    (default value), then object_b will be removed and object_a will stay
    after the merge.  If `base_a!=True` then object_a will be removed, and
    object_b will stay after the merge.

    `easy_fields` contains names of non-M2M-relation fields, while
    `difficult_fields` contains names of M2M-relation fields.

    Finally, `choices` is a dictionary of field name as a key and one of
    3 values: 'obj_a', 'obj_b', or 'combine'.

    This view can throw ProtectedError when removing an object is not allowed;
    in that case, this function's call should be wrapped in try-except
    block."""
    if base_a:
        base_obj = object_a
        merging_obj = object_b
    else:
        base_obj = object_b
        merging_obj = object_a

    # used to catch all IntegrityErrors caused by violated database constraints
    # when adding two similar entries by the manager (see below for more
    # details)
    integrity_errors = []

    with transaction.atomic():
        for attr in easy_fields:
            value = choices.get(attr)
            if value == 'obj_a':
                setattr(base_obj, attr, getattr(object_a, attr))
            elif value == 'obj_b':
                setattr(base_obj, attr, getattr(object_b, attr))
            elif value == 'combine':
                try:
                    new_value = (getattr(object_a, attr) +
                                 getattr(object_b, attr))
                    setattr(base_obj, attr, new_value)
                except TypeError:
                    # probably 'unsupported operand type', but we
                    # can't do much about it…
                    pass

        for attr in difficult_fields:
            related_a = getattr(object_a, attr)
            related_b = getattr(object_b, attr)

            manager = getattr(base_obj, attr)
            value = choices.get(attr)

            # switch only if this is opposite object
            if value == 'obj_a' and manager != related_a:
                if hasattr(manager, 'clear'):
                    # M2M and FK with `null=True` have `.clear()` method
                    # which unassigns instead of removing the related objects
                    manager.clear()
                else:
                    # in some cases FK are strictly related with the instance
                    # ie. they cannot be unassigned (`null=False`), so the
                    # only sensible solution is to remove them
                    manager.all().delete()
                manager.set(list(related_a.all()))

            elif value == 'obj_b' and manager != related_b:
                if hasattr(manager, 'clear'):
                    # M2M and FK with `null=True` have `.clear()` method
                    # which unassigns instead of removing the related objects
                    manager.clear()
                else:
                    # in some cases FK are strictly related with the instance
                    # ie. they cannot be unassigned (`null=False`), so the
                    # only sensible solution is to remove them
                    manager.all().delete()
                manager.set(list(related_b.all()))

            elif value == 'combine':
                summed = related_a.all() | related_b.all()

                # some entries may cause IntegrityError (violation of
                # uniqueness constraint) because they are duplicates *after*
                # being added by the manager
                for element in summed:
                    try:
                        with transaction.atomic():
                            manager.add(element)
                    except IntegrityError as e:
                        integrity_errors.append(str(e))

        merging_obj.delete()

        return base_obj.save(), integrity_errors

Example 8

Project: Misago Source File: user.py
    def create_user(self, username, email, password=None, set_default_avatar=False, **extra_fields):
        from ..validators import validate_email, validate_password, validate_username

        with transaction.atomic():
            if not email:
                raise ValueError(_("User must have an email address."))
            if not password:
                raise ValueError(_("User must have a password."))

            validate_username(username)
            validate_email(email)
            validate_password(password)

            if not 'joined_from_ip' in extra_fields:
                extra_fields['joined_from_ip'] = '127.0.0.1'

            WATCH_DICT = {
                'no': AUTO_SUBSCRIBE_NONE,
                'watch': AUTO_SUBSCRIBE_NOTIFY,
                'watch_email': AUTO_SUBSCRIBE_NOTIFY_AND_EMAIL,
            }

            if not 'subscribe_to_started_threads' in extra_fields:
                new_value = WATCH_DICT[settings.subscribe_start]
                extra_fields['subscribe_to_started_threads'] = new_value

            if not 'subscribe_to_replied_threads' in extra_fields:
                new_value = WATCH_DICT[settings.subscribe_reply]
                extra_fields['subscribe_to_replied_threads'] = new_value

            now = timezone.now()
            user = self.model(
                is_staff=False,
                is_superuser=False,
                last_login=now,
                joined_on=now,
                **extra_fields
            )

            user.set_username(username)
            user.set_email(email)
            user.set_password(password)

            if not 'rank' in extra_fields:
                user.rank = Rank.objects.get_default()

            user.save(using=self._db)

            if set_default_avatar:
                avatars.set_default_avatar(user)
                user.avatar_hash = avatars.get_avatar_hash(user)
            else:
                user.avatar_hash = 'abcdef01'

            authenticated_role = Role.objects.get(special_role='authenticated')
            if authenticated_role not in user.roles.all():
                user.roles.add(authenticated_role)
            user.update_acl_key()

            user.save(update_fields=['avatar_hash', 'acl_key'])

            # populate online tracker with default value
            Online.objects.create(
                user=user,
                current_ip=extra_fields['joined_from_ip'],
                last_click=now,
            )

            return user

Example 9

Project: rapidsms Source File: views.py
@login_required
@transaction.atomic
def contact_bulk_add(request):
    bulk_form = BulkRegistrationForm(request.POST)

    if request.method == "POST" and "bulk" in request.FILES:
        # Python3's CSV module takes strings while Python2's takes bytes
        if six.PY3:
            encoding = request.encoding or settings.DEFAULT_CHARSET
            f = TextIOWrapper(request.FILES['bulk'].file, encoding=encoding)
        else:
            f = request.FILES['bulk']
        reader = csv.reader(
            f,
            quoting=csv.QUOTE_NONE,
            skipinitialspace=True
        )
        count = 0
        for i, row in enumerate(reader, start=1):
            try:
                name, backend_name, identity = row
            except:
                return render(request, 'registration/bulk_form.html', {
                    "bulk_form": bulk_form,
                    "csv_errors": "Could not unpack line " + str(i),
                })
            contact = Contact.objects.create(name=name)
            try:
                backend = Backend.objects.get(name=backend_name)
            except:
                return render(request, 'registration/bulk_form.html', {
                    "bulk_form": bulk_form,
                    "csv_errors": "Could not find Backend.  Line: " + str(i),
                })
            Connection.objects.create(
                backend=backend,
                identity=identity,
                contact=contact)
            count += 1
        if not count:
            return render(request, 'registration/bulk_form.html', {
                "bulk_form": bulk_form,
                "csv_errors": "No contacts found in file",
            })
        messages.add_message(request, messages.INFO, "Added %d contacts" %
                                                     count)
        return HttpResponseRedirect(reverse(registration))
    return render(request, 'registration/bulk_form.html', {
        "bulk_form": bulk_form,
    })

Example 10

Project: viewflow Source File: decorators.py
def flow_job(func):
    """
    Decorator that prepares celery task for execution.

    Makes celery job function with the following signature
    `(flow_task-strref, process_pk, task_pk, **kwargs)`

    Expects actual celery job function which has the following signature `(activation, **kwargs)`
    If celery task class implements activation interface, job function is
    called without activation instance `(**kwargs)`

    Process instance is locked only before and after the function execution.
    Please avoid any process state modification during the celery job.
    """
    @functools.wraps(func)
    def _wrapper(*args, **kwargs):
        flow_task_strref = kwargs.pop('flow_task_strref') if 'flow_task_strref' in kwargs else args[0]
        process_pk = kwargs.pop('process_pk') if 'process_pk' in kwargs else args[1]
        task_pk = kwargs.pop('task_pk') if 'task_pk' in kwargs else args[2]
        flow_task = import_task_by_ref(flow_task_strref)

        lock = flow_task.flow_class.lock_impl(flow_task.flow_class.instance)

        # start
        with transaction.atomic(), lock(flow_task.flow_class, process_pk):
            try:
                task = flow_task.flow_class.task_class.objects.get(pk=task_pk)
                if task.status == STATUS.CANCELED:
                    return
            except flow_task.flow_class.task_class.DoesNotExist:
                # There was rollback on job task created transaction,
                # we don't need to do the job
                return
            else:
                activation = flow_task.activation_class()
                activation.initialize(flow_task, task)
                if task.status == STATUS.SCHEDULED:
                    activation.start()
                else:
                    activation.restart()

        # execute
        try:
            result = func(activation, **kwargs)
        except Exception as exc:
            # mark as error
            with transaction.atomic(), lock(flow_task.flow_class, process_pk):
                task = flow_task.flow_class.task_class.objects.get(pk=task_pk)
                activation = flow_task.activation_class()
                activation.initialize(flow_task, task)
                activation.error(comments="{}\n{}".format(exc, traceback.format_exc()))
            raise
        else:
            # mark as done
            with transaction.atomic(), lock(flow_task.flow_class, process_pk):
                task = flow_task.flow_class.task_class.objects.get(pk=task_pk)
                activation = flow_task.activation_class()
                activation.initialize(flow_task, task)
                activation.done()

            return result

    return _wrapper

Example 11

Project: Django--an-app-at-a-time Source File: migrate.py
    def sync_apps(self, connection, app_labels):
        "Runs the old syncdb-style operation on a list of app_labels."
        cursor = connection.cursor()

        try:
            # Get a list of already installed *models* so that references work right.
            tables = connection.introspection.table_names(cursor)
            created_models = set()

            # Build the manifest of apps and models that are to be synchronized
            all_models = [
                (app_config.label,
                    router.get_migratable_models(app_config, connection.alias, include_auto_created=False))
                for app_config in apps.get_app_configs()
                if app_config.models_module is not None and app_config.label in app_labels
            ]

            def model_installed(model):
                opts = model._meta
                converter = connection.introspection.table_name_converter
                # Note that if a model is unmanaged we short-circuit and never try to install it
                return not ((converter(opts.db_table) in tables) or
                    (opts.auto_created and converter(opts.auto_created._meta.db_table) in tables))

            manifest = OrderedDict(
                (app_name, list(filter(model_installed, model_list)))
                for app_name, model_list in all_models
            )

            create_models = set(itertools.chain(*manifest.values()))
            emit_pre_migrate_signal(create_models, self.verbosity, self.interactive, connection.alias)

            # Create the tables for each model
            if self.verbosity >= 1:
                self.stdout.write("  Creating tables...\n")
            with transaction.atomic(using=connection.alias, savepoint=connection.features.can_rollback_ddl):
                deferred_sql = []
                for app_name, model_list in manifest.items():
                    for model in model_list:
                        if model._meta.proxy or not model._meta.managed:
                            continue
                        if self.verbosity >= 3:
                            self.stdout.write(
                                "    Processing %s.%s model\n" % (app_name, model._meta.object_name)
                            )
                        with connection.schema_editor() as editor:
                            if self.verbosity >= 1:
                                self.stdout.write("    Creating table %s\n" % model._meta.db_table)
                            editor.create_model(model)
                            deferred_sql.extend(editor.deferred_sql)
                            editor.deferred_sql = []
                        created_models.add(model)

                if self.verbosity >= 1:
                    self.stdout.write("    Running deferred SQL...\n")
                for statement in deferred_sql:
                    cursor.execute(statement)
        finally:
            cursor.close()

        # The connection may have been closed by a syncdb handler.
        cursor = connection.cursor()
        try:
            # Install custom SQL for the app (but only if this
            # is a model we've just created)
            if self.verbosity >= 1:
                self.stdout.write("  Installing custom SQL...\n")
            for app_name, model_list in manifest.items():
                for model in model_list:
                    if model in created_models:
                        custom_sql = custom_sql_for_model(model, no_style(), connection)
                        if custom_sql:
                            if self.verbosity >= 2:
                                self.stdout.write(
                                    "    Installing custom SQL for %s.%s model\n" %
                                    (app_name, model._meta.object_name)
                                )
                            try:
                                with transaction.atomic(using=connection.alias):
                                    for sql in custom_sql:
                                        cursor.execute(sql)
                            except Exception as e:
                                self.stderr.write(
                                    "    Failed to install custom SQL for %s.%s model: %s\n"
                                    % (app_name, model._meta.object_name, e)
                                )
                                if self.show_traceback:
                                    traceback.print_exc()
                        else:
                            if self.verbosity >= 3:
                                self.stdout.write(
                                    "    No custom SQL for %s.%s model\n" %
                                    (app_name, model._meta.object_name)
                                )
        finally:
            cursor.close()

        # Load initial_data fixtures (unless that has been disabled)
        if self.load_initial_data:
            for app_label in app_labels:
                call_command(
                    'loaddata', 'initial_data', verbosity=self.verbosity,
                    database=connection.alias, app_label=app_label,
                    hide_empty=True,
                )

        return created_models

Example 12

Project: coursys Source File: views.py
def _pages_from_json(request, offering, data):
    with django.db.transaction.atomic():
        try:
            data = data.decode('utf-8-sig')
        except UnicodeDecodeError:
            raise ValidationError(u"Bad UTF-8 data in file.")
            
        try:
            data = json.loads(data)
        except ValueError as e:
            raise ValidationError(u'JSON decoding error.  Exception was: "' + str(e) + '"')
        
        if not isinstance(data, dict):
            raise ValidationError(u'Outer JSON data structure must be an object.')
        if 'userid' not in data or 'token' not in data:
            raise ValidationError(u'Outer JSON data object must contain keys "userid" and "token".')
        if 'pages' not in data:
            raise ValidationError(u'Outer JSON data object must contain keys "pages".')
        if not isinstance(data['pages'], list):
            raise ValidationError(u'Value for "pages" must be a list.')
        
        try:
            user = Person.objects.get(userid=data['userid'])
            member = Member.objects.exclude(role='DROP').get(person=user, offering=offering)
        except Person.DoesNotExist, Member.DoesNotExist:
            raise ValidationError(u'Person with that userid does not exist.')
        
        if 'pages-token' not in user.config or user.config['pages-token'] != data['token']:
            e = ValidationError(u'Could not validate authentication token.')
            e.status = 403
            raise e
        
        # if we get this far, the user is authenticated and we can start processing the pages...
        
        for i, pdata in enumerate(data['pages']):
            if not isinstance(pdata, dict):
                raise ValidationError(u'Page #%i entry structure must be an object.' % (i))
            if 'label' not in pdata:
                raise ValidationError(u'Page #%i entry does not have a "label".' % (i))
            
            # handle changes to the Page object
            pages = Page.objects.filter(offering=offering, label=pdata['label'])
            if pages:
                page = pages[0]
                old_ver = page.current_version()
            else:
                page = Page(offering=offering, label=pdata['label'])
                old_ver = None

            # check write permissions
            
            # mock the request object enough to satisfy _check_allowed()
            class FakeRequest(object):
                def is_authenticated(self):
                    return True
            fake_request = FakeRequest()
            fake_request.user = FakeRequest()
            fake_request.user.username = user.userid

            if old_ver:
                m = _check_allowed(fake_request, offering, page.can_write, page.editdate())
            else:
                m = _check_allowed(fake_request, offering, offering.page_creators())
            if not m:
                raise ValidationError(u'You can\'t edit page #%i.' % (i))
            
            # handle Page attributes
            if 'can_read' in pdata:
                if type(pdata['can_read']) != unicode or pdata['can_read'] not in ACL_DESC:
                    raise ValidationError(u'Page #%i "can_read" value must be one of %s.'
                                          % (i, ','.join(ACL_DESC.keys())))
                
                page.can_read = pdata['can_read']

            if 'can_write' in pdata:
                if type(pdata['can_write']) != unicode or pdata['can_write'] not in WRITE_ACL_DESC:
                    raise ValidationError(u'Page #%i "can_write" value must be one of %s.'
                                          % (i, ','.join(WRITE_ACL_DESC.keys())))
                if m.role == 'STUD':
                    raise ValidationError(u'Page #%i: students can\'t change can_write value.' % (i))
                page.can_write = pdata['can_write']
            
            if 'new_label' in pdata:
                if type(pdata['new_label']) != unicode:
                    raise ValidationError(u'Page #%i "new_label" value must be a string.' % (i))
                if m.role == 'STUD':
                    raise ValidationError(u'Page #%i: students can\'t change label value.' % (i))
                if Page.objects.filter(offering=offering, label=pdata['new_label']):
                    raise ValidationError(u'Page #%i: there is already a page with that "new_label".' % (i))

                page.label = pdata['new_label']

            page.save()

            # handle PageVersion changes
            ver = PageVersion(page=page, editor=member)
            
            if 'title' in pdata:
                if type(pdata['title']) != unicode:
                    raise ValidationError(u'Page #%i "title" value must be a string.' % (i))
                
                ver.title = pdata['title']
            elif old_ver:
                ver.title = old_ver.title
            else:
                raise ValidationError(u'Page #%i has no "title" for new page.' % (i))

            if 'comment' in pdata:
                if type(pdata['comment']) != unicode:
                    raise ValidationError(u'Page #%i "comment" value must be a string.' % (i))
                
                ver.comment = pdata['comment']

            if 'use_math' in pdata:
                if type(pdata['use_math']) != bool:
                    raise ValidationError(u'Page #%i "comment" value must be a boolean.' % (i))
                
                ver.set_math(pdata['use_math'])

            if 'wikitext-base64' in pdata:
                if type(pdata['wikitext-base64']) != unicode:
                    raise ValidationError(u'Page #%i "wikitext-base64" value must be a string.' % (i))
                try:
                    wikitext = base64.b64decode(pdata['wikitext-base64'])
                except TypeError:
                    raise ValidationError(u'Page #%i "wikitext-base64" contains bad base BASE64 data.' % (i))
                
                ver.wikitext = wikitext
            elif 'wikitext' in pdata:
                if type(pdata['wikitext']) != unicode:
                    raise ValidationError(u'Page #%i "wikitext" value must be a string.' % (i))
                
                ver.wikitext = pdata['wikitext']
            elif old_ver:
                ver.wikitext = old_ver.wikitext
            else:
                raise ValidationError(u'Page #%i has no wikitext for new page.' % (i))
            
            ver.save()
        
        return user

Example 13

Project: orchestra Source File: test_load.py
    def test_load_workflow_version(self):
        """ Ensure that workflow version loading works as desired. """
        # Verify initial DB state.
        assert_test_dir_workflow_not_loaded(self)
        assert_test_dir_v1_not_loaded(self)
        assert_test_dir_v2_not_loaded(self)

        # Load V1 of the workflow.
        load_workflow(WORKFLOWS['valid']['app_label'], VERSION_1)
        workflow = Workflow.objects.get(slug='test_dir')
        assert_test_dir_workflow_loaded(self)
        assert_test_dir_v1_loaded(self)
        assert_test_dir_v2_not_loaded(self)

        # Load the JSON data for the versions.
        v1_file_path = os.path.join(WORKFLOWS['valid']['dir'],
                                    'v1/version.json')
        with open(v1_file_path, 'r') as v1_file:
            v1_data = json.load(v1_file)

        v2_file_path = os.path.join(WORKFLOWS['valid']['dir'],
                                    'v2/version.json')
        with open(v2_file_path, 'r') as v2_file:
            v2_data = json.load(v2_file)

        # Without --force, can't overwrite a version.
        # We wrap calls to load_workflow_version in transaction.atomic, because
        # the call might create corrupt database state otherwise.
        force_error = 'Version {} already exists'.format(VERSION_1)
        with self.assertRaisesMessage(WorkflowError, force_error):
            with transaction.atomic():
                load_workflow_version(v1_data, workflow)

        # Even with --force, can't overwrite a version with a new step
        v1_data['steps'].append({'slug': 'invalid_new_step'})
        step_change_err_msg = ('Even with --force, cannot change the steps of '
                               'a workflow.')
        with self.assertRaisesMessage(WorkflowError, step_change_err_msg):
            with transaction.atomic():
                load_workflow_version(v1_data, workflow, force=True)
        v1_data['steps'] = v1_data['steps'][:-1]

        # Even with --force, can't change a step's creation dependencies.
        step_2_create_dependencies = v1_data['steps'][1]['creation_depends_on']
        step_2_create_dependencies.append('s3')
        topology_change_err_msg = ('Even with --force, cannot change the '
                                   'topology of a workflow.')
        with self.assertRaisesMessage(WorkflowError, topology_change_err_msg):
            with transaction.atomic():
                load_workflow_version(v1_data, workflow, force=True)
        v1_data['steps'][1]['creation_depends_on'] = (
            step_2_create_dependencies[:-1])

        # Even with --force, can't change a step's submission dependencies.
        step_3_submit_dependencies = v1_data['steps'][2][
            'submission_depends_on']
        step_3_submit_dependencies.append('s1')
        with self.assertRaisesMessage(WorkflowError, topology_change_err_msg):
            with transaction.atomic():
                load_workflow_version(v1_data, workflow, force=True)
        v1_data['steps'][2]['submission_depends_on'] = (
            step_3_submit_dependencies[:-1])

        # Otherwise, --force should reload versions correctly.
        with transaction.atomic():
            load_workflow_version(v1_data, workflow, force=True)
        assert_test_dir_workflow_loaded(self)
        assert_test_dir_v1_loaded(self)
        assert_test_dir_v2_not_loaded(self)

        # New versions with bad slugs should not load correctly
        v2_step_2 = v2_data['steps'][1]
        v2_step_2_create_dependencies = v2_step_2['creation_depends_on']
        v2_step_2_create_dependencies.append('not_a_real_step')
        bad_slug_error = '{}.{} contains a non-existent slug'
        with self.assertRaisesMessage(
                WorkflowError,
                bad_slug_error.format('s2', 'creation_depends_on')):
            with transaction.atomic():
                load_workflow_version(v2_data, workflow)
        v2_step_2['creation_depends_on'] = (
            v2_step_2_create_dependencies[:-1])

        v2_step_2_submit_dependencies = v2_step_2['submission_depends_on']
        v2_step_2_submit_dependencies.append('not_a_real_step')
        with self.assertRaisesMessage(
                WorkflowError,
                bad_slug_error.format('s2', 'submission_depends_on')):
            with transaction.atomic():
                load_workflow_version(v2_data, workflow)
        v2_step_2['submission_depends_on'] = (
            v2_step_2_submit_dependencies[:-1])

        v2_step_2_certification_dependencies = v2_step_2[
            'required_certifications']
        v2_step_2_certification_dependencies.append('not_a_real_certification')
        with self.assertRaisesMessage(
                WorkflowError,
                bad_slug_error.format('s2', 'required_certifications')):
            with transaction.atomic():
                load_workflow_version(v2_data, workflow)
        v2_step_2['required_certifications'] = (
            v2_step_2_certification_dependencies[:-1])

        # Otherwise, new versions should load correctly
        with transaction.atomic():
            load_workflow_version(v2_data, workflow)
        assert_test_dir_workflow_loaded(self)
        assert_test_dir_v1_loaded(self)
        assert_test_dir_v2_loaded(self)

Example 14

Project: oioioi Source File: import_schools.py
    def handle(self, *args, **options):
        if len(args) != 1:
            raise CommandError(_("Expected one argument - filename or url"))

        arg = args[0]

        if arg.startswith('http://') or arg.startswith('https://'):
            self.stdout.write(_("Fetching %s...\n") % (arg,))
            stream = urllib2.urlopen(arg)
        else:
            if not os.path.exists(arg):
                raise CommandError(_("File not found: ") + arg)
            stream = open(arg, 'r')

        reader = unicodecsv.DictReader(stream)
        fields = reader.fieldnames
        if fields != COLUMNS:
            raise CommandError(
                _("Missing header or invalid columns: %(h)s."
                  " Expected: %(col)s")
                % {'h': ', '.join(fields), 'col': ', '.join(COLUMNS)})

        with transaction.atomic():
            ok = True
            all_count = 0
            created_count = 0
            for row in reader:
                all_count += 1

                row['address'] = row['address'].replace('ul.', '')
                row['address'] = row['address'].strip(' ')
                row['address'] = string.capwords(row['address'])

                row['postal_code'] = ''.join(row['postal_code'].split())

                for hypen in (' - ', u'\u2010'):
                    row['city'] = row['city'].replace(hypen, '-')
                row['city'] = row['city'].title()

                row['province'] = row['province'].lower()

                row['phone'] = row['phone'].split(',')[0]
                row['phone'] = row['phone'].split(';')[0]
                for c in ['tel.', 'fax.', '(', ')', '-', ' ']:
                    row['phone'] = row['phone'].replace(c, '')
                row['phone'] = row['phone'].lstrip('0')

                row['email'] = row['email'].split(',')[0]
                row['email'] = row['email'].split(';')[0]

                school, created = School.objects \
                        .get_or_create(name=row['name'],
                                       postal_code=row['postal_code'])
                if created:
                    created_count += 1

                for column in COLUMNS:
                    setattr(school, column, row[column])

                school.is_active = True
                school.is_approved = True

                try:
                    school.full_clean()
                    school.save()
                except ValidationError, e:
                    for k, v in e.message_dict.iteritems():
                        for msg in v:
                            if k == '__all__':
                                self.stdout.write(
                                    _("Line %(lineNum)s: %(msg)s\n")
                                    % {'lineNum': reader.line_num, 'msg': msg})
                            else:
                                self.stdout.write(
                                    _("Line %(lineNum)s,"
                                      " field %(field)s: %(msg)s\n")
                                    % {'lineNum': reader.line_num, 'field': k,
                                       'msg': msg})
                    ok = False

            if ok:
                self.stdout.write(
                    _("Processed %(all_count)d entries (%(new_count)d new)\n")
                    % {'all_count': all_count, 'new_count': created_count})
            else:
                raise CommandError(_("There were some errors."
                                     " Database not changed\n"))

Example 15

Project: junction Source File: sample_data.py
Function: handle
    @transaction.atomic
    def handle(self, *args, **options):

        self.users = []
        self.conferences = []
        self.proposals = []
        self.proposal_reviewers = []

        print('  Updating domain to localhost:8000')  # Update site url
        site = Site.objects.get_current()
        site.domain, site.name = 'localhost:8000', 'Local'
        site.save()

        print('  Creating Superuser')  # create superuser
        super_user = self.create_user(is_superuser=True, username='admin',
                                      is_active=True)
        EmailAddress.objects.get_or_create(user=super_user,
                                           verified=True,
                                           primary=True,
                                           email=super_user.email)

        print('  Creating sample Users')  # create users
        for x in range(NUM_USERS):
            self.users.append(self.create_user(counter=x))

        print('  Creating proposal sections')
        self.proposal_sections = self.create_proposal_sections()

        print('  Create proposal types')
        self.proposal_types = self.create_proposal_types()

        # create conferences
        print('  Creating sample Conferences')
        for x in range(NUM_CONFERENCES + NUM_EMPTY_CONFERENCES):
            conference = self.create_conference(x)
            self.conferences.append(conference)

            if x < NUM_CONFERENCES:
                self.create_moderators(conference)
                self.proposal_reviewers = self.create_propsoal_reviewers(conference)

                # attach all proposal sections
                for section in self.proposal_sections:
                    conference.proposal_sections.add(section)

                # attach all proposal types
                for proposal_type in self.proposal_types:
                    conference.proposal_types.add(proposal_type)

        # create proposals
        print('  Creating sample proposals')
        for x in range(NUM_PUBLIC_PROPOSALS):
            self.proposals.append(self.create_proposal(proposal_type="Public"))

        for x in range(NUM_DRAFT_PROPOSALS):
            self.proposals.append(self.create_proposal(proposal_type="Draft"))

        for x in range(NUM_CANCELLED_PROPOSALS):
            self.proposals.append(self.create_proposal(proposal_type="Cancelled"))

        print('  Create sample Schedule')
        for proposal in self.proposals:
            if not proposal.get_status_display() == 'Public':
                continue
            self.create_scheduled_item(proposal=proposal)
        self.create_scheduled_item(proposal='Break',
                                   conference=self.conferences[0])

        # create comments
        print('  Creating sample proposal comments')
        for x in range(NUM_PUBLIC_COMMENTS):
            self.create_proposal_comment(users=self.users)

        reviewers = [i.reviewer for i in self.proposal_reviewers]
        for x in range(NUM_REVIEWER_COMMENTS):
            self.create_proposal_comment(users=reviewers)

        print(' Creating default choices for proposal reviewer vote values.')
        for vote in constants.ProposalReviewVote.CHOICES:
            ProposalSectionReviewerVoteValue.objects.create(vote_value=vote[0], description=vote[1])

Example 16

Project: SmartElect Source File: job.py
    def generate_rolls(self):
        """Build PDFs for this job. This is where all the action happens.

        May raise NoVotersError, NoOfficeError and OutOfDiskSpaceError.
        """
        self.begin = django_now()

        if not self.input_arguments['forgive_no_office']:
            # We are not going to be forgiving if we find any office-less centers.
            has_office = lambda center: center.office.id != NO_NAMEDTHING
            problem_centers = [center.center_id for center in self.centers if not
                               has_office(center)]

            if problem_centers:
                msg = "The following centers have no associated office: {}."
                raise NoOfficeError(msg.format(problem_centers))

        if not self.input_arguments['forgive_no_voters']:
            # Test each center to make sure it has at least one registration. This is a lot of
            # DB churn and can take a while. It has to be done in two parts.

            # Find non-copy centers with no registrations
            problem_center_ids = \
                RegistrationCenter.objects.filter(id__in=[center.id for center in self.centers],
                                                  registration__isnull=True,
                                                  copy_of_id__isnull=True).values_list('id',
                                                                                       flat=True)
            problem_centers = [center for center in self.centers if center.id in problem_center_ids]

            # Find copy centers with no registrations. This runs one query per center which is
            # the expensive way to do it, but it's the only way to figure out exactly which copy
            # centers (if any) have parents with no registrations without dropping to raw SQL.
            for center in self.centers:
                copied = center.copy_of
                if copied:
                    if not Registration.objects.filter(registration_center=copied).exists():
                        problem_centers.append(center)

            if problem_centers:
                problem_centers = [center.center_id for center in problem_centers]
                msg = "The following centers have no registrants: {}."
                raise NoVotersError(msg.format(problem_centers))

        for i_center, center in enumerate(self.centers):
            # Fetch the voters for this center from the DB.
            voter_roll = get_voter_roll(center)

            office_id = center.office.id
            if office_id not in self.offices:
                self.offices[office_id] = center.office

            out_path = os.path.join(self.output_path, str(office_id))
            if not os.path.exists(out_path):
                with out_of_disk_space_handler_context():
                    os.makedirs(out_path)

            filename_params = {'center_id': center.center_id, }

            # Generate different PDFs based on phase
            if self.phase == 'in-person':
                # election center books only
                for gender in (FEMALE, MALE):
                    filename_params['gender'] = GENDER_ABBRS[gender]
                    filename = self.get_filename(out_path, filename_params)
                    n_pages = generate_pdf(filename, center, voter_roll, gender, center_book=True)
                    self.add(filename, n_pages)

            elif self.phase == 'exhibitions':
                # election center list only
                for gender in (FEMALE, MALE):
                    filename_params['gender'] = GENDER_ABBRS[gender]
                    filename = self.get_filename(out_path, filename_params)
                    n_pages = generate_pdf(filename, center, voter_roll, gender)
                    self.add(filename, n_pages)

            elif self.phase == 'polling':
                # distribute registrations into stations for this center
                stations = station_distributor(voter_roll)

                # Stash the list of which voters registered at this center/station for later.
                election = Election.objects.get_most_current_election()
                if not election:
                    raise NoElectionError('There is no current in-person election.')
                for station in stations:
                    station.election = election
                    station.center = center
                    for voter in station.roll:
                        voter_station = VoterStation(national_id=voter.national_id,
                                                     center_id=center.center_id,
                                                     station_number=station.number)
                        self.voter_stations.append(voter_station)

                # count stations by gender for center list
                station_counts_by_gender = Counter(station.gender for station in stations)
                for gender in station_counts_by_gender:
                    filename_params['gender'] = GENDER_ABBRS[gender]
                    filename = self.get_filename(out_path, filename_params, 'list')
                    n_pages = generate_pdf_center_list(filename, stations, gender)
                    self.add(filename, n_pages)
                    logger.info('center list {}'.format(filename))

                # Create a separate book and sign for each station
                for station in stations:
                    filename_params['station_number'] = station.number

                    # polling station books
                    filename = self.get_filename(out_path, filename_params, 'book')
                    n_pages = generate_pdf_station_book(filename, station)
                    self.add(filename, n_pages)
                    logger.info('station book {}'.format(filename))

                    # polling station sign
                    filename = self.get_filename(out_path, filename_params, 'sign')
                    n_pages = generate_pdf_station_sign(filename, station)
                    self.add(filename, n_pages)
                    logger.info('station book {}'.format(filename))

                with transaction.atomic():
                    # Delete any existing Stations for this center and replace them with new.
                    Station.objects.filter(election=election, center=center).delete()
                    for station in stations:
                        station.save()

            # Emit status
            logger.info('saved PDFs for center %s' % center.center_id)
            params = (i_center + 1, len(self.centers), (i_center + 1) / len(self.centers))
            logger.info("Completed {} of {} (~{:.2%})".format(*params))

        self.end = django_now()

        # Now that rolls are generated, write voter station CSVs (if appropriate) and job JSON
        # metadata. Last but not least, zip output.
        if self.voter_stations:
            # Write voter station data twice to CSV files. First sorted by national id and again
            # sorted by (center id, station number).
            header = [('national_id', 'center_id', 'station_number')]
            # sort by national id
            self.voter_stations.sort()

            filename = os.path.join(self.output_path, 'voters_by_national_id.csv')
            with out_of_disk_space_handler_context():
                csv_writer = UnicodeWriter(open(filename, 'w'))
                csv_writer.writerows(header)
                csv_writer.writerows(self.voter_stations)

            # sort by center, station number
            self.voter_stations.sort(key=lambda voter_station: voter_station[1:])

            filename = os.path.join(self.output_path, 'voters_by_center_and_station.csv')
            with out_of_disk_space_handler_context():
                csv_writer = UnicodeWriter(open(filename, 'w'))
                csv_writer.writerows(header)
                csv_writer.writerows(self.voter_stations)

        # Write the JSON metadata file
        metadata_filename = os.path.join(self.output_path, METADATA_FILENAME)
        with out_of_disk_space_handler_context():
            with open(metadata_filename, 'w') as f:
                json.dump(self.metadata, f, indent=2)

        # Write a hash of the metadata file
        sha = hashlib.sha256(open(metadata_filename).read()).hexdigest()
        with out_of_disk_space_handler_context():
            open(metadata_filename + '.sha256', 'w').write(sha)

        logger.info('zipping output')
        for office_id in sorted(self.offices.keys()):
            office_dir = os.path.join(self.output_path, str(office_id))
            with out_of_disk_space_handler_context():
                zip_filename = office_dir + '.zip'
                with zipfile.ZipFile(zip_filename, 'w', zipfile.ZIP_DEFLATED) as office_zip:
                    logger.info('zipping %s' % office_dir)
                    for office_base, dirs, files in os.walk(office_dir):
                        for pdf in files:
                            fn = os.path.join(office_base, pdf)
                            office_zip.write(fn, pdf)

        logger.info('done')

Example 17

Project: rockstor-core Source File: disk.py
    @staticmethod
    @transaction.atomic
    def _update_disk_state():
        """
        A db atomic method to update the database of attached disks / drives.
        Works only on device serial numbers for drive identification.
        Calls scan_disks to establish the current connected drives info.
        Initially removes duplicate by serial number db entries to deal
        with legacy db states and obfuscates all previous device names as they
        are transient. The drive database is then updated with the attached
        disks info and previously known drives no longer found attached are
        marked as offline. All offline drives have their SMART availability and
        activation status removed and all attached drives have their SMART
        availability assessed and activated if available.
        :return: serialized models of attached and missing disks via serial num
        """
        # Acquire a list (namedtupil collection) of attached drives > min size
        disks = scan_disks(settings.MIN_DISK_SIZE)
        serial_numbers_seen = []
        # Make sane our db entries in view of what we know we have attached.
        # Device serial number is only known external unique entry, scan_disks
        # make this so in the case of empty or repeat entries by providing
        # fake serial numbers which are in turn flagged via WebUI as unreliable.
        # 1) scrub all device names with unique but nonsense uuid4
        # 1) mark all offline disks as such via db flag
        # 2) mark all offline disks smart available and enabled flags as False
        # logger.info('update_disk_state() Called')
        for do in Disk.objects.all():
            # Replace all device names with a unique placeholder on each scan
            # N.B. do not optimize by re-using uuid index as this could lead
            # to a non refreshed webui acting upon an entry that is different
            # from that shown to the user.
            do.name = 'detached-' + str(uuid.uuid4()).replace('-', '')
            # Delete duplicate or fake by serial number db disk entries.
            # It makes no sense to save fake serial number drives between scans
            # as on each scan the serial number is re-generated (fake) anyway.
            # Serial numbers beginning with 'fake-serial-' are from scan_disks.
            if (do.serial in serial_numbers_seen) or (
                    re.match('fake-serial-', do.serial) is not None):
                logger.info('Deleting duplicate or fake (by serial) Disk db '
                            'entry. Serial = %s' % do.serial)
                do.delete()  # django >=1.9 returns a dict of deleted items.
                # Continue onto next db disk object as nothing more to process.
                continue
            # first encounter of this serial in the db so stash it for reference
            serial_numbers_seen.append(deepcopy(do.serial))
            # Look for devices (by serial number) that are in the db but not in
            # our disk scan, ie offline / missing.
            if (do.serial not in [d.serial for d in disks]):
                # update the db entry as offline
                do.offline = True
                # disable S.M.A.R.T available and enabled flags.
                do.smart_available = do.smart_enabled = False
            do.save()  # make sure all updates are flushed to db
        # Our db now has no device name info as all dev names are place holders.
        # Iterate over attached drives to update the db's knowledge of them.
        # Kernel dev names are unique so safe to overwrite our db unique name.
        for d in disks:
            # start with an empty disk object
            dob = None
            # Convert our transient but just scanned so current sda type name
            # to a more useful by-id type name as found in /dev/disk/by-id
            byid_disk_name, is_byid = get_dev_byid_name(d.name, True)
            # If the db has an entry with this disk's serial number then
            # use this db entry and update the device name from our recent scan.
            if (Disk.objects.filter(serial=d.serial).exists()):
                dob = Disk.objects.get(serial=d.serial)
                #dob.name = d.name
                dob.name = byid_disk_name
            else:
                # We have an assumed new disk entry as no serial match in db.
                # Build a new entry for this disk.
                #dob = Disk(name=d.name, serial=d.serial)
                # N.B. we may want to force a fake-serial here if is_byid False,
                # that way we flag as unusable disk as no by-id type name found.
                # It may already have been set though as the only by-id
                # failures so far are virtio disks with no serial so scan_disks
                # will have already given it a fake serial in d.serial.
                dob = Disk(name=byid_disk_name, serial=d.serial)
            # Update the db disk object (existing or new) with our scanned info
            dob.size = d.size
            dob.parted = d.parted
            dob.offline = False  # as we are iterating over attached devices
            dob.model = d.model
            dob.transport = d.transport
            dob.vendor = d.vendor
            dob.btrfs_uuid = d.btrfs_uuid
            # If attached disk has an fs and it isn't btrfs
            if (d.fstype is not None and d.fstype != 'btrfs'):
                dob.btrfs_uuid = None
                dob.parted = True  # overload use of parted as non btrfs flag.
                # N.B. this overload use may become redundant with the addition
                # of the Disk.role field.
            # Update the role field with scan_disks findings, currently only
            # mdraid membership type based on fstype info. In the case of
            # these raid member indicators from scan_disks() we have the
            # current truth provided so update the db role status accordingly.
            # N.B. this if else could be expanded to accommodate other
            # roles based on the fs found
            if d.fstype == 'isw_raid_member' or d.fstype == 'linux_raid_member':
                # We have an indicator of mdraid membership so update existing
                # role info if any.
                # N.B. We have a minor legacy issue in that prior to using json
                # format for the db role field we stored one of 2 strings.
                # if these 2 strings are found then ignore them as we then
                # overwrite with our current finding and in the new json format.
                # I.e. non None could also be a legacy entry so follow overwrite
                # path when legacy entry found by treating as a None entry.
                # TODO: When we reset migrations the following need only check
                # TODO: "dob.role is not None"
                if dob.role is not None and dob.role != 'isw_raid_member' \
                        and dob.role != 'linux_raid_member':
                    # get our known roles into a dictionary
                    known_roles = json.loads(dob.role)
                    # create or update an mdraid dictionary entry
                    known_roles['mdraid'] = str(d.fstype)
                    # return updated dict to json format and store in db object
                    dob.role = json.dumps(known_roles)
                else:  # We have a dob.role = None so just insert our new role.
                    # Also applies to legacy pre json role entries.
                    dob.role = '{"mdraid": "' + d.fstype + '"}'  # json string
            else:  # We know this disk is not an mdraid raid member.
                # No identified role from scan_disks() fstype value (mdraid
                # only for now )so we preserve any prior known roles not
                # exposed by scan_disks but remove the mdraid role if found.
                # TODO: When we reset migrations the following need only check
                # TODO: "dob.role is not None"
                if dob.role is not None and dob.role != 'isw_raid_member' \
                        and dob.role != 'linux_raid_member':
                    # remove mdraid role if found but preserve prior roles
                    # which should now only be in json format
                    known_roles = json.loads(dob.role)
                    if 'mdraid' in known_roles:
                        if len(known_roles) > 1:
                            # mdraid is not the only entry so we have to pull
                            # out only mdraid from dict and convert back to json
                            del known_roles['mdraid']
                            dob.role = json.dumps(known_roles)
                        else:
                            # mdraid was the only entry so we need not bother
                            # with dict edit and json conversion only to end up
                            # with an empty json {} so revert to default 'None'.
                            dob.role = None
                else:  # Empty or legacy role entry.
                    # We have either None or a legacy mdraid role when this disk
                    # is no longer an mdraid member. We can now assert None.
                    dob.role = None
            # If our existing Pool db knows of this disk's pool via it's label:
            if (Pool.objects.filter(name=d.label).exists()):
                # update the disk db object's pool field accordingly.
                dob.pool = Pool.objects.get(name=d.label)

                #this is for backwards compatibility. root pools created
                #before the pool.role migration need this. It can safely be
                #removed a few versions after 3.8-11 or when we reset migrations.
                if (d.root is True):
                    dob.pool.role = 'root'
                    dob.pool.save()
            else:  # this disk is not known to exist in any pool via it's label
                dob.pool = None
            # If no pool has yet been found with this disk's label in and
            # the attached disk is our root disk (flagged by scan_disks)
            if (dob.pool is None and d.root is True):
                # setup our special root disk db entry in Pool
                # TODO: dynamically retrieve raid level.
                p = Pool(name=d.label, raid='single', role='root')
                p.disk_set.add(dob)
                p.save()
                # update disk db object to reflect special root pool status
                dob.pool = p
                dob.save()
                p.size = pool_usage(mount_root(p))[0]
                enable_quota(p)
                p.uuid = btrfs_uuid(dob.name)
                p.save()
            # save our updated db disk object
            dob.save()
        # Update online db entries with S.M.A.R.T availability and status.
        for do in Disk.objects.all():
            # find all the not offline db entries
            if (not do.offline):
                # We have an attached disk db entry.
                # Since our Disk.name model now uses by-id type names we can
                # do cheap matches to the beginnings of these names to find
                # virtio, md, or sdcard devices which are assumed to have no
                # SMART capability.
                # We also disable devices smart support when they have a
                # fake serial number as ascribed by scan_disks as any SMART
                # data collected is then less likely to be wrongly associated
                # with the next device that takes this temporary drive's name.
                # Also note that with no serial number some device types will
                # not have a by-id type name expected by the smart subsystem.
                # This has only been observed in no serial virtio devices.
                if (re.match('fake-serial-', do.serial) is not None) or \
                        (re.match('virtio-|md-|mmc-|nvme-', do.name) is not None):
                    # Virtio disks (named virtio-*), md devices (named md-*),
                    # and an sdcard reader that provides devs named mmc-* have
                    # no smart capability so avoid cluttering logs with
                    # exceptions on probing these with smart.available.
                    # nvme not yet supported by CentOS 7 smartmontools:
                    # https://www.smartmontools.org/ticket/657
                    # Thanks to @snafu in rockstor forum post 1567 for this.
                    do.smart_available = do.smart_enabled = False
                    continue
                # try to establish smart availability and status and update db
                try:
                    # for non ata/sata drives
                    do.smart_available, do.smart_enabled = smart.available(
                        do.name, do.smart_options)
                except Exception, e:
                    logger.exception(e)
                    do.smart_available = do.smart_enabled = False
            do.save()
        ds = DiskInfoSerializer(Disk.objects.all().order_by('name'), many=True)
        return Response(ds.data)

Example 18

Project: ion Source File: serializers.py
    @transaction.atomic
    def fetch_activity_list_with_metadata(self, block):
        user = self.context.get("user", self.context["request"].user)

        favorited_activities = set(user.favorited_activity_set.values_list("id", flat=True))

        available_restricted_acts = EighthActivity.restricted_activities_available_to_user(user)

        activity_list = FallbackDict(functools.partial(self.get_activity, user, favorited_activities, available_restricted_acts))
        scheduled_activity_to_activity_map = FallbackDict(self.get_scheduled_activity)

        # Find all scheduled activities that don't correspond to
        # deleted activities
        scheduled_activities = (block.eighthscheduledactivity_set.exclude(activity__deleted=True).select_related("activity"))

        for scheduled_activity in scheduled_activities:
            # Avoid re-fetching scheduled_activity.
            activity_info = self.get_activity(user, favorited_activities, available_restricted_acts, None, scheduled_activity)
            activity = scheduled_activity.activity
            scheduled_activity_to_activity_map[scheduled_activity.id] = activity.id
            activity_list[activity.id] = activity_info

        # Find the number of students signed up for every activity
        # in this block
        activities_with_signups = (EighthSignup.objects.filter(scheduled_activity__block=block).exclude(
            scheduled_activity__activity__deleted=True).values_list("scheduled_activity__activity_id")
            .annotate(user_count=Count("scheduled_activity")))

        for activity, user_count in activities_with_signups:
            activity_list[activity]["roster"]["count"] = user_count

        sponsors_dict = (EighthSponsor.objects.values_list("id", "user_id", "first_name", "last_name", "show_full_name"))

        all_sponsors = dict((sponsor[0], {"user_id": sponsor[1],
                                          "name": sponsor[2] + " " + sponsor[3] if sponsor[4] else sponsor[3]}) for sponsor in sponsors_dict)

        activity_ids = scheduled_activities.values_list("activity__id")
        sponsorships = (EighthActivity.sponsors.through.objects.filter(eighthactivity_id__in=activity_ids).select_related("sponsors").values(
            "eighthactivity", "eighthsponsor"))

        scheduled_activity_ids = scheduled_activities.values_list("id", flat=True)
        overidden_sponsorships = (EighthScheduledActivity.sponsors.through.objects.filter(
            eighthscheduledactivity_id__in=scheduled_activity_ids).values("eighthscheduledactivity", "eighthsponsor"))

        for sponsorship in sponsorships:
            activity_id = int(sponsorship["eighthactivity"])
            sponsor_id = sponsorship["eighthsponsor"]
            sponsor = all_sponsors[sponsor_id]

            if sponsor["user_id"]:
                # We're not using User.get_user() here since we only want
                # a value from LDAP that is probably already cached.
                # This eliminates several hundred SQL queries on some
                # pages.
                dn = User.dn_from_id(sponsor["user_id"])
                if dn is not None:
                    name = User(dn=dn).last_name
                else:
                    name = None
            else:
                name = None

            if activity_id in activity_list:
                activity_list[activity_id]["sponsors"].append(sponsor["name"] or name)

        activities_sponsors_overidden = []
        for sponsorship in overidden_sponsorships:
            scheduled_activity_id = sponsorship["eighthscheduledactivity"]
            activity_id = scheduled_activity_to_activity_map[scheduled_activity_id]
            sponsor_id = sponsorship["eighthsponsor"]
            sponsor = all_sponsors[sponsor_id]

            if activity_id not in activities_sponsors_overidden:
                activities_sponsors_overidden.append(activity_id)
                del activity_list[activity_id]["sponsors"][:]

            if sponsor["user_id"]:
                # See a few lines up for why we're not using User.get_user()
                dn = User.dn_from_id(sponsor["user_id"])
                if dn is not None:
                    name = User(dn=dn).last_name
                else:
                    name = None
            else:
                name = None
            activity_list[activity_id]["sponsors"].append(sponsor["name"] or name)

        roomings = (EighthActivity.rooms.through.objects.filter(eighthactivity_id__in=activity_ids).select_related("eighthroom", "eighthactivity"))
        overidden_roomings = (EighthScheduledActivity.rooms.through.objects.filter(
            eighthscheduledactivity_id__in=scheduled_activity_ids).select_related("eighthroom", "eighthscheduledactivity"))

        for rooming in roomings:
            activity_id = rooming.eighthactivity.id
            activity_cap = rooming.eighthactivity.default_capacity
            room_name = rooming.eighthroom.name
            activity_list[activity_id]["rooms"].append(room_name)
            if activity_cap:
                # use activity default capacity instead of sum of activity rooms
                activity_list[activity_id]["roster"]["capacity"] = activity_cap
            else:
                activity_list[activity_id]["roster"]["capacity"] += rooming.eighthroom.capacity

        activities_rooms_overidden = []
        for rooming in overidden_roomings:
            scheduled_activity_id = rooming.eighthscheduledactivity.id

            activity_id = scheduled_activity_to_activity_map[scheduled_activity_id]
            if activity_id not in activities_rooms_overidden:
                activities_rooms_overidden.append(activity_id)
                del activity_list[activity_id]["rooms"][:]
                activity_list[activity_id]["roster"]["capacity"] = 0
            room_name = rooming.eighthroom.name
            activity_list[activity_id]["rooms"].append(room_name)
            activity_list[activity_id]["roster"]["capacity"] += rooming.eighthroom.capacity

        for scheduled_activity in scheduled_activities:
            if scheduled_activity.capacity is not None:
                capacity = scheduled_activity.capacity
                sched_act_id = scheduled_activity.activity.id
                activity_list[sched_act_id]["roster"]["capacity"] = capacity

        return activity_list

Example 19

Project: django-bulk-admin Source File: admin.py
    @csrf_protect_m
    @transaction.atomic
    def bulk_view(self, request, form_url='', extra_context=None):
        to_field = request.POST.get(TO_FIELD_VAR, request.GET.get(TO_FIELD_VAR))
        if to_field and not self.to_field_allowed(request, to_field):
            raise DisallowedModelAdminToField("The field %s cannot be referenced." % to_field)

        model = self.model
        opts = model._meta

        continue_requested = request.POST.get('_continue', request.GET.get('_continue'))
        force_continue = False
        inline = self.get_bulk_inline(request)
        formset_class = inline.get_formset(request)
        formset_params = {}
        prefix = formset_class.get_default_prefix()
        queryset = inline.get_queryset(request)

        if not self.has_add_permission(request):
            formset_class.max_num = 0

        if request.method == 'GET':
            if 'pks' in request.GET and self.has_change_permission(request):
                pks = [opts.pk.to_python(pk) for pk in request.GET.get('pks').split(',')]
                queryset = queryset.filter(pk__in=pks)
            else:
                queryset = queryset.none()

        elif request.method == 'POST':
            management_form = ManagementForm(request.POST, prefix=prefix)

            if not management_form.is_valid():
                raise ValidationError(
                    _('ManagementForm data is missing or has been tampered with'),
                    code='missing_management_form',
                )

            if not self.has_add_permission(request) and management_form.cleaned_data[INITIAL_FORM_COUNT] < management_form.cleaned_data[TOTAL_FORM_COUNT]:
                raise PermissionDenied

            if not self.has_change_permission(request) and management_form.cleaned_data[INITIAL_FORM_COUNT] > 0:
                raise PermissionDenied

            queryset = self.transform_queryset(request, queryset, management_form, prefix)

            post, files, force_continue = self.transform_post_and_files(request, prefix)
            formset_params.update({
                'data': post,
                'files': files,
            })

        formset_params['queryset'] = queryset

        formset = formset_class(**formset_params)

        if request.method == 'POST':
            if formset.is_valid():
                self.save_formset(request, form=None, formset=formset, change=False)

                if continue_requested or force_continue:
                    # The implementation of ModelAdmin redirects to the change view if valid and continue was requested
                    # The change view then reads the edited model again from database
                    # In our case, we can't make a redirect as we would loose the information which models should be edited
                    # Thus, we create a new formset with the edited models and continue as this would have been a usual GET request

                    if self.has_change_permission(request):
                        queryset = _ListQueryset(queryset)
                        queryset.extend(formset.new_objects)
                    else:
                        queryset = _ListQueryset()

                    formset_params.update({
                        'data': None,
                        'files': None,
                        'queryset': queryset,
                    })

                    formset = formset_class(**formset_params)

                    msg = _('The %s were bulk added successfully. You may edit them again below.') % (force_text(opts.verbose_name_plural),)
                    self.message_user(request, msg, messages.SUCCESS)

                else:
                    return self.response_bulk(request, formset)

        media = self.media

        inline_formsets = self.get_inline_formsets(request, [formset], [inline], obj=None)
        for inline_formset in inline_formsets:
            media = media + inline_formset.media

        errors = ErrorList()

        if formset.is_bound:
            errors.extend(formset.non_form_errors())
            for formset_errors in formset.errors:
                errors.extend(list(six.itervalues(formset_errors)))

        context = dict(
            self.admin_site.each_context(request) if django.VERSION >= (1, 8) else self.admin_site.each_context(),
            bulk=True,
            bulk_formset_prefix=prefix,
            bulk_upload_fields=self.get_bulk_upload_fields(request),
            title=_('Bulk add %s') % force_text(opts.verbose_name_plural),
            is_popup=(IS_POPUP_VAR in request.POST or
                      IS_POPUP_VAR in request.GET),
            to_field=to_field,
            media=media,
            inline_admin_formsets=inline_formsets,
            errors=errors,
            preserved_filters=self.get_preserved_filters(request),
        )

        context.update(extra_context or {})

        return self.render_change_form(request, context, add=True, change=False, obj=None, form_url=form_url)

Example 20

Project: openduty Source File: incidents.py
    def create(self, request, *args, **kwargs):
        try:
            token = Token.objects.get(key=request.DATA["service_key"])
            serviceToken = ServiceTokens.objects.get(token_id=token)
            service = serviceToken.service_id
        except ServiceTokens.DoesNotExist:
            return Response({}, status=status.HTTP_404_NOT_FOUND)
        except Token.DoesNotExist:
            return Response({}, status=status.HTTP_403_FORBIDDEN)

        with transaction.atomic():
            try:
                incident = Incident.objects.get(
                    incident_key=request.DATA["incident_key"],
                    service_key=service)

                event_log_message = "%s api key changed %s from %s to %s" % (
                    serviceToken.name, incident.incident_key,
                    incident.event_type, request.DATA['event_type'])
            except (Incident.DoesNotExist, KeyError):
                incident = Incident()
                try:
                    incident.incident_key = request.DATA["incident_key"]
                except KeyError:
                    if request.DATA["event_type"] == Incident.TRIGGER:
                        incident.incident_key = base64.urlsafe_b64encode(
                            uuid.uuid1().bytes).replace(
                            '=',
                            '')
                    else:
                        response = {}
                        response["status"] = "failure"
                        response["message"] = "Mandatory parameter missing"
                        return Response(
                            response,
                            status=status.HTTP_400_BAD_REQUEST)
                incident.service_key = service

                event_log_message = "%s api key created %s with status %s" % (
                    serviceToken.name, incident.incident_key, request.DATA['event_type'])

            if self.is_relevant(incident, request.DATA['event_type']):
                event_log = EventLog()
                # Anonymous user for testing
                if request.user.is_anonymous():
                    user = None
                else:
                    user = request.user
                event_log.user = user
                event_log.service_key = incident.service_key
                event_log.data = event_log_message
                event_log.occurred_at = timezone.now()

                incident.event_type = request.DATA["event_type"]
                incident.description = request.DATA["description"][:100]
                incident.details = request.DATA.get("details", "")
                incident.occurred_at = timezone.now()
                try:
                    incident.full_clean()
                except ValidationError as e:
                    return Response(
                        {'errors': e.messages},
                        status=status.HTTP_400_BAD_REQUEST)
                incident.save()
                event_log.incident_key = incident
                event_log.action = incident.event_type
                event_log.save()
                servicesilenced = ServiceSilenced.objects.filter(
                    service=service).count() > 0
                if incident.event_type == Incident.TRIGGER and not servicesilenced:
                    NotificationHelper.notify_incident(incident)
                if incident.event_type == "resolve" or incident.event_type == Incident.ACKNOWLEDGE:
                    ScheduledNotification.remove_all_for_incident(incident)

            headers = self.get_success_headers(request.POST)

            response = {}
            response["status"] = "success"
            response["message"] = "Event processed"
            response["incident_key"] = incident.incident_key
            return Response(
                response,
                status=status.HTTP_201_CREATED,
                headers=headers)

Example 21

Project: Django--an-app-at-a-time Source File: fields.py
def create_generic_related_manager(superclass):
    """
    Factory function for a manager that subclasses 'superclass' (which is a
    Manager) and adds behavior for generic related objects.
    """

    class GenericRelatedObjectManager(superclass):
        def __init__(self, model=None, instance=None, symmetrical=None,
                     source_col_name=None, target_col_name=None, content_type=None,
                     content_type_field_name=None, object_id_field_name=None,
                     prefetch_cache_name=None):

            super(GenericRelatedObjectManager, self).__init__()
            self.model = model
            self.content_type = content_type
            self.symmetrical = symmetrical
            self.instance = instance
            self.source_col_name = source_col_name
            self.target_col_name = target_col_name
            self.content_type_field_name = content_type_field_name
            self.object_id_field_name = object_id_field_name
            self.prefetch_cache_name = prefetch_cache_name
            self.pk_val = self.instance._get_pk_val()
            self.core_filters = {
                '%s__pk' % content_type_field_name: content_type.id,
                '%s' % object_id_field_name: instance._get_pk_val(),
            }

        def __call__(self, **kwargs):
            # We use **kwargs rather than a kwarg argument to enforce the
            # `manager='manager_name'` syntax.
            manager = getattr(self.model, kwargs.pop('manager'))
            manager_class = create_generic_related_manager(manager.__class__)
            return manager_class(
                model=self.model,
                instance=self.instance,
                symmetrical=self.symmetrical,
                source_col_name=self.source_col_name,
                target_col_name=self.target_col_name,
                content_type=self.content_type,
                content_type_field_name=self.content_type_field_name,
                object_id_field_name=self.object_id_field_name,
                prefetch_cache_name=self.prefetch_cache_name,
            )
        do_not_call_in_templates = True

        def __str__(self):
            return repr(self)

        def get_queryset(self):
            try:
                return self.instance._prefetched_objects_cache[self.prefetch_cache_name]
            except (AttributeError, KeyError):
                db = self._db or router.db_for_read(self.model, instance=self.instance)
                return super(GenericRelatedObjectManager, self).get_queryset().using(db).filter(**self.core_filters)

        def get_prefetch_queryset(self, instances, queryset=None):
            if queryset is None:
                queryset = super(GenericRelatedObjectManager, self).get_queryset()

            queryset._add_hints(instance=instances[0])
            queryset = queryset.using(queryset._db or self._db)

            query = {
                '%s__pk' % self.content_type_field_name: self.content_type.id,
                '%s__in' % self.object_id_field_name: set(obj._get_pk_val() for obj in instances)
            }

            # We (possibly) need to convert object IDs to the type of the
            # instances' PK in order to match up instances:
            object_id_converter = instances[0]._meta.pk.to_python
            return (queryset.filter(**query),
                    lambda relobj: object_id_converter(getattr(relobj, self.object_id_field_name)),
                    lambda obj: obj._get_pk_val(),
                    False,
                    self.prefetch_cache_name)

        def add(self, *objs):
            db = router.db_for_write(self.model, instance=self.instance)
            with transaction.atomic(using=db, savepoint=False):
                for obj in objs:
                    if not isinstance(obj, self.model):
                        raise TypeError("'%s' instance expected" % self.model._meta.object_name)
                    setattr(obj, self.content_type_field_name, self.content_type)
                    setattr(obj, self.object_id_field_name, self.pk_val)
                    obj.save()
        add.alters_data = True

        def remove(self, *objs, **kwargs):
            if not objs:
                return
            bulk = kwargs.pop('bulk', True)
            self._clear(self.filter(pk__in=[o.pk for o in objs]), bulk)
        remove.alters_data = True

        def clear(self, **kwargs):
            bulk = kwargs.pop('bulk', True)
            self._clear(self, bulk)
        clear.alters_data = True

        def _clear(self, queryset, bulk):
            db = router.db_for_write(self.model, instance=self.instance)
            queryset = queryset.using(db)
            if bulk:
                # `QuerySet.delete()` creates its own atomic block which
                # contains the `pre_delete` and `post_delete` signal handlers.
                queryset.delete()
            else:
                with transaction.atomic(using=db, savepoint=False):
                    for obj in queryset:
                        obj.delete()
        _clear.alters_data = True

        def create(self, **kwargs):
            kwargs[self.content_type_field_name] = self.content_type
            kwargs[self.object_id_field_name] = self.pk_val
            db = router.db_for_write(self.model, instance=self.instance)
            return super(GenericRelatedObjectManager, self).using(db).create(**kwargs)
        create.alters_data = True

        def get_or_create(self, **kwargs):
            kwargs[self.content_type_field_name] = self.content_type
            kwargs[self.object_id_field_name] = self.pk_val
            db = router.db_for_write(self.model, instance=self.instance)
            return super(GenericRelatedObjectManager, self).using(db).get_or_create(**kwargs)
        get_or_create.alters_data = True

        def update_or_create(self, **kwargs):
            kwargs[self.content_type_field_name] = self.content_type
            kwargs[self.object_id_field_name] = self.pk_val
            db = router.db_for_write(self.model, instance=self.instance)
            return super(GenericRelatedObjectManager, self).using(db).update_or_create(**kwargs)
        update_or_create.alters_data = True

    return GenericRelatedObjectManager

Example 22

Project: Django--an-app-at-a-time Source File: layermapping.py
    def __init__(self, model, data, mapping, layer=0,
                 source_srs=None, encoding='utf-8',
                 transaction_mode='commit_on_success',
                 transform=True, unique=None, using=None):
        """
        A LayerMapping object is initialized using the given Model (not an instance),
        a DataSource (or string path to an OGR-supported data file), and a mapping
        dictionary.  See the module level docstring for more details and keyword
        argument usage.
        """
        # Getting the DataSource and the associated Layer.
        if isinstance(data, six.string_types):
            self.ds = DataSource(data, encoding=encoding)
        else:
            self.ds = data
        self.layer = self.ds[layer]

        self.using = using if using is not None else router.db_for_write(model)
        self.spatial_backend = connections[self.using].ops

        # Setting the mapping & model attributes.
        self.mapping = mapping
        self.model = model

        # Checking the layer -- initialization of the object will fail if
        # things don't check out before hand.
        self.check_layer()

        # Getting the geometry column associated with the model (an
        # exception will be raised if there is no geometry column).
        if connections[self.using].features.supports_transform:
            self.geo_field = self.geometry_field()
        else:
            transform = False

        # Checking the source spatial reference system, and getting
        # the coordinate transformation object (unless the `transform`
        # keyword is set to False)
        if transform:
            self.source_srs = self.check_srs(source_srs)
            self.transform = self.coord_transform()
        else:
            self.transform = transform

        # Setting the encoding for OFTString fields, if specified.
        if encoding:
            # Making sure the encoding exists, if not a LookupError
            # exception will be thrown.
            from codecs import lookup
            lookup(encoding)
            self.encoding = encoding
        else:
            self.encoding = None

        if unique:
            self.check_unique(unique)
            transaction_mode = 'autocommit'  # Has to be set to autocommit.
            self.unique = unique
        else:
            self.unique = None

        # Setting the transaction decorator with the function in the
        # transaction modes dictionary.
        self.transaction_mode = transaction_mode
        if transaction_mode == 'autocommit':
            self.transaction_decorator = None
        elif transaction_mode == 'commit_on_success':
            self.transaction_decorator = transaction.atomic
        else:
            raise LayerMapError('Unrecognized transaction mode: %s' % transaction_mode)

Example 23

Project: PyClassLessons Source File: layermapping.py
    def __init__(self, model, data, mapping, layer=0,
                 source_srs=None, encoding='utf-8',
                 transaction_mode='commit_on_success',
                 transform=True, unique=None, using=None):
        """
        A LayerMapping object is initialized using the given Model (not an instance),
        a DataSource (or string path to an OGR-supported data file), and a mapping
        dictionary.  See the module level docstring for more details and keyword
        argument usage.
        """
        # Getting the DataSource and the associated Layer.
        if isinstance(data, six.string_types):
            self.ds = DataSource(data, encoding=encoding)
        else:
            self.ds = data
        self.layer = self.ds[layer]

        self.using = using if using is not None else router.db_for_write(model)
        self.spatial_backend = connections[self.using].ops

        # Setting the mapping & model attributes.
        self.mapping = mapping
        self.model = model

        # Checking the layer -- initialization of the object will fail if
        # things don't check out before hand.
        self.check_layer()

        # Getting the geometry column associated with the model (an
        # exception will be raised if there is no geometry column).
        if self.spatial_backend.mysql:
            transform = False
        else:
            self.geo_field = self.geometry_field()

        # Checking the source spatial reference system, and getting
        # the coordinate transformation object (unless the `transform`
        # keyword is set to False)
        if transform:
            self.source_srs = self.check_srs(source_srs)
            self.transform = self.coord_transform()
        else:
            self.transform = transform

        # Setting the encoding for OFTString fields, if specified.
        if encoding:
            # Making sure the encoding exists, if not a LookupError
            # exception will be thrown.
            from codecs import lookup
            lookup(encoding)
            self.encoding = encoding
        else:
            self.encoding = None

        if unique:
            self.check_unique(unique)
            transaction_mode = 'autocommit'  # Has to be set to autocommit.
            self.unique = unique
        else:
            self.unique = None

        # Setting the transaction decorator with the function in the
        # transaction modes dictionary.
        self.transaction_mode = transaction_mode
        if transaction_mode == 'autocommit':
            self.transaction_decorator = None
        elif transaction_mode == 'commit_on_success':
            self.transaction_decorator = transaction.atomic
        else:
            raise LayerMapError('Unrecognized transaction mode: %s' % transaction_mode)

Example 24

Project: reviewboard Source File: fill-database.py
    @transaction.atomic
    def handle_noargs(self, users=None, review_requests=None, diffs=None,
                      reviews=None, diff_comments=None, password=None,
                      verbosity=NORMAL, **options):
        num_of_requests = None
        num_of_diffs = None
        num_of_reviews = None
        num_of_diff_comments = None
        random.seed()

        if review_requests:
            num_of_requests = self.parse_command("review_requests",
                                                 review_requests)

            # Setup repository.
            repo_dir = os.path.abspath(
                os.path.join(sys.argv[0], "..", "scmtools", "testdata",
                             "git_repo"))

            # Throw exception on error so transaction reverts.
            if not os.path.exists(repo_dir):
                raise CommandError("No path to the repository")

            self.repository = Repository.objects.create(
                name="Test Repository", path=repo_dir,
                tool=Tool.objects.get(name="Git"))

        if diffs:
            num_of_diffs = self.parse_command("diffs", diffs)

            # Create the diff directory locations.
            diff_dir_tmp = os.path.abspath(
                os.path.join(sys.argv[0], "..", "reviews", "management",
                             "commands", "diffs"))

            # Throw exception on error so transaction reverts.
            if not os.path.exists(diff_dir_tmp):
                    raise CommandError("Diff dir does not exist")

            diff_dir = diff_dir_tmp + '/'  # Add trailing slash.

            # Get a list of the appropriate files.
            files = [f for f in os.listdir(diff_dir)
                     if f.endswith('.diff')]

            # Check for any diffs in the files.
            if len(files) == 0:
                raise CommandError("No diff files in this directory")

        if reviews:
            num_of_reviews = self.parse_command("reviews", reviews)

        if diff_comments:
            num_of_diff_comments = self.parse_command("diff-comments",
                                                      diff_comments)

        # Users is required for any other operation.
        if not users:
            raise CommandError("At least one user must be added")

        # Start adding data to the database.
        for i in range(1, users + 1):
            new_user = User.objects.create(
                username=self.rand_username(),  # Avoids having to flush db.
                first_name=random.choice(NAMES),
                last_name=random.choice(NAMES),
                email="[email protected]",
                is_staff=False,
                is_active=True,
                is_superuser=False)

            if password:
                new_user.set_password(password)
                new_user.save()
            else:
                new_user.set_password("test1")
                new_user.save()

            Profile.objects.create(
                user=new_user,
                first_time_setup_done=True,
                collapsed_diffs=True,
                wordwrapped_diffs=True,
                syntax_highlighting=True,
                show_closed=True)

            # Review Requests.
            req_val = self.pick_random_value(num_of_requests)

            if int(verbosity) > NORMAL:
                self.stdout.write("For user %s:%s" % (i, new_user.username))
                self.stdout.write("=============================")

            for j in range(0, req_val):
                if int(verbosity) > NORMAL:
                    self.stdout.write("Request #%s:" % j)

                review_request = ReviewRequest.objects.create(new_user, None)
                review_request.public = True
                review_request.summary = self.lorem_ipsum("summary")
                review_request.description = self.lorem_ipsum("description")
                review_request.shipit_count = 0
                review_request.repository = self.repository
                # Set the targeted reviewer to superuser or 1st defined.
                if j == 0:
                    review_request.target_people.add(User.objects.get(pk=1))
                review_request.save()

                # Add the diffs if any to add.
                diff_val = self.pick_random_value(num_of_diffs)

                # If adding diffs add history.
                if diff_val > 0:
                    diffset_history = DiffSetHistory.objects.create(
                        name='testDiffFile' + six.text_type(i))
                    diffset_history.save()

                # Won't execute if diff_val is 0, ie: no diffs requested.
                for k in range(0, diff_val):
                    if int(verbosity) > NORMAL:
                        self.stdout.write("%s:\tDiff #%s" % (i, k))

                    random_number = random.randint(0, len(files) - 1)
                    file_to_open = diff_dir + files[random_number]
                    f = open(file_to_open, 'r')
                    form = UploadDiffForm(review_request=review_request,
                                          files={"path": File(f)})

                    if form.is_valid():
                        cur_diff = form.create(f, None, diffset_history)

                    review_request.diffset_history = diffset_history
                    review_request.save()
                    review_request.publish(new_user)
                    f.close()

                    # Add the reviews if any.
                    review_val = self.pick_random_value(num_of_reviews)

                    for l in range(0, review_val):
                        if int(verbosity) > NORMAL:
                            self.stdout.write("%s:%s:\t\tReview #%s:" %
                                              (i, j, l))

                        reviews = Review.objects.create(
                            review_request=review_request,
                            user=new_user)

                        reviews.publish(new_user)

                        # Add comments if any.
                        comment_val = self.pick_random_value(
                            num_of_diff_comments)

                        for m in range(0, comment_val):
                            if int(verbosity) > NORMAL:
                                self.stdout.write("%s:%s:\t\t\tComments #%s" %
                                                  (i, j, m))

                            if m == 0:
                                file_diff = cur_diff.files.order_by('id')[0]

                            # Choose random lines to comment.
                            # Max lines: should be mod'd in future to read
                            # diff.
                            max_lines = 220
                            first_line = random.randrange(1, max_lines - 1)
                            remain_lines = max_lines - first_line
                            num_lines = random.randrange(1, remain_lines)

                            diff_comment = Comment.objects.create(
                                filediff=file_diff,
                                text="comment number %s" % (m + 1),
                                first_line=first_line,
                                num_lines=num_lines)

                            review_request.publish(new_user)

                            reviews.comments.add(diff_comment)
                            reviews.save()
                            reviews.publish(new_user)

                            db.reset_queries()

                        # No comments, so have previous layer clear queries.
                        if comment_val == 0:
                            db.reset_queries()

                    if review_val == 0:
                        db.reset_queries()

                if diff_val == 0:
                    db.reset_queries()

            if req_val == 0:
                db.reset_queries()

            # Generate output as users & data is created.
            if req_val != 0:
                self.stdout.write("user %s created with %s requests"
                                  % (new_user.username, req_val))
            else:
                self.stdout.write("user %s created successfully"
                                  % new_user.username)

Example 25

Project: SchoolIdolAPI Source File: importcardstats.py
@transaction.atomic
def importcardstats(opt):
    local = opt['local']
    print '### Import card ids & stats from decaf wiki'
    if local:
        f = open('decaf.html', 'r')
    else:
        f = urllib2.urlopen('http://decaf.kouhi.me/lovelive/index.php?title=List_of_Cards&action=edit')

    currentType = types['Normals']
    special = specials['None']
    for line in f.readlines():
        line = h.unescape(line)
        data = str(line).split('||')
        if len(data) == 1:
            name = clean(data[0].translate(None, '='))
            if name in types.keys():
                currentType = types[name]
                special = specials['None']
                note = ""
                center = None
            if name in specials.keys():
                special = specials[name]
                note = ""
                center = None
        elif len(data) > 2:
            id = int(clean(data[0]))
            print 'Importing card #', id, '...',; sys.stdout.flush()
            name = cleanwithquotes(data[1].split('|')[1].split('#')[0])
            type = clean(data[2])

            hp = 0
            minStats = (0, 0, 0)
            intStats = (0, 0, 0)
            maxStats = (0, 0, 0)
            nextData = 7
            skill = None
            promo = None
            release_date = None
            event_en = ''
            event_jp = ''
            event = None

            if len(data) > 3:
                if special != 2:
                    hp = int(clean(data[3]))
                else:
                    note = clean(data[3].split('|')[-1])
            if len(data) > 6:
                minStats = (int(clean(data[4])), int(clean(data[5])), int(clean(data[6])))
            if currentType != 'N' and special == 0 and len(data) > 14: # intermediate stats
                intStats = (int(clean(data[8])), int(clean(data[9])), int(clean(data[10])))
                maxStats = (int(clean(data[12])), int(clean(data[13])), int(clean(data[14])))
                nextData = 15
            elif len(data) > 10:
                maxStats = (int(clean(data[8])), int(clean(data[9])), int(clean(data[10])))
                nextData = 11
            if len(data) > nextData:
                skill = clean(data[nextData].split('|')[-1])
            if len(data) > nextData + 1:
                center = clean(data[nextData + 1].split('|')[-1])

            soup = BeautifulSoup(data[1])
            soupsmall = soup.small
            if soupsmall is not None:
                soupspan = soupsmall.span
                if soupspan is not None:
                    if special == 1:
                        promo = soupspan.text.split('c/w ')[-1].replace('[[', '').replace('|', ' (').replace(']]', ') ')
                    else:
                        if 'Added on' in soupspan.text:
                            release_date_str = soupspan.text.split('Added on ')[1]
                            release_date_str = release_date_str.replace(' (Seal Shop)', '')
                            release_date = datetime.datetime.fromtimestamp(time.mktime(time.strptime(release_date_str, '%B %d, %Y')))
                        elif 'event prize' in soupspan.text:
                            event_name = soupspan.text.split(' event prize')[0].replace(']]', '').replace('[[', '')
                            event_jp = event_name.split('|')[0]
                            event_en = event_name.split('|')[-1]
                            event, created = models.Event.objects.update_or_create(japanese_name=event_jp, defaults={
                                'romaji_name': event_en,
                            })

            defaults = {
                'name': name,
                'rarity': currentType,
                'attribute': type,
                'is_promo': special == 1,
                'promo_item': promo,
                'is_special': special == 2,
                'hp': hp,
                'minimum_statistics_smile': minStats[0],
                'minimum_statistics_pure': minStats[1],
                'minimum_statistics_cool': minStats[2],
                'non_idolized_maximum_statistics_smile': intStats[0],
                'non_idolized_maximum_statistics_pure': intStats[1],
                'non_idolized_maximum_statistics_cool': intStats[2],
                'idolized_maximum_statistics_smile': maxStats[0],
                'idolized_maximum_statistics_pure': maxStats[1],
                'idolized_maximum_statistics_cool': maxStats[2],
                'skill': skill,
                'center_skill': center,
            }
            if note:
                defaults['skill_details'] = note
            if release_date is not None:
                defaults['release_date'] = release_date
            if event is not None:
                defaults['event'] = event
            idol, created = models.Idol.objects.get_or_create(name=name)
            defaults['idol'] = idol
            card, created = models.Card.objects.update_or_create(id=id, defaults=defaults)
            print 'Done'
    f.close()

Example 26

Project: coursys Source File: importer.py
@transaction.atomic
def import_semester_info(verbose=False, dry_run=False, long_long_ago=False, bootstrap=False):
    """
    Update information on Semester objects from SIMS

    Finding the reference is tricky. Try Googling 'sfu calendar {{year}} "academic dates"'

    long_long_ago: import from the beginning of time
    bootstrap: don't assume Semester.current() will work, for bootstrapping test data creation
    """
    output = []
    semester_start = semester_first_day()
    semester_end = semester_last_day()
    sims_holidays = [(datetime.datetime.strptime(d, "%Y-%m-%d").date(), h) for d,h in all_holidays()]

    if not bootstrap:
        # we want semesters 5 years into the future: that's a realistic max horizon for grad promises
        current = Semester.current()
        strms = [current.offset_name(i) for i in range(15)]
    else:
        strms = []

    if long_long_ago:
        strms = sorted(list(set(strms) | set(semester_start.keys())))
    semesters = dict((s.name, s) for s in Semester.objects.filter(name__in=strms))

    semester_weeks = itertools.groupby(
                SemesterWeek.objects.filter(semester__name__in=strms).select_related('semester'),
                lambda sw: sw.semester.name)
    semester_weeks = dict((k,list(v)) for k,v in semester_weeks)

    holidays = itertools.groupby(
                Holiday.objects.filter(semester__name__in=strms, holiday_type='FULL').select_related('semester'),
                lambda h: h.semester.name)
    holidays = dict((k,list(v)) for k,v in holidays)

    for strm in strms:
        url = settings.BASE_ABS_URL + reverse('coredata.views.edit_semester', kwargs={'semester_name': strm})

        # Semester object
        try:
            semester = semesters[strm]
        except KeyError:
            semester = Semester(name=strm)
            semesters[strm] = semester
            output.append("Creating %s." % (strm,))

        # class start and end dates
        try:
            start = datetime.datetime.strptime(semester_start[strm], "%Y-%m-%d").date()
        except KeyError:
            # No data found about this semester: if there's a date already around, honour it
            # Otherwise, guess "same day as this semester last year" which is probably wrong but close.
            start = semester.start
            if not semester.start:
                lastyr = semesters[semester.offset_name(-3)]
                start = lastyr.start.replace(year=lastyr.start.year+1)
                output.append("Guessing start date for %s." % (strm,))

        try:
            end = datetime.datetime.strptime(semester_end[strm], "%Y-%m-%d").date()
        except KeyError:
            # no classes scheduled yet? Assume 13 weeks exactly
            end = start + datetime.timedelta(days=91)

        if semester.start != start:
            output.append("Changing start date for %s from %s to %s." % (strm, semester.start, start))
            semester.start = start
        if semester.end != end:
            output.append("Changing end date for %s from %s to %s." % (strm, semester.end, end))
            semester.end = end

        if not dry_run:
            semester.save()

        # SemesterWeeks
        weeks = semester_weeks.get(strm, [])
        if not weeks:
            sw = SemesterWeek(semester=semester, week=1, monday=first_monday(start))
            weeks.append(sw)
            assert sw.monday.weekday() == 0
            output.append("Creating week 1 for %s on %s." % (strm, sw.monday))
            if not dry_run:
                sw.save()
        elif weeks[0].monday != first_monday(start):
            sw = weeks[0]
            sw.monday = first_monday(start)
            output.append("Changing first Monday of %s to %s." % (strm, sw.monday))
            if not dry_run:
                sw.save()

        length = semester.end - semester.start
        if not bootstrap and length > datetime.timedelta(days=92) and len(weeks) < 2 \
                and semester.start - datetime.date.today() < datetime.timedelta(days=365):
            # semester is longer than 13 weeks: insist that the user specify reading week reasonably-soon before the semester starts
            message = "Semester %s is long (%s) but has no reading week specified. Please have a look here: %s\n\nYou probably want to enter the Monday of week 5/6/7/8 as the Monday after reading week, a week later than it would otherwise be." % (strm, length, url)
            if verbose:
                output.append('*** ' + message)
            else:
                import_admin_email(source='coredata.importer.import_semester_info', message=message)
        elif not bootstrap:
            # also check that the last day of classes is at a coherent time. Might reveal problems with reading week specification.
            endweek,_ = semester.week_weekday(semester.end, weeks=weeks)
            if endweek not in [12, 13, 14]:
                message = "Semester %s ends in week %i (should be 13 or 14). That's weird. Have a look here to see if things are coherent: %s" % (strm, endweek, url)
                if verbose:
                    output.append('*** ' + message)
                else:
                    import_admin_email(source='coredata.importer.import_semester_info', message=message)

        # Holidays
        hs = holidays.get(strm, [])
        h_start, h_end = Semester.start_end_dates(semester)
        for dt, desc in [(d,h) for d,h in sims_holidays if h_start <= d <= h_end]:
            existing = [h for h in hs if h.date == dt]
            if existing:
                holiday = existing[0]
            else:
                holiday = Holiday(semester=semester, date=dt, holiday_type='FULL')
                output.append("Adding holiday %s on %s." % (desc, dt))

            holiday.description = desc
            if not dry_run:
                holiday.save()



    if verbose:
        print '\n'.join(output)

Example 27

Project: coursys Source File: views.py
def _edit_pagefile(request, course_slug, page_label, kind):
    """
    View to create and edit pages
    """
    if request.method == 'POST' and 'delete' in request.POST and request.POST['delete'] == 'yes':
        return _delete_pagefile(request, course_slug, page_label, kind)
    with django.db.transaction.atomic():
        offering = get_object_or_404(CourseOffering, slug=course_slug)
        if page_label:
            page = get_object_or_404(Page, offering=offering, label=page_label)
            version = page.current_version()
            member = _check_allowed(request, offering, page.can_write, page.editdate())
            old_label = page.label
        else:
            page = None
            version = None
            member = _check_allowed(request, offering, offering.page_creators()) # users who can create pages
            old_label = None

        if isinstance(member, PagePermission):
            return ForbiddenResponse(request, 'Editing of pages by additional-permission holders is not implemented. Sorry')

        # make sure we're looking at the right "kind" (page/file)
        if not kind:
            kind = "file" if version.is_filepage() else "page"

        # get the form class we need
        if kind == "page":
            Form = EditPageForm
        else:
            Form = EditFileForm
        
        # check that we have an allowed member of the course (and can continue)
        if not member:
            return ForbiddenResponse(request, 'Not allowed to edit/create this '+kind+'.')
        restricted = False
        if member.role == 'STUD':
            # students get the restricted version of the form
            Form = Form.restricted_form
            restricted = True
        
        if request.method == 'POST':
            form = Form(instance=page, offering=offering, data=request.POST, files=request.FILES)
            if form.is_valid():
                instance = form.save(editor=member)
                
                # clean up weirdness from restricted form
                if 'label' not in form.cleaned_data:
                    # happens when student edits an existing page
                    instance.label = page.label
                if 'can_write' not in form.cleaned_data:
                    # happens only when students create a page
                    instance.can_write = 'STUD'
                
                if not restricted and 'releasedate' in form.cleaned_data:
                    instance.set_releasedate(form.cleaned_data['releasedate'])
                elif not restricted:
                    instance.set_releasedate(None)

                if not restricted and 'editdate' in form.cleaned_data:
                    instance.set_editdate(form.cleaned_data['editdate'])
                elif not restricted:
                    instance.set_editdate(None)

                instance.redirect = None

                if old_label and old_label != instance.label:
                    # page has been moved to a new URL: leave a redirect in its place
                    redir_page = Page(offering=instance.offering, label=old_label,
                                      can_read=instance.can_read, can_write=offering.page_creators())
                    redir_page.set_releasedate(instance.releasedate())
                    redir_page.set_editdate(instance.editdate())
                    redir_page.save()
                    redir_version = PageVersion(page=redir_page, title=version.title, redirect=instance.label,
                                                editor=member, comment='automatically generated on label change')
                    redir_version.set_redirect_reason('rename')
                    redir_version.save()
                    messages.info(request, 'Page label changed: the old location (%s) will redirect to this page.' % (old_label,))

                instance.save()
                
                #LOG EVENT#
                l = LogEntry(userid=request.user.username,
                      description="Edited page %s in %s." % (instance.label, offering),
                      related_object=instance)
                l.save()
                if page:
                    messages.success(request, "Edited "+kind+" \"%s\"." % (instance.label))
                else:
                    messages.success(request, "Created "+kind+" \"%s\"." % (instance.label))

                if not page and instance.label == 'Index' and not offering.url():
                    # new Index page but no existing course URL: set as course web page
                    url = settings.BASE_ABS_URL + instance.get_absolute_url()
                    offering.set_url(url)
                    offering.save()
                    messages.info(request, "Set course URL to new Index page.")
                
                return HttpResponseRedirect(reverse('pages.views.view_page', kwargs={'course_slug': course_slug, 'page_label': instance.label}))
        else:
            form = Form(instance=page, offering=offering)
            if 'label' in request.GET:
                label = request.GET['label']
                if label == 'Index':
                    form.initial['title'] = offering.name()
                    form.fields['label'].help_text += u'\u2014the label "Index" indicates the front page for this course.'
                elif label == MACRO_LABEL:
                    form.initial['can_read'] = 'INST'
                    form.initial['can_write'] = 'INST'
                    form.initial['title'] = MACRO_LABEL
                else:
                    form.initial['title'] = label.title()
                form.initial['label'] = label

        context = {
            'offering': offering,
            'page': page,
            'form': form,
            'kind': kind.title(),
            'is_macro_page': form.initial.get('title', None) == MACRO_LABEL,
        }
        return render(request, 'pages/edit_page.html', context)

Example 28

Project: orchestra Source File: machine_tasks.py
def execute(project_id, step_slug):
    project = Project.objects.get(id=project_id)
    step = Step.objects.get(slug=step_slug,
                            workflow_version=project.workflow_version)
    task = Task.objects.get(project=project,
                            step=step)

    # Run machine function
    if step.is_human:
        raise MachineExecutionError('Step worker type is not machine')

    if task.status == Task.Status.COMPLETE:
        raise MachineExecutionError('Task assignment already completed')

    # Machine tasks are only assigned to one worker/machine,
    # so they should only have one task assignment,
    # and should never be submitted for review.

    with transaction.atomic():
        # Uniqueness constraint on assignnment_counter and task prevents
        # concurrent creation of more than one assignment
        task_assignment, created = TaskAssignment.objects.get_or_create(
            assignment_counter=0,
            task=task,
            defaults={
                'status': TaskAssignment.Status.PROCESSING,
                'in_progress_task_data': {}})
        if created:
            task.status = Task.Status.PROCESSING
            task.save()

            Iteration.objects.create(
                assignment=task_assignment,
                start_datetime=task_assignment.start_datetime)
        else:
            # Task assignment already exists
            if task_assignment.status == TaskAssignment.Status.FAILED:
                # Pick up failed task for reprocessing
                task_assignment.status = TaskAssignment.Status.PROCESSING
                task_assignment.save()
            else:
                # Task assignment already processing
                raise MachineExecutionError(
                    'Task already picked up by another machine')

    prerequisites = previously_completed_task_data(task)

    function = locate(step.execution_function['path'])
    kwargs = step.execution_function.get('kwargs', {})
    try:
        project_data = project.project_data
        project_data['project_id'] = project_id
        task_data = function(project_data, prerequisites, **kwargs)
    except:
        task_assignment.status = TaskAssignment.Status.FAILED
        logger.exception('Machine task has failed')
        task_assignment.save()
        return
    task_assignment.status = TaskAssignment.Status.SUBMITTED
    task_assignment.in_progress_task_data = task_data
    task_assignment.save()

    if task.project.status == Project.Status.ABORTED:
        # If a long-running task's project was aborted while running, we ensure
        # the aborted state on the task.
        task.status = Task.Status.ABORTED
        task.save()
    else:
        task.status = Task.Status.COMPLETE
        task.save()

        iteration = get_latest_iteration(task_assignment)
        iteration.status = Iteration.Status.REQUESTED_REVIEW
        iteration.submitted_data = task_data
        iteration.end_datetime = timezone.now()
        iteration.save()

        create_subsequent_tasks(project)

Example 29

Project: coursys Source File: views.py
@retry_transaction()
@transaction.atomic
def _show_components_student(request, course_slug, activity_slug, userid=None, template="dashboard_student.html", staff=False):
    """
    Show all the component submission history of this activity
    """
    if userid == None:
        userid = request.user.username
    course = get_object_or_404(CourseOffering, slug=course_slug)
    activity = get_object_or_404(course.activity_set,slug=activity_slug, deleted=False)
    student = get_object_or_404(Person, find_userid_or_emplid(userid))
    cansubmit = True
    submission_configured = SubmissionComponent.objects.filter(activity_id=activity.id).exists()
    if not submission_configured:
        return NotFoundResponse(request)

    submission_info = SubmissionInfo(student=student, activity=activity)
    submission_info.get_most_recent_components()
    if activity.multisubmit():
        submission_info.get_all_components()

    any_submissions = bool(submission_info.submissions)

    if submission_info.submissions and activity.due_date and activity.due_date < submission_info.latest().created_at:
        late = submission_info.latest().created_at - activity.due_date
    else:
        late = 0
    
    if activity.group:
        gm = GroupMember.objects.filter(student__person=student, activity=activity, confirmed=True)
        if gm:
            group = gm[0].group
            member = gm[0].student
        else:
            group = None

    else:
        group = None

    # activity should be submitable
    cansubmit = cansubmit and activity.submitable()

    if not cansubmit:
        messages.add_message(request, messages.ERROR, "This activity is not submittable.")
        return render(request, "submission/" + template,
        {"course":course, "activity":activity, "submission_info": submission_info, 'any_submissions': any_submissions,
         "userid":userid, "late":late, "student":student, "group":group, "cansubmit":cansubmit})

    # get all components of activity
    component_list = select_all_components(activity)
    component_list.sort()
    component_form_list=[]

    if request.method == 'POST':
        component_form_list = make_form_from_list(component_list, request=request)
        submitted_comp = []    # list all components which has content submitted in the POST
        not_submitted_comp = [] #list allcomponents which has no content submitted in the POST
        if not activity.group:
            new_sub = StudentSubmission()   # the submission foreign key for newly submitted components
            new_sub.member = get_object_or_404(Member, offering__slug=course_slug, person__userid=request.user.username)
        elif gm:
            new_sub = GroupSubmission()
            new_sub.group = group
            new_sub.creator = member
        else:
            messages.add_message(request, messages.ERROR, "This is a group submission. You cannot submit since you aren't in a group.")
            return ForbiddenResponse(request)
        new_sub.activity = activity

        # begin validating uploaded data
        submitted_comp = []
        not_submitted_comp = []
        # validate forms one by one
        for data in component_form_list:
            component = data['comp']
            form = data['form']
            if form.is_valid():
                sub = form.save(commit=False)
                sub.component = component
                submitted_comp.append(sub)
            else:
                # hack to replace the "required" message to something more appropriate
                for k,v in form.errors.items():
                    for i,e in enumerate(v):
                        if e == "This field is required.":
                            v[i] = "Nothing submitted."

                not_submitted_comp.append(component)
        # check duplicate filenames here
        all_ok = False
        while not all_ok:
            all_ok = True
            d = {}
            if not activity.multisubmit():
                # single-submit logic: don't want to overrite filenames from earlier submissions that are still in-play
                for c,s in submission_info.components_and_submitted():
                    d[c] = s and s.get_filename()
            # filenames from this submission
            for s in submitted_comp:
                d[s.component] = s.get_filename()
            # a list holding all file names
            file_name_list = [a[1] for a in d.items() if a[1] is not None]
            to_be_removed = []
            for (i, s) in enumerate(submitted_comp):
                if file_name_list.count(s.get_filename()) > 1:
                    all_ok = False
                    to_be_removed.append(i)
                    not_submitted_comp.append(s.component)
                    #HACK: modify the 'errors' field in the form
                    for data in component_form_list:
                        if s.component == data['comp']:
                            # assume we have only one field for submission form
                            field_name = data['form'].fields.keys()[0]
                            data['form']._errors[field_name] = ErrorList([u"This file has the same name as another file in your submission."])
            # remove those has errors in submitted_comp
            to_be_removed.reverse()
            for t in to_be_removed:
                submitted_comp.pop(t)
        # all okay now
        # end validating, begin saving
        if len(submitted_comp) > 0:
            new_sub.save()    
        for sub in submitted_comp:
            sub.submission = new_sub
            sub.save()
            #LOG EVENT#
            if activity.group:
                group_str = " as a member of group %s" % new_sub.group.name
            else:
                group_str = ""
            l = LogEntry(userid=request.user.username,
                  description=u"submitted for %s %s%s" % (activity, sub.component.title, group_str),
                  related_object=sub)
            l.save()

        if len(not_submitted_comp) == 0:
            messages.add_message(request, messages.SUCCESS, "Your submission was successful.")
            return HttpResponseRedirect(reverse(show_components, args=[course_slug, activity_slug]))

        return render(request, "submission/submission_error.html",
            {"course":course, "activity":activity, "component_list":component_form_list,
            "submitted_comp":submitted_comp, "not_submitted_comp":not_submitted_comp})
    else: #not POST
        component_form_list = make_form_from_list(component_list)
        return render(request, "submission/" + template,
        {'component_form_list': component_form_list, "course": course, "activity": activity, "submission_info": submission_info,
         "userid":userid, "late":late, "student":student, "group":group,
         "cansubmit":cansubmit, "is_staff":staff, 'any_submissions': any_submissions})

Example 30

Project: pycon Source File: views.py
    @transaction.atomic
    def post(self, request, *args, **kwargs):
        try:
            data = json.loads(request.body)
        except:
            return HttpResponseBadRequest(self.format_error)

        # Data should be a list of registration info in dictionary format.
        if not (isinstance(data, list) and all([isinstance(d, dict) for d in data])):
            return HttpResponseBadRequest(self.format_error)

        all_valid = True
        seen_emails = []
        user_data = []
        for registration in data:
            form = GroupRegistrationForm(data=registration)
            if form.is_valid():
                # Check if this is a duplicate of an email provided in this request.
                email = User.objects.normalize_email(form.cleaned_data['email'])
                if email in seen_emails:
                    all_valid = False
                    user_data.append({
                        'valid': False,
                        'error_message': 'This email is a duplicate of one above.',
                        'user': None,
                    })
                else:
                    seen_emails.append(email)
                    created, user = form.save(commit=False)
                    if created:
                        # Delay account creation until the transaction is
                        # committed.
                        user._disable_account_creation = True
                        user.save()
                    user_data.append({
                        'valid': True,
                        'created': created,
                        'user': {
                            'pycon_id': user.pk,
                            'email': form.cleaned_data.get('email'),
                            'first_name': form.cleaned_data.get('first_name', ''),
                            'last_name': form.cleaned_data.get('last_name', ''),
                        }
                    })
            else:
                all_valid = False
                user_data.append({
                    'valid': False,
                    'errors': [e for errors in form.errors.values() for e in errors],
                    'user': None,
                })

        # The request is atomic - all users are created (or found), or none
        # are.
        if all_valid:
            for d in user_data:
                if d['created']:
                    # Now that the transaction has been committed,
                    # create an Account for the user so that they can log in.
                    user = User.objects.get(pk=d['user']['pycon_id'])
                    Account.create(user=user)
        else:
            for d in user_data:
                d['user'] = None
                d.pop('created', None)
            transaction.set_rollback(True)

        return_data = {'success': all_valid, 'users': user_data}
        return HttpResponse(json.dumps(return_data))

Example 31

Project: oioioi Source File: import_balloons_displays.py
    def handle(self, *args, **options):
        if len(args) != 2:
            raise CommandError(_("Expected two arguments"))

        try:
            contest = Contest.objects.get(id=args[0])
        except Contest.DoesNotExist:
            raise CommandError(_("Contest %s does not exist") % args[0])

        arg = args[1]

        if arg.startswith('http://') or arg.startswith('https://'):
            self.stdout.write(_("Fetching %s...\n") % (arg,))
            stream = urllib2.urlopen(arg)
        else:
            if not os.path.exists(arg):
                raise CommandError(_("File not found: ") + arg)
            stream = open(arg, 'r')

        reader = csv.reader(stream)
        header = reader.next()
        if header != self.COLUMNS:
            raise CommandError(_("Missing header or invalid columns: "
                "%(header)s\nExpected: %(expected)s") % {
                    'header': ', '.join(header),
                    'expected': ', '.join(self.COLUMNS)})

        with transaction.atomic():
            BalloonsDisplay.objects.filter(contest=contest).delete()

            ok = True
            all_count = 0
            for row in reader:
                all_count += 1

                for i, _column in enumerate(self.COLUMNS):
                    row[i] = row[i].decode('utf8')

                try:
                    user = User.objects.get(username=row[0])
                    display = BalloonsDisplay(ip_addr=row[1], user=user,
                            contest=contest)
                    display.save()
                except User.DoesNotExist:
                    self.stdout.write(_("Error for user=%(user)s: user does"
                        " not exist\n") % {'user': row[1]})
                    ok = False
                except DatabaseError, e:
                    # This assumes that we'll get the message in this
                    # encoding. It is not perfect, but much better than
                    # ascii.
                    message = e.message.decode('utf-8')
                    self.stdout.write(_(
                        "DB Error for user=%(user)s: %(message)s\n")
                            % {'user': row[1], 'message': message})
                    ok = False
                except ValidationError, e:
                    for k, v in e.message_dict.iteritems():
                        for message in v:
                            if k == '__all__':
                                self.stdout.write(_(
                                    "Error for user=%(user)s: %s\n")
                                        % (row[1], message))
                            else:
                                self.stdout.write(
                                        _("Error for user=%(user)s, "
                                            "field %(field)s: %(message)s\n")
                                        % {'user': row[1], 'field': k,
                                            'message': message})
                    ok = False

            if ok:
                self.stdout.write(_("Processed %d entries") % (all_count))
            else:
                raise CommandError(_("There were some errors. Database not "
                    "changed.\n"))

Example 32

Project: orchestra Source File: task_lifecycle.py
@transaction.atomic
def submit_task(task_id, task_data, iteration_status, worker):
    """
    Returns a dict mapping task prerequisites onto their
    latest task assignment information.  The dict is of the form:
    {'previous-slug': {task_assignment_data}, ...}

    Args:
        task_id (int):
            The ID of the task to submit.
        task_data (str):
            A JSON blob of task data to submit.
        iteration_status (orchestra.models.Iteration.Status):
            The action taken upon task submission (i.e., REQUESTED_REVIEW
            or PROVIDED_REVIEW).
        worker (orchestra.models.Worker):
            The worker submitting the task.

    Returns:
        task (orchestra.models.Task):
            The modified task object.

    Raises:
        orchestra.core.errors.IllegalTaskSubmission:
            Submission prerequisites for the task are incomplete or the
            assignment is in a non-processing state.
        orchestra.core.errors.TaskAssignmentError:
            Worker belongs to more than one assignment for the given
            task.
        orchestra.core.errors.TaskStatusError:
            Task has already been completed.
    """
    submit_datetime = timezone.now()

    task = Task.objects.select_related('step', 'project').get(id=task_id)
    step = task.step
    if not _are_desired_steps_completed_on_project(step.submission_depends_on,
                                                   project=task.project):
        raise IllegalTaskSubmission('Submission prerequisites are not '
                                    'complete.')

    if task.status == Task.Status.COMPLETE:
        raise TaskStatusError('Task already completed')

    # Use select_for_update to prevent concurrency issues with save_task.
    # See https://github.com/b12io/orchestra/issues/2.
    assignments = (TaskAssignment.objects.select_for_update()
                                 .filter(worker=worker, task=task))

    # Worker can belong to only one assignment for a given task.
    if not assignments.count() == 1:
        raise TaskAssignmentError(
            'Task assignment with worker is in broken state.')

    assignment = assignments[0]

    if assignment.status != TaskAssignment.Status.PROCESSING:
        raise IllegalTaskSubmission('Worker is not allowed to submit')

    next_status = get_next_task_status(task, iteration_status)

    assignment.in_progress_task_data = task_data

    # Submit latest iteration
    latest_iteration = get_latest_iteration(assignment)
    latest_iteration.status = iteration_status
    latest_iteration.submitted_data = assignment.in_progress_task_data
    latest_iteration.end_datetime = submit_datetime
    latest_iteration.save()

    assignment.status = TaskAssignment.Status.SUBMITTED
    assignment.save()
    previous_status = task.status
    task.status = next_status
    task.save()

    if task.status == Task.Status.PENDING_REVIEW:
        # Check the assignment policy to try to assign a reviewer automatically
        task = _preassign_workers(task, AssignmentPolicyType.REVIEWER)
    elif task.status == Task.Status.REVIEWING:
        update_related_assignment_status(task,
                                         assignment.assignment_counter + 1,
                                         assignment.in_progress_task_data,
                                         submit_datetime)
    elif task.status == Task.Status.POST_REVIEW_PROCESSING:
        update_related_assignment_status(task,
                                         assignment.assignment_counter - 1,
                                         assignment.in_progress_task_data,
                                         submit_datetime)
    elif task.status == Task.Status.COMPLETE:
        create_subsequent_tasks(task.project)

    notify_status_change(task, previous_status)
    return task

Example 33

Project: oioioi Source File: import_onsite_participants.py
    def handle(self, *args, **options):
        if len(args) != 2:
            raise CommandError(_("Expected two arguments"))

        try:
            contest = Contest.objects.get(id=args[0])
        except Contest.DoesNotExist:
            raise CommandError(_("Contest %s does not exist") % args[0])

        rcontroller = contest.controller.registration_controller()
        print rcontroller
        if not issubclass(getattr(rcontroller, 'participant_admin', None),
                          OnsiteRegistrationParticipantAdmin):
            raise CommandError(_("Wrong type of contest"))

        arg = args[1]

        if arg.startswith('http://') or arg.startswith('https://'):
            self.stdout.write(_("Fetching %s...\n") % (arg,))
            stream = urllib2.urlopen(arg)
        else:
            if not os.path.exists(arg):
                raise CommandError(_("File not found: ") + arg)
            stream = open(arg, 'r')

        reader = csv.reader(stream)
        header = reader.next()
        if header != self.COLUMNS:
            raise CommandError(_("Missing header or invalid columns: "
                "%(header)s\nExpected: %(expected)s") % {
                    'header': ', '.join(header),
                    'expected': ', '.join(self.COLUMNS)})

        with transaction.atomic():
            ok = True
            all_count = 0
            for row in reader:
                all_count += 1

                for i, _column in enumerate(self.COLUMNS):
                    row[i] = row[i].decode('utf8')

                try:
                    user = User.objects.get(username=row[1])
                    region = Region.objects.get(short_name=row[2],
                                                contest=contest)

                    participant, created = Participant.objects \
                            .get_or_create(contest=contest, user=user)

                    reg = OnsiteRegistration(participant=participant,
                            number=row[0], local_number=row[3], region=region)

                    reg.full_clean()
                    reg.save()
                except User.DoesNotExist:
                    self.stdout.write(_("Error for user=%(user)s: user does"
                        " not exist\n") % {'user': row[1]})
                    ok = False
                except Region.DoesNotExist:
                    self.stdout.write(_(
                        "Error for user=%(user)s: region %(region)s does"
                        " not exist\n") % {'user': row[1], 'region': row[2]})
                    ok = False
                except DatabaseError, e:
                    # This assumes that we'll get the message in this
                    # encoding. It is not perfect, but much better than
                    # ascii.
                    message = e.message.decode('utf-8')
                    self.stdout.write(_(
                        "DB Error for user=%(user)s: %(message)s\n")
                            % {'user': row[1], 'message': message})
                    ok = False
                except ValidationError, e:
                    for k, v in e.message_dict.iteritems():
                        for message in v:
                            if k == '__all__':
                                self.stdout.write(_(
                                    "Error for user=%(user)s: %(message)s\n")
                                    % {'user': row[1], 'message': message})
                            else:
                                self.stdout.write(
                                        _("Error for user=%(user)s, "
                                            "field %(field)s: %(message)s\n")
                                        % {'user': row[1], 'field': k,
                                            'message': message})
                    ok = False

            if ok:
                self.stdout.write(_("Processed %d entries") % (all_count))
            else:
                raise CommandError(_("There were some errors. Database not "
                    "changed.\n"))

Example 34

Project: product-definition-center Source File: lib.py
@transaction.atomic
def release__import_from_composeinfo(request, composeinfo_json):
    """
    Import release including variants and architectures from composeinfo json.
    """
    ci = productmd.composeinfo.ComposeInfo()
    common_hacks.deserialize_wrapper(ci.deserialize, composeinfo_json)

    if ci.release.is_layered:
        release_type_obj = models.ReleaseType.objects.get(short=getattr(ci.base_product, "type", "ga"))
        base_product_obj, _ = _logged_get_or_create(
            request, models.BaseProduct,
            name=ci.base_product.name,
            short=ci.base_product.short.lower(),
            version=ci.base_product.version,
            release_type=release_type_obj,
        )
    else:
        base_product_obj = None

    product_obj, _ = _logged_get_or_create(
        request, models.Product,
        name=ci.release.name,
        short=ci.release.short.lower()
    )
    product_version_obj, _ = _logged_get_or_create(
        request, models.ProductVersion,
        product=product_obj,
        name=ci.release.name,
        short=ci.release.short.lower(),
        version=ci.release.major_version
    )

    release_type_obj = models.ReleaseType.objects.get(short=getattr(ci.release, "type", "ga"))
    release_obj, _ = _logged_get_or_create(
        request, models.Release,
        name=ci.release.name,
        short=ci.release.short.lower(),
        version=ci.release.version,
        base_product=base_product_obj,
        release_type=release_type_obj,
        product_version=product_version_obj,
    )

    # if not created:
    #    raise RuntimeError("Release already exists: %s" % release_obj)

    # We can't log variants immediately after they are created, as their export
    # includes architectures. Therefore they are collected in this list and
    # logged once import is done. This also nicely abstracts integrated
    # variants that may not be present.
    add_to_changelog = []

    for variant in ci.variants.get_variants(recursive=True):
        variant_type = models.VariantType.objects.get(name=variant.type)
        release = variant.release
        integrated_variant = None
        if release.name:
            integrated_release = get_or_create_integrated_release(
                request,
                release_obj,
                release
            )
            integrated_variant, created = models.Variant.objects.get_or_create(
                release=integrated_release,
                variant_id=variant.id,
                variant_uid=variant.uid,
                variant_name=variant.name,
                variant_type=models.VariantType.objects.get(name='variant')
            )
            if created:
                add_to_changelog.append(integrated_variant)
        variant_obj, created = models.Variant.objects.get_or_create(
            release=release_obj,
            variant_id=variant.id,
            variant_uid=variant.uid,
            variant_name=variant.name,
            variant_type=variant_type,
        )
        if created:
            add_to_changelog.append(variant_obj)
        for arch in variant.arches:
            arch_obj = common_models.Arch.objects.get(name=arch)
            var_arch_obj, _ = models.VariantArch.objects.get_or_create(
                arch=arch_obj,
                variant=variant_obj
            )
            if integrated_variant:
                models.VariantArch.objects.get_or_create(
                    arch=arch_obj,
                    variant=integrated_variant
                )

    for obj in add_to_changelog:
        _maybe_log(request, True, obj)

    return release_obj

Example 35

Project: SmartElect Source File: models.py
    def execute(self):
        """
        Implement the changeset.

        If the changeset status is not valid to execute it,
        raise ChangesetException.

        Otherwise, try to execute the changeset and at the end, set the
        changeset status appropriately.

        Create ChangeRecords to record successful changes, and changes that
        might have been made but were not due to the current status of the
        affected citizen or registration (e.g., if in a rollback, the citizen
        is no longer in the status the changeset being rolled back left them
        in. Or in a block or unblock, if the citizen is already in the status
        the changeset was supposed to change them to).

        If the status is failed, no registration changes will have been
        applied (they are rolled back if needed).
        """

        logger.info("Execute changeset %s...", self.name)
        if not self.in_executable_status():
            raise NotAnAllowedStatus("Cannot execute changeset in status %s"
                                     % self.get_status_display())
        if self.change == Changeset.CHANGE_ROLLBACK:
            # Can only rollback a successful or partially successful changeset
            if not self.other_changeset.in_rollbackable_status():
                raise NotAnAllowedStatus("Cannot rollback changeset in status %s"
                                         % self.other_changeset.get_status_display())
        if self.change not in Changeset.CHANGE_VALID_VALUES:
            raise ChangesetException("Cannot execute changeset, %s is not a valid change type",
                                     self.change)
        try:
            self.status = Changeset.STATUS_EXECUTING
            self.execution_start_time = now()
            self.save()
            with transaction.atomic():
                if self.change == Changeset.CHANGE_CENTER:
                    changerecord_kwargs = dict(changeset=self, change=self.change,
                                               to_center=self.target_center)
                    for reg in self.get_registrations_to_change():
                        changerecord_kwargs.update(
                            citizen=reg.citizen,
                            from_center=reg.registration_center
                        )
                        if reg.registration_center == self.target_center:
                            # Citizen is already registered there.
                            # (Can happen if they uploaded a list of NIDs and later
                            # the citizen changed their registration.)
                            ChangeRecord.objects.create(changed=False, **changerecord_kwargs)
                        else:
                            reg.registration_center = self.target_center
                            reg.save_with_archive_version()
                            ChangeRecord.objects.create(changed=True, **changerecord_kwargs)
                elif self.change in [Changeset.CHANGE_BLOCK, Changeset.CHANGE_UNBLOCK]:
                    changerecord_kwargs = dict(changeset=self, change=self.change)
                    for citizen in self.get_citizens_to_change():
                        changerecord_kwargs['citizen'] = citizen
                        if self.change == Changeset.CHANGE_BLOCK and not citizen.blocked:
                            citizen.block()
                            ChangeRecord.objects.create(changed=True, **changerecord_kwargs)
                        elif self.change == Changeset.CHANGE_UNBLOCK and citizen.blocked:
                            citizen.unblock()
                            ChangeRecord.objects.create(changed=True, **changerecord_kwargs)
                        else:
                            ChangeRecord.objects.create(changed=False, **changerecord_kwargs)
                elif self.change == Changeset.CHANGE_ROLLBACK:
                    # Undo the changes made in another changeset, where possible
                    for change in ChangeRecord.objects.filter(changeset=self.other_changeset,
                                                              changed=True):
                        change.undo(self)
                    self.other_changeset.rollback_changeset = self
                    self.other_changeset.status = Changeset.STATUS_ROLLED_BACK
                    self.other_changeset.save()

                # Set the status depending on whether we applied all the requested changes
                if ChangeRecord.objects.filter(changeset=self, changed=False).exists():
                    self.status = Changeset.STATUS_PARTIALLY_SUCCESSFUL
                else:
                    self.status = Changeset.STATUS_SUCCESSFUL
                self.finish_time = now()
                self.save()
                logger.info("Changeset execution status: %s", self.get_status_display())
        except Exception as e:
            # Exiting the inner 'with transaction' by an exception will have triggered a rollback.
            # This log command will log the exception
            logger.exception("Executing changeset %s failed unexpectedly", self.name)
            self.status = Changeset.STATUS_FAILED
            self.error_text = str(e)
            self.finish_time = now()
            self.save()

Example 36

Project: rockstor-core Source File: command.py
    @transaction.atomic
    def post(self, request, command):
        if (command == 'bootstrap'):

            self._refresh_pool_state()
            for p in Pool.objects.all():
                import_shares(p, request)

            for share in Share.objects.all():
                try:
                    if (share.pqgroup == settings.MODEL_DEFS['pqgroup']):
                        share.pqgroup = qgroup_create(share.pool)
                        share.save()
                    if (not is_share_mounted(share.name)):
                        mnt_pt = ('%s%s' % (settings.MNT_PT, share.name))
                        mount_share(share, mnt_pt)
                except Exception, e:
                    e_msg = ('Exception while mounting a share(%s) during '
                             'bootstrap: %s' % (share.name, e.__str__()))
                    logger.error(e_msg)
                    logger.exception(e)

                try:
                    import_snapshots(share)
                except Exception, e:
                    e_msg = ('Exception while importing Snapshots of '
                             'Share(%s): %s' % (share.name, e.__str__()))
                    logger.error(e_msg)
                    logger.exception(e)

            for snap in Snapshot.objects.all():
                if (snap.uvisible):
                    try:
                        mount_snap(snap.share, snap.real_name)
                    except Exception, e:
                        e_msg = ('Failed to make the Snapshot(%s) visible. '
                                 'Exception: %s' % (snap.real_name, e.__str__()))
                        logger.error(e_msg)

            mnt_map = sftp_mount_map(settings.SFTP_MNT_ROOT)
            for sftpo in SFTP.objects.all():
                try:
                    sftp_mount(sftpo.share, settings.MNT_PT,
                               settings.SFTP_MNT_ROOT, mnt_map, sftpo.editable)
                    sftp_snap_toggle(sftpo.share)
                except Exception, e:
                    e_msg = ('Exception while exportin a sftp share during '
                             'bootstrap: %s' % e.__str__())
                    logger.error(e_msg)

            try:
                adv_entries = [a.export_str for a in AdvancedNFSExport.objects.all()]
                exports_d = self.create_adv_nfs_export_input(adv_entries, request)
                exports = self.create_nfs_export_input(NFSExport.objects.all())
                exports.update(exports_d)
                self.refresh_wrapper(exports, request, logger)
            except Exception, e:
                e_msg = ('Exception while bootstrapping NFS: %s' % e.__str__())
                logger.error(e_msg)

            #  bootstrap services
            try:
                systemctl('firewalld', 'stop')
                systemctl('firewalld', 'disable')
                systemctl('nginx', 'stop')
                systemctl('nginx', 'disable')
                systemctl('atd', 'enable')
                systemctl('atd', 'start')
            except Exception, e:
                e_msg = ('Exception while setting service statuses during '
                         'bootstrap: %s' % e.__str__())
                logger.error(e_msg)
                handle_exception(Exception(e_msg), request)

            logger.debug('Bootstrap operations completed')
            return Response()

        if (command == 'utcnow'):
            return Response(datetime.utcnow().replace(tzinfo=utc))

        if (command == 'uptime'):
            return Response(uptime())

        if (command == 'kernel'):
            try:
                return Response(kernel_info(settings.SUPPORTED_KERNEL_VERSION))
            except Exception, e:
                handle_exception(e, request)

        if (command == 'update-check'):
            try:
                subo = None
                try:
                    subo = UpdateSubscription.objects.get(name='Stable', status='active')
                except UpdateSubscription.DoesNotExist:
                    try:
                        subo = UpdateSubscription.objects.get(name='Testing', status='active')
                    except UpdateSubscription.DoesNotExist:
                        pass
                return Response(update_check(subscription=subo))
            except Exception, e:
                e_msg = ('Unable to check update due to a system error: %s' % e.__str__())
                handle_exception(Exception(e_msg), request)

        if (command == 'update'):
            try:
                update_run()
                return Response('Done')
            except Exception, e:
                e_msg = ('Update failed due to this exception: %s' % e.__str__())
                handle_exception(Exception(e_msg), request)

        if (command == 'current-version'):
            try:
                return Response(current_version())
            except Exception, e:
                e_msg = ('Unable to check current version due to this '
                         'exception: ' % e.__str__())
                handle_exception(Exception(e_msg), request)

        if (command == 'shutdown'):
            msg = ('The system will now be shutdown')
            try:
                request.session.flush()
                system_shutdown()
            except Exception, e:
                msg = ('Failed to shutdown the system due to a low level '
                       'error: %s' % e.__str__())
                handle_exception(Exception(msg), request)
            finally:
                return Response(msg)

        if (command == 'reboot'):
            msg = ('The system will now reboot')
            try:
                request.session.flush()
                system_reboot()
            except Exception, e:
                msg = ('Failed to reboot the system due to a low level error: '
                       '%s' % e.__str__())
                handle_exception(Exception(msg), request)
            finally:
                return Response(msg)

        if (command == 'current-user'):
            return Response(request.user.username)

        if (command == 'auto-update-status'):
            status = True
            try:
                status = auto_update_status()
            except:
                status = False
            finally:
                return Response({'enabled': status, })

        if (command == 'enable-auto-update'):
            try:
                auto_update(enable=True)
                return Response({'enabled': True, })
            except Exception, e:
                msg = ('Failed to enable auto update due to this exception: '
                       '%s' % e.__str__())
                handle_exception(Exception(msg), request)

        if (command == 'disable-auto-update'):
            try:
                auto_update(enable=False)
                return Response({'enabled': False, })
            except Exception, e:
                msg = ('Failed to disable auto update due to this exception:  '
                       '%s' % e.__str__())
                handle_exception(Exception(msg), request)

        if (command == 'refresh-pool-state'):
            self._refresh_pool_state()
            return Response()

        if (command == 'refresh-share-state'):
            for p in Pool.objects.all():
                import_shares(p, request)
            return Response()

        if (command == 'refresh-snapshot-state'):
            for share in Share.objects.all():
                import_snapshots(share)
            return Response()

Example 37

Project: amy Source File: util.py
def create_uploaded_persons_tasks(data):
    """
    Create persons and tasks from upload data.
    """

    # Quick sanity check.
    if any([row.get('errors') for row in data]):
        raise InternalError('Uploaded data contains errors, cancelling upload')

    persons_created = []
    tasks_created = []
    events = set()

    with transaction.atomic():
        for row in data:
            try:
                row_repr = ('{personal} {family} {username} <{email}>, '
                            '{role} at {event}').format(**row)

                fields = {key: row[key] for key in Person.PERSON_UPLOAD_FIELDS}
                fields['username'] = row['username']

                if fields['email']:
                    # we should use existing Person or create one
                    p, created = Person.objects.get_or_create(
                        email__iexact=fields['email'], defaults=fields
                    )

                    if created:
                        persons_created.append(p)

                else:
                    # we should create a new Person without any email provided
                    p = Person(**fields)
                    p.save()
                    persons_created.append(p)

                if row['event'] and row['role']:
                    e = Event.objects.get(slug=row['event'])
                    r = Role.objects.get(name=row['role'])

                    # is the number of learners attending the event changed,
                    # we should update ``event.attendance``
                    if row['role'] == 'learner':
                        events.add(e)

                    t, created = Task.objects.get_or_create(person=p, event=e,
                                                            role=r)
                    if created:
                        tasks_created.append(t)

            except IntegrityError as e:

                raise IntegrityError('{0} (for "{1}")'.format(str(e), row_repr))

            except ObjectDoesNotExist as e:
                raise ObjectDoesNotExist('{0} (for "{1}")'.format(str(e),
                                                                row_repr))

    for event in events:
        # if event.attendance is lower than number of learners, then
        # update the attendance
        update_event_attendance_from_tasks(event)

    return persons_created, tasks_created

Example 38

Project: Misago Source File: createfakethreads.py
    def handle(self, *args, **options):
        try:
            fake_threads_to_create = int(args[0])
        except IndexError:
            fake_threads_to_create = 5
        except ValueError:
            self.stderr.write("\nOptional argument should be integer.")
            sys.exit(1)

        categories = list(Category.objects.all_categories())

        fake = Factory.create()

        User = get_user_model()
        total_users = User.objects.count()

        self.stdout.write('Creating fake threads...\n')

        message = '\nSuccessfully created %s fake threads in %s'

        created_threads = 0
        start_time = time.time()
        show_progress(self, created_threads, fake_threads_to_create)
        for i in range(fake_threads_to_create):
            with atomic():
                datetime = timezone.now()
                category = random.choice(categories)
                user = User.objects.order_by('?')[:1][0]

                thread_is_unapproved = random.randint(0, 100) > 90
                thread_is_hidden = random.randint(0, 100) > 90
                thread_is_closed = random.randint(0, 100) > 90

                thread = Thread(
                    category=category,
                    started_on=datetime,
                    starter_name='-',
                    starter_slug='-',
                    last_post_on=datetime,
                    last_poster_name='-',
                    last_poster_slug='-',
                    replies=0,
                    is_unapproved=thread_is_unapproved,
                    is_hidden=thread_is_hidden,
                    is_closed=thread_is_closed
                )
                thread.set_title(fake.sentence())
                thread.save()

                fake_message = "\n\n".join(fake.paragraphs())
                post = Post.objects.create(
                    category=category,
                    thread=thread,
                    poster=user,
                    poster_name=user.username,
                    poster_ip=fake.ipv4(),
                    original=fake_message,
                    parsed=linebreaks_filter(fake_message),
                    posted_on=datetime,
                    updated_on=datetime
                )
                update_post_checksum(post)
                post.save(update_fields=['checksum'])

                thread.set_first_post(post)
                thread.set_last_post(post)
                thread.save()

                user.threads += 1
                user.posts += 1
                user.save()

                thread_type = random.randint(0, 100)
                if thread_type > 95:
                    thread_replies = random.randint(200, 2500)
                elif thread_type > 50:
                    thread_replies = random.randint(5, 30)
                else:
                    thread_replies = random.randint(0, 10)

                for x in range(thread_replies):
                    datetime = timezone.now()
                    user = User.objects.order_by('?')[:1][0]
                    fake_message = "\n\n".join(fake.paragraphs())

                    is_unapproved = random.randint(0, 100) > 97
                    if not is_unapproved:
                        is_hidden = random.randint(0, 100) > 97
                    else:
                        is_hidden = False

                    post = Post.objects.create(
                        category=category,
                        thread=thread,
                        poster=user,
                        poster_name=user.username,
                        poster_ip=fake.ipv4(),
                        original=fake_message,
                        parsed=linebreaks_filter(fake_message),
                        is_hidden=is_hidden,
                        is_unapproved=is_unapproved,
                        posted_on=datetime,
                        updated_on=datetime
                    )
                    update_post_checksum(post)
                    post.save(update_fields=['checksum'])

                    user.posts += 1
                    user.save()

                thread.synchronize()
                thread.save()

                created_threads += 1
                show_progress(
                    self, created_threads, fake_threads_to_create, start_time)

        pinned_threads = random.randint(0, int(created_threads * 0.025)) or 1
        self.stdout.write('\nPinning %s threads...' % pinned_threads)
        for i in range(0, pinned_threads):
            thread = Thread.objects.order_by('?')[:1][0]
            if random.randint(0, 100) > 75:
                thread.weight = 2
            else:
                thread.weight = 1
            thread.save()

        for category in categories:
            category.synchronize()
            category.save()

        total_time = time.time() - start_time
        total_humanized = time.strftime('%H:%M:%S', time.gmtime(total_time))
        self.stdout.write(message % (created_threads, total_humanized))

Example 39

Project: taiga-back Source File: services.py
@transaction.atomic
def send_sync_notifications(notification_id):
    """
    Given changed instance, calculate the history entry and
    a complete list for users to notify, send
    email to all users.
    """

    notification = HistoryChangeNotification.objects.select_for_update().get(pk=notification_id)
    # If the last modification is too recent we ignore it
    now = timezone.now()
    time_diff = now - notification.updated_datetime
    if time_diff.seconds < settings.CHANGE_NOTIFICATIONS_MIN_INTERVAL:
        return

    history_entries = tuple(notification.history_entries.all().order_by("created_at"))
    obj, _ = get_last_snapshot_for_key(notification.key)
    obj_class = get_model_from_key(obj.key)

    context = {"obj_class": obj_class,
               "snapshot": obj.snapshot,
               "project": notification.project,
               "changer": notification.owner,
               "history_entries": history_entries}

    model = get_model_from_key(notification.key)
    template_name = _resolve_template_name(model, change_type=notification.history_type)
    email = _make_template_mail(template_name)
    domain = settings.SITES["api"]["domain"].split(":")[0] or settings.SITES["api"]["domain"]

    if "ref" in obj.snapshot:
        msg_id = obj.snapshot["ref"]
    elif "slug" in obj.snapshot:
        msg_id = obj.snapshot["slug"]
    else:
        msg_id = 'taiga-system'

    now = datetime.datetime.now()
    format_args = {
        "project_slug": notification.project.slug,
        "project_name": notification.project.name,
        "msg_id": msg_id,
        "time": int(now.timestamp()),
        "domain": domain
    }

    headers = {
        "Message-ID": "<{project_slug}/{msg_id}/{time}@{domain}>".format(**format_args),
        "In-Reply-To": "<{project_slug}/{msg_id}@{domain}>".format(**format_args),
        "References": "<{project_slug}/{msg_id}@{domain}>".format(**format_args),
        "List-ID": 'Taiga/{project_name} <taiga.{project_slug}@{domain}>'.format(**format_args),
        "Thread-Index": make_ms_thread_index("<{project_slug}/{msg_id}@{domain}>".format(**format_args), now)
    }

    for user in notification.notify_users.distinct():
        context["user"] = user
        context["lang"] = user.lang or settings.LANGUAGE_CODE
        email.send(user.email, context, headers=headers)

    notification.delete()

Example 40

Project: rockstor-core Source File: pool.py
    @transaction.atomic
    def put(self, request, pname, command):
        """
        resize a pool.
        @pname: pool's name
        @command: 'add' - add a list of disks and hence expand the pool
                  'remove' - remove a list of disks and hence shrink the pool
        """
        with self._handle_exception(request):
            try:
                pool = Pool.objects.get(name=pname)
            except:
                e_msg = ('Pool(%s) does not exist.' % pname)
                handle_exception(Exception(e_msg), request)

            if (pool.role == 'root'):
                e_msg = ('Edit operations are not allowed on this Pool(%s) '
                         'as it contains the operating system.' % pname)
                handle_exception(Exception(e_msg), request)

            if (command == 'remount'):
                return self._remount(request, pool)

            disks = [self._validate_disk(d, request) for d in
                     request.data.get('disks', [])]
            num_new_disks = len(disks)
            dnames = [d.name for d in disks]
            new_raid = request.data.get('raid_level', pool.raid)
            num_total_disks = (Disk.objects.filter(pool=pool).count() +
                               num_new_disks)
            if (command == 'add'):
                for d in disks:
                    if (d.pool is not None):
                        e_msg = ('Disk(%s) cannot be added to this Pool(%s) '
                                 'because it belongs to another pool(%s)' %
                                 (d.name, pool.name, d.pool.name))
                        handle_exception(Exception(e_msg), request)
                    if (d.btrfs_uuid is not None):
                        e_msg = ('Disk(%s) has a BTRFS filesystem from the '
                                 'past. If you really like to add it, wipe it '
                                 'from the Storage -> Disks screen of the '
                                 'web-ui' % d.name)
                        handle_exception(Exception(e_msg), request)

                if (pool.raid != 'single' and new_raid == 'single'):
                    e_msg = ('Pool migration from %s to %s is not supported.'
                             % (pool.raid, new_raid))
                    handle_exception(Exception(e_msg), request)

                if (new_raid == 'raid10' and num_total_disks < 4):
                     e_msg = ('A minimum of Four drives are required for the '
                              'raid level: raid10')
                     handle_exception(Exception(e_msg), request)

                if (new_raid == 'raid6' and num_total_disks < 3):
                    e_msg = ('A minimum of Three drives are required for the '
                             'raid level: raid6')
                    handle_exception(Exception(e_msg), request)

                if (new_raid == 'raid5' and num_total_disks < 2):
                    e_msg = ('A minimum of Two drives are required for the '
                              'raid level: raid5')
                    handle_exception(Exception(e_msg), request)

                if (PoolBalance.objects.filter(
                        pool=pool,
                        status__regex=r'(started|running|cancelling|pausing|paused)').exists()):
                    e_msg = ('A Balance process is already running or paused '
                             'for this pool(%s). Resize is not supported '
                             'during a balance process.' % pool.name)
                    handle_exception(Exception(e_msg), request)

                resize_pool(pool, dnames)
                tid = self._balance_start(pool, convert=new_raid)
                ps = PoolBalance(pool=pool, tid=tid)
                ps.save()

                pool.raid = new_raid
                for d_o in disks:
                    d_o.pool = pool
                    d_o.save()

            elif (command == 'remove'):
                if (new_raid != pool.raid):
                    e_msg = ('Raid configuration cannot be changed while '
                             'removing disks')
                    handle_exception(Exception(e_msg), request)
                for d in disks:
                    if (d.pool is None or d.pool != pool):
                        e_msg = ('Disk(%s) cannot be removed because it does '
                                 'not belong to this Pool(%s)' %
                                 (d.name, pool.name))
                        handle_exception(Exception(e_msg), request)
                remaining_disks = (Disk.objects.filter(pool=pool).count() -
                                   num_new_disks)
                if (pool.raid == 'raid0'):
                    e_msg = ('Disks cannot be removed from a pool with this '
                             'raid(%s) configuration' % pool.raid)
                    handle_exception(Exception(e_msg), request)

                if (pool.raid == 'raid1' and remaining_disks < 2):
                    e_msg = ('Disks cannot be removed from this pool '
                             'because its raid configuration(raid1) '
                             'requires a minimum of 2 disks')
                    handle_exception(Exception(e_msg), request)

                if (pool.raid == 'raid10' and remaining_disks < 4):
                    e_msg = ('Disks cannot be removed from this pool '
                             'because its raid configuration(raid10) '
                             'requires a minimum of 4 disks')
                    handle_exception(Exception(e_msg), request)

                if (pool.raid == 'raid5' and remaining_disks < 2):
                    e_msg = ('Disks cannot be removed from this pool because '
                             'its raid configuration(raid5) requires a '
                             'minimum of 2 disks')
                    handle_exception(Exception(e_msg), request)

                if (pool.raid == 'raid6' and remaining_disks < 3):
                    e_msg = ('Disks cannot be removed from this pool because '
                             'its raid configuration(raid6) requires a '
                             'minimum of 3 disks')
                    handle_exception(Exception(e_msg), request)

                usage = pool_usage('/%s/%s' % (settings.MNT_PT, pool.name))
                size_cut = 0
                for d in disks:
                    size_cut += d.size
                if (size_cut >= usage[2]):
                    e_msg = ('Removing these(%s) disks may shrink the pool by '
                             '%dKB, which is greater than available free space'
                             ' %dKB. This is not supported.' %
                             (dnames, size_cut, usage[2]))
                    handle_exception(Exception(e_msg), request)

                resize_pool(pool, dnames, add=False)
                tid = self._balance_start(pool)
                ps = PoolBalance(pool=pool, tid=tid)
                ps.save()

                for d in disks:
                    d.pool = None
                    d.save()

            else:
                e_msg = ('command(%s) is not supported.' % command)
                handle_exception(Exception(e_msg), request)
            usage = pool_usage('/%s/%s' % (settings.MNT_PT, pool.name))
            pool.size = usage[0]
            pool.save()
            return Response(PoolInfoSerializer(pool).data)

Example 41

Project: sublimall-server Source File: views.py
    @transaction.atomic
    def post(self, request):
        template = 'registration.html'

        current_user_count = Member.objects.all().count()
        if current_user_count >= settings.MAX_MEMBER:
            logger.warning('Max registration number reached')
            return render(
                request,
                template,
                {'form':
                    {'errors':
                        "Max member reach. I'm sorry about that, "
                        "don't forget that it's a beta version of Sublimall. "
                        "Registrations will been soon re-opened!"}})

        email = request.POST.get('email', '').lower()
        email2 = request.POST.get('email2', '').lower()
        password = request.POST.get('password')
        password2 = request.POST.get('password2')

        if not email:
            return render(
                request,
                template,
                {"form": {'errors': "Email can't be empty."}})
        if not password:
            return render(
                request,
                template,
                {"form": {'errors': "Password can't be empty."}})

        try:
            validate_email(email)
        except ValidationError:
            return render(
                request, template, {"form": {'errors': "Need a valid email."}})

        password_validation, error = is_password_valid(password)
        if not password_validation:
            return render(
                request,
                template,
                {"form": {"errors": error}})

        if password != password2:
            return render(
                request,
                template,
                {"form": {'errors': "Password doesn't match."}})

        if email != email2:
            return render(
                request,
                template,
                {"form": {'errors': "Emails doesn't match."}})

        if Member.objects.filter(email=email).exists():
            return render(
                request, template, {"form": {'errors': "Email already used."}})

        try:
            member = Member(email=email, is_active=False)
            member.set_password(password)
            member.full_clean()
            member.save()

            member.send_registration_confirmation()

        except Exception:
            logger.error(
                'Registration unhandled exception',
                exc_info=True,
                extra={'request': request})
            return render(
                request,
                template,
                {"form": {
                    'errors':
                    "Error while creating your account. "
                    "A report have been sent. Sorry about that."}})

        messages.success(
            request,
            "You'll receive an email soon, check it to confirm "
            "your account. See you soon!")
        return HttpResponseRedirect(reverse('login'))

Example 42

Project: product-definition-center Source File: lib.py
@transaction.atomic(savepoint=False)
def compose__import_rpms(request, release_id, composeinfo, rpm_manifest):
    release_obj = release_models.Release.objects.get(release_id=release_id)

    ci = productmd.composeinfo.ComposeInfo()
    common_hacks.deserialize_wrapper(ci.deserialize, composeinfo)
    rm = Rpms()
    common_hacks.deserialize_wrapper(rm.deserialize, rpm_manifest)

    _maybe_raise_inconsistency_error(ci, rm, 'rpms')

    compose_date = "%s-%s-%s" % (ci.compose.date[:4], ci.compose.date[4:6], ci.compose.date[6:])
    compose_type = models.ComposeType.objects.get(name=ci.compose.type)
    acceptance_status = models.ComposeAcceptanceTestingState.objects.get(name='untested')
    compose_obj, created = lib._logged_get_or_create(
        request, models.Compose,
        release=release_obj,
        compose_id=ci.compose.id,
        compose_date=compose_date,
        compose_type=compose_type,
        compose_respin=ci.compose.respin,
        compose_label=ci.compose.label or None,
        acceptance_testing=acceptance_status,
    )
    if created and hasattr(request._request, '_messagings'):
        # add message
        _add_compose_create_msg(request, compose_obj)

    rpms_in_db = {}
    qs = package_models.RPM.objects.all()
    for rpm in qs.iterator():
        key = "%s-%s:%s-%s.%s" % (rpm.name, rpm.epoch, rpm.version, rpm.release, rpm.arch)
        rpms_in_db[key] = rpm.id

    cursor = connection.cursor()
    add_to_changelog = []
    imported_rpms = 0
    variants_info = composeinfo['payload']['variants']

    for variant in ci.get_variants(recursive=True):
        _link_compose_to_integrated_product(request, compose_obj, variant)
        variant_type = release_models.VariantType.objects.get(name=variant.type)
        variant_obj, created = models.Variant.objects.get_or_create(
            compose=compose_obj,
            variant_id=variant.id,
            variant_uid=variant.uid,
            variant_name=variant.name,
            variant_type=variant_type
        )
        if created:
            add_to_changelog.append(variant_obj)

        _store_relative_path_for_compose(compose_obj, variants_info, variant, variant_obj, add_to_changelog)

        for arch in variant.arches:
            arch_obj = common_models.Arch.objects.get(name=arch)
            var_arch_obj, _ = models.VariantArch.objects.get_or_create(arch=arch_obj,
                                                                       variant=variant_obj)

            compose_rpms_in_db = set()
            qs = models.ComposeRPM.objects.filter(variant_arch=var_arch_obj).values_list('variant_arch_id',
                                                                                         'rpm_id')
            for (variant_arch_id, rpm_id) in qs.iterator():
                key = "%s/%s" % (variant_arch_id, rpm_id)
                compose_rpms_in_db.add(key)

            sources = set()
            for srpm_nevra, rpms in rm.rpms.get(variant.uid, {}).get(arch, {}).iteritems():
                sources.add(srpm_nevra)
                for rpm_nevra, rpm_data in rpms.iteritems():
                    imported_rpms += 1
                    path, filename = os.path.split(rpm_data['path'])
                    rpm_id = get_or_insert_rpm(rpms_in_db, cursor, rpm_nevra, srpm_nevra, filename)
                    sigkey_id = common_models.SigKey.get_cached_id(rpm_data["sigkey"], create=True)
                    path_id = models.Path.get_cached_id(path, create=True)
                    content_category = rpm_data["category"]
                    content_category_id = repository_models.ContentCategory.get_cached_id(content_category)
                    insert_compose_rpms_if_nonexist(compose_rpms_in_db, cursor,
                                                    var_arch_obj.id, rpm_id,
                                                    content_category_id, sigkey_id, path_id)

    for obj in add_to_changelog:
        lib._maybe_log(request, True, obj)

    request.changeset.add('notice', 0, 'null',
                          json.dumps({
                              'compose': compose_obj.compose_id,
                              'num_linked_rpms': imported_rpms,
                          }))

    if hasattr(request._request, '_messagings'):
        _add_import_msg(request, compose_obj, 'rpms', imported_rpms)

    return compose_obj.compose_id, imported_rpms

Example 43

Project: open-synthesis Source File: views.py
@require_http_methods(['HEAD', 'GET', 'POST'])
@login_required
def evaluate(request, board_id, evidence_id):
    """Return a view for assessing a piece of evidence against all hypotheses.

    Take a couple measures to reduce bias: (1) do not show the analyst their previous assessment, and (2) show
    the hypotheses in a random order.
    """
    # Would be nice if we could refactor this and the view to use formsets. Not obvious how to handle the shuffling
    # of the indices that way

    board = get_object_or_404(Board, pk=board_id)
    evidence = get_object_or_404(Evidence, pk=evidence_id)

    default_eval = '------'
    keep_eval = '-- ' + _('Keep Previous Assessment')
    remove_eval = '-- ' + _('Remove Assessment')

    evaluations = {e.hypothesis_id: e for e in
                   Evaluation.objects.filter(board=board_id, evidence=evidence_id, user=request.user)}

    hypotheses = [(h, evaluations.get(h.id, None)) for h in Hypothesis.objects.filter(board=board_id)]

    evaluation_set = set([str(m.value) for m in Eval])

    if request.method == 'POST':
        with transaction.atomic():
            for hypothesis, dummy_evaluation in hypotheses:
                select = request.POST['hypothesis-{}'.format(hypothesis.id)]
                if select == remove_eval:
                    Evaluation.objects.filter(
                        board=board_id,
                        evidence=evidence,
                        user=request.user,
                        hypothesis_id=hypothesis.id,
                    ).delete()
                elif select in evaluation_set:
                    Evaluation.objects.update_or_create(
                        board=board,
                        evidence=evidence,
                        hypothesis=hypothesis,
                        user=request.user,
                        defaults={'value': select}
                    )
                else:
                    # don't add/update the evaluation
                    pass
            BoardFollower.objects.update_or_create(board=board, user=request.user, defaults={
                'is_evaluator': True,
            })

        messages.success(request, _('Recorded evaluations for evidence: {desc}').format(desc=evidence.evidence_desc))
        return HttpResponseRedirect(reverse('openach:detail', args=(board_id,)))
    else:
        new_hypotheses = [h for h in hypotheses if h[1] is None]
        old_hypotheses = [h for h in hypotheses if h[1] is not None]
        random.shuffle(old_hypotheses)
        random.shuffle(new_hypotheses)
        context = {
            'board': board,
            'evidence': evidence,
            'hypotheses': new_hypotheses + old_hypotheses,
            'options': Evaluation.EVALUATION_OPTIONS,
            'default_eval': default_eval,
            'keep_eval': keep_eval,
            'remove_eval': remove_eval,
        }
        return render(request, 'boards/evaluate.html', context)

Example 44

Project: rockstor-core Source File: rockon_id.py
    @transaction.atomic
    def post(self, request, rid, command):
        with self._handle_exception(request):

            if (not docker_status()):
                e_msg = ('Docker service is not running. Start it and try '
                         'again.')
                handle_exception(Exception(e_msg), request)

            try:
                rockon = RockOn.objects.get(id=rid)
            except:
                e_msg = ('Rock-on(%d) does not exist' % rid)
                handle_exception(Exception(e_msg), request)

            try:
                dname = 'ztask-daemon'
                e_msg = ('ztask daemon is not running and could not be started')
                o, e, rc = superctl(dname, 'status')
                if (rc == 1):
                    superctl(dname, 'restart')
                    time.sleep(5)
            except Exception, e:
                logger.exception(e)
                handle_exception(Exception(e_msg), request)
            finally:
                if (rc == 1):
                    o, e, rc = superctl(dname, 'status')
                    if (rc == 1):
                        handle_exception(Exception(e_msg), request)

            if (command == 'install'):
                self._pending_check(request)
                share_map = request.data.get('shares', {})
                port_map = request.data.get('ports', {})
                cc_map = request.data.get('cc', {})
                env_map = request.data.get('environment', {})
                containers = DContainer.objects.filter(rockon=rockon)
                for co in containers:
                    for s in share_map.keys():
                        sname = share_map[s]
                        if (not Share.objects.filter(name=sname).exists()):
                            e_msg = ('Invalid Share(%s).' % sname)
                            handle_exception(Exception(e_msg), request)
                        if (DVolume.objects.filter(container=co, dest_dir=s).exists()):
                            so = Share.objects.get(name=sname)
                            vo = DVolume.objects.get(container=co,
                                                     dest_dir=s)
                            vo.share = so
                            vo.save()
                    # {'host_port' : 'container_port', ... }
                    for p in port_map.keys():
                        if (DPort.objects.filter(hostp=p).exists()):
                            dup_po = DPort.objects.get(hostp=p)
                            if (dup_po.container.rockon.id != rockon.id):
                                if (dup_po.container.rockon.state in
                                    ('installed', 'pending_install')):
                                    #cannot claim from a rock-on that's installed.
                                    conf_ro = dup_po.container.rockon.name
                                    e_msg = (
                                        'Port(%s) belongs to another '
                                        'Rock-n(%s). Choose a different '
                                        'port. If you must choose the same '
                                        'port, uninstall %s first and try again.'
                                        % (p, conf_ro, conf_ro))
                                    handle_exception(Exception(e_msg), request)
                                #change the host port to next available.
                                dup_po.hostp = self._next_available_default_hostp(dup_po.hostp)
                                dup_po.save()
                        for co2 in DContainer.objects.filter(rockon=rockon):
                            if (DPort.objects.filter(container=co2, containerp=port_map[p]).exists()):
                                #found the container that needs this port.
                                po = DPort.objects.get(container=co2, containerp=port_map[p])
                                po.hostp = p
                                po.save()
                                break
                    for c in cc_map.keys():
                        if (not DCustomConfig.objects.filter(rockon=rockon, key=c).exists()):
                            e_msg = ('Invalid custom config key(%s)' % c)
                            handle_exception(Exception(e_msg), request)
                        cco = DCustomConfig.objects.get(rockon=rockon, key=c)
                        cco.val = cc_map[c]
                        cco.save()
                    for e in env_map.keys():
                        if (not DContainerEnv.objects.filter(container=co, key=e).exists()):
                            e_msg = ('Invalid environment variabled(%s)' % e)
                            handle_exception(Exception(e_msg), request)
                        ceo = DContainerEnv.objects.get(container=co, key=e)
                        ceo.val = env_map[e]
                        ceo.save()
                install.async(rockon.id)
                rockon.state = 'pending_install'
                rockon.save()
            elif (command == 'uninstall'):
                self._pending_check(request)
                if (rockon.state != 'installed'):
                    e_msg = ('Rock-on(%s) is not currently installed. Cannot '
                             'uninstall it' % rockon.name)
                    handle_exception(Exception(e_msg), request)
                if (rockon.status == 'started' or rockon.status == 'pending_start'):
                    e_msg = ('Rock-on(%s) must be stopped before it can '
                             'be uninstalled. Stop it and try again' %
                             rockon.name)
                    handle_exception(Exception(e_msg), request)
                uninstall.async(rockon.id)
                rockon.state = 'pending_uninstall'
                rockon.save()
                for co in DContainer.objects.filter(rockon=rockon):
                    DVolume.objects.filter(container=co, uservol=True).delete()
            elif (command == 'update'):
                self._pending_check(request)
                if (rockon.state != 'installed'):
                    e_msg = ('Rock-on(%s) is not currently installed. Cannot '
                             'update it' % rockon.name)
                    handle_exception(Exception(e_msg), request)
                if (rockon.status == 'started' or rockon.status == 'pending_start'):
                    e_msg = ('Rock-on(%s) must be stopped before it can '
                             'be updated. Stop it and try again' %
                             rockon.name)
                    handle_exception(Exception(e_msg), request)
                share_map = request.data.get('shares')
                for co in DContainer.objects.filter(rockon=rockon):
                    for s in share_map.keys():
                        sname = share_map[s]
                        if (not Share.objects.filter(name=sname).exists()):
                            e_msg = ('Invalid Share(%s).' % sname)
                            handle_exception(Exception(e_msg), request)
                        so = Share.objects.get(name=sname)
                        if (DVolume.objects.filter(container=co, share=so).exists()):
                            e_msg = ('Share(%s) is already assigned to this Rock-on' % sname)
                            handle_exception(Exception(e_msg), request)
                        if (DVolume.objects.filter(container=co, dest_dir=s).exists()):
                            e_msg = ('Directory(%s) is already mapped for this Rock-on' % s)
                            handle_exception(Exception(e_msg), request)
                        if (not s.startswith('/')):
                            e_msg = ('Invalid Directory(%s). Must provide an '
                                     'absolute path. Eg: /data/media' % s)
                            handle_exception(Exception(e_msg), request)
                        do = DVolume(container=co, share=so, uservol=True, dest_dir=s)
                        do.save()
                rockon.state = 'pending_update'
                rockon.save()
                update.async(rockon.id)
            elif (command == 'stop'):
                stop.async(rockon.id)
                rockon.status = 'pending_stop'
                rockon.save()
            elif (command == 'start'):
                start.async(rockon.id)
                rockon.status = 'pending_start'
                rockon.save()
            elif (command == 'state_update'):
                state = request.data.get('new_state')
                rockon.state = state
                rockon.save()
            elif (command == 'status_update'):
                status = request.data.get('new_status')
                rockon.status = status
                rockon.save()
            return Response(RockOnSerializer(rockon).data)

Example 45

Project: django-shop Source File: checkout.py
    @list_route(methods=['post'], url_path='upload')
    def upload(self, request):
        """
        All forms using the AngularJS directive `shop-dialog-form` have an implicit scope containing
        an `upload()` function. This function then may be connected to any input element, say
        `ng-change="upload()"`. If such an event triggers, the scope data is send to this `upload()`
        method using an Ajax POST request. This `upload()` method then dispatches the form data
        to all forms registered through a `DialogFormPluginBase`.
        Afterwards the cart is updated, so that all cart modifiers run and adopt those changes.
        """
        cart = self.get_queryset()
        if cart is None:
            raise ValidationError("Can not proceed to checkout without a cart")

        # sort posted form data by plugin order
        dialog_data = []
        for form_class in self.dialog_forms:
            key = form_class.scope_prefix.split('.', 1)[1]
            if key in request.data:
                if 'plugin_order' in request.data[key]:
                    dialog_data.append((form_class, request.data[key]))
                else:
                    for data in request.data[key].values():
                        dialog_data.append((form_class, data))
        dialog_data = sorted(dialog_data, key=lambda tpl: int(tpl[1]['plugin_order']))

        # save data, get text representation and collect potential errors
        errors, checkout_summary, response_data = {}, {}, {'$valid': True}
        with transaction.atomic():
            for form_class, data in dialog_data:
                form = form_class.form_factory(request, data, cart)
                if form.is_valid():
                    # empty error dict forces revalidation by the client side validation
                    errors[form_class.form_name] = {}
                    # keep a summary of of validated form content inside the client's $rootScope
                    checkout_summary[form_class.form_name] = form.as_text()
                else:
                    # errors are rendered by the client side validation
                    errors[form_class.form_name] = dict(form.errors)
                response_data['$valid'] = response_data['$valid'] and form.is_valid()

                # by updating the response data, we can override the form's model $scope
                update_data = form.get_response_data()
                if isinstance(update_data, dict):
                    key = form_class.scope_prefix.split('.', 1)[1]
                    response_data[key] = update_data
            cart.save()

        # add possible form errors for giving feedback to the customer
        response = self.list(request)
        response.data.update(errors=errors, checkout_summary=checkout_summary, data=response_data)
        return response

Example 46

Project: coursys Source File: forms.py
@transaction.atomic
def process_pcs_row(row, column, rownum, unit, semester, user):
    """
    Process a single row from the PCS import
    """
    appsemester = semester.previous_semester()
    warnings = []
    ident = "in row %i" % (rownum)
    appid = row[column['appid']]
    emplid = row[column['emplid']]
    program = row[column['program']]

    # get Person, from SIMS if necessary
    try:
        p = Person.objects.get(emplid=int(emplid))
    except ValueError:
        warnings.append("Bad emplid %s: not processing that row." % (ident))
        return warnings
    except Person.DoesNotExist:
        try:
            p = add_person(emplid)
        except SIMSProblem as e:
            return e.message

    ident = 'for "%s"' % (p.name())

    # update information on the Person
    email = row[column['email']]
    if email: p.config['applic_email'] = email

    dob = row[column['dob']]
    if dob:
        try:
            dt = datetime.datetime.strptime(dob, "%Y-%m-%d")
            p.config['birthdate'] = dt.date().isoformat()
        except ValueError:
            warnings.append("Bad birthdate %s." % (ident))
    
    # get extended SIMS data
    data = grad_student_info(emplid)
    p.config.update(data)
    
    p.save()
    
    #print "Importing %s" % (p)
    
    # get GradStudent, creating if necessary
    
    # a unique identifier for this application, so we can detect repeated imports (and handle gracefully)
    uid = "%s-%s-%s-%s" % (unit.slug, semester.name, appid, emplid)
    # TODO: wrong, wrong, wrong. Figure out how to select program from import data
    program = GradProgram.objects.filter(unit=unit)[0]

    # find the old GradStudent if possible
    gss = GradStudent.objects.filter(program__unit=unit, person=p)
    gs = None
    for g in gss:
        if 'app_id' in g.config and g.config['app_id'] == uid:
            gs = g
            break
    if not gs:
        gs = GradStudent(program=program, person=p)
        gs.config['app_id'] = uid
    
    resarea = row[column['resarea']]
    firstlang = row[column['firstlang']]
    
    gs.research_area = resarea
    gs.mother_tongue = firstlang
    gs.created_by = user.userid
    gs.updated_by = user.userid
    gs.config['start_semester'] = semester.name
    gs.save()
    
    complete = row[column['complete']].strip()
    decision = row[column['decision']].strip()
    notes = row[column['notes']].strip()
    gs.config['decisionnotes'] = notes
    
    old_st = GradStatus.objects.filter(student=gs, start__name__gte=semester.name)
    if not old_st:
        # if no old status for current semester, create one
        
        # application completion status
        if complete == 'AppConfirm':
            st = GradStatus(student=gs, status="COMP", start=appsemester, end=None, notes="PCS import")
            st.save()
        elif complete == '':
            st = GradStatus(student=gs, status="INCO", start=appsemester, end=None, notes="PCS import")
            st.save()
        else:
            warnings.append('Unknown "Confirmation of Completion of Application" value %s.' % (ident))
        
        # decision status
        if decision == 'DECL':
            st = GradStatus(student=gs, status="DECL", start=appsemester, end=None, notes="PCS import")
            st.save()
        elif decision == '':
            st = GradStatus(student=gs, status="OFFO", start=appsemester, end=None, notes="PCS import")
            st.save()
        elif decision == 'R':
            st = GradStatus(student=gs, status="REJE", start=appsemester, end=None, notes="PCS import")
            st.save()
        elif decision == 'HOLD':
            st = GradStatus(student=gs, status="HOLD", start=appsemester, end=None, notes="PCS import")
            st.save()
        elif decision == 'AMScT':
            # TODO: bump program to MSc thesis
            st = GradStatus(student=gs, status="CONF", start=appsemester, end=None, notes="PCS import")
            st.save()
        elif decision == 'AMScC':
            # TODO: bump program to MSc course-based
            st = GradStatus(student=gs, status="CONF", start=appsemester, end=None, notes="PCS import")
            st.save()


    # potential supervisor
    potsuper = row[column['potsuper']]
    if potsuper:
        superv = None
        external = None
        try:
            ps_last, ps_first = potsuper.split(', ')
        except ValueError:
            warnings.append('Bad potential supervisor name %s: will store them as an "external" supervisor.' % (ident))
            external = potsuper
        else:
            potentials = possible_supervisor_people([unit])
            potential_ids = [p.id for p in potentials]
            query = Q(last_name=ps_last, first_name=ps_first) | Q(last_name=ps_last, pref_first_name=ps_first)
            people = Person.objects.filter(query, id__in=potential_ids)
            if people.count() == 1:
                superv = people[0]
            else:
                warnings.append('Coundn\'t find potential supervisor %s: will store them as an "external" supervisor.' % (ident))
                external = potsuper

        old_s = Supervisor.objects.filter(student=gs, supervisor_type='POT')
        if old_s:
            s = old_s[0]
        else:
            s = Supervisor(student=gs, supervisor_type='POT')
        s.superv = superv
        s.external = external
        s.position = 0
        s.created_by = user.userid
        s.modified_by = user.userid
        s.save()
                
        
    l = LogEntry(userid=user.userid, description="Imported grad record for %s (%s) from PCS" % (p.name(), p.emplid), related_object=gs)
    l.save()
    
    return warnings

Example 47

Project: product-definition-center Source File: lib.py
@transaction.atomic(savepoint=False)
def compose__import_images(request, release_id, composeinfo, image_manifest):
    release_obj = release_models.Release.objects.get(release_id=release_id)

    ci = productmd.composeinfo.ComposeInfo()
    common_hacks.deserialize_wrapper(ci.deserialize, composeinfo)

    im = productmd.images.Images()
    common_hacks.deserialize_wrapper(im.deserialize, image_manifest)

    _maybe_raise_inconsistency_error(ci, im, 'images')

    compose_date = "%s-%s-%s" % (ci.compose.date[:4], ci.compose.date[4:6], ci.compose.date[6:])
    compose_type = models.ComposeType.objects.get(name=ci.compose.type)
    compose_obj, created = lib._logged_get_or_create(
        request, models.Compose,
        release=release_obj,
        compose_id=ci.compose.id,
        compose_date=compose_date,
        compose_type=compose_type,
        compose_respin=ci.compose.respin,
        compose_label=ci.compose.label or None,
    )
    if created and hasattr(request._request, '_messagings'):
        # add message
        _add_compose_create_msg(request, compose_obj)

    add_to_changelog = []
    imported_images = 0

    variants_info = composeinfo['payload']['variants']
    for variant in ci.get_variants(recursive=True):
        _link_compose_to_integrated_product(request, compose_obj, variant)
        variant_type = release_models.VariantType.objects.get(name=variant.type)
        variant_obj, created = models.Variant.objects.get_or_create(
            compose=compose_obj,
            variant_id=variant.id,
            variant_uid=variant.uid,
            variant_name=variant.name,
            variant_type=variant_type
        )
        if created:
            add_to_changelog.append(variant_obj)

        _store_relative_path_for_compose(compose_obj, variants_info, variant, variant_obj, add_to_changelog)

        for arch in variant.arches:
            arch_obj = common_models.Arch.objects.get(name=arch)
            var_arch_obj, created = models.VariantArch.objects.get_or_create(arch=arch_obj, variant=variant_obj)

            for i in im.images.get(variant.uid, {}).get(arch, []):
                path, file_name = os.path.split(i.path)
                path_id = models.Path.get_cached_id(path, create=True)

                image, _ = package_models.Image.objects.get_or_create(
                    file_name=file_name, sha256=i.checksums["sha256"],
                    defaults={
                        'image_format_id': package_models.ImageFormat.get_cached_id(i.format),
                        'image_type_id': package_models.ImageType.get_cached_id(i.type),
                        'disc_number': i.disc_number,
                        'disc_count': i.disc_count,
                        'arch': i.arch,
                        'mtime': i.mtime,
                        'size': i.size,
                        'bootable': i.bootable,
                        'implant_md5': i.implant_md5,
                        'volume_id': i.volume_id,
                        'md5': i.checksums.get("md5", None),
                        'sha1': i.checksums.get("sha1", None),
                        'subvariant': getattr(i, 'subvariant', None),
                    }
                )

                mi, created = models.ComposeImage.objects.get_or_create(
                    variant_arch=var_arch_obj,
                    image=image,
                    path_id=path_id)
                imported_images += 1

    for obj in add_to_changelog:
        lib._maybe_log(request, True, obj)

    request.changeset.add('notice', 0, 'null',
                          json.dumps({
                              'compose': compose_obj.compose_id,
                              'num_linked_images': imported_images,
                          }))

    if hasattr(request._request, '_messagings'):
        _add_import_msg(request, compose_obj, 'images', imported_images)

    return compose_obj.compose_id, imported_images

Example 48

Project: PyClassLessons Source File: migrate.py
    def sync_apps(self, connection, app_labels):
        "Runs the old syncdb-style operation on a list of app_labels."
        cursor = connection.cursor()

        try:
            # Get a list of already installed *models* so that references work right.
            tables = connection.introspection.table_names(cursor)
            seen_models = connection.introspection.installed_models(tables)
            created_models = set()
            pending_references = {}

            # Build the manifest of apps and models that are to be synchronized
            all_models = [
                (app_config.label,
                    router.get_migratable_models(app_config, connection.alias, include_auto_created=True))
                for app_config in apps.get_app_configs()
                if app_config.models_module is not None and app_config.label in app_labels
            ]

            def model_installed(model):
                opts = model._meta
                converter = connection.introspection.table_name_converter
                # Note that if a model is unmanaged we short-circuit and never try to install it
                return not ((converter(opts.db_table) in tables) or
                    (opts.auto_created and converter(opts.auto_created._meta.db_table) in tables))

            manifest = OrderedDict(
                (app_name, list(filter(model_installed, model_list)))
                for app_name, model_list in all_models
            )

            create_models = set(itertools.chain(*manifest.values()))
            emit_pre_migrate_signal(create_models, self.verbosity, self.interactive, connection.alias)

            # Create the tables for each model
            if self.verbosity >= 1:
                self.stdout.write("  Creating tables...\n")
            with transaction.atomic(using=connection.alias, savepoint=False):
                for app_name, model_list in manifest.items():
                    for model in model_list:
                        # Create the model's database table, if it doesn't already exist.
                        if self.verbosity >= 3:
                            self.stdout.write("    Processing %s.%s model\n" % (app_name, model._meta.object_name))
                        sql, references = connection.creation.sql_create_model(model, no_style(), seen_models)
                        seen_models.add(model)
                        created_models.add(model)
                        for refto, refs in references.items():
                            pending_references.setdefault(refto, []).extend(refs)
                            if refto in seen_models:
                                sql.extend(connection.creation.sql_for_pending_references(refto, no_style(), pending_references))
                        sql.extend(connection.creation.sql_for_pending_references(model, no_style(), pending_references))
                        if self.verbosity >= 1 and sql:
                            self.stdout.write("    Creating table %s\n" % model._meta.db_table)
                        for statement in sql:
                            cursor.execute(statement)
                        tables.append(connection.introspection.table_name_converter(model._meta.db_table))

            # We force a commit here, as that was the previous behavior.
            # If you can prove we don't need this, remove it.
            transaction.set_dirty(using=connection.alias)
        finally:
            cursor.close()

        # The connection may have been closed by a syncdb handler.
        cursor = connection.cursor()
        try:
            # Install custom SQL for the app (but only if this
            # is a model we've just created)
            if self.verbosity >= 1:
                self.stdout.write("  Installing custom SQL...\n")
            for app_name, model_list in manifest.items():
                for model in model_list:
                    if model in created_models:
                        custom_sql = custom_sql_for_model(model, no_style(), connection)
                        if custom_sql:
                            if self.verbosity >= 2:
                                self.stdout.write("    Installing custom SQL for %s.%s model\n" % (app_name, model._meta.object_name))
                            try:
                                with transaction.commit_on_success_unless_managed(using=connection.alias):
                                    for sql in custom_sql:
                                        cursor.execute(sql)
                            except Exception as e:
                                self.stderr.write("    Failed to install custom SQL for %s.%s model: %s\n" % (app_name, model._meta.object_name, e))
                                if self.show_traceback:
                                    traceback.print_exc()
                        else:
                            if self.verbosity >= 3:
                                self.stdout.write("    No custom SQL for %s.%s model\n" % (app_name, model._meta.object_name))

            if self.verbosity >= 1:
                self.stdout.write("  Installing indexes...\n")

            # Install SQL indices for all newly created models
            for app_name, model_list in manifest.items():
                for model in model_list:
                    if model in created_models:
                        index_sql = connection.creation.sql_indexes_for_model(model, no_style())
                        if index_sql:
                            if self.verbosity >= 2:
                                self.stdout.write("    Installing index for %s.%s model\n" % (app_name, model._meta.object_name))
                            try:
                                with transaction.commit_on_success_unless_managed(using=connection.alias):
                                    for sql in index_sql:
                                        cursor.execute(sql)
                            except Exception as e:
                                self.stderr.write("    Failed to install index for %s.%s model: %s\n" % (app_name, model._meta.object_name, e))
        finally:
            cursor.close()

        # Load initial_data fixtures (unless that has been disabled)
        if self.load_initial_data:
            for app_label in app_labels:
                call_command('loaddata', 'initial_data', verbosity=self.verbosity, database=connection.alias, skip_validation=True, app_label=app_label, hide_empty=True)

        return created_models

Example 49

Project: reviewboard Source File: user.py
    @webapi_login_required
    @webapi_check_local_site
    @webapi_response_errors(PERMISSION_DENIED, INVALID_FORM_DATA)
    @webapi_request_fields(
        required={
            'username': {
                'type': six.text_type,
                'description': 'The username of the user to create.',
            },
            'email': {
                'type': six.text_type,
                'description': 'The e-mail address of the user to create.',
            },
            'password': {
                'type': six.text_type,
                'description': 'The password of the user to create.',
            }
        },
        optional={
            'first_name': {
                'type': six.text_type,
                'description': 'The first name of the user to create.',
            },
            'last_name': {
                'type': six.text_type,
                'description': 'The last name of the user to create.',
            }
        })
    def create(self, request, username, email, password, first_name='',
               last_name='', local_site=None, *args, **kwargs):
        """Create a user

        This functionality is limited to superusers.
        """
        if (not request.user.is_superuser and
            not request.user.has_perm('auth.add_user')):
            return PERMISSION_DENIED.with_message(
                'You do not have permission to create users.')

        if local_site:
            return PERMISSION_DENIED.with_message(
                'This API is not available for local sites.')

        try:
            validate_email(email)
        except ValidationError as e:
            return INVALID_FORM_DATA, {
                'fields': {
                    'email': [six.text_type(e)]
                },
            }

        try:
            # We wrap this in a transaction.atomic block because attempting to
            # create a user with a username that already exists will generate
            # an IntegrityError and break the current transaction.
            #
            # Unit tests wrap each test case in a transaction.atomic block as
            # well. If this is block is not here, the test case's transaction
            # will break and cause errors during test teardown.
            with transaction.atomic():
                user = User.objects.create_user(username, email, password,
                                                first_name=first_name,
                                                last_name=last_name)
        except IntegrityError:
            return INVALID_FORM_DATA, {
                'fields': {
                    'username': [
                        'A user with the requested username already exists.',
                    ]
                }
            }

        return 201, {
            self.item_result_key: user,
        }

Example 50

Project: coursys Source File: views.py
@requires_course_by_slug
@transaction.atomic
def submit(request, course_slug):
    person = get_object_or_404(Person,userid=request.user.username)
    course = get_object_or_404(CourseOffering, slug = course_slug)
    member = Member.objects.exclude(role='DROP').get(person=person, offering=course)
    error_info=None
    name = request.POST.get('GroupName')
    if name:
        name = name[:30]
    #Check if group has a unique name
    if Group.objects.filter(name=name,courseoffering=course):
        error_info="A group named \"%s\" already exists" % (name)
        messages.add_message(request, messages.ERROR, error_info)
        return HttpResponseRedirect(reverse('groups.views.groupmanage', kwargs={'course_slug': course_slug}))
    #Check if the group name is empty, these two checks may need to be moved to forms later.
    if name == "":
        error_info = "Group name cannot be empty: please enter a group name."
        messages.add_message(request, messages.ERROR, error_info)
        return HttpResponseRedirect(reverse('groups.views.groupmanage', kwargs={'course_slug': course_slug}))
    

    else:
        # find selected activities
        selected_act = []
        activities = Activity.objects.filter(offering=course, group=True, deleted=False)
        if not is_course_staff_by_slug(request, course_slug):
            activities = activities.exclude(status='INVI')

        for activity in activities:
            activityForm = ActivityForm(request.POST, prefix=activity.slug)
            if activityForm.is_valid() and activityForm.cleaned_data['selected'] == True:
                selected_act.append(activity)
        
        # no selected activities: fail.
        if not selected_act:
            messages.add_message(request, messages.ERROR, "Group not created: no activities selected.")
            return HttpResponseRedirect(reverse('groups.views.groupmanage', kwargs={'course_slug': course_slug}))
        
        #groupForSemesterForm = GroupForSemesterForm(request.POST)
        #if groupForSemesterForm.is_valid():
        #    groupForSemester = groupForSemesterForm.cleaned_data['selected']
        groupForSemester = False
        
        #validate database integrity before saving anything. 
        #If one student is in a group for an activity, he/she cannot be in another group for the same activity.
        if is_course_student_by_slug(request, course_slug):
            isStudentCreatedGroup = True
            studentList = []
            studentList.append(member)
        elif is_course_staff_by_slug(request, course_slug):
            isStudentCreatedGroup = False
            studentList = []
            students = Member.objects.select_related('person').filter(offering = course, role = 'STUD')
            for student in students:
                studentForm = StudentForm(request.POST, prefix = student.person.userid)
                if studentForm.is_valid() and studentForm.cleaned_data['selected'] == True:
                    studentList.append(student)
        #Check if students has already in a group
        if _validateIntegrity(request,isStudentCreatedGroup, groupForSemester, course, studentList, selected_act) == False:
            return HttpResponseRedirect(reverse('groups.views.groupmanage', kwargs={'course_slug': course_slug}))
        #No selected members,group creating will fail.        
        if not studentList:
            messages.add_message(request, messages.ERROR, "Group not created: no members selected.")
            return HttpResponseRedirect(reverse('groups.views.groupmanage', kwargs={'course_slug': course_slug}))
        
        group = Group(name=name, manager=member, courseoffering=course, groupForSemester = groupForSemester)
        group.save()
        #LOG EVENT#
        l = LogEntry(userid=request.user.username,
        description="created a new group %s for %s." % (group.name, course),
        related_object=group )
        l.save()

        if is_course_student_by_slug(request, course_slug):
            for activity in selected_act:
                groupMember = GroupMember(group=group, student=member, confirmed=True, activity_id=activity.id)
                groupMember.save()
                #LOG EVENT#
                l = LogEntry(userid=request.user.username,
                description="automatically became a group member of %s for activity %s." % (group.name, groupMember.activity),
                    related_object=groupMember )
                l.save()

            messages.add_message(request, messages.SUCCESS, 'Group Created')
            return HttpResponseRedirect(reverse('groups.views.groupmanage', kwargs={'course_slug': course_slug}))

        elif is_course_staff_by_slug(request, course_slug):
            students = Member.objects.select_related('person').filter(offering = course, role = 'STUD')
            for student in students:
                studentForm = StudentForm(request.POST, prefix = student.person.userid)
                if studentForm.is_valid() and studentForm.cleaned_data['selected'] == True:
                    for activity in selected_act:
                        groupMember = GroupMember(group=group, student=student, confirmed=True, activity_id=activity.id)
                        groupMember.save()
                        #LOG EVENT#
                        l = LogEntry(userid=request.user.username,
                        description="added %s as a group member to %s for activity %s." % (student.person.userid,group.name, groupMember.activity),
                            related_object=groupMember )
                        l.save()
                    
                    n = NewsItem(user=student.person, author=member.person, course=group.courseoffering,
                     source_app="group", title="Added to Group",
                     content="You have been added the group %s." % (group.name),
                     url=reverse('groups.views.groupmanage', kwargs={'course_slug':course.slug})
                    )
                    n.save()
                    
            messages.add_message(request, messages.SUCCESS, 'Group Created')
            return HttpResponseRedirect(reverse('groups.views.view_group', kwargs={'course_slug': course_slug, 'group_slug': group.slug}))
        else:
            return HttpResponseForbidden()
See More Examples - Go to Next Page
Page 1 Selected Page 2 Page 3 Page 4