django.utils.six.moves.xrange

Here are the examples of the python api django.utils.six.moves.xrange taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

199 Examples 7

Example 151

Project: django-happenings
Source File: handlers.py
View license
    def repeat_reverse(self, start, end):
        """
        Starts from 'start' day and counts backwards until 'end' day.
        'start' should be >= 'end'. If it's equal to, does nothing.
        If a day falls outside of end_repeat, it won't be counted.
        """
        day = start
        diff = start - end
        try:
            if date(self.year, self.month, day) <= self.end_repeat:
                self.count_it(day)
        # a value error likely means the event runs past the end of the month,
        # like an event that ends on the 31st, but the month doesn't have that
        # many days. Ignore it b/c the dates won't be added to calendar anyway
        except ValueError:
            pass
        for i in xrange(diff):
            day -= 1
            try:
                if date(self.year, self.month, day) <= self.end_repeat:
                    self.count_it(day)
            except ValueError:
                pass

Example 152

Project: django-happenings
Source File: handlers.py
View license
    def repeat_chunk(self, diff):
        for i in xrange(diff):
            self.repeat(self.day + i + 1)

Example 153

Project: django-happenings
Source File: handlers.py
View license
    def _handle_weekly_repeat_out(self):
        """
        Handles repeating an event weekly (or biweekly) if the current
        year and month are outside of its start year and month.
        It takes care of cases 3 and 4 in _handle_weekly_repeat_in() comments.
        """
        start_d = _first_weekday(
            self.event.l_start_date.weekday(), date(self.year, self.month, 1)
        )
        self.day = start_d.day
        self.count_first = True
        if self.event.repeats('BIWEEKLY'):
            self._biweekly_helper()
        elif self.event.repeats('WEEKLY'):
            # Note count_first=True b/c although the start date isn't this
            # month, the event does begin repeating this month and start_date
            # has not yet been counted.
            # Also note we start from start_d.day and not
            # event.l_start_date.day
            self.repeat()
            if self.event.is_chunk():
                diff = self.event.start_end_diff
                self.count = _chunk_fill_out_first_week(
                    self.year, self.month, self.count, self.event, diff
                )
                for i in xrange(diff):
                    # count the chunk days, then repeat them
                    self.day = start_d.day + i + 1
                    self.repeat()

Example 154

Project: django-happenings
Source File: handlers.py
View license
def _chunk_fill_out_first_week(year, month, count, event, diff):
    """
    If a repeating chunk event exists in a particular month, but didn't
    start that month, it may be neccessary to fill out the first week.
    Five cases:
        1. event starts repeating on the 1st day of month
        2. event starts repeating past the 1st day of month
        3. event starts repeating before the 1st day of month, and continues
        through it.
        4. event starts repeating before the 1st day of month, and finishes
        repeating before it.
        5. event starts repeating before the 1st day of month, and finishes
        on it.
    """
    first_of_the_month = date(year, month, 1)
    d = _first_weekday(event.l_end_date.weekday(), first_of_the_month)
    d2 = _first_weekday(event.l_start_date.weekday(), first_of_the_month)
    diff_weekdays = d.day - d2.day
    day = first_of_the_month.day
    start = event.l_start_date.weekday()
    first = first_of_the_month.weekday()

    if start == first or diff_weekdays == diff:
        return count
    elif start > first:
        end = event.l_end_date.weekday()
        diff = end - first + 1
    elif start < first:
        diff = d.day
    for i in xrange(diff):
        if event.end_repeat is not None and \
                date(year, month, day) >= event.end_repeat:
            break
        count[day].append((event.title, event.pk))
        day += 1
    return count

Example 155

Project: django-happenings
Source File: upcoming.py
View license
    def _weekday(self):
        start = self.event.l_start_date
        if not self.event.l_start_date > self.now:
            start = start.replace(
                year=self.now.year, month=self.now.month, day=self.now.day
            )
        while start.weekday() > 4:
            start += timedelta(days=1)
        if start < self.now:
            start += timedelta(days=1)
            while start.weekday() > 4:
                start += timedelta(days=1)
        for i in xrange(self.num):
            # change to date() so we can compare to event.end_repeat
            start_ = date(start.year, start.month, start.day)
            if self.we_should_stop(start, start_):
                return
            while start.weekday() > 4:
                start += timedelta(days=1)
            self.events.append((start, self.event))
            start += timedelta(days=1)

Example 156

Project: django-happenings
Source File: upcoming.py
View license
    def _others(self):
        repeat = {'WEEKLY': 7, 'BIWEEKLY': 14, 'DAILY': 1}
        if self.event.repeats('DAILY'):
            if self.event.l_start_date > self.now:
                start = self.event.l_start_date
            elif self.event.l_start_date.time() < self.now.time():
                # has it already begun? Then no longer upcoming, so ++day
                start = self.now
                start += timedelta(days=1)
            else:
                start = self.now
        else:
            start = self.event.l_start_date
            end = self.event.l_end_date
            while start < self.now or end <= self.now:
                start += timedelta(days=repeat[self.event.repeat])
                end += timedelta(days=repeat[self.event.repeat])
        for i in xrange(self.num):
            # change to date() so we can compare to event.end_repeat
            start_ = date(start.year, start.month, start.day)
            if self.we_should_stop(start, start_):
                return
            self.events.append((start, self.event))
            start += timedelta(days=repeat[self.event.repeat])

Example 157

View license
    def check_dates(self, event, valid_dates):
        """A DRY helper function."""
        for year, dates in valid_dates.items():
            for month, days in dates.items():
                response = self.client.get(reverse(
                    'calendar:list', kwargs={'year': year, 'month': month}
                ))
                self.clean_whitespace(response)

                if days:
                    self.assertContains(response, event.title)
                else:
                    self.assertNotContains(response, event.title)

                for day in days:
                    self.assertContains(response, self.cal_str(day))

                [self.assertNotContains(response, self.cal_str(day)) for
                 day in xrange(1, monthrange(2014, int(month))[1] + 1)
                 if day not in days]

Example 158

View license
    def test_monthly_repeating_chunk_three_days_span_one_month3(self):
        """
        Test a monthly repeating chunk that lasts three days with
        start_date and end_date in the same month, but with the event
        ending on the 31st. This checks that we the calendar won't complain
        if the event runs off the end of the month sometimes, and it shouldn't
        add those extra days to the beginning of the following month.
        """
        event = create_event(
            start_date=(2014, 3, 25),
            end_date=(2014, 3, 31),
            created_by=self.user,
            title="Arnie",
            description="chunk event that lasts 3 days and repeats monthly.",
            repeat="MONTHLY"
        )
        x = [i for i in xrange(25, 32)]
        y = [i for i in xrange(25, 31)]
        z = {'03': x, '04': y, '05': x}
        valid_dates = dict.fromkeys(['2014', '2015', '2016'], z)
        self.check_dates(event, valid_dates)

Example 159

View license
    def test_weekly_repeating_chunk_six_days_span_two_months(self):
        """
        Test a weekly repeating chunk that lasts six days and
        spans 2 different months when it starts.
        """
        event = create_event(
            start_date=(2014, 3, 28),
            end_date=(2014, 4, 2),
            created_by=self.user,
            title="Khloe",
            description="'chunk' event that lasts 6 days and repeats weekly.",
            repeat="WEEKLY"
        )
        invalid_dates = {
            '2014': {
                '04': [3, 10, 17, 24],
                '05': [1, 8, 15, 22, 29],
                '06': [5, 12, 19, 26],
                '07': [3, 10, 17, 24, 31],
                '08': [7, 14, 21, 28],
                '09': [4, 11, 18, 25],
                '10': [2, 9, 16, 23, 30],
                '11': [6, 13, 20, 27],
                '12': [4, 11, 18, 25],
            },
            '2015': {
                '01': [1, 8, 15, 22, 29],
                '02': [5, 12, 19, 26],
                '03': [5, 12, 19, 26],
                '04': [2, 9, 16, 23, 30],
            }
        }

        for year, dates in invalid_dates.items():
            for month, days in dates.items():
                response = self.client.get(reverse(
                    'calendar:list', kwargs={'year': year, 'month': month}
                ))
                self.clean_whitespace(response)
                self.assertContains(response, event.title)

                for day in days:
                    self.assertNotContains(response, self.cal_str(day))

                [self.assertContains(response, self.cal_str(day)) for
                 day in xrange(1, monthrange(2014, int(month))[1] + 1)
                 if day not in days]

Example 160

View license
    def test_weekly_repeating_chunk_three_days_span_one_month3(self):
        """
        Test a weekly repeating chunk that lasts seven days with
        start_date and end_date in the same month. This should result in
        every day in the month being filled in.
        """
        event = create_event(
            start_date=(2014, 3, 25),
            end_date=(2014, 3, 31),
            created_by=self.user,
            title="Snoop",
            description="Erry day",
            repeat="WEEKLY"
        )
        days_31 = [x for x in xrange(1, 32)]
        days_30 = [x for x in xrange(1, 31)]
        dm = dict.fromkeys(['5', '7', '8', '10', '12'], days_31)
        dm2 = dict.fromkeys(['6', '9', '11'], days_30)
        dm.update(dm2)
        valid_dates = dict.fromkeys(['2014', '2015'], dm)
        self.check_dates(event, valid_dates)

Example 161

Project: django-happenings
Source File: test_repeat.py
View license
    def check_dates(self, valid_dates, title, one_year=True):
        """
        Checks the dates in the given valid_dates dict. Also check that the
        event title appears on the page when it's supposed to.
        """
        for year, dates in valid_dates.items():
            for month, days in dates.items():
                response = self.client.get(reverse(
                    'calendar:list', kwargs={'year': year, 'month': month}
                ))
                self.clean_whitespace(response)
                if days:
                    self.assertContains(response, title)
                else:
                    self.assertNotContains(response, title)

                for day in days:
                    # days given should appear on the calendar
                    [self.assertContains(response, self.cal_str(day)) for
                     day in days]
                    # days not given should not appear on the calendar
                    [self.assertNotContains(response, self.cal_str(day)) for
                     day in xrange(1, monthrange(2014, int(month))[1] + 1)
                     if day not in days]
                if one_year:
                    response = self.client.get(reverse(
                        'calendar:list', kwargs={
                            'year': str(int(year) + 1), 'month': month
                        }
                    ))
                    self.clean_whitespace(response)
                    self.assertNotContains(response, title)

Example 162

Project: django-happenings
Source File: test_repeat.py
View license
    def test_list_view_with_daily_repeat(self):
        event = create_event(
            start_date=(2014, 4, 1),
            end_date=(2014, 4, 1),
            created_by=self.user,
            title="Casey",
            description="I repeat every day with no end in sight.",
            repeat='DAILY',
        )
        m = ['04', '05', '06']
        y = ['2014', '2015', '2016']
        valid_dates = dict.fromkeys(y, m)
        for year, months in valid_dates.items():
            for month in months:
                response = self.client.get(reverse(
                    'calendar:list', kwargs={'year': year, 'month': month}
                ))
                self.clean_whitespace(response)
                self.assertContains(response, event.title)
                [self.assertContains(response, self.cal_str(day)) for
                 day in xrange(1, monthrange(2014, int(month) + 1)[1])]

        # The month before start_date shouldn't have any events
        response = self.client.get(reverse(
            'calendar:list', kwargs={'year': '2014', 'month': '03'}
        ))
        self.clean_whitespace(response)
        self.assertNotContains(response, event.title)
        self.assertNotContains(response, self.event_div)

Example 163

Project: hue
Source File: geometries.py
View license
    def transform(self, coord_trans, clone=False):
        """
        Transforms this geometry to a different spatial reference system.
        May take a CoordTransform object, a SpatialReference object, string
        WKT or PROJ.4, and/or an integer SRID.  By default nothing is returned
        and the geometry is transformed in-place.  However, if the `clone`
        keyword is set, then a transformed clone of this geometry will be
        returned.
        """
        if clone:
            klone = self.clone()
            klone.transform(coord_trans)
            return klone

        # Have to get the coordinate dimension of the original geometry
        # so it can be used to reset the transformed geometry's dimension
        # afterwards.  This is done because of GDAL bug (in versions prior
        # to 1.7) that turns geometries 3D after transformation, see:
        #  http://trac.osgeo.org/gdal/changeset/17792
        if GDAL_VERSION < (1, 7):
            orig_dim = self.coord_dim

        # Depending on the input type, use the appropriate OGR routine
        # to perform the transformation.
        if isinstance(coord_trans, CoordTransform):
            capi.geom_transform(self.ptr, coord_trans.ptr)
        elif isinstance(coord_trans, SpatialReference):
            capi.geom_transform_to(self.ptr, coord_trans.ptr)
        elif isinstance(coord_trans, six.integer_types + six.string_types):
            sr = SpatialReference(coord_trans)
            capi.geom_transform_to(self.ptr, sr.ptr)
        else:
            raise TypeError('Transform only accepts CoordTransform, '
                            'SpatialReference, string, and integer objects.')

        # Setting with original dimension, see comment above.
        if GDAL_VERSION < (1, 7):
            if isinstance(self, GeometryCollection):
                # With geometry collections have to set dimension on
                # each internal geometry reference, as the collection
                # dimension isn't affected.
                for i in xrange(len(self)):
                    internal_ptr = capi.get_geom_ref(self.ptr, i)
                    if orig_dim != capi.get_coord_dim(internal_ptr):
                        capi.set_coord_dim(internal_ptr, orig_dim)
            else:
                if self.coord_dim != orig_dim:
                    self.coord_dim = orig_dim

Example 164

Project: hue
Source File: linestring.py
View license
    def __init__(self, *args, **kwargs):
        """
        Initializes on the given sequence -- may take lists, tuples, NumPy arrays
        of X,Y pairs, or Point objects.  If Point objects are used, ownership is
        _not_ transferred to the LineString object.

        Examples:
         ls = LineString((1, 1), (2, 2))
         ls = LineString([(1, 1), (2, 2)])
         ls = LineString(array([(1, 1), (2, 2)]))
         ls = LineString(Point(1, 1), Point(2, 2))
        """
        # If only one argument provided, set the coords array appropriately
        if len(args) == 1: coords = args[0]
        else: coords = args

        if isinstance(coords, (tuple, list)):
            # Getting the number of coords and the number of dimensions -- which
            #  must stay the same, e.g., no LineString((1, 2), (1, 2, 3)).
            ncoords = len(coords)
            if coords: ndim = len(coords[0])
            else: raise TypeError('Cannot initialize on empty sequence.')
            self._checkdim(ndim)
            # Incrementing through each of the coordinates and verifying
            for i in xrange(1, ncoords):
                if not isinstance(coords[i], (tuple, list, Point)):
                    raise TypeError('each coordinate should be a sequence (list or tuple)')
                if len(coords[i]) != ndim: raise TypeError('Dimension mismatch.')
            numpy_coords = False
        elif numpy and isinstance(coords, numpy.ndarray):
            shape = coords.shape # Using numpy's shape.
            if len(shape) != 2: raise TypeError('Too many dimensions.')
            self._checkdim(shape[1])
            ncoords = shape[0]
            ndim = shape[1]
            numpy_coords = True
        else:
            raise TypeError('Invalid initialization input for LineStrings.')

        # Creating a coordinate sequence object because it is easier to
        # set the points using GEOSCoordSeq.__setitem__().
        cs = GEOSCoordSeq(capi.create_cs(ncoords, ndim), z=bool(ndim==3))

        for i in xrange(ncoords):
            if numpy_coords: cs[i] = coords[i,:]
            elif isinstance(coords[i], Point): cs[i] = coords[i].tuple
            else: cs[i] = coords[i]

        # If SRID was passed in with the keyword arguments
        srid = kwargs.get('srid', None)

        # Calling the base geometry initialization with the returned pointer
        #  from the function.
        super(LineString, self).__init__(self._init_func(cs.ptr), srid=srid)

Example 165

Project: hue
Source File: test_geos.py
View license
    def test_mutable_geometries(self):
        "Testing the mutability of Polygons and Geometry Collections."
        ### Testing the mutability of Polygons ###
        for p in self.geometries.polygons:
            poly = fromstr(p.wkt)

            # Should only be able to use __setitem__ with LinearRing geometries.
            self.assertRaises(TypeError, poly.__setitem__, 0, LineString((1, 1), (2, 2)))

            # Constructing the new shell by adding 500 to every point in the old shell.
            shell_tup = poly.shell.tuple
            new_coords = []
            for point in shell_tup: new_coords.append((point[0] + 500., point[1] + 500.))
            new_shell = LinearRing(*tuple(new_coords))

            # Assigning polygon's exterior ring w/the new shell
            poly.exterior_ring = new_shell
            s = str(new_shell) # new shell is still accessible
            self.assertEqual(poly.exterior_ring, new_shell)
            self.assertEqual(poly[0], new_shell)

        ### Testing the mutability of Geometry Collections
        for tg in self.geometries.multipoints:
            mp = fromstr(tg.wkt)
            for i in range(len(mp)):
                # Creating a random point.
                pnt = mp[i]
                new = Point(random.randint(21, 100), random.randint(21, 100))
                # Testing the assignment
                mp[i] = new
                s = str(new) # what was used for the assignment is still accessible
                self.assertEqual(mp[i], new)
                self.assertEqual(mp[i].wkt, new.wkt)
                self.assertNotEqual(pnt, mp[i])

        # MultiPolygons involve much more memory management because each
        # Polygon w/in the collection has its own rings.
        for tg in self.geometries.multipolygons:
            mpoly = fromstr(tg.wkt)
            for i in xrange(len(mpoly)):
                poly = mpoly[i]
                old_poly = mpoly[i]
                # Offsetting the each ring in the polygon by 500.
                for j in xrange(len(poly)):
                    r = poly[j]
                    for k in xrange(len(r)): r[k] = (r[k][0] + 500., r[k][1] + 500.)
                    poly[j] = r

                self.assertNotEqual(mpoly[i], poly)
                # Testing the assignment
                mpoly[i] = poly
                s = str(poly) # Still accessible
                self.assertEqual(mpoly[i], poly)
                self.assertNotEqual(mpoly[i], old_poly)

Example 166

Project: hue
Source File: where.py
View license
    def make_atom(self, child, qn, connection):
        """
        Turn a tuple (Constraint(table_alias, column_name, db_type),
        lookup_type, value_annotation, params) into valid SQL.

        The first item of the tuple may also be an Aggregate.

        Returns the string for the SQL fragment and the parameters to use for
        it.
        """
        lvalue, lookup_type, value_annotation, params_or_value = child
        field_internal_type = lvalue.field.get_internal_type() if lvalue.field else None
        
        if isinstance(lvalue, Constraint):
            try:
                lvalue, params = lvalue.process(lookup_type, params_or_value, connection)
            except EmptyShortCircuit:
                raise EmptyResultSet
        elif isinstance(lvalue, Aggregate):
            params = lvalue.field.get_db_prep_lookup(lookup_type, params_or_value, connection)
        else:
            raise TypeError("'make_atom' expects a Constraint or an Aggregate "
                            "as the first item of its 'child' argument.")

        if isinstance(lvalue, tuple):
            # A direct database column lookup.
            field_sql, field_params = self.sql_for_columns(lvalue, qn, connection, field_internal_type), []
        else:
            # A smart object with an as_sql() method.
            field_sql, field_params = lvalue.as_sql(qn, connection)

        is_datetime_field = value_annotation is datetime.datetime
        cast_sql = connection.ops.datetime_cast_sql() if is_datetime_field else '%s'

        if hasattr(params, 'as_sql'):
            extra, params = params.as_sql(qn, connection)
            cast_sql = ''
        else:
            extra = ''

        params = field_params + params

        if (len(params) == 1 and params[0] == '' and lookup_type == 'exact'
            and connection.features.interprets_empty_strings_as_nulls):
            lookup_type = 'isnull'
            value_annotation = True

        if lookup_type in connection.operators:
            format = "%s %%s %%s" % (connection.ops.lookup_cast(lookup_type),)
            return (format % (field_sql,
                              connection.operators[lookup_type] % cast_sql,
                              extra), params)

        if lookup_type == 'in':
            if not value_annotation:
                raise EmptyResultSet
            if extra:
                return ('%s IN %s' % (field_sql, extra), params)
            max_in_list_size = connection.ops.max_in_list_size()
            if max_in_list_size and len(params) > max_in_list_size:
                # Break up the params list into an OR of manageable chunks.
                in_clause_elements = ['(']
                for offset in xrange(0, len(params), max_in_list_size):
                    if offset > 0:
                        in_clause_elements.append(' OR ')
                    in_clause_elements.append('%s IN (' % field_sql)
                    group_size = min(len(params) - offset, max_in_list_size)
                    param_group = ', '.join(repeat('%s', group_size))
                    in_clause_elements.append(param_group)
                    in_clause_elements.append(')')
                in_clause_elements.append(')')
                return ''.join(in_clause_elements), params
            else:
                return ('%s IN (%s)' % (field_sql,
                                        ', '.join(repeat('%s', len(params)))),
                        params)
        elif lookup_type in ('range', 'year'):
            return ('%s BETWEEN %%s and %%s' % field_sql, params)
        elif is_datetime_field and lookup_type in ('month', 'day', 'week_day',
                                                   'hour', 'minute', 'second'):
            tzname = timezone.get_current_timezone_name() if settings.USE_TZ else None
            sql, tz_params = connection.ops.datetime_extract_sql(lookup_type, field_sql, tzname)
            return ('%s = %%s' % sql, tz_params + params)
        elif lookup_type in ('month', 'day', 'week_day'):
            return ('%s = %%s'
                    % connection.ops.date_extract_sql(lookup_type, field_sql), params)
        elif lookup_type == 'isnull':
            assert value_annotation in (True, False), "Invalid value_annotation for isnull"
            return ('%s IS %sNULL' % (field_sql, ('' if value_annotation else 'NOT ')), ())
        elif lookup_type == 'search':
            return (connection.ops.fulltext_search_sql(field_sql), params)
        elif lookup_type in ('regex', 'iregex'):
            return connection.ops.regex_lookup(lookup_type) % (field_sql, cast_sql), params

        raise TypeError('Invalid lookup_type: %r' % lookup_type)

Example 167

Project: hue
Source File: dispatcher.py
View license
    def disconnect(self, receiver=None, sender=None, weak=True, dispatch_uid=None):
        """
        Disconnect receiver from sender for signal.

        If weak references are used, disconnect need not be called. The receiver
        will be remove from dispatch automatically.

        Arguments:

            receiver
                The registered receiver to disconnect. May be none if
                dispatch_uid is specified.

            sender
                The registered sender to disconnect

            weak
                The weakref state to disconnect

            dispatch_uid
                the unique identifier of the receiver to disconnect
        """
        if dispatch_uid:
            lookup_key = (dispatch_uid, _make_id(sender))
        else:
            lookup_key = (_make_id(receiver), _make_id(sender))

        with self.lock:
            for index in xrange(len(self.receivers)):
                (r_key, _) = self.receivers[index]
                if r_key == lookup_key:
                    del self.receivers[index]
                    break
            self.sender_receivers_cache.clear()

Example 168

Project: hue
Source File: crypto.py
View license
def pbkdf2(password, salt, iterations, dklen=0, digest=None):
    """
    Implements PBKDF2 as defined in RFC 2898, section 5.2

    HMAC+SHA256 is used as the default pseudo random function.

    As of 2011, 10,000 iterations was the recommended default which
    took 100ms on a 2.2Ghz Core 2 Duo. This is probably the bare
    minimum for security given 1000 iterations was recommended in
    2001. This code is very well optimized for CPython and is only
    four times slower than openssl's implementation. Look in
    django.contrib.auth.hashers for the present default.
    """
    assert iterations > 0
    if not digest:
        digest = hashlib.sha256
    password = force_bytes(password)
    salt = force_bytes(salt)
    hlen = digest().digest_size
    if not dklen:
        dklen = hlen
    if dklen > (2 ** 32 - 1) * hlen:
        raise OverflowError('dklen too big')
    l = -(-dklen // hlen)
    r = dklen - (l - 1) * hlen

    hex_format_string = "%%0%ix" % (hlen * 2)

    inner, outer = digest(), digest()
    if len(password) > inner.block_size:
        password = digest(password).digest()
    password += b'\x00' * (inner.block_size - len(password))
    inner.update(password.translate(hmac.trans_36))
    outer.update(password.translate(hmac.trans_5C))

    def F(i):
        def U():
            u = salt + struct.pack(b'>I', i)
            for j in xrange(int(iterations)):
                dig1, dig2 = inner.copy(), outer.copy()
                dig1.update(u)
                dig2.update(dig1.digest())
                u = dig2.digest()
                yield _bin_to_long(u)
        return _long_to_bin(reduce(operator.xor, U()), hex_format_string)

    T = [F(x) for x in range(1, l + 1)]
    return b''.join(T[:-1]) + T[-1][:r]

Example 169

Project: hue
Source File: ipv6.py
View license
def _explode_shorthand_ip_string(ip_str):
    """
    Expand a shortened IPv6 address.

    Args:
        ip_str: A string, the IPv6 address.

    Returns:
        A string, the expanded IPv6 address.

    """
    if not _is_shorthand_ip(ip_str):
        # We've already got a longhand ip_str.
        return ip_str

    new_ip = []
    hextet = ip_str.split('::')

    # If there is a ::, we need to expand it with zeroes
    # to get to 8 hextets - unless there is a dot in the last hextet,
    # meaning we're doing v4-mapping
    if '.' in ip_str.split(':')[-1]:
        fill_to = 7
    else:
        fill_to = 8

    if len(hextet) > 1:
        sep = len(hextet[0].split(':')) + len(hextet[1].split(':'))
        new_ip = hextet[0].split(':')

        for _ in xrange(fill_to - sep):
            new_ip.append('0000')
        new_ip += hextet[1].split(':')

    else:
        new_ip = ip_str.split(':')

    # Now need to make sure every hextet is 4 lower case characters.
    # If a hextet is < 4 characters, we've got missing leading 0's.
    ret_ip = []
    for hextet in new_ip:
        ret_ip.append(('0' * (4 - len(hextet)) + hextet).lower())
    return ':'.join(ret_ip)

Example 170

Project: decode-Django
Source File: geometries.py
View license
    def transform(self, coord_trans, clone=False):
        """
        Transforms this geometry to a different spatial reference system.
        May take a CoordTransform object, a SpatialReference object, string
        WKT or PROJ.4, and/or an integer SRID.  By default nothing is returned
        and the geometry is transformed in-place.  However, if the `clone`
        keyword is set, then a transformed clone of this geometry will be
        returned.
        """
        if clone:
            klone = self.clone()
            klone.transform(coord_trans)
            return klone

        # Have to get the coordinate dimension of the original geometry
        # so it can be used to reset the transformed geometry's dimension
        # afterwards.  This is done because of GDAL bug (in versions prior
        # to 1.7) that turns geometries 3D after transformation, see:
        #  http://trac.osgeo.org/gdal/changeset/17792
        if GDAL_VERSION < (1, 7):
            orig_dim = self.coord_dim

        # Depending on the input type, use the appropriate OGR routine
        # to perform the transformation.
        if isinstance(coord_trans, CoordTransform):
            capi.geom_transform(self.ptr, coord_trans.ptr)
        elif isinstance(coord_trans, SpatialReference):
            capi.geom_transform_to(self.ptr, coord_trans.ptr)
        elif isinstance(coord_trans, six.integer_types + six.string_types):
            sr = SpatialReference(coord_trans)
            capi.geom_transform_to(self.ptr, sr.ptr)
        else:
            raise TypeError('Transform only accepts CoordTransform, '
                            'SpatialReference, string, and integer objects.')

        # Setting with original dimension, see comment above.
        if GDAL_VERSION < (1, 7):
            if isinstance(self, GeometryCollection):
                # With geometry collections have to set dimension on
                # each internal geometry reference, as the collection
                # dimension isn't affected.
                for i in xrange(len(self)):
                    internal_ptr = capi.get_geom_ref(self.ptr, i)
                    if orig_dim != capi.get_coord_dim(internal_ptr):
                        capi.set_coord_dim(internal_ptr, orig_dim)
            else:
                if self.coord_dim != orig_dim:
                    self.coord_dim = orig_dim

Example 171

Project: decode-Django
Source File: linestring.py
View license
    def __init__(self, *args, **kwargs):
        """
        Initializes on the given sequence -- may take lists, tuples, NumPy arrays
        of X,Y pairs, or Point objects.  If Point objects are used, ownership is
        _not_ transferred to the LineString object.

        Examples:
         ls = LineString((1, 1), (2, 2))
         ls = LineString([(1, 1), (2, 2)])
         ls = LineString(array([(1, 1), (2, 2)]))
         ls = LineString(Point(1, 1), Point(2, 2))
        """
        # If only one argument provided, set the coords array appropriately
        if len(args) == 1: coords = args[0]
        else: coords = args

        if isinstance(coords, (tuple, list)):
            # Getting the number of coords and the number of dimensions -- which
            #  must stay the same, e.g., no LineString((1, 2), (1, 2, 3)).
            ncoords = len(coords)
            if coords: ndim = len(coords[0])
            else: raise TypeError('Cannot initialize on empty sequence.')
            self._checkdim(ndim)
            # Incrementing through each of the coordinates and verifying
            for i in xrange(1, ncoords):
                if not isinstance(coords[i], (tuple, list, Point)):
                    raise TypeError('each coordinate should be a sequence (list or tuple)')
                if len(coords[i]) != ndim: raise TypeError('Dimension mismatch.')
            numpy_coords = False
        elif numpy and isinstance(coords, numpy.ndarray):
            shape = coords.shape # Using numpy's shape.
            if len(shape) != 2: raise TypeError('Too many dimensions.')
            self._checkdim(shape[1])
            ncoords = shape[0]
            ndim = shape[1]
            numpy_coords = True
        else:
            raise TypeError('Invalid initialization input for LineStrings.')

        # Creating a coordinate sequence object because it is easier to
        # set the points using GEOSCoordSeq.__setitem__().
        cs = GEOSCoordSeq(capi.create_cs(ncoords, ndim), z=bool(ndim==3))

        for i in xrange(ncoords):
            if numpy_coords: cs[i] = coords[i,:]
            elif isinstance(coords[i], Point): cs[i] = coords[i].tuple
            else: cs[i] = coords[i]

        # If SRID was passed in with the keyword arguments
        srid = kwargs.get('srid', None)

        # Calling the base geometry initialization with the returned pointer
        #  from the function.
        super(LineString, self).__init__(self._init_func(cs.ptr), srid=srid)

Example 172

Project: decode-Django
Source File: test_geos.py
View license
    def test_mutable_geometries(self):
        "Testing the mutability of Polygons and Geometry Collections."
        ### Testing the mutability of Polygons ###
        for p in self.geometries.polygons:
            poly = fromstr(p.wkt)

            # Should only be able to use __setitem__ with LinearRing geometries.
            self.assertRaises(TypeError, poly.__setitem__, 0, LineString((1, 1), (2, 2)))

            # Constructing the new shell by adding 500 to every point in the old shell.
            shell_tup = poly.shell.tuple
            new_coords = []
            for point in shell_tup: new_coords.append((point[0] + 500., point[1] + 500.))
            new_shell = LinearRing(*tuple(new_coords))

            # Assigning polygon's exterior ring w/the new shell
            poly.exterior_ring = new_shell
            s = str(new_shell) # new shell is still accessible
            self.assertEqual(poly.exterior_ring, new_shell)
            self.assertEqual(poly[0], new_shell)

        ### Testing the mutability of Geometry Collections
        for tg in self.geometries.multipoints:
            mp = fromstr(tg.wkt)
            for i in range(len(mp)):
                # Creating a random point.
                pnt = mp[i]
                new = Point(random.randint(21, 100), random.randint(21, 100))
                # Testing the assignment
                mp[i] = new
                s = str(new) # what was used for the assignment is still accessible
                self.assertEqual(mp[i], new)
                self.assertEqual(mp[i].wkt, new.wkt)
                self.assertNotEqual(pnt, mp[i])

        # MultiPolygons involve much more memory management because each
        # Polygon w/in the collection has its own rings.
        for tg in self.geometries.multipolygons:
            mpoly = fromstr(tg.wkt)
            for i in xrange(len(mpoly)):
                poly = mpoly[i]
                old_poly = mpoly[i]
                # Offsetting the each ring in the polygon by 500.
                for j in xrange(len(poly)):
                    r = poly[j]
                    for k in xrange(len(r)): r[k] = (r[k][0] + 500., r[k][1] + 500.)
                    poly[j] = r

                self.assertNotEqual(mpoly[i], poly)
                # Testing the assignment
                mpoly[i] = poly
                s = str(poly) # Still accessible
                self.assertEqual(mpoly[i], poly)
                self.assertNotEqual(mpoly[i], old_poly)

Example 173

Project: decode-Django
Source File: where.py
View license
    def make_atom(self, child, qn, connection):
        """
        Turn a tuple (Constraint(table_alias, column_name, db_type),
        lookup_type, value_annotation, params) into valid SQL.

        The first item of the tuple may also be an Aggregate.

        Returns the string for the SQL fragment and the parameters to use for
        it.
        """
        lvalue, lookup_type, value_annotation, params_or_value = child
        if isinstance(lvalue, Constraint):
            try:
                lvalue, params = lvalue.process(lookup_type, params_or_value, connection)
            except EmptyShortCircuit:
                raise EmptyResultSet
        elif isinstance(lvalue, Aggregate):
            params = lvalue.field.get_db_prep_lookup(lookup_type, params_or_value, connection)
        else:
            raise TypeError("'make_atom' expects a Constraint or an Aggregate "
                            "as the first item of its 'child' argument.")

        if isinstance(lvalue, tuple):
            # A direct database column lookup.
            field_sql = self.sql_for_columns(lvalue, qn, connection)
        else:
            # A smart object with an as_sql() method.
            field_sql = lvalue.as_sql(qn, connection)

        if value_annotation is datetime.datetime:
            cast_sql = connection.ops.datetime_cast_sql()
        else:
            cast_sql = '%s'

        if hasattr(params, 'as_sql'):
            extra, params = params.as_sql(qn, connection)
            cast_sql = ''
        else:
            extra = ''

        if (len(params) == 1 and params[0] == '' and lookup_type == 'exact'
            and connection.features.interprets_empty_strings_as_nulls):
            lookup_type = 'isnull'
            value_annotation = True

        if lookup_type in connection.operators:
            format = "%s %%s %%s" % (connection.ops.lookup_cast(lookup_type),)
            return (format % (field_sql,
                              connection.operators[lookup_type] % cast_sql,
                              extra), params)

        if lookup_type == 'in':
            if not value_annotation:
                raise EmptyResultSet
            if extra:
                return ('%s IN %s' % (field_sql, extra), params)
            max_in_list_size = connection.ops.max_in_list_size()
            if max_in_list_size and len(params) > max_in_list_size:
                # Break up the params list into an OR of manageable chunks.
                in_clause_elements = ['(']
                for offset in xrange(0, len(params), max_in_list_size):
                    if offset > 0:
                        in_clause_elements.append(' OR ')
                    in_clause_elements.append('%s IN (' % field_sql)
                    group_size = min(len(params) - offset, max_in_list_size)
                    param_group = ', '.join(repeat('%s', group_size))
                    in_clause_elements.append(param_group)
                    in_clause_elements.append(')')
                in_clause_elements.append(')')
                return ''.join(in_clause_elements), params
            else:
                return ('%s IN (%s)' % (field_sql,
                                        ', '.join(repeat('%s', len(params)))),
                        params)
        elif lookup_type in ('range', 'year'):
            return ('%s BETWEEN %%s and %%s' % field_sql, params)
        elif lookup_type in ('month', 'day', 'week_day'):
            return ('%s = %%s' % connection.ops.date_extract_sql(lookup_type, field_sql),
                    params)
        elif lookup_type == 'isnull':
            return ('%s IS %sNULL' % (field_sql,
                (not value_annotation and 'NOT ' or '')), ())
        elif lookup_type == 'search':
            return (connection.ops.fulltext_search_sql(field_sql), params)
        elif lookup_type in ('regex', 'iregex'):
            return connection.ops.regex_lookup(lookup_type) % (field_sql, cast_sql), params

        raise TypeError('Invalid lookup_type: %r' % lookup_type)

Example 174

Project: decode-Django
Source File: dispatcher.py
View license
    def disconnect(self, receiver=None, sender=None, weak=True, dispatch_uid=None):
        """
        Disconnect receiver from sender for signal.

        If weak references are used, disconnect need not be called. The receiver
        will be remove from dispatch automatically.

        Arguments:

            receiver
                The registered receiver to disconnect. May be none if
                dispatch_uid is specified.

            sender
                The registered sender to disconnect

            weak
                The weakref state to disconnect

            dispatch_uid
                the unique identifier of the receiver to disconnect
        """
        dispatch_uid 是可以计算的
        if dispatch_uid:
            lookup_key = (dispatch_uid, _make_id(sender))
        else:
            lookup_key = (_make_id(receiver), _make_id(sender))

        with self.lock:
            for index in xrange(len(self.receivers)):
                (r_key, _) = self.receivers[index]
                if r_key == lookup_key:
                    del self.receivers[index]
                    break

Example 175

Project: decode-Django
Source File: crypto.py
View license
def pbkdf2(password, salt, iterations, dklen=0, digest=None):
    """
    Implements PBKDF2 as defined in RFC 2898, section 5.2

    HMAC+SHA256 is used as the default pseudo random function.

    Right now 10,000 iterations is the recommended default which takes
    100ms on a 2.2Ghz Core 2 Duo.  This is probably the bare minimum
    for security given 1000 iterations was recommended in 2001. This
    code is very well optimized for CPython and is only four times
    slower than openssl's implementation.
    """
    assert iterations > 0
    if not digest:
        digest = hashlib.sha256
    password = force_bytes(password)
    salt = force_bytes(salt)
    hlen = digest().digest_size
    if not dklen:
        dklen = hlen
    if dklen > (2 ** 32 - 1) * hlen:
        raise OverflowError('dklen too big')
    l = -(-dklen // hlen)
    r = dklen - (l - 1) * hlen

    hex_format_string = "%%0%ix" % (hlen * 2)

    def F(i):
        def U():
            u = salt + struct.pack(b'>I', i)
            for j in xrange(int(iterations)):
                u = _fast_hmac(password, u, digest).digest()
                yield _bin_to_long(u)
        return _long_to_bin(reduce(operator.xor, U()), hex_format_string)

    T = [F(x) for x in range(1, l + 1)]
    return b''.join(T[:-1]) + T[-1][:r]

Example 176

Project: decode-Django
Source File: ipv6.py
View license
def _explode_shorthand_ip_string(ip_str):
    """
    Expand a shortened IPv6 address.

    Args:
        ip_str: A string, the IPv6 address.

    Returns:
        A string, the expanded IPv6 address.

    """
    if not _is_shorthand_ip(ip_str):
        # We've already got a longhand ip_str.
        return ip_str

    new_ip = []
    hextet = ip_str.split('::')

    # If there is a ::, we need to expand it with zeroes
    # to get to 8 hextets - unless there is a dot in the last hextet,
    # meaning we're doing v4-mapping
    if '.' in ip_str.split(':')[-1]:
        fill_to = 7
    else:
        fill_to = 8

    if len(hextet) > 1:
        sep = len(hextet[0].split(':')) + len(hextet[1].split(':'))
        new_ip = hextet[0].split(':')

        for _ in xrange(fill_to - sep):
            new_ip.append('0000')
        new_ip += hextet[1].split(':')

    else:
        new_ip = ip_str.split(':')

    # Now need to make sure every hextet is 4 lower case characters.
    # If a hextet is < 4 characters, we've got missing leading 0's.
    ret_ip = []
    for hextet in new_ip:
        ret_ip.append(('0' * (4 - len(hextet)) + hextet).lower())
    return ':'.join(ret_ip)

Example 177

Project: golismero
Source File: where.py
View license
    def make_atom(self, child, qn, connection):
        """
        Turn a tuple (Constraint(table_alias, column_name, db_type),
        lookup_type, value_annotation, params) into valid SQL.

        The first item of the tuple may also be an Aggregate.

        Returns the string for the SQL fragment and the parameters to use for
        it.
        """
        lvalue, lookup_type, value_annotation, params_or_value = child
        if isinstance(lvalue, Constraint):
            try:
                lvalue, params = lvalue.process(lookup_type, params_or_value, connection)
            except EmptyShortCircuit:
                raise EmptyResultSet
        elif isinstance(lvalue, Aggregate):
            params = lvalue.field.get_db_prep_lookup(lookup_type, params_or_value, connection)
        else:
            raise TypeError("'make_atom' expects a Constraint or an Aggregate "
                            "as the first item of its 'child' argument.")

        if isinstance(lvalue, tuple):
            # A direct database column lookup.
            field_sql = self.sql_for_columns(lvalue, qn, connection)
        else:
            # A smart object with an as_sql() method.
            field_sql = lvalue.as_sql(qn, connection)

        if value_annotation is datetime.datetime:
            cast_sql = connection.ops.datetime_cast_sql()
        else:
            cast_sql = '%s'

        if hasattr(params, 'as_sql'):
            extra, params = params.as_sql(qn, connection)
            cast_sql = ''
        else:
            extra = ''

        if (len(params) == 1 and params[0] == '' and lookup_type == 'exact'
            and connection.features.interprets_empty_strings_as_nulls):
            lookup_type = 'isnull'
            value_annotation = True

        if lookup_type in connection.operators:
            format = "%s %%s %%s" % (connection.ops.lookup_cast(lookup_type),)
            return (format % (field_sql,
                              connection.operators[lookup_type] % cast_sql,
                              extra), params)

        if lookup_type == 'in':
            if not value_annotation:
                raise EmptyResultSet
            if extra:
                return ('%s IN %s' % (field_sql, extra), params)
            max_in_list_size = connection.ops.max_in_list_size()
            if max_in_list_size and len(params) > max_in_list_size:
                # Break up the params list into an OR of manageable chunks.
                in_clause_elements = ['(']
                for offset in xrange(0, len(params), max_in_list_size):
                    if offset > 0:
                        in_clause_elements.append(' OR ')
                    in_clause_elements.append('%s IN (' % field_sql)
                    group_size = min(len(params) - offset, max_in_list_size)
                    param_group = ', '.join(repeat('%s', group_size))
                    in_clause_elements.append(param_group)
                    in_clause_elements.append(')')
                in_clause_elements.append(')')
                return ''.join(in_clause_elements), params
            else:
                return ('%s IN (%s)' % (field_sql,
                                        ', '.join(repeat('%s', len(params)))),
                        params)
        elif lookup_type in ('range', 'year'):
            return ('%s BETWEEN %%s and %%s' % field_sql, params)
        elif lookup_type in ('month', 'day', 'week_day'):
            return ('%s = %%s' % connection.ops.date_extract_sql(lookup_type, field_sql),
                    params)
        elif lookup_type == 'isnull':
            return ('%s IS %sNULL' % (field_sql,
                (not value_annotation and 'NOT ' or '')), ())
        elif lookup_type == 'search':
            return (connection.ops.fulltext_search_sql(field_sql), params)
        elif lookup_type in ('regex', 'iregex'):
            return connection.ops.regex_lookup(lookup_type) % (field_sql, cast_sql), params

        raise TypeError('Invalid lookup_type: %r' % lookup_type)

Example 178

Project: golismero
Source File: dispatcher.py
View license
    def disconnect(self, receiver=None, sender=None, weak=True, dispatch_uid=None):
        """
        Disconnect receiver from sender for signal.

        If weak references are used, disconnect need not be called. The receiver
        will be remove from dispatch automatically.

        Arguments:

            receiver
                The registered receiver to disconnect. May be none if
                dispatch_uid is specified.

            sender
                The registered sender to disconnect

            weak
                The weakref state to disconnect

            dispatch_uid
                the unique identifier of the receiver to disconnect
        """
        if dispatch_uid:
            lookup_key = (dispatch_uid, _make_id(sender))
        else:
            lookup_key = (_make_id(receiver), _make_id(sender))

        with self.lock:
            for index in xrange(len(self.receivers)):
                (r_key, _) = self.receivers[index]
                if r_key == lookup_key:
                    del self.receivers[index]
                    break

Example 179

Project: golismero
Source File: formsets.py
View license
    def _construct_forms(self):
        # instantiate all the forms and put them in self.forms
        self.forms = []
        for i in xrange(min(self.total_form_count(), self.absolute_max)):
            self.forms.append(self._construct_form(i))

Example 180

Project: golismero
Source File: crypto.py
View license
def pbkdf2(password, salt, iterations, dklen=0, digest=None):
    """
    Implements PBKDF2 as defined in RFC 2898, section 5.2

    HMAC+SHA256 is used as the default pseudo random function.

    Right now 10,000 iterations is the recommended default which takes
    100ms on a 2.2Ghz Core 2 Duo.  This is probably the bare minimum
    for security given 1000 iterations was recommended in 2001. This
    code is very well optimized for CPython and is only four times
    slower than openssl's implementation.
    """
    assert iterations > 0
    if not digest:
        digest = hashlib.sha256
    password = force_bytes(password)
    salt = force_bytes(salt)
    hlen = digest().digest_size
    if not dklen:
        dklen = hlen
    if dklen > (2 ** 32 - 1) * hlen:
        raise OverflowError('dklen too big')
    l = -(-dklen // hlen)
    r = dklen - (l - 1) * hlen

    hex_format_string = "%%0%ix" % (hlen * 2)

    def F(i):
        def U():
            u = salt + struct.pack(b'>I', i)
            for j in xrange(int(iterations)):
                u = _fast_hmac(password, u, digest).digest()
                yield _bin_to_long(u)
        return _long_to_bin(reduce(operator.xor, U()), hex_format_string)

    T = [F(x) for x in range(1, l + 1)]
    return b''.join(T[:-1]) + T[-1][:r]

Example 181

Project: golismero
Source File: ipv6.py
View license
def _explode_shorthand_ip_string(ip_str):
    """
    Expand a shortened IPv6 address.

    Args:
        ip_str: A string, the IPv6 address.

    Returns:
        A string, the expanded IPv6 address.

    """
    if not _is_shorthand_ip(ip_str):
        # We've already got a longhand ip_str.
        return ip_str

    new_ip = []
    hextet = ip_str.split('::')

    # If there is a ::, we need to expand it with zeroes
    # to get to 8 hextets - unless there is a dot in the last hextet,
    # meaning we're doing v4-mapping
    if '.' in ip_str.split(':')[-1]:
        fill_to = 7
    else:
        fill_to = 8

    if len(hextet) > 1:
        sep = len(hextet[0].split(':')) + len(hextet[1].split(':'))
        new_ip = hextet[0].split(':')

        for _ in xrange(fill_to - sep):
            new_ip.append('0000')
        new_ip += hextet[1].split(':')

    else:
        new_ip = ip_str.split(':')

    # Now need to make sure every hextet is 4 lower case characters.
    # If a hextet is < 4 characters, we've got missing leading 0's.
    ret_ip = []
    for hextet in new_ip:
        ret_ip.append(('0' * (4 - len(hextet)) + hextet).lower())
    return ':'.join(ret_ip)

Example 182

Project: django-sharding
Source File: test_fields.py
View license
    def test_get_pk_value_on_save_calls_strategy_get_next_id(self):
        for i in xrange(100):
            instance = TestModel.objects.using('app_shard_001').create(user_pk=1)
            self.assertEqual(ShardedTestModelIDs.objects.using('app_shard_001').latest('pk').pk, instance.pk)

Example 183

View license
    def test_returns_unique_values(self):
        sut = TableStrategy('tests.ShardedModelIDs')
        ids = [sut.get_next_id() for i in xrange(100)]
        self.assertEqual(ids, list(set(ids)))

Example 184

View license
    def test_no_exception_raised(self):
        sut = RoundRobinRoutingStrategy(settings.DATABASES)
        [sut.pick_read_db('app_shard_001') for i in xrange(150)]

Example 185

Project: django-likert-field
Source File: test_forms.py
View license
    def test_valid_integer_values(self):
        """These are valid responses and should raise no failures"""
        ff = LikertFormField()
        for v in xrange(50+1):
            ff.clean(v)

Example 186

Project: django-likert-field
Source File: test_forms.py
View license
    def test_valid_integer_string_values(self):
        """These are valid responses and should raise no failures"""
        ff = LikertFormField()
        for v in xrange(50+1):
            ff.clean(str(v))

Example 187

Project: django-likert-field
Source File: test_models.py
View license
    def test_get_prep_value_int(self):
        item = LikertField()
        for value in xrange(1, 50+1):
            self.assertIsInstance(item.get_prep_value(value), int)

Example 188

Project: django-likert-field
Source File: test_models.py
View license
    def test_get_prep_value_strings(self):
        item = LikertField()
        for value in xrange(1, 50+1):
            self.assertIsInstance(item.get_prep_value(str(value)), int)

Example 189

Project: django-likert-field
Source File: test_forms.py
View license
    def test_valid_integer_values(self):
        """These are valid responses and should raise no failures"""
        ff = LikertFormField()
        for v in xrange(50+1):
            ff.clean(v)

Example 190

Project: django-likert-field
Source File: test_forms.py
View license
    def test_valid_integer_string_values(self):
        """These are valid responses and should raise no failures"""
        ff = LikertFormField()
        for v in xrange(50+1):
            ff.clean(str(v))

Example 191

Project: django-likert-field
Source File: test_models.py
View license
    def test_get_prep_value_int(self):
        item = LikertField()
        for value in xrange(1, 50+1):
            self.assertIsInstance(item.get_prep_value(value), int)

Example 192

Project: django-likert-field
Source File: test_models.py
View license
    def test_get_prep_value_strings(self):
        item = LikertField()
        for value in xrange(1, 50+1):
            self.assertIsInstance(item.get_prep_value(str(value)), int)

Example 193

Project: oh-mainline
Source File: geometries.py
View license
    def transform(self, coord_trans, clone=False):
        """
        Transforms this geometry to a different spatial reference system.
        May take a CoordTransform object, a SpatialReference object, string
        WKT or PROJ.4, and/or an integer SRID.  By default nothing is returned
        and the geometry is transformed in-place.  However, if the `clone`
        keyword is set, then a transformed clone of this geometry will be
        returned.
        """
        if clone:
            klone = self.clone()
            klone.transform(coord_trans)
            return klone

        # Have to get the coordinate dimension of the original geometry
        # so it can be used to reset the transformed geometry's dimension
        # afterwards.  This is done because of GDAL bug (in versions prior
        # to 1.7) that turns geometries 3D after transformation, see:
        #  http://trac.osgeo.org/gdal/changeset/17792
        if GDAL_VERSION < (1, 7):
            orig_dim = self.coord_dim

        # Depending on the input type, use the appropriate OGR routine
        # to perform the transformation.
        if isinstance(coord_trans, CoordTransform):
            capi.geom_transform(self.ptr, coord_trans.ptr)
        elif isinstance(coord_trans, SpatialReference):
            capi.geom_transform_to(self.ptr, coord_trans.ptr)
        elif isinstance(coord_trans, six.integer_types + six.string_types):
            sr = SpatialReference(coord_trans)
            capi.geom_transform_to(self.ptr, sr.ptr)
        else:
            raise TypeError('Transform only accepts CoordTransform, '
                            'SpatialReference, string, and integer objects.')

        # Setting with original dimension, see comment above.
        if GDAL_VERSION < (1, 7):
            if isinstance(self, GeometryCollection):
                # With geometry collections have to set dimension on
                # each internal geometry reference, as the collection
                # dimension isn't affected.
                for i in xrange(len(self)):
                    internal_ptr = capi.get_geom_ref(self.ptr, i)
                    if orig_dim != capi.get_coord_dim(internal_ptr):
                        capi.set_coord_dim(internal_ptr, orig_dim)
            else:
                if self.coord_dim != orig_dim:
                    self.coord_dim = orig_dim

Example 194

Project: oh-mainline
Source File: linestring.py
View license
    def __init__(self, *args, **kwargs):
        """
        Initializes on the given sequence -- may take lists, tuples, NumPy arrays
        of X,Y pairs, or Point objects.  If Point objects are used, ownership is
        _not_ transferred to the LineString object.

        Examples:
         ls = LineString((1, 1), (2, 2))
         ls = LineString([(1, 1), (2, 2)])
         ls = LineString(array([(1, 1), (2, 2)]))
         ls = LineString(Point(1, 1), Point(2, 2))
        """
        # If only one argument provided, set the coords array appropriately
        if len(args) == 1: coords = args[0]
        else: coords = args

        if isinstance(coords, (tuple, list)):
            # Getting the number of coords and the number of dimensions -- which
            #  must stay the same, e.g., no LineString((1, 2), (1, 2, 3)).
            ncoords = len(coords)
            if coords: ndim = len(coords[0])
            else: raise TypeError('Cannot initialize on empty sequence.')
            self._checkdim(ndim)
            # Incrementing through each of the coordinates and verifying
            for i in xrange(1, ncoords):
                if not isinstance(coords[i], (tuple, list, Point)):
                    raise TypeError('each coordinate should be a sequence (list or tuple)')
                if len(coords[i]) != ndim: raise TypeError('Dimension mismatch.')
            numpy_coords = False
        elif numpy and isinstance(coords, numpy.ndarray):
            shape = coords.shape # Using numpy's shape.
            if len(shape) != 2: raise TypeError('Too many dimensions.')
            self._checkdim(shape[1])
            ncoords = shape[0]
            ndim = shape[1]
            numpy_coords = True
        else:
            raise TypeError('Invalid initialization input for LineStrings.')

        # Creating a coordinate sequence object because it is easier to
        # set the points using GEOSCoordSeq.__setitem__().
        cs = GEOSCoordSeq(capi.create_cs(ncoords, ndim), z=bool(ndim==3))

        for i in xrange(ncoords):
            if numpy_coords: cs[i] = coords[i,:]
            elif isinstance(coords[i], Point): cs[i] = coords[i].tuple
            else: cs[i] = coords[i]

        # If SRID was passed in with the keyword arguments
        srid = kwargs.get('srid', None)

        # Calling the base geometry initialization with the returned pointer
        #  from the function.
        super(LineString, self).__init__(self._init_func(cs.ptr), srid=srid)

Example 195

Project: oh-mainline
Source File: test_geos.py
View license
    def test_mutable_geometries(self):
        "Testing the mutability of Polygons and Geometry Collections."
        ### Testing the mutability of Polygons ###
        for p in self.geometries.polygons:
            poly = fromstr(p.wkt)

            # Should only be able to use __setitem__ with LinearRing geometries.
            self.assertRaises(TypeError, poly.__setitem__, 0, LineString((1, 1), (2, 2)))

            # Constructing the new shell by adding 500 to every point in the old shell.
            shell_tup = poly.shell.tuple
            new_coords = []
            for point in shell_tup: new_coords.append((point[0] + 500., point[1] + 500.))
            new_shell = LinearRing(*tuple(new_coords))

            # Assigning polygon's exterior ring w/the new shell
            poly.exterior_ring = new_shell
            s = str(new_shell) # new shell is still accessible
            self.assertEqual(poly.exterior_ring, new_shell)
            self.assertEqual(poly[0], new_shell)

        ### Testing the mutability of Geometry Collections
        for tg in self.geometries.multipoints:
            mp = fromstr(tg.wkt)
            for i in range(len(mp)):
                # Creating a random point.
                pnt = mp[i]
                new = Point(random.randint(21, 100), random.randint(21, 100))
                # Testing the assignment
                mp[i] = new
                s = str(new) # what was used for the assignment is still accessible
                self.assertEqual(mp[i], new)
                self.assertEqual(mp[i].wkt, new.wkt)
                self.assertNotEqual(pnt, mp[i])

        # MultiPolygons involve much more memory management because each
        # Polygon w/in the collection has its own rings.
        for tg in self.geometries.multipolygons:
            mpoly = fromstr(tg.wkt)
            for i in xrange(len(mpoly)):
                poly = mpoly[i]
                old_poly = mpoly[i]
                # Offsetting the each ring in the polygon by 500.
                for j in xrange(len(poly)):
                    r = poly[j]
                    for k in xrange(len(r)): r[k] = (r[k][0] + 500., r[k][1] + 500.)
                    poly[j] = r

                self.assertNotEqual(mpoly[i], poly)
                # Testing the assignment
                mpoly[i] = poly
                s = str(poly) # Still accessible
                self.assertEqual(mpoly[i], poly)
                self.assertNotEqual(mpoly[i], old_poly)

Example 196

Project: oh-mainline
Source File: where.py
View license
    def make_atom(self, child, qn, connection):
        """
        Turn a tuple (Constraint(table_alias, column_name, db_type),
        lookup_type, value_annotation, params) into valid SQL.

        The first item of the tuple may also be an Aggregate.

        Returns the string for the SQL fragment and the parameters to use for
        it.
        """
        lvalue, lookup_type, value_annotation, params_or_value = child
        if isinstance(lvalue, Constraint):
            try:
                lvalue, params = lvalue.process(lookup_type, params_or_value, connection)
            except EmptyShortCircuit:
                raise EmptyResultSet
        elif isinstance(lvalue, Aggregate):
            params = lvalue.field.get_db_prep_lookup(lookup_type, params_or_value, connection)
        else:
            raise TypeError("'make_atom' expects a Constraint or an Aggregate "
                            "as the first item of its 'child' argument.")

        if isinstance(lvalue, tuple):
            # A direct database column lookup.
            field_sql = self.sql_for_columns(lvalue, qn, connection)
        else:
            # A smart object with an as_sql() method.
            field_sql = lvalue.as_sql(qn, connection)

        if value_annotation is datetime.datetime:
            cast_sql = connection.ops.datetime_cast_sql()
        else:
            cast_sql = '%s'

        if hasattr(params, 'as_sql'):
            extra, params = params.as_sql(qn, connection)
            cast_sql = ''
        else:
            extra = ''

        if (len(params) == 1 and params[0] == '' and lookup_type == 'exact'
            and connection.features.interprets_empty_strings_as_nulls):
            lookup_type = 'isnull'
            value_annotation = True

        if lookup_type in connection.operators:
            format = "%s %%s %%s" % (connection.ops.lookup_cast(lookup_type),)
            return (format % (field_sql,
                              connection.operators[lookup_type] % cast_sql,
                              extra), params)

        if lookup_type == 'in':
            if not value_annotation:
                raise EmptyResultSet
            if extra:
                return ('%s IN %s' % (field_sql, extra), params)
            max_in_list_size = connection.ops.max_in_list_size()
            if max_in_list_size and len(params) > max_in_list_size:
                # Break up the params list into an OR of manageable chunks.
                in_clause_elements = ['(']
                for offset in xrange(0, len(params), max_in_list_size):
                    if offset > 0:
                        in_clause_elements.append(' OR ')
                    in_clause_elements.append('%s IN (' % field_sql)
                    group_size = min(len(params) - offset, max_in_list_size)
                    param_group = ', '.join(repeat('%s', group_size))
                    in_clause_elements.append(param_group)
                    in_clause_elements.append(')')
                in_clause_elements.append(')')
                return ''.join(in_clause_elements), params
            else:
                return ('%s IN (%s)' % (field_sql,
                                        ', '.join(repeat('%s', len(params)))),
                        params)
        elif lookup_type in ('range', 'year'):
            return ('%s BETWEEN %%s and %%s' % field_sql, params)
        elif lookup_type in ('month', 'day', 'week_day'):
            return ('%s = %%s' % connection.ops.date_extract_sql(lookup_type, field_sql),
                    params)
        elif lookup_type == 'isnull':
            return ('%s IS %sNULL' % (field_sql,
                (not value_annotation and 'NOT ' or '')), ())
        elif lookup_type == 'search':
            return (connection.ops.fulltext_search_sql(field_sql), params)
        elif lookup_type in ('regex', 'iregex'):
            return connection.ops.regex_lookup(lookup_type) % (field_sql, cast_sql), params

        raise TypeError('Invalid lookup_type: %r' % lookup_type)

Example 197

Project: oh-mainline
Source File: dispatcher.py
View license
    def disconnect(self, receiver=None, sender=None, weak=True, dispatch_uid=None):
        """
        Disconnect receiver from sender for signal.

        If weak references are used, disconnect need not be called. The receiver
        will be remove from dispatch automatically.

        Arguments:

            receiver
                The registered receiver to disconnect. May be none if
                dispatch_uid is specified.

            sender
                The registered sender to disconnect

            weak
                The weakref state to disconnect

            dispatch_uid
                the unique identifier of the receiver to disconnect
        """
        if dispatch_uid:
            lookup_key = (dispatch_uid, _make_id(sender))
        else:
            lookup_key = (_make_id(receiver), _make_id(sender))

        with self.lock:
            for index in xrange(len(self.receivers)):
                (r_key, _) = self.receivers[index]
                if r_key == lookup_key:
                    del self.receivers[index]
                    break

Example 198

Project: oh-mainline
Source File: crypto.py
View license
def pbkdf2(password, salt, iterations, dklen=0, digest=None):
    """
    Implements PBKDF2 as defined in RFC 2898, section 5.2

    HMAC+SHA256 is used as the default pseudo random function.

    Right now 10,000 iterations is the recommended default which takes
    100ms on a 2.2Ghz Core 2 Duo.  This is probably the bare minimum
    for security given 1000 iterations was recommended in 2001. This
    code is very well optimized for CPython and is only four times
    slower than openssl's implementation.
    """
    assert iterations > 0
    if not digest:
        digest = hashlib.sha256
    password = force_bytes(password)
    salt = force_bytes(salt)
    hlen = digest().digest_size
    if not dklen:
        dklen = hlen
    if dklen > (2 ** 32 - 1) * hlen:
        raise OverflowError('dklen too big')
    l = -(-dklen // hlen)
    r = dklen - (l - 1) * hlen

    hex_format_string = "%%0%ix" % (hlen * 2)

    inner, outer = digest(), digest()
    if len(password) > inner.block_size:
        password = digest(password).digest()
    password += b'\x00' * (inner.block_size - len(password))
    inner.update(password.translate(hmac.trans_36))
    outer.update(password.translate(hmac.trans_5C))

    def F(i):
        def U():
            u = salt + struct.pack(b'>I', i)
            for j in xrange(int(iterations)):
                dig1, dig2 = inner.copy(), outer.copy()
                dig1.update(u)
                dig2.update(dig1.digest())
                u = dig2.digest()
                yield _bin_to_long(u)
        return _long_to_bin(reduce(operator.xor, U()), hex_format_string)

    T = [F(x) for x in range(1, l + 1)]
    return b''.join(T[:-1]) + T[-1][:r]

Example 199

Project: oh-mainline
Source File: ipv6.py
View license
def _explode_shorthand_ip_string(ip_str):
    """
    Expand a shortened IPv6 address.

    Args:
        ip_str: A string, the IPv6 address.

    Returns:
        A string, the expanded IPv6 address.

    """
    if not _is_shorthand_ip(ip_str):
        # We've already got a longhand ip_str.
        return ip_str

    new_ip = []
    hextet = ip_str.split('::')

    # If there is a ::, we need to expand it with zeroes
    # to get to 8 hextets - unless there is a dot in the last hextet,
    # meaning we're doing v4-mapping
    if '.' in ip_str.split(':')[-1]:
        fill_to = 7
    else:
        fill_to = 8

    if len(hextet) > 1:
        sep = len(hextet[0].split(':')) + len(hextet[1].split(':'))
        new_ip = hextet[0].split(':')

        for _ in xrange(fill_to - sep):
            new_ip.append('0000')
        new_ip += hextet[1].split(':')

    else:
        new_ip = ip_str.split(':')

    # Now need to make sure every hextet is 4 lower case characters.
    # If a hextet is < 4 characters, we've got missing leading 0's.
    ret_ip = []
    for hextet in new_ip:
        ret_ip.append(('0' * (4 - len(hextet)) + hextet).lower())
    return ':'.join(ret_ip)