numpy.sort

Here are the examples of the python api numpy.sort taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

200 Examples 7

Example 1

Project: chainer
Source File: permutate.py
View license
def _check_indices(indices):
    if len(indices) == 0:
        return
    # TODO(unno): Check indices without cpu
    indices = cuda.to_cpu(indices)
    for i in indices:
        if 0 <= i < len(indices):
            continue
        raise ValueError('Out of bounds index: {}'.format(i))
    sort = numpy.sort(indices)
    for s, t in six.moves.zip(sort, sort[1:]):
        if s == t:
            raise ValueError('indices contains duplicate value: {}'.format(s))

Example 2

Project: python-control
Source File: statesp_test.py
View license
    def testPole(self):
        """Evaluate the poles of a MIMO system."""

        p = np.sort(self.sys1.pole())
        true_p = np.sort([3.34747678408874,
            -3.17373839204437 + 1.47492908003839j,
            -3.17373839204437 - 1.47492908003839j])

        np.testing.assert_array_almost_equal(p, true_p)

Example 3

Project: AZOrange
Source File: pstat.py
View license
    def asortrows(a,axis=0):
        """
    Sorts an array "by rows".  This differs from the Numeric.sort() function,
    which sorts elements WITHIN the given axis.  Instead, this function keeps
    the elements along the given axis intact, but shifts them 'up or down'
    relative to one another.
    
    Usage:   asortrows(a,axis=0)
    Returns: sorted version of a
    """
        return N.sort(a,axis=axis,kind='mergesort')

Example 4

Project: attention-lvcsr
Source File: test_sort.py
View license
    def test2(self):
        a = tensor.dmatrix()
        axis = tensor.scalar()
        w = sort(a, axis)
        f = theano.function([a, axis], w)
        for axis_val in 0, 1:
            gv = f(self.m_val, axis_val)
            gt = np.sort(self.m_val, axis_val)
            assert np.allclose(gv, gt)

Example 5

Project: attention-lvcsr
Source File: test_sort.py
View license
    def test3(self):
        a = tensor.dvector()
        w2 = sort(a)
        f = theano.function([a], w2)
        gv = f(self.v_val)
        gt = np.sort(self.v_val)
        assert np.allclose(gv, gt)

Example 6

Project: attention-lvcsr
Source File: test_sort.py
View license
    def test_None(self):
        a = tensor.dmatrix()
        l = sort(a, None)
        f = theano.function([a], l)
        gv = f(self.m_val)
        gt = np.sort(self.m_val, None)
        assert np.allclose(gv, gt)

Example 7

Project: rlpy
Source File: iFDD.py
View license
    def showPotentials(self):
        print "Potentials:"
        print "-" * 30
        print " index\t| f_set\t| relevance\t| count\t| p1\t| p2"
        print "-" * 30
        for _, potential in self.iFDD_potentials.iteritems():
            print " %d\t| %s\t| %0.2f\t| %d\t| %s\t| %s" % (potential.index, str(np.sort(list(potential.f_set))), potential.relevance, potential.count, potential.p1, potential.p2)

Example 8

Project: rlpy
Source File: iFDD.py
View license
    def showPotentials(self):
        print "Potentials:"
        print "-" * 30
        print " index\t| f_set\t| relevance\t| count\t| p1\t| p2"
        print "-" * 30

        k = sorted(self.iFDD_potentials.iterkeys(), cmp=_set_comp_lt)
        for f_set in k:
            potential = self.iFDD_potentials[f_set]
            print " %d\t| %s\t| %g\t| %d\t| %s\t| %s" % (potential.index, str(np.sort(list(potential.f_set))), potential.relevance(plus=True), potential.c, potential.p1, potential.p2)

Example 9

Project: MJHMC
Source File: sg_fig.py
View license
def sg(algebraic_sampler, full):
    """
    returns the spectral gap of the sampler object
    """
    T = algebraic_sampler.calculate_true_transition_matrix(full)
    w, v = eig(T)
    w_ord = np.sort(w)[::-1]
    if np.around(np.real_if_close(w_ord[0]), decimals=5) != 1:
        raise Exception("no eval with value 1")
    return 1 - np.absolute(w_ord[1])

Example 10

Project: MJHMC
Source File: mixing.py
View license
def sg(sampler):
    """returns the spectral gap
    t: transition matrix
    """
    while True:
        try:
            t = sampler.get_empirical_transition_matrix()
            w,v = eig(t)
            w_ord = np.sort(w)[::-1]
            if np.around(np.real_if_close(w_ord[0]), decimals=5) != 1:
                raise Exception("no eval with value 1")
            return 1 - np.absolute(w_ord[1])
        except RuntimeError:
            sampler.sample(1000)

Example 11

Project: cmonkey2
Source File: datamatrix_test.py
View license
def as_sorted_flat_values(matrices):
    """this method is now inlined into quantile_normalize_scores
    we keep it here with its tests to demonstrate how it works
    """
    return np.transpose(np.asarray([np.sort(matrix.values.flatten())
                                    for matrix in matrices]))

Example 12

Project: scikit-image
Source File: color_histogram.py
View license
def pct_total_area(image, percentile=0.80):
    """Return threshold value based on percentage of total area.

    The specified percent of pixels less than the given intensity threshold.
    """
    idx = int((image.size - 1) * percentile)
    sorted_pixels = np.sort(image.flat)
    return sorted_pixels[idx]

Example 13

View license
    def _get_support_mask(self):
        check_is_fitted(self, 'scores_')

        n_features = len(self.pvalues_)
        sv = np.sort(self.pvalues_)
        selected = sv[sv <= float(self.alpha) / n_features *
                      np.arange(1, n_features + 1)]
        if selected.size == 0:
            return np.zeros_like(self.pvalues_, dtype=bool)
        return self.pvalues_ <= selected.max()

Example 14

Project: scot
Source File: xvschema.py
View license
def _nfold(num_trials, skipstep=None, n='unset'):
    blocksize = int(np.ceil(num_trials / n))
    for i in range(0, num_trials, blocksize):
        testset = [k for k in (i + np.arange(blocksize)) if k < num_trials]
        trainset = [i for i in range(testset[0])] + \
                   [i for i in range(testset[-1] + 1, num_trials)]
        trainset = sort([t % num_trials for t in trainset])
        yield trainset, testset

Example 15

Project: RCN
Source File: draw_points_denoise_finetune.py
View license
def save_error_results(array, set_name, sample_num):
    array_asc = np.sort(array)[::-1]
    array_asc_indx = np.argsort(array)[::-1]
    print "error values for set %s are %s" %(set_name, array_asc[:sample_num])
    print "error indices for set %s are %s" %(set_name, array_asc_indx[:sample_num])
    out_str_path = "%s/%s_sorted_error.txt" %(out_path, set_name)
    out_str=open(out_str_path,'w')
    out_str.write("error, index\n")
    for err, indx in zip(array_asc, array_asc_indx):
        out_str.write("%s, %s\n" %(err, indx))

    out_str_path = "%s/%s_sorted_indices.pickle" %(out_path, set_name)
    with open(out_str_path, 'wb') as fp:
        pickle.dump(array_asc_indx, fp)
    print "done with %s" %(set_name)
    return array_asc_indx[:sample_num]

Example 16

Project: postpic
Source File: datahandling.py
View license
    def cutout(self, newextent):
        '''
        keeps the grid points within the newextent only.
        '''
        nex = np.sort(newextent)
        gnnew = [gn for gn in self.grid_node
                 if (nex[0] <= gn and gn <= nex[1])]
        self.grid_node = gnnew

Example 17

Project: best
Source File: __init__.py
View license
def hdi_of_mcmc( sample_vec, cred_mass = 0.95 ):
    assert len(sample_vec), 'need points to find HDI'
    sorted_pts = np.sort( sample_vec )

    ci_idx_inc = int(np.floor( cred_mass*len(sorted_pts) ))
    n_cis = len(sorted_pts) - ci_idx_inc
    ci_width = sorted_pts[ci_idx_inc:] - sorted_pts[:n_cis]

    min_idx = np.argmin(ci_width)
    hdi_min = sorted_pts[min_idx]
    hdi_max = sorted_pts[min_idx+ci_idx_inc]
    return hdi_min, hdi_max

Example 18

Project: tvb-library
Source File: fcd_matrix.py
View license
def spectral_embedding(fcd):
    xi, _ = spectral_dbscan(fcd, 2)
    xir = compute_radii(xi, True)
    xir_sorted = np.sort(xir)
    xir_cutoff = 0.5 * xir_sorted[-1]
    return xir, xir_cutoff

Example 19

Project: deep_recommend_system
Source File: nn_test.py
View license
  def testShapedDropoutCorrelation(self):
    # Runs a shaped dropout and tests that the correlations are correct.
    x_dim = 40
    y_dim = 30
    num_iter = 10
    for keep_prob in [0.1, 0.5, 0.8]:
      with self.test_session():
        t = tf.constant(1.0, shape=[x_dim, y_dim], dtype=tf.float32)
        dropout = tf.nn.dropout(t, keep_prob, noise_shape=[x_dim, 1])
        self.assertEqual([x_dim, y_dim], dropout.get_shape())
        for _ in xrange(0, num_iter):
          value = dropout.eval()
          # Verifies that each y column as only one type of activation.
          for i in xrange(x_dim):
            sorted_value = np.unique(np.sort(value[i, :]))
            self.assertEqual(sorted_value.size, 1)

Example 20

Project: deep_recommend_system
Source File: nn_test.py
View license
  def testShapedDropoutCorrelation(self):
    # Runs a shaped dropout and tests that the correlations are correct.
    x_dim = 40
    y_dim = 30
    num_iter = 10
    for keep_prob in [0.1, 0.5, 0.8]:
      with self.test_session():
        t = tf.constant(1.0, shape=[x_dim, y_dim], dtype=tf.float32)
        dropout = tf.nn.dropout(t, keep_prob, noise_shape=[x_dim, 1])
        self.assertEqual([x_dim, y_dim], dropout.get_shape())
        for _ in xrange(0, num_iter):
          value = dropout.eval()
          # Verifies that each y column as only one type of activation.
          for i in xrange(x_dim):
            sorted_value = np.unique(np.sort(value[i, :]))
            self.assertEqual(sorted_value.size, 1)

Example 21

Project: msaf
Source File: main.py
View license
def estimate_bandwidth(D, k):
    D_sort = np.sort(D, axis=1)

    if 1 + k >= len(D):
        k = len(D) - 2

    sigma = np.mean(D_sort[:, 1+k])
    return sigma

Example 22

Project: msaf
Source File: kmeans.py
View license
    def init_w(self):
        # set W to some random data samples
        sel = random.sample(range(self._num_samples), self._num_bases)

        # sort indices, otherwise h5py won't work
        self.W = self.data[:, np.sort(sel)]

Example 23

Project: kombine
Source File: correlated_likelihood.py
View license
    def __init__(self, ts, vs, dvs):
        self.ts = np.sort(ts)
        self.vs = vs
        self.dvs = dvs

        self.T = self.ts[-1] - self.ts[0]
        self.dt_min = np.min(np.diff(self.ts))

Example 24

Project: verif
Source File: InputText_test.py
View license
   def test_getStations(self):
      input = Input.Text("tests/example.txt")
      stations = input.getStations()
      stations = np.sort(stations)
      self.assertEqual(4, len(stations))
      self.assertTrue(Station.Station(0, 1, 1, 1) in stations)
      self.assertTrue(Station.Station(0, 0, 0, 1) in stations)
      self.assertTrue(Station.Station(0, 0, 0, 2) in stations)
      self.assertTrue(Station.Station(0, 2, 2, 1) in stations)

Example 25

Project: verif
Source File: Metric.py
View license
   def _quantileToThreshold(self, values, tRange):
      sorted = np.sort(values)
      qRange = [-np.inf, np.inf]
      for i in range(0, 1):
         if(not np.isinf(abs(tRange[i]))):
            qRange[i] = np.percentile(sorted, tRange[i] * 100)
      return qRange

Example 26

Project: drmad
Source File: omniglot.py
View license
def load_data_split(num_chars, RS, num_alphabets=NUM_ALPHABETS):
    alphabets_to_load = RS.choice(range(NUM_ALPHABETS), size=num_alphabets, replace=False)
    raw_data = load_data(np.sort(alphabets_to_load))
    shuffled_data = [shuffle(alphabet, RS) for alphabet in raw_data]
    data_split = zip(*[split(alphabet, num_chars) for alphabet in shuffled_data])
    normalized_data = [subtract_mean(data_subset) for data_subset in data_split]
    return normalized_data

Example 27

Project: qiime
Source File: plot_rank_abundance_graph.py
View license
def make_sorted_frequencies(counts, absolute=False):
    """transform and sort a vector of count.

    counts: a column of an OTU table
    absolute: if True return absolute values instead of frequencies.
    """

    c = sort(counts)
    c = c[c.nonzero()]
    c = c[::-1]
    if absolute:
        return c
    else:
        f = c / float(c.sum())
        return f

Example 28

Project: scikit-bio
Source File: _gini.py
View license
def _lorenz_curve(data):
    """Calculate the Lorenz curve for input data.

    Notes
    -----
    Formula available on wikipedia.

    """
    sorted_data = np.sort(data)
    Sn = sorted_data.sum()
    n = sorted_data.shape[0]
    return np.arange(1, n + 1) / n, sorted_data.cumsum() / Sn

Example 29

Project: kaggle-burn-cpu
Source File: random_layer.py
View license
    def _compute_centers(self, X, sparse, rs):
        """Generate centers, then compute tau, dF and dN vals"""

        super(GRBFRandomLayer, self)._compute_centers(X, sparse, rs)

        centers = self.components_['centers']
        sorted_distances = np.sort(squareform(pdist(centers)))
        self.dF_vals = sorted_distances[:, -1]
        self.dN_vals = sorted_distances[:, 1]/100.0
        #self.dN_vals = 0.0002 * np.ones(self.dF_vals.shape)

        tauNum = np.log(np.log(self.grbf_lambda) /
                        np.log(1.0 - self.grbf_lambda))

        tauDenom = np.log(self.dF_vals/self.dN_vals)

        self.tau_vals = tauNum/tauDenom

        self._extra_args['taus'] = self.tau_vals

Example 30

Project: blaze
Source File: test_numpy_compute.py
View license
def test_sort():
    assert eq(compute(t.sort('amount'), x),
              np.sort(x, order='amount'))

    assert eq(compute(t.sort('amount', ascending=False), x),
              np.sort(x, order='amount')[::-1])

    assert eq(compute(t.sort(['amount', 'id']), x),
              np.sort(x, order=['amount', 'id']))

    assert eq(compute(t.amount.sort(), x),
              np.sort(x['amount']))

Example 31

Project: acoular
Source File: trajectory.py
View license
    @property_depends_on('points[]')
    def _get_tck( self ):
        t = sort(self.points.keys())
        xp = array([self.points[i] for i in t]).T
        k = min(3, len(self.points)-1)
        tcku = splprep(xp, u=t, s=0, k=k)
        return tcku[0]

Example 32

Project: APGL
Source File: Util.py
View license
    @staticmethod  
    def sampleWithoutReplacement(sampleSize, totalSize):
        """ 
        Create a list of integers from 0 to totalSize, and take a random sample of size sampleSize. The 
        sample ordered. 
        """
        perm = rand.permutation(totalSize)
        perm = perm[0:sampleSize]
        perm = numpy.sort(perm)
        
        return perm 

Example 33

Project: DESMAN
Source File: Variant_Filter.py
View license
    def select_Random(self, random_select):
        if(random_select < self.NS):
            self.randomSelect = True
        
            select = np.sort(self.randomState.choice(self.NS, random_select, replace=False))
            
            self.snps_filter_original = np.copy(self.snps_filter)
            self.snps_filter = self.snps_filter[select,:,:]
            
            self.NS = random_select
            
            self.selected_indices_original = np.copy(self.selected_indices)
            self.selected_indices = [self.selected_indices[i] for i in select]
        
            self.selected_original = np.copy(self.selected)
            self.selected = np.zeros((self.V), dtype=bool)  
            self.selected[self.selected_indices] = True
        
        return self.snps_filter

Example 34

Project: pretty-midi
Source File: instrument.py
View license
    def get_onsets(self):
        """Get all onsets of all notes played by this instrument.
        May contain duplicates.

        Returns
        -------
        onsets : np.ndarray
                List of all note onsets.

        """
        onsets = []
        # Get the note-on time of each note played by this instrument
        for note in self.notes:
            onsets.append(note.start)
        # Return them sorted (because why not?)
        return np.sort(onsets)

Example 35

Project: SERT
Source File: query.py
View license
    def query(self, centroids):
        if self.entity_neighbors is not None:
            distances, indices = self.entity_neighbors.kneighbors(centroids)

            return distances, indices
        else:
            pairwise_distances = scipy.spatial.distance.cdist(
                centroids, self.entity_representations,
                metric=self.entity_representation_distance)

            distances = np.sort(pairwise_distances, axis=1)
            indices = pairwise_distances.argsort(axis=1)\
                .argsort(axis=1).argsort(axis=1)

            return distances, indices

Example 36

Project: Python-ELM
Source File: random_layer.py
View license
    def _compute_centers(self, X, sparse, rs):
        """Generate centers, then compute tau, dF and dN vals"""

        super(GRBFRandomLayer, self)._compute_centers(X, sparse, rs)

        centers = self.components_['centers']
        sorted_distances = np.sort(squareform(pdist(centers)))
        self.dF_vals = sorted_distances[:, -1]
        self.dN_vals = sorted_distances[:, 1]/100.0
        #self.dN_vals = 0.0002 * np.ones(self.dF_vals.shape)

        tauNum = np.log(np.log(self.grbf_lambda) /
                        np.log(1.0 - self.grbf_lambda))

        tauDenom = np.log(self.dF_vals/self.dN_vals)

        self.tau_vals = tauNum/tauDenom

        self._extra_args['taus'] = self.tau_vals

Example 37

Project: astrodendro
Source File: test_pruning.py
View license
def compare_dendrograms(d1, d2):

    assert len(d1) == len(d2)
    assert (np.sort([leaf.vmax for leaf in d1.all_structures]) == np.sort([leaf.vmax for leaf in d2.all_structures])).all()
    assert (np.sort([leaf.vmin for leaf in d1.all_structures]) == np.sort([leaf.vmin for leaf in d2.all_structures])).all()

    for s in d1.all_structures:
         ind1 = np.where(d1.index_map == s.idx)
         idx2 = d2.index_map[ind1][0]
         ind2 = np.where(d2.index_map == idx2)
         assert_permuted_fancyindex(ind1, ind2)

Example 38

Project: nonconformist
Source File: nc.py
View license
	def apply_inverse(self, nc, significance):
		nc = np.sort(nc)[::-1]
		border = int(np.floor(significance * (nc.size + 1))) - 1
		# TODO: should probably warn against too few calibration examples
		border = min(max(border, 0), nc.size - 1)
		return np.vstack([nc[border], nc[border]])

Example 39

Project: nonconformist
Source File: nc.py
View license
	def apply_inverse(self, nc, significance):
		nc = np.sort(nc)[::-1]
		upper = int(np.floor((significance / 2) * (nc.size + 1)))
		lower = int(np.floor((1 - significance / 2) * (nc.size + 1)))
		# TODO: should probably warn against too few calibration examples
		upper = min(max(upper, 0), nc.size - 1)
		lower = max(min(lower, nc.size - 1), 0)
		return np.vstack([-nc[lower], nc[upper]])

Example 40

Project: sklearn-evaluation
Source File: metrics.py
View license
@validate.proportion
def __threshold_at(y_score, proportion):
    # Sort scores in descending order
    scores_sorted = np.sort(y_score)[::-1]
    # Based on the proportion, get the index to split th
    # if value is negative, return 0
    threshold_index = max(int(len(y_score) * proportion) - 1, 0)
    # Get the cutoff value
    threshold_value = scores_sorted[threshold_index]
    return threshold_value

Example 41

Project: ssf
Source File: pstat.py
View license
 def asortrows(a,axis=0):
    """
Sorts an array "by rows".  This differs from the Numeric.sort() function,
which sorts elements WITHIN the given axis.  Instead, this function keeps
the elements along the given axis intact, but shifts them 'up or down'
relative to one another.

Usage:   asortrows(a,axis=0)
Returns: sorted version of a
"""
    return N.sort(a,axis=axis,kind='mergesort')

Example 42

Project: pycortex
Source File: polyutils.py
View license
def boundary_edges(polys):
    '''Returns the edges that are on the boundary of a mesh, as defined by belonging to only 1 face'''
    edges = dict()
    for i, poly in enumerate(np.sort(polys)):
        for a, b in [(0,1), (1,2), (0, 2)]:
            key = poly[a], poly[b]
            if key not in edges:
                edges[key] = []
            edges[key].append(i)

    epts = []
    for edge, faces in edges.items():
        if len(faces) == 1:
            epts.append(edge)

    return np.array(epts)

Example 43

Project: GPflow
Source File: FITCvsVFE.py
View license
def plotPredictions( ax, model, color, label ):
    xtest = np.sort( readCsvFile( 'data/snelson_test_inputs' ) )
    predMean, predVar = model.predict_y(xtest)
    ax.plot( xtest, predMean, color, label=label )
    ax.plot( xtest, predMean + 2.*np.sqrt(predVar),color )
    ax.plot( xtest, predMean - 2.*np.sqrt(predVar), color )

Example 44

Project: hyperspy
Source File: eds.py
View license
    def _add_vertical_lines_groups(self, position, **kwargs):
        """
        Add vertical markers for each group that shares the color.

        Parameters
        ----------
        position: 2D array of float
            The position on the signal axis. Each row corresponds to a
            group.
        kwargs
            keywords argument for markers.vertical_line
        """
        per_xray = len(position[0])
        colors = itertools.cycle(np.sort(
            plt.rcParams['axes.color_cycle'] * per_xray))
        for x, color in zip(np.ravel(position), colors):
            line = markers.vertical_line(x=x, color=color, **kwargs)
            self.add_marker(line)

Example 45

Project: generativebot
Source File: helpers.py
View license
def _rnd_interpolate(xy, num_points, ordered=False):
  tck,u = splprep([
    xy[:,0],
    xy[:,1]],
    s=0
  )
  unew = random(num_points)
  if ordered:
    unew = sort(unew)
  out = splev(unew, tck)
  return column_stack(out)

Example 46

Project: generativebot
Source File: utils.py
View license
def _rnd_interpolate(xy, num_points, ordered=False):
  tck,u = splprep([
    xy[:,0],
    xy[:,1]],
    s=0
  )
  unew = random(num_points)
  if ordered:
    unew = sort(unew)
  out = splev(unew, tck)
  return column_stack(out)

Example 47

Project: generativebot
Source File: helpers.py
View license
def _rnd_interpolate(xy, num_points, ordered=False):
  tck,u = splprep([
    xy[:,0],
    xy[:,1]],
    s=0
  )
  unew = random(num_points)
  if sort:
    unew = sort(unew)
  out = splev(unew, tck)
  return column_stack(out)

Example 48

Project: sand-glyphs
Source File: utils.py
View license
def _rnd_interpolate(xy, num_points, ordered=False):
  tck,u = splprep([
    xy[:,0],
    xy[:,1]],
    s=0
  )
  unew = random(num_points)
  if ordered:
    unew = sort(unew)
  out = splev(unew, tck)
  return column_stack(out)

Example 49

Project: sand-spline
Source File: helpers.py
View license
def _rnd_interpolate(xy, num_points, ordered=False):
  tck,u = splprep([
    xy[:,0],
    xy[:,1]],
    s=0
  )
  unew = random(num_points)
  if sort:
    unew = sort(unew)
  out = splev(unew, tck)
  return column_stack(out)

Example 50

Project: pyNCS
Source File: monitors.py
View license
    def __init__(self, addr_group=None, plot_args = None):
        # By definition of populations, SpikeMonitor is associated to at most
        # one channel
        self.addr_group = addr_group
        if plot_args == None:
            self.plot_args = {}
        else:
            self.plot_args = plot_args
        self._sl = monitorSpikeList(self.addr_group.channel,
             spikes=[], id_list=np.sort(addr_group.laddr))
        self._populated = False
        self.name = self.addr_group.name
        self.channel = self.addr_group.channel