# numpy.average

Here are the examples of the python api numpy.average taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

## 176 Examples

#### Example 1

Project: orange
Source File: orngDimRed.py
    def __call__(self,lambd):
nv = _BC(self.v,lambd)
mean = numpy.average(nv)
cv = nv-mean
skewness = numpy.average(numpy.power(cv,3))/numpy.power(numpy.average(numpy.power(cv,2)),1.5)
# kurtosis = numpy.average(numpy.power(cv,4))/numpy.power(numpy.average(numpy.power(cv,2)),2)-3
return skewness**2


#### Example 2

Project: muscle-plotter
Source File: reachstats.py
    def analyze_distance_to_target(self):
if len(self.y_distances) > 0:
average_y = np.average(self.y_distances)
else:
return
self.y_distances = []
self.lag_log.append(average_y)
if len(self.lag_log) > self.lag_cycles * 2:
test_region = self.lag_log[-self.lag_cycles:]
sum_lag = sum(test_region)
if sum_lag < 0:
if (abs(sum_lag) >
self.lag_cycles * self.boost_threshold):
return 1
if sum_lag > 0:
if (abs(sum_lag) >
self.lag_cycles * self.boost_threshold):
return -1
return 0


#### Example 3

Project: python-acoustics
Source File: room.py
def mean_alpha(alphas, surfaces):
"""
Calculate mean of absorption coefficients.

:param alphas: Absorption coefficients :math:\\alpha.
:param surfaces: Surfaces :math:S.
"""
return np.average(alphas, axis=0, weights=surfaces)


#### Example 4

Project: python-acoustics
Source File: room.py
def t60_millington(surfaces, alpha, volume, c=SOUNDSPEED):
"""
Reverberation time according to Millington.

:param surfaces: Surfaces :math:S.
:param alpha: Mean absorption coefficient :math:\\alpha or by frequency bands
:param volume: Volume of the room :math:V.
:param c: Speed of sound :math:c.
:returns: Reverberation time :math:T_{60}
"""
mean_alpha = np.average(alpha, axis=0, weights=surfaces)
A = -np.sum(surfaces[:, np.newaxis] * np.log(1.0 - mean_alpha), axis=0)
t60 = 4.0 * np.log(10.0**6.0) * volume / (c * A)
return t60


#### Example 5

Project: python-acoustics
Source File: utils.py
def mean_tl(tl, surfaces):
try:
tau_axis = tl.ndim - 1
except AttributeError:
tau_axis = 0
tau = 1.0 / (10.0**(tl/10.0))
return 10.0 * np.log10(1.0 / np.average(tau, tau_axis, surfaces))


#### Example 6

Project: zipline
Source File: technical.py
    def compute(self, today, assets, out, data, decay_rate):
out[:] = average(
data,
axis=0,
weights=self.weights(len(data), decay_rate),
)


#### Example 7

Project: zipline
Source File: technical.py
    def compute(self, today, assets, out, data, decay_rate):
weights = self.weights(len(data), decay_rate)

mean = average(data, axis=0, weights=weights)
variance = average((data - mean) ** 2, axis=0, weights=weights)

squared_weight_sum = (np_sum(weights) ** 2)
bias_correction = (
squared_weight_sum / (squared_weight_sum - np_sum(weights ** 2))
)
out[:] = sqrt(variance * bias_correction)


#### Example 8

Project: mlxtend
Source File: ensemble_vote.py
    def predict_proba(self, X):
""" Predict class probabilities for X.

Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples, n_features]
Training vectors, where n_samples is the number of samples and
n_features is the number of features.

Returns
----------
avg : array-like, shape = [n_samples, n_classes]
Weighted average probability for each class per sample.

"""
avg = np.average(self._predict_probas(X), axis=0, weights=self.weights)
return avg


#### Example 9

Project: heamy
Source File: pipeline.py
    def weight(self, weights):
"""Applies weighted mean to models.

Parameters
----------
weights : list

Returns
-------
np.ndarray

Examples
----------
>>> pipeline = ModelsPipeline(model_rf,model_lr)
>>> pipeline.weight([0.8,0.2])
"""
return self.apply(lambda x: np.average(x, axis=0, weights=weights))


#### Example 10

Project: scikit-learn
Source File: voting_classifier.py
    def _predict_proba(self, X):
"""Predict class probabilities for X in 'soft' voting """
if self.voting == 'hard':
raise AttributeError("predict_proba is not available when"
" voting=%r" % self.voting)
check_is_fitted(self, 'estimators_')
avg = np.average(self._collect_probas(X), axis=0, weights=self.weights)
return avg


#### Example 11

Project: sdaps
Source File: opencv.py
def ensure_greyscale(img):
if len(img.shape) == 2:
# Well, seems to be greyscale/monochrome already
return img

# Average the color samples, and convert back to uint8
img = np.average(img, 2)
img = np.array(img, dtype=np.uint8)

return img


#### Example 12

Project: postpic
Source File: particles.py
    def mean(self, func, weights=1.0):
'''
the mean of a value given by the function func. The particle weight
of the individual particles will be included in the calculation.
An additional weight can be given as well.
'''
w = self.weight() * weights
return np.average(func(self), weights=w)


#### Example 13

Project: postpic
Source File: particles.py
    def var(self, func, weights=1.0):
'''
variance
'''
w = self.weight() * weights
data = func(self)
m = np.average(data, weights=w)
return np.average((data - m)**2, weights=w)


#### Example 14

Project: beat-bricks
Source File: lego.py
def average_cell_color_hsv(img, y, x):
y_start, y_end = cell_start_end(y)
x_start, x_end = cell_start_end(x)
cell = img[
y_start:y_end,
x_start:x_end,
:]
return bgr2hsv(numpy.average(numpy.average(cell, axis=0), axis=0))


#### Example 15

Project: semisup-learn
Source File: CPLELearning.py
    def predict(self, X):
"""Perform classification on samples in X.

Parameters
----------
X : array-like, shape = [n_samples, n_features]

Returns
-------
y_pred : array, shape = [n_samples]
Class labels for samples in X.
"""

if self.predict_from_probabilities:
P = self.predict_proba(X)
return (P[:, 0]<numpy.average(P[:, 0]))
else:
return self.model.predict(X)


#### Example 16

Project: Sushi
Source File: sushi.py
def average_shifts(events):
events = [e for e in events if not e.linked]
shifts = [x.shift for x in events]
weights = [1 - x.diff for x in events]
avg = np.average(shifts, weights=weights)
for e in events:
e.set_shift(avg, e.diff)
return avg


#### Example 17

Project: FabSim
Source File: plot_lammps.py
def calc_ave_press(iter, tag="Press"):
d1,d2 = lmp_io.load_quantity_from_file("new_CG.prod%d.log" % (iter-1), tag)

offset = 100

pave = np.average(d2[100:])
pstdev = np.std(d2[100:])

print "%s information iteration #%d, average pressure: %f, stdev: %f, initial values: %f %f %f" % (tag, iter-1,pave,pstdev,d2,d2,d2)


#### Example 18

Project: FabSim
Source File: plot_lammps.py
def get_pressure(iter, tag):
d1,d2 = lmp_io.load_quantity_from_file("new_CG.prod%d.log" % (iter-1), tag)

offset = 100

pave = np.average(d2[100:])
pstdev = np.std(d2[100:])

print "%s information iteration #%d, average pressure: %f, stdev: %f, initial values: %f %f %f" % (tag, iter-1,pave,pstdev,d2,d2,d2)

return pave, pstdev, d1, d2


#### Example 19

Project: deer
Source File: agent.py
    def avgBellmanResidual(self):
""" Returns the average training loss on the epoch
"""
if (len(self._training_loss_averages) == 0):
return -1
return np.average(self._training_loss_averages)


#### Example 20

Project: StrepHit
Source File: rank_verbs.py
def get_similarity_scores(verb_token, vectorizer, tf_idf_matrix):
""" Compute the cosine similarity score of a given verb token against the input corpus TF/IDF matrix.

:param str verb_token: Surface form of a verb, e.g., *born*
:param sklearn.feature_extraction.text.TfidfVectorizer vectorizer: Vectorizer
used to transform verbs into vectors
:return: cosine similarity score
:rtype: ndarray
"""
verb_token_vector = vectorizer.transform([verb_token])
# Here the linear kernel is the same as the cosine similarity, but faster
# cf. http://scikit-learn.org/stable/modules/metrics.html#cosine-similarity
scores = linear_kernel(verb_token_vector, tf_idf_matrix)
logger.debug("Corpus-wide TF/IDF scores for '%s': %s" % (verb_token, scores))
logger.debug("Average TF/IDF score for '%s': %f" % (verb_token, average(scores)))
return scores


#### Example 21

Project: StrepHit
Source File: rank_verbs.py
    def score_lemma(self, lemma):
""" Computess TF-IDF based score of a single lemma

:param str lemma: The lemma to score
:return: tuple with lemma, average tf-idf, average of tf-idf standard deviations
:rtype: tuple of (str, float, float)
"""
tf_idfs, st_devs = [], []
for token in self.verbs[lemma]:
scores = get_similarity_scores(token, self.vectorizer, self.tfidf_matrix)
tf_idfs += filter(None, scores.flatten().tolist())
st_devs.append(scores.std())

return lemma, average(tf_idfs), average(st_devs)


#### Example 22

Project: python-graphenelib
Source File: pricefeeds.py
def weighted_std(values, weights):
""" Weighted std for statistical reasons
"""
average = num.average(values, weights=weights)
variance = num.average((values - average) ** 2, weights=weights)  # Fast and numerically precise
return sqrt(variance)


#### Example 23

Project: qspectrumanalyzer
Source File: data.py
    def update_average(self, data):
"""Update average data"""
if self.average is None:
self.average = data["y"].copy()
else:
self.average = np.average((self.average, data["y"]), axis=0, weights=(self.average_counter - 1, 1))
self.average_updated.emit(self)


#### Example 24

Project: orange
Source File: orngProjectionPursuit.py
def standardize(mat):
""" Subtracts means and multiplies by diagonal elements of inverse
square root of covariance matrix.
"""
av = numpy.average(mat, axis=0)
sigma = numpy.corrcoef(mat, rowvar=0)
srSigma = sqrtm(sigma)
isrSigma = numpy.linalg.inv(srSigma)
return (mat-av) * numpy.diag(isrSigma)


#### Example 25

def test_create_gaussian_image_pyramid():
pyramid_depth = 3

pyramid = create_gaussian_image_pyramid(img, pyramid_depth)
display_image_pyramid(pyramid)

means = [np.average(f) for f in pyramid]
deviations = [np.std(f) for f in pyramid]

assert len(pyramid) == pyramid_depth
assert np.std(means) < 1
assert np.std(deviations) < 1
assert pyramid.shape == img.shape
assert pyramid.shape < pyramid.shape
assert pyramid.shape < pyramid.shape


#### Example 26

def test_create_laplacian_image_pyramid():
pyramid_depth = 3

pyramid = create_laplacian_image_pyramid(img, pyramid_depth)
display_image_pyramid(pyramid)

means = [np.average(f) for f in pyramid]
deviations = [np.std(f) for f in pyramid]

assert len(pyramid) == pyramid_depth
assert np.std(deviations) > 0.01  # one of the images is just the original sized down photo. there should be significantly more variance in that photo
assert pyramid.shape == img.shape
assert pyramid.shape < pyramid.shape
assert pyramid.shape < pyramid.shape


#### Example 27

Project: pyKriging
Source File: CrossValidation.py
    def leave_n_out(self, q=5):
'''
:param q: the numer of groups to split the model data inot
:return:
'''
mseArray = []
for i in splitArrays(self.model,5):
testk = kriging( i, i )
testk.train()
for j in range(len(i)):
mseArray.append(mse(i[j], testk.predict( i[j] )))
del(testk)
return np.average(mseArray), np.std(mseArray)


#### Example 28

Project: cesium
Source File: test_general_features.py
def test_weighted_average():
"""Test weighted average and distance from weighted average features."""
times, values, errors = irregular_random()
f = generate_features(times, values, errors, ['weighted_average'])
weighted_avg = np.average(values, weights=1. / (errors**2))
weighted_var = np.average((values - weighted_avg)**2,
weights=1. / (errors**2))
npt.assert_allclose(f['weighted_average'], weighted_avg)

dists_from_weighted_avg = values - weighted_avg
stds_from_weighted_avg = (dists_from_weighted_avg /
np.sqrt(weighted_var))

f = generate_features(times, values, errors,
['percent_beyond_1_std'])
npt.assert_equal(f['percent_beyond_1_std'], np.mean(stds_from_weighted_avg > 1.))


#### Example 29

Project: chemlab
Source File: __init__.py
def geometric_center(r_array):
'''Return the geometric center given an array of coordinates of
shape (n_coord, coord_dimensions).

'''

return np.average(r_array, axis=0)


#### Example 30

Project: chemlab
Source File: __init__.py
def center_of_mass(r_array, m_array):
'''Return the mass center given an array of coordinates of shape
(n_coord, coord_dimensions) and an array of masses (weights).

'''
return np.average(r_array, axis=0, weights=m_array)


#### Example 31

Project: unshred
Source File: lines.py
def _get_mean_angle(lines):
unit_vectors = []
for x1, y1, x2, y2 in lines:
c = complex(x2, -y2) - complex(x1, -y1)
unit = c / abs(c)
unit_vectors.append(unit)

avg_angle = cmath.phase(numpy.average(unit_vectors))

return _normalize_angle(avg_angle, [-math.pi / 2, math.pi / 2], math.pi)


#### Example 32

    def compare(self):
for i in range(self.count):
for j in range(self.count):
if i < j:
tst, pvalue = stats.ttest_ind(self.errors[i], self.errors[j])
if pvalue < 0.05:
print("{0} is significantly better than {1}".format(self.names[i], self.names[j]))
print("{0} avg err = {1}, {2} avg err = {3}".format(
self.names[i], np.average(self.errors[i]),
self.names[j], np.average(self.errors[j])
))
else:
print("{0} and {1} are not significantly different".format(self.names[i], self.names[j]))


#### Example 33

def compute_mb_analytic(x, y):
"""Given arrays of x, y computes m, b analytically."""
xbar = np.average(x)
ybar = np.average(y)
m = (xbar * ybar - np.average(x * y)) / (xbar ** 2 - np.average(x ** 2))
b = ybar - m * xbar
return m, b


#### Example 34

def mean_euclid(covmats, sample_weight=None):
"""Return the mean covariance matrix according to the euclidean metric :

.. math::
\mathbf{C} = \\frac{1}{N} \sum_i \mathbf{C}_i

:param covmats: Covariance matrices set, Ntrials X Nchannels X Nchannels
:param sample_weight: the weight of each sample

:returns: the mean covariance matrix

"""
return numpy.average(covmats, axis=0, weights=sample_weight)


#### Example 35

Project: pyRiemann
Source File: mean.py
def mean_euclid(covmats, sample_weight=None):
"""Return the mean covariance matrix according to the euclidean metric :

.. math::
\mathbf{C} = \\frac{1}{N} \sum_i \mathbf{C}_i

:param covmats: Covariance matrices set, Ntrials X Nchannels X Nchannels
:param sample_weight: the weight of each sample

:returns: the mean covariance matrix

"""
return numpy.average(covmats, axis=0, weights=sample_weight)


#### Example 36

Project: fusedwind
Source File: geometry_vt.py
    def computeLETE(self):
"""
computes the leading and trailing edge of the airfoil.

TE is computed as the mid-point between lower and upper TE points
LE is computed as the point with maximum distance from the TE.
"""

self.TE = np.array([np.average(self.points[[0, -1], 0]),
np.average(self.points[[0, -1], 1])])

res = minimize(self._sdist, (0.5), method='SLSQP', bounds=[(0, 1)])
self.sLE = res['x']
xLE = self._splines(self.sLE)
yLE = self._splines(self.sLE)
self.LE = np.array([xLE, yLE])
self.curvLE = NaturalCubicSpline(self.s, curvature(self.points))(self.sLE)
self.chord = np.linalg.norm(self.LE-self.TE)


#### Example 37

Project: h5pyViewer
Source File: FrmPyFAI.py
  def InitChild(self,data):
fig=self.fig
ax=self.ax
ctrX,ctrY=self.center=FindCenter(data)
self.ai = pyFAI.AzimuthalIntegrator(1.e3, ctrX, ctrY, 0.0, 0.0, 0.0, 1.e0, 1.e0)
#canvas=self.canvas
self.numPtTh=int(np.average(data.shape)/2.)
out=self.ai.xrpd(data,self.numPtTh)
self.hl=ax.plot(*out)
ax.set_yscale('log')


#### Example 38

Project: h5pyViewer
Source File: FrmPyFAI.py
  def __init__(self, parent, title, hid):
HdfImageGLFrame.__init__(self, parent, title, hid)
#HdfPyFAI1DFrame(self, title, hid)
canvas=self.canvas
raw=canvas.data
ctrX,ctrY=FindCenter(raw)
self.ai = pyFAI.AzimuthalIntegrator(1.e3, ctrX, ctrY, 0.0, 0.0, 0.0, 1.e0, 1.e0)

raw
self.numPtTh=int(np.average(raw.shape)/2.)
self.numPtCh=360

imgPolar,theta,chi=self.ai.xrpd2(raw,self.numPtTh,self.numPtCh)
canvas.data=imgPolar
print imgPolar.shape


#### Example 39

Project: h5pyViewer
Source File: GLCanvasImg.py
  def AutoRange(self,txrTrfFunc):
data=self.data
self.txrTrfFunc=txrTrfFunc
if txrTrfFunc==0:
avg=np.average(data); std=np.std(data)
vmin=data.min();vmax=data.max()
vmin=max(vmin,avg-3*std);vmax=min(vmax,avg+3*std)
elif txrTrfFunc==1:
msk=~np.isnan(data);msk=data[msk]
avg=np.average(msk); std=np.std(msk)
vmin=msk.min();vmax=msk.max()
vmin=max(vmin,avg-3*std);vmax=min(vmax,avg+3*std)
self.dataRange=(vmin,vmax)


#### Example 40

Project: IkaLog
Source File: special_weapon.py
    def _match_phase1(self, context, img_special, img_last_special):
#
# Phase 1
#
# Crop the area special weapon message supposed to be appeared.
# Compare with last frame, and check if it is (almost) same with
# the last frame.
#

img_special_diff = abs(img_special - img_last_special)
matched = bool(np.average(img_special_diff) < 90)
return matched


#### Example 41

Project: facefit
Source File: fern.py
    @staticmethod
def _highest_correlated_feature(dir, targets, cov_pp, pixel_values, pixel_averages, var_pp_sum):
n_pixels = len(cov_pp)
# Project each target onto random direction.
lengths = targets.dot(dir)
cov_l_p = pixel_values.dot(lengths)/len(targets) - np.average(lengths) * pixel_averages
correlation = (cov_l_p[:, None] - cov_l_p) / np.sqrt(np.std(lengths) * (var_pp_sum - 2 * cov_pp))

res = np.nanargmax(correlation)
return res / n_pixels, res % n_pixels


#### Example 42

Project: facefit
Source File: fern.py
    @staticmethod
def _highest_correlated_feature(dir, targets, cov_pp, pixel_values, pixel_averages, var_pp_sum):
n_pixels = len(cov_pp)
# Project each target onto random direction.
lengths = targets.dot(dir)
cov_l_p = pixel_values.dot(lengths)/len(targets) - np.average(lengths) * pixel_averages
correlation = (cov_l_p[:, None] - cov_l_p) / np.sqrt(np.std(lengths) * (var_pp_sum - 2 * cov_pp))

res = np.nanargmax(correlation)
return res / n_pixels, res % n_pixels


#### Example 43

Project: quietnet
Source File: quietnet.py
def has_freq(fft_sample, freq_in_hertz, rate, chunk, offset=3):
peak_index = get_peak(freq_in_hertz, rate, chunk)
top = max(fft_sample[peak_index-1:peak_index+2])

avg_around_peak = np.average(weighted_values_around_peak(fft_sample, peak_index, offset))

if top > avg_around_peak:
return fft_sample[peak_index]
else:
return 0


#### Example 44

Project: quietnet
Source File: quietnet.py
def has_freq(fft_sample, freq_in_hertz, rate, chunk, offset=3):
peak_index = get_peak(freq_in_hertz, rate, chunk)
top = max(fft_sample[peak_index-1:peak_index+2])

avg_around_peak = np.average(weighted_values_around_peak(fft_sample, peak_index, offset))

if top > avg_around_peak:
return fft_sample[peak_index]
else:
return 0


#### Example 45

Project: luminol
Source File: time_series.py
  def average(self, default=None):
"""
Calculate the average value over the time series.

:param default: Value to return as a default should the calculation not be possible.
:return: Float representing the average value or None.
"""
return numpy.asscalar(numpy.average(self.values)) if self.values else default


#### Example 46

Source File: time_series.py
  def average(self, default=None):
"""
Calculate the average value over the time series.

:param default: Value to return as a default should the calculation not be possible.
:return: Float representing the average value or None.
"""
return numpy.asscalar(numpy.average(self.values)) if self.values else default


#### Example 47

Source File: time_series.py
  def average(self, default=None):
"""
Calculate the average value over the time series.

:param default: Value to return as a default should the calculation not be possible.
:return: Float representing the average value or None.
"""
return numpy.asscalar(numpy.average(self.values)) if self.values else default


#### Example 48

Project: trimesh
Source File: base.py
    @util.cache_decorator
def centroid(self):
'''
The point in space which is the average of the triangle centroids
weighted by the area of each triangle.

This will be valid even for non- watertight meshes, unlike self.center_mass

Returns
----------
centroid: (3,) float, the average vertex
'''

# use the centroid of each triangle weighted by
# the area of the triangle to find the overall centroid
centroid = np.average(self.triangles_center,
axis    = 0,
weights = self.area_faces)
return centroid


#### Example 49

Project: trimesh
Source File: base.py
    @util.cache_decorator
def centroid(self):
'''
The point in space which is the average of the triangle centroids
weighted by the area of each triangle.

This will be valid even for non- watertight meshes, unlike self.center_mass

Returns
----------
centroid: (3,) float, the average vertex
'''

# use the centroid of each triangle weighted by
# the area of the triangle to find the overall centroid
centroid = np.average(self.triangles_center,
axis    = 0,
weights = self.area_faces)
return centroid


#### Example 50

Project: LO-PHI
Source File: memory_aggregate_data.py
def extract_graph_data(aggregate_data):

rate_data = {}
for mem_operation in aggregate_data:
for mem_type in aggregate_data[mem_operation]:
rates = aggregate_data[mem_operation][mem_type]
if mem_type != "AVERAGE":
continue
print "Operation: %s, Type: %s"%(mem_operation,mem_type)
print " Avg: %f, StDev: %f, Count: %d"%(numpy.average(rates),
numpy.std(rates),
len(rates))
rate_data[mem_operation] = rates

return rate_data