com.google.common.util.concurrent.AtomicDouble

Here are the examples of the java api class com.google.common.util.concurrent.AtomicDouble taken from open source projects.

1. QuantileDigest#getHistogram()

Project: pinot
Source File: QuantileDigest.java
View license
/*
    * Get the exponentially-decayed approximate counts of values in multiple buckets. The elements in
    * the provided list denote the upper bound each of the buckets and must be sorted in ascending
    * order.
    *
    * The approximate count in each bucket is guaranteed to be within 2 * totalCount * maxError of
    * the real count.
    */
public List<Bucket> getHistogram(List<Long> bucketUpperBounds) {
    checkArgument(Ordering.natural().isOrdered(bucketUpperBounds), "buckets must be sorted in increasing order");
    final ImmutableList.Builder<Bucket> builder = ImmutableList.builder();
    final PeekingIterator<Long> iterator = Iterators.peekingIterator(bucketUpperBounds.iterator());
    final AtomicDouble sum = new AtomicDouble();
    final AtomicDouble lastSum = new AtomicDouble();
    // for computing weighed average of values in bucket
    final AtomicDouble bucketWeightedSum = new AtomicDouble();
    final double normalizationFactor = weight(TimeUnit.NANOSECONDS.toSeconds(ticker.read()));
    postOrderTraversal(root, new Callback() {

        @Override
        public boolean process(Node node) {
            while (iterator.hasNext() && iterator.peek() <= node.getUpperBound()) {
                double bucketCount = sum.get() - lastSum.get();
                Bucket bucket = new Bucket(bucketCount / normalizationFactor, bucketWeightedSum.get() / bucketCount);
                builder.add(bucket);
                lastSum.set(sum.get());
                bucketWeightedSum.set(0);
                iterator.next();
            }
            bucketWeightedSum.addAndGet(node.getMiddle() * node.weightedCount);
            sum.addAndGet(node.weightedCount);
            return iterator.hasNext();
        }
    });
    while (iterator.hasNext()) {
        double bucketCount = sum.get() - lastSum.get();
        Bucket bucket = new Bucket(bucketCount / normalizationFactor, bucketWeightedSum.get() / bucketCount);
        builder.add(bucket);
        iterator.next();
    }
    return builder.build();
}

2. DHistogram#init()

Project: h2o-3
Source File: DHistogram.java
View license
// Big allocation of arrays
public void init() {
    assert _w == null;
    if (_histoType == SharedTreeModel.SharedTreeParameters.HistogramType.Random) {
        // every node makes the same split points
        Random rng = RandomUtils.getRNG((Double.doubleToRawLongBits(((_step + 0.324) * _min + 8.3425) + 89.342 * _maxEx) + 0xDECAF * _nbin + 0xC0FFEE * _isInt + _seed));
        assert (_nbin > 1);
        _splitPts = new double[_nbin];
        _splitPts[0] = 0;
        _splitPts[_nbin - 1] = _nbin - 1;
        for (int i = 1; i < _nbin - 1; ++i) _splitPts[i] = rng.nextFloat() * (_nbin - 1);
        Arrays.sort(_splitPts);
    } else if (_histoType == SharedTreeModel.SharedTreeParameters.HistogramType.QuantilesGlobal) {
        assert (_splitPts == null);
        if (_globalQuantilesKey != null) {
            HistoQuantiles hq = DKV.getGet(_globalQuantilesKey);
            if (hq != null) {
                _splitPts = ((HistoQuantiles) DKV.getGet(_globalQuantilesKey)).splitPts;
                if (_splitPts != null) {
                    //            Log.info("Obtaining global splitPoints: " + Arrays.toString(_splitPts));
                    _splitPts = ArrayUtils.limitToRange(_splitPts, _min, _maxEx);
                    if (_splitPts.length > 1 && _splitPts.length < _nbin)
                        _splitPts = ArrayUtils.padUniformly(_splitPts, _nbin);
                    if (_splitPts.length <= 1) {
                        //abort, fall back to uniform binning
                        _splitPts = null;
                        _histoType = SharedTreeModel.SharedTreeParameters.HistogramType.UniformAdaptive;
                    } else {
                        _hasQuantiles = true;
                        _nbin = (char) _splitPts.length;
                    //              Log.info("Refined splitPoints: " + Arrays.toString(_splitPts));
                    }
                }
            }
        }
    } else
        assert (_histoType == SharedTreeModel.SharedTreeParameters.HistogramType.UniformAdaptive);
    //otherwise AUTO/UniformAdaptive
    assert (_nbin > 0);
    _w = MemoryManager.malloc8d(_nbin);
    _wY = MemoryManager.malloc8d(_nbin);
    _wYY = MemoryManager.malloc8d(_nbin);
    _wNA = new AtomicDouble();
    _wYNA = new AtomicDouble();
    _wYYNA = new AtomicDouble();
}

3. StatsTest#testDoubleExport()

Project: commons
Source File: StatsTest.java
View license
@Test
public void testDoubleExport() {
    AtomicDouble var = Stats.exportDouble("test_double");
    assertCounter("test_double", 0.0);
    var.addAndGet(1.1);
    assertCounter("test_double", 1.1);
    var.addAndGet(5.55);
    assertCounter("test_double", 6.65);
}

4. QuantileDigest#getHistogram()

Project: airlift
Source File: QuantileDigest.java
View license
/*
    * Get the exponentially-decayed approximate counts of values in multiple buckets. The elements in
    * the provided list denote the upper bound each of the buckets and must be sorted in ascending
    * order.
    *
    * The approximate count in each bucket is guaranteed to be within 2 * totalCount * maxError of
    * the real count.
    */
public List<Bucket> getHistogram(List<Long> bucketUpperBounds) {
    checkArgument(Ordering.natural().isOrdered(bucketUpperBounds), "buckets must be sorted in increasing order");
    final ImmutableList.Builder<Bucket> builder = ImmutableList.builder();
    final PeekingIterator<Long> iterator = Iterators.peekingIterator(bucketUpperBounds.iterator());
    final AtomicDouble sum = new AtomicDouble();
    final AtomicDouble lastSum = new AtomicDouble();
    // for computing weighed average of values in bucket
    final AtomicDouble bucketWeightedSum = new AtomicDouble();
    final double normalizationFactor = weight(TimeUnit.NANOSECONDS.toSeconds(ticker.read()));
    postOrderTraversal(root, new Callback() {

        public boolean process(Node node) {
            while (iterator.hasNext() && iterator.peek() <= node.getUpperBound()) {
                double bucketCount = sum.get() - lastSum.get();
                Bucket bucket = new Bucket(bucketCount / normalizationFactor, bucketWeightedSum.get() / bucketCount);
                builder.add(bucket);
                lastSum.set(sum.get());
                bucketWeightedSum.set(0);
                iterator.next();
            }
            bucketWeightedSum.addAndGet(node.getMiddle() * node.weightedCount);
            sum.addAndGet(node.weightedCount);
            return iterator.hasNext();
        }
    });
    while (iterator.hasNext()) {
        double bucketCount = sum.get() - lastSum.get();
        Bucket bucket = new Bucket(bucketCount / normalizationFactor, bucketWeightedSum.get() / bucketCount);
        builder.add(bucket);
        iterator.next();
    }
    return builder.build();
}

5. QuantileDigest#validate()

Project: pinot
Source File: QuantileDigest.java
View license
@VisibleForTesting
void validate() {
    final AtomicDouble sumOfWeights = new AtomicDouble();
    final AtomicInteger actualNodeCount = new AtomicInteger();
    final AtomicInteger actualNonZeroNodeCount = new AtomicInteger();
    if (root != null) {
        validateStructure(root);
        postOrderTraversal(root, new Callback() {

            @Override
            public boolean process(Node node) {
                sumOfWeights.addAndGet(node.weightedCount);
                actualNodeCount.incrementAndGet();
                if (node.weightedCount >= ZERO_WEIGHT_THRESHOLD) {
                    actualNonZeroNodeCount.incrementAndGet();
                }
                return true;
            }
        });
    }
    checkState(Math.abs(sumOfWeights.get() - weightedCount) < ZERO_WEIGHT_THRESHOLD, "Computed weight (%s) doesn't match summary (%s)", sumOfWeights.get(), weightedCount);
    checkState(actualNodeCount.get() == totalNodeCount, "Actual node count (%s) doesn't match summary (%s)", actualNodeCount.get(), totalNodeCount);
    checkState(actualNonZeroNodeCount.get() == nonZeroNodeCount, "Actual non-zero node count (%s) doesn't match summary (%s)", actualNonZeroNodeCount.get(), nonZeroNodeCount);
}

6. QuantileDigest#validate()

Project: airlift
Source File: QuantileDigest.java
View license
@VisibleForTesting
void validate() {
    final AtomicDouble sumOfWeights = new AtomicDouble();
    final AtomicInteger actualNodeCount = new AtomicInteger();
    final AtomicInteger actualNonZeroNodeCount = new AtomicInteger();
    if (root != null) {
        validateStructure(root);
        postOrderTraversal(root, new Callback() {

            @Override
            public boolean process(Node node) {
                sumOfWeights.addAndGet(node.weightedCount);
                actualNodeCount.incrementAndGet();
                if (node.weightedCount >= ZERO_WEIGHT_THRESHOLD) {
                    actualNonZeroNodeCount.incrementAndGet();
                }
                return true;
            }
        });
    }
    checkState(Math.abs(sumOfWeights.get() - weightedCount) < ZERO_WEIGHT_THRESHOLD, "Computed weight (%s) doesn't match summary (%s)", sumOfWeights.get(), weightedCount);
    checkState(actualNodeCount.get() == totalNodeCount, "Actual node count (%s) doesn't match summary (%s)", actualNodeCount.get(), totalNodeCount);
    checkState(actualNonZeroNodeCount.get() == nonZeroNodeCount, "Actual non-zero node count (%s) doesn't match summary (%s)", actualNonZeroNodeCount.get(), nonZeroNodeCount);
}