System.Array.Clear()

Here are the examples of the csharp api System.Array.Clear() taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

6 Examples 7

19 Source : AveragedStochasticGradientDescent.cs
with MIT License
from PacktPublishing

void trainOne(TInput x, bool y, double eta, double mu)
        {
            // Renormalize if needed
            if (aDivisor > 1e5 || wDivisor > 1e5)
                renorm();

            // Forward
            double s = Kernel.Function(w, x) / wDivisor + wBias;

            // SGD update for regularization term
            wDivisor = wDivisor / (1 - eta * lambda);

            // SGD update for loss term
            double d = -loss.Derivative(y, s);

            double etd = eta * d * wDivisor;
            if (etd != 0)
                Kernel.Product(etd, x, acreplacedulate: w);

            // Averaging
            if (mu >= 1) 
            {
                a.Clear();
                aDivisor = wDivisor;
                wFraction = 1;
            }
            else if (mu > 0)
            {
                if (etd != 0)
                    Kernel.Product(-wFraction * etd, x, acreplacedulate: a);
                aDivisor = aDivisor / (1 - mu);
                wFraction = wFraction + mu * aDivisor / wDivisor;
            }

            // same for the bias
            if (bias)
            {
                double etab = eta * 0.01;
                if (regularizedBias)
                    wBias *= (1 - etab * lambda);
                wBias += etab * d;
                aBias += mu * (wBias - aBias);
            }
        }

19 Source : Munkres.cs
with MIT License
from PacktPublishing

private int step_three()
        {
            colCover.Clear();
            rowCover.Clear();
            primeZ.Clear();

            bool done = true;
            int count = 0;
            for (int i = 0; i < starZ.Length; i++)
            {
                int j = starZ[i];
                if (j >= 0)
                {
                    colCover[j] = true;
                    count++;
                }
                else
                {
                    done = false;
                }
            }

            if (done)
                return 7; // done
            return 4;
        }

19 Source : IterativeReweightedLeastSquares.cs
with MIT License
from PacktPublishing

public TModel Learn(double[][] x, double[] y, double[] weights = null)
        {
            // Regress using Iteratively Reweighted Least Squares estimation.

            // References:
            //  - Bishop, Christopher M.; Pattern Recognition 
            //    and Machine Learning. Springer; 1st ed. 2006.

            if (x.Length != y.Length)
            {
                throw new DimensionMismatchException("outputs",
                    "The number of input vectors and their replacedociated output values must have the same size.");
            }

            if (regression == null)
            {
                Initialize(new TModel()
                {
                    NumberOfInputs = x.Columns()
                });
            }

            // Initial definitions and memory allocations
            int N = x.Length;

            double[] errors = new double[N];
            double[] w = new double[N];
            convergence.Clear();

            double[][] design = x.InsertColumn(value: 1, index: 0);

            do
            {
                if (Token.IsCancellationRequested)
                    break;

                // Compute errors and weighting matrix
                for (int i = 0; i < x.Length; i++)
                {
                    double z = regression.Linear.Transform(x[i]);
                    double actual = regression.Link.Inverse(z);

                    // Calculate error vector
                    errors[i] = actual - y[i];

                    // Calculate weighting matrix
                    w[i] = regression.Link.Derivative2(actual);
                }

                if (weights != null)
                {
                    for (int i = 0; i < weights.Length; i++)
                    {
                        errors[i] *= weights[i];
                        w[i] *= weights[i];
                    }
                }

                // Reset Hessian matrix and gradient
                gradient.Clear();
                hessian.Clear();

                // (Re-) Compute error gradient
                for (int j = 0; j < design.Length; j++)
                    for (int i = 0; i < gradient.Length; i++)
                        gradient[i] += design[j][i] * errors[j];

                // (Re-) Compute weighted "Hessian" matrix 
                for (int k = 0; k < w.Length; k++)
                {
                    double[] row = design[k];
                    for (int j = 0; j < row.Length; j++)
                        for (int i = 0; i < row.Length; i++)
                            hessian[j][i] += row[i] * row[j] * w[k];
                }

                // Apply L2 regularization
                if (lambda > 0)
                {
                    // https://www.cs.ubc.ca/~murphyk/Teaching/CS540-Fall08/L6.pdf
                    for (int i = 0; i < gradient.Length; i++)
                    {
                        gradient[i] += lambda * regression.GetCoefficient(i);
                        hessian[i][i] += lambda;
                    }
                }

                decomposition = new JaggedSingularValueDecomposition(hessian);
                deltas = decomposition.Solve(gradient);

                previous = (double[])this.Solution.Clone();

                // Update coefficients using the calculated deltas
                for (int i = 0; i < regression.Weights.Length; i++)
                    regression.Weights[i] -= deltas[i + 1];
                regression.Intercept -= deltas[0];

                // Return the relative maximum parameter change
                convergence.NewValue = deltas.Abs().Max();

                if (Token.IsCancellationRequested)
                    break;

            } while (!convergence.HasConverged);

            if (computeStandardErrors)
            {
                // Grab the regression information matrix
                double[][] inverse = decomposition.Inverse();

                // Calculate coefficients' standard errors
                double[] standardErrors = regression.StandardErrors;
                for (int i = 0; i < standardErrors.Length; i++)
                    standardErrors[i] = Math.Sqrt(inverse[i][i]);
            }

            return regression;
        }

19 Source : MultinomialLogisticLearning`1.cs
with MIT License
from PacktPublishing

internal double[] crossEntropyGradient(double[] w)
        {
            gradient.Clear();

            IntRange miniBatch = miniBatches[current++];
            if (current >= miniBatches.Length)
                current = 0;

            for (int i = miniBatch.Min; i < miniBatch.Max; i++)
            {
                double[] x = inputs[i];
                int y = outputs[i];

                this.compute(w, x, log_y_hat);

                for (int s = 1, c = 0; s < log_y_hat.Length; s++)
                {
                    double h = Math.Exp(log_y_hat[s]);

                    if (s == y)
                    {
                        gradient[c++] += 1 * h - 1;
                        for (int p = 0; p < x.Length; p++)
                            gradient[c++] += x[p] * h - x[p];
                    }
                    else
                    {
                        gradient[c++] += h;
                        for (int p = 0; p < x.Length; p++)
                            gradient[c++] += x[p] * h;
                    }
                }
            }

            for (int i = 0; i < gradient.Length; i++)
                gradient[i] /= (double)miniBatch.Length;

            return gradient;
        }

19 Source : QuasiNewtonLearning.cs
with MIT License
from PacktPublishing

private double[] gradient(T[][] observations, int[][] labels, double[] g)
        {
            var model = Model;
            var function = model.Function;
            int states = model.States;
            int n = observations.Length;
            int d = Model.Function.Weights.Length;
            int Tmax = observations.Max(x => x.Length);
            int progress = 0;

            g.Clear();


            // Compute sequence probabilities
            Parallel.For(0, observations.Length, ParallelOptions,

                () =>
                {
                    // Create thread-local storage
                    var work = new double[states + 1, states][];
                    for (int j = 0; j < states + 1; j++)
                        for (int k = 0; k < states; k++)
                            work[j, k] = new double[Tmax];

                    return new
                    {
                        bwd = new double[Tmax, states],
                        fwd = new double[Tmax, states],
                        sum1 = new double[d],
                        sum2 = new double[d],
                        work = work,
                        count = new int[] { 0 }
                    };
                },

                (i, state, local) =>
                {
                    T[] x = observations[i];
                    var fwd = local.fwd;
                    var bwd = local.bwd;
                    var sum1 = local.sum1;
                    var sum2 = local.sum2;
                    var work = local.work;
                    ForwardBackwardAlgorithm.Forward(function.Factors[0], x, fwd);
                    ForwardBackwardAlgorithm.Backward(function.Factors[0], x, bwd);
                    double z = parreplacedion(fwd, x);

                    for (int prev = -1; prev < states; prev++)
                    {
                        for (int next = 0; next < states; next++)
                        {
                            double[] Pis = work[prev + 1, next];
                            for (int t = 0; t < x.Length; t++)
                                Pis[t] = p(prev, next, x, t, fwd, bwd, function) / z;
                        }
                    }

                    // Compute the gradient w.r.t. each feature
                    //  function in the model's potential function.

                    int[] y = labels[i];

                    Parallel.For(0, g.Length, ParallelOptions, k =>
                    {
                        IFeature<T> feature = function.Features[k];

                        // Compute first term of the partial derivative
                        sum1[k] += feature.Compute(-1, y[0], x, 0);
                        for (int t = 1; t < x.Length; t++)
                            sum1[k] += feature.Compute(y[t - 1], y[t], x, t);

                        // Compute second term of the partial derivative
                        for (int prev = -1; prev < states; prev++)
                        {
                            for (int next = 0; next < states; next++)
                            {
                                double[] Pis = work[prev + 1, next];
                                for (int t = 0; t < Pis.Length; t++)
                                    sum2[k] += feature.Compute(prev, next, x, t) * Pis[t];
                            }
                        }
                    });

                    local.count[0]++;
                    return local;
                },

                (local) =>
                {
                    lock (g)
                    {
                        for (int k = 0; k < g.Length; k++)
                            g[k] -= (local.sum1[k] - local.sum2[k]);
                        progress += local.count[0];
                    }
                }
            );

            return g;
        }

19 Source : ArrayExtensions.cs
with Apache License 2.0
from Proxem

public static Array<Real> Sum(this Array<Real> a, int axis, Func<Real, Real> f, Array<Real> result = null, bool keepDims = false)
        {
            if (axis < 0) axis = a.Shape.Length + axis;

            result = result == null ? NN.Zeros<Real>(GetAggregatorResultShape(a, axis, true)) : result.Clear();

            var slice = a.Slices(); // TODO: create a new ElementwiseOp
            int ndim = a.Shape[axis];
            for (int d = 0; d < ndim; ++d)
            {
                slice[axis] = (d, d+1);
                Array_.ElementwiseOp(a[slice], result, (n, _a, off_a, step_a, _r, off_r, step_r) => {
                    Real s = 0;
                    for (int i = 0; i < n; i++)
                    {
                        s += f(_a[off_a]);
                        off_a += step_a;
                        off_r += step_r;
                    }
                    result += s;
                });
            }
            if (!keepDims)
                result = result.Reshape(RemoveAxis(result.Shape, axis));
            return result;
        }