System.Collections.Generic.IEnumerable.Contains(double)

Here are the examples of the csharp api System.Collections.Generic.IEnumerable.Contains(double) taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

18 Examples 7

19 Source : GCodeParser.cs
with MIT License
from 3RD-Dimension

static void Parse(string line, int lineNumber)
		{
			MatchCollection matches = GCodeSplitter.Matches(line);

			List<Word> Words = new List<Word>(matches.Count);

			foreach (Match match in matches)
			{
				Words.Add(new Word() { Command = match.Groups[1].Value[0], Parameter = double.Parse(match.Groups[2].Value, Constants.DecimalParseFormat) });
			}

			for (int i = 0; i < Words.Count; i++)
			{
				if (Words[i].Command == 'N')
				{
					Words.RemoveAt(i--);
					continue;
				}

				if (IgnoreAxes.Contains(Words[i].Command) && Properties.Settings.Default.IgnoreAdditionalAxes)
				{
					Words.RemoveAt(i--);
					continue;
				}

				if (!ValidWords.Contains(Words[i].Command))
				{
					Warnings.Add($"ignoring unknown word (letter): \"{Words[i]}\". (line {lineNumber})");
					Words.RemoveAt(i--);
					continue;
				}

				if (Words[i].Command != 'F')
					continue;

				State.Feed = Words[i].Parameter;
				if (State.Unit == ParseUnit.Imperial)
					State.Feed *= 25.4;
				Words.RemoveAt(i--);
				continue;
			}

			for (int i = 0; i < Words.Count; i++)
			{
				if (Words[i].Command == 'M')
				{
					int param = (int)Words[i].Parameter;

					if (param != Words[i].Parameter || param < 0)
						throw new ParseException("M code can only have positive integer parameters", lineNumber);

					Commands.Add(new MCode() { Code = param, LineNumber = lineNumber });

					Words.RemoveAt(i);
					i--;
					continue;
				}

				if (Words[i].Command == 'S')
				{
					double param = Words[i].Parameter;

					if (param < 0)
						Warnings.Add($"spindle speed must be positive. (line {lineNumber})");

					Commands.Add(new Spindle() { Speed = Math.Abs(param), LineNumber = lineNumber });

					Words.RemoveAt(i);
					i--;
					continue;
				}

				if (Words[i].Command == 'G' && !MotionCommands.Contains(Words[i].Parameter))
				{
					#region UnitPlaneDistanceMode

					double param = Words[i].Parameter;

					if (param == 90)
					{
						State.DistanceMode = ParseDistanceMode.Absolute;
						Words.RemoveAt(i);
						i--;
						continue;
					}
					if (param == 91)
					{
						State.DistanceMode = ParseDistanceMode.Incremental;
						Words.RemoveAt(i);
						i--;
						continue;
					}
					if (param == 90.1)
					{
						State.ArcDistanceMode = ParseDistanceMode.Absolute;
						Words.RemoveAt(i);
						continue;
					}
					if (param == 91.1)
					{
						State.ArcDistanceMode = ParseDistanceMode.Incremental;
						Words.RemoveAt(i);
						i--;
						continue;
					}
					if (param == 21)
					{
						State.Unit = ParseUnit.Metric;
						Words.RemoveAt(i);
						i--;
						continue;
					}
					if (param == 20)
					{
						State.Unit = ParseUnit.Imperial;
						Words.RemoveAt(i);
						i--;
						continue;
					}
					if (param == 17)
					{
						State.Plane = ArcPlane.XY;
						Words.RemoveAt(i);
						i--;
						continue;
					}
					if (param == 18)
					{
						State.Plane = ArcPlane.ZX;
						Words.RemoveAt(i);
						i--;
						continue;
					}
					if (param == 19)
					{
						State.Plane = ArcPlane.YZ;
						Words.RemoveAt(i);
						i--;
						continue;
					}
					if (param == 4)
					{
						if (Words.Count >= 2 && Words[i + 1].Command == 'P')
						{
							if (Words[i + 1].Parameter < 0)
								Warnings.Add($"dwell time must be positive. (line {lineNumber})");

							Commands.Add(new Dwell() { Seconds = Math.Abs(Words[i + 1].Parameter), LineNumber = lineNumber });
							Words.RemoveAt(i + 1);
							Words.RemoveAt(i);
							i--;
							continue;
						}
					}

					Warnings.Add($"ignoring unknown command G{param}. (line {lineNumber})");
					Words.RemoveAt(i--);
					#endregion
				}
			}

			if (Words.Count == 0)
				return;

			int MotionMode = State.LastMotionMode;

			if (Words.First().Command == 'G')
			{
				MotionMode = (int)Words.First().Parameter;
				State.LastMotionMode = MotionMode;
				Words.RemoveAt(0);
			}

			if (MotionMode < 0)
				throw new ParseException("no motion mode active", lineNumber);

			double UnitMultiplier = (State.Unit == ParseUnit.Metric) ? 1 : 25.4;

			Vector3 EndPos = State.Position;

			if (State.DistanceMode == ParseDistanceMode.Incremental && State.PositionValid.Any(isValid => !isValid))
			{
				throw new ParseException("incremental motion is only allowed after an absolute position has been established (eg. with \"G90 G0 X0 Y0 Z5\")", lineNumber);
			}

			if ((MotionMode == 2 || MotionMode == 3) && State.PositionValid.Any(isValid => !isValid))
			{
				throw new ParseException("arcs (G2/G3) are only allowed after an absolute position has been established (eg. with \"G90 G0 X0 Y0 Z5\")", lineNumber);
			}

			#region FindEndPos
			{
				int Incremental = (State.DistanceMode == ParseDistanceMode.Incremental) ? 1 : 0;

				for (int i = 0; i < Words.Count; i++)
				{
					if (Words[i].Command != 'X')
						continue;
					EndPos.X = Words[i].Parameter * UnitMultiplier + Incremental * EndPos.X;
					Words.RemoveAt(i);
					State.PositionValid[0] = true;
					break;
				}

				for (int i = 0; i < Words.Count; i++)
				{
					if (Words[i].Command != 'Y')
						continue;
					EndPos.Y = Words[i].Parameter * UnitMultiplier + Incremental * EndPos.Y;
					Words.RemoveAt(i);
					State.PositionValid[1] = true;
					break;
				}

				for (int i = 0; i < Words.Count; i++)
				{
					if (Words[i].Command != 'Z')
						continue;
					EndPos.Z = Words[i].Parameter * UnitMultiplier + Incremental * EndPos.Z;
					Words.RemoveAt(i);
					State.PositionValid[2] = true;
					break;
				}
			}
			#endregion

			if (MotionMode != 0 && State.Feed <= 0)
			{
				throw new ParseException("feed rate undefined", lineNumber);
			}

			if (MotionMode == 1 && State.PositionValid.Any(isValid => !isValid))
			{
				Warnings.Add($"a feed move is used before an absolute position is established, height maps will not be applied to this motion. (line {lineNumber})");
			}

			if (MotionMode <= 1)
			{
				if (Words.Count > 0)
					Warnings.Add($"motion command must be last in line (ignoring unused words {string.Join(" ", Words)} in block). (line {lineNumber})");

				Line motion = new Line();
				motion.Start = State.Position;
				motion.End = EndPos;
				motion.Feed = State.Feed;
				motion.Rapid = MotionMode == 0;
				motion.LineNumber = lineNumber;
				State.PositionValid.CopyTo(motion.PositionValid, 0);

				Commands.Add(motion);
				State.Position = EndPos;
				return;
			}

			double U, V;

			bool IJKused = false;

			switch (State.Plane)
			{
				default:
					U = State.Position.X;
					V = State.Position.Y;
					break;
				case ArcPlane.YZ:
					U = State.Position.Y;
					V = State.Position.Z;
					break;
				case ArcPlane.ZX:
					U = State.Position.Z;
					V = State.Position.X;
					break;
			}

			#region FindIJK
			{
				int ArcIncremental = (State.ArcDistanceMode == ParseDistanceMode.Incremental) ? 1 : 0;

				for (int i = 0; i < Words.Count; i++)
				{
					if (Words[i].Command != 'I')
						continue;

					switch (State.Plane)
					{
						case ArcPlane.XY:
							U = Words[i].Parameter * UnitMultiplier + ArcIncremental * State.Position.X;
							break;
						case ArcPlane.YZ:
							throw new ParseException("current plane is YZ, I word is invalid", lineNumber);
						case ArcPlane.ZX:
							V = Words[i].Parameter * UnitMultiplier + ArcIncremental * State.Position.X;
							break;
					}

					IJKused = true;
					Words.RemoveAt(i);
					break;
				}

				for (int i = 0; i < Words.Count; i++)
				{
					if (Words[i].Command != 'J')
						continue;

					switch (State.Plane)
					{
						case ArcPlane.XY:
							V = Words[i].Parameter * UnitMultiplier + ArcIncremental * State.Position.Y;
							break;
						case ArcPlane.YZ:
							U = Words[i].Parameter * UnitMultiplier + ArcIncremental * State.Position.Y;
							break;
						case ArcPlane.ZX:
							throw new ParseException("current plane is ZX, J word is invalid", lineNumber);
					}

					IJKused = true;
					Words.RemoveAt(i);
					break;
				}

				for (int i = 0; i < Words.Count; i++)
				{
					if (Words[i].Command != 'K')
						continue;

					switch (State.Plane)
					{
						case ArcPlane.XY:
							throw new ParseException("current plane is XY, K word is invalid", lineNumber);
						case ArcPlane.YZ:
							V = Words[i].Parameter * UnitMultiplier + ArcIncremental * State.Position.Z;
							break;
						case ArcPlane.ZX:
							U = Words[i].Parameter * UnitMultiplier + ArcIncremental * State.Position.Z;
							break;
					}

					IJKused = true;
					Words.RemoveAt(i);
					break;
				}
			}
			#endregion

			#region ResolveRadius
			for (int i = 0; i < Words.Count; i++)
			{
				if (Words[i].Command != 'R')
					continue;

				if (IJKused)
					throw new ParseException("both IJK and R notation used", lineNumber);

				if (State.Position == EndPos)
					throw new ParseException("arcs in R-notation must have non-coincident start and end points", lineNumber);

				double Radius = Words[i].Parameter * UnitMultiplier;

				if (Radius == 0)
					throw new ParseException("radius can't be zero", lineNumber);

				double A, B;

				switch (State.Plane)
				{
					default:
						A = EndPos.X;
						B = EndPos.Y;
						break;
					case ArcPlane.YZ:
						A = EndPos.Y;
						B = EndPos.Z;
						break;
					case ArcPlane.ZX:
						A = EndPos.Z;
						B = EndPos.X;
						break;
				}

				A -= U;     //(AB) = vector from start to end of arc along the axes of the current plane
				B -= V;

				//see grbl/gcode.c
				double h_x2_div_d = 4.0 * (Radius * Radius) - (A * A + B * B);
				if (h_x2_div_d < 0)
				{
					throw new ParseException("arc radius too small to reach both ends", lineNumber);
				}

				h_x2_div_d = -Math.Sqrt(h_x2_div_d) / Math.Sqrt(A * A + B * B);

				if (MotionMode == 3 ^ Radius < 0)
				{
					h_x2_div_d = -h_x2_div_d;
				}

				U += 0.5 * (A - (B * h_x2_div_d));
				V += 0.5 * (B + (A * h_x2_div_d));

				Words.RemoveAt(i);
				break;
			}
			#endregion

			if (Words.Count > 0)
				Warnings.Add($"motion command must be last in line (ignoring unused words {string.Join(" ", Words)} in block). (line {lineNumber})");

			Arc arc = new Arc();
			arc.Start = State.Position;
			arc.End = EndPos;
			arc.Feed = State.Feed;
			arc.Direction = (MotionMode == 2) ? ArcDirection.CW : ArcDirection.CCW;
			arc.U = U;
			arc.V = V;
			arc.LineNumber = lineNumber;
			arc.Plane = State.Plane;

			Commands.Add(arc);
			State.Position = EndPos;
			return;
		}

19 Source : Add_attribute_from_attachment.cs
with GNU Affero General Public License v3.0
from arklumpus

public static void Transform(ref TreeNode tree, Dictionary<string, object> parameterValues, Action<double> progressAction)
        {
            Attachment attachment = (Attachment)parameterValues["Taxon list:"];

            if (attachment != null)
            {
                string[] taxonListString = attachment.GetLines();

                string attributeName = (string)parameterValues["Attribute:"];

                string attrType = (string)parameterValues["Attribute type:"];

                string attrValue = (string)parameterValues["New value:"];

                int applyTo = (int)parameterValues["Apply to:"];

                string matchAttribute = (string)parameterValues["Match attribute:"];
                string matchAttributeType = (string)parameterValues["Match attribute type:"];

                double[] taxonListDouble = null;

                if (matchAttributeType == "Number")
                {
                    double elDouble = double.NaN;
                    taxonListDouble = (from el in taxonListString where double.TryParse(el, System.Globalization.NumberStyles.Any, System.Globalization.CultureInfo.InvariantCulture, out elDouble) && !double.IsNaN(elDouble) select elDouble).ToArray();
                }

                if (applyTo == 0 || applyTo == 1 || applyTo == 2)
                {
                    TreeNode lca = null;

                    if (applyTo == 2)
                    {
                        if (matchAttributeType == "String")
                        {
                            lca = GetLCA(tree, taxonListString, matchAttribute);
                        }
                        else if (matchAttributeType == "Number")
                        {
                            lca = GetLCA(tree, taxonListDouble, matchAttribute);
                        }
                    }

                    foreach (TreeNode leaf in tree.GetLeaves())
                    {
                        TreeNode node = leaf;

                        bool matches = false;

                        if (matchAttributeType == "String")
                        {
                            if (node.Attributes.TryGetValue(matchAttribute, out object attrObject) && attrObject is string matchAttrValue && !string.IsNullOrEmpty(matchAttrValue))
                            {
                                matches = taxonListString.Contains(matchAttrValue);
                            }
                        }
                        else if (matchAttributeType == "Number")
                        {
                            if (node.Attributes.TryGetValue(matchAttribute, out object attrObject) && attrObject is double matchAttrValue && !double.IsNaN(matchAttrValue))
                            {
                                matches = taxonListDouble.Contains(matchAttrValue);
                            }
                        }

                        if (matches)
                        {
                            TreeNode targetNode = applyTo == 0 ? node.Parent : applyTo == 1 ? null : lca?.Parent;

                            while (node != targetNode)
                            {
                                if (attributeName == "Name" && attrType == "String")
                                {
                                    node.Name = attrValue;
                                }
                                else if (attributeName == "Support" && attrType == "Number")
                                {
                                    node.Support = double.Parse(attrValue);
                                }
                                else if (attributeName == "Length" && attrType == "Number")
                                {
                                    node.Length = double.Parse(attrValue);
                                }
                                else if (!string.IsNullOrEmpty(attrType))
                                {
                                    if (attrType == "String")
                                    {
                                        node.Attributes[attributeName] = attrValue;
                                    }
                                    else if (attrType == "Number")
                                    {
                                        node.Attributes[attributeName] = double.Parse(attrValue);
                                    }
                                }
                                node = node.Parent;
                            }
                        }
                    }
                }
                else if (applyTo == 3 || applyTo == 4)
                {
                    TreeNode node = null;

                    if (matchAttributeType == "String")
                    {
                        node = GetLCA(tree, taxonListString, matchAttribute);
                    }
                    else if (matchAttributeType == "Number")
                    {
                        node = GetLCA(tree, taxonListDouble, matchAttribute);
                    }

                    if (node == null)
                    {
                        throw new Exception("Could not find the requested ancestor!");
                    }

                    if (applyTo == 3)
                    {
                        if (attributeName == "Name" && attrType == "String")
                        {
                            node.Name = attrValue;
                        }
                        else if (attributeName == "Support" && attrType == "Number")
                        {
                            node.Support = double.Parse(attrValue);
                        }
                        else if (attributeName == "Length" && attrType == "Number")
                        {
                            node.Length = double.Parse(attrValue);
                        }
                        else if (!string.IsNullOrEmpty(attrType))
                        {
                            if (attrType == "String")
                            {
                                node.Attributes[attributeName] = attrValue;
                            }
                            else if (attrType == "Number")
                            {
                                node.Attributes[attributeName] = double.Parse(attrValue);
                            }
                        }
                    }
                    else
                    {
                        foreach (TreeNode child in node.GetChildrenRecursiveLazy())
                        {
                            if (attributeName == "Name" && attrType == "String")
                            {
                                child.Name = attrValue;
                            }
                            else if (attributeName == "Support" && attrType == "Number")
                            {
                                child.Support = double.Parse(attrValue);
                            }
                            else if (attributeName == "Length" && attrType == "Number")
                            {
                                child.Length = double.Parse(attrValue);
                            }
                            else if (!string.IsNullOrEmpty(attrType))
                            {
                                if (attrType == "String")
                                {
                                    child.Attributes[attributeName] = attrValue;
                                }
                                else if (attrType == "Number")
                                {
                                    child.Attributes[attributeName] = double.Parse(attrValue);
                                }
                            }
                        }
                    }
                }
            }
        }

19 Source : Add_attribute_from_attachment.cs
with GNU Affero General Public License v3.0
from arklumpus

public static TreeNode GetLCA(TreeNode tree, double[] taxonList, string attributeName)
        {
            List<TreeNode> nodes = tree.GetChildrenRecursive();

            if (taxonList.Length > 0)
            {
                TreeNode seed = null;

                foreach (TreeNode node in nodes)
                {
                    if (node.Attributes.TryGetValue(attributeName, out object attrValue) && attrValue is double attrDouble && !double.IsNaN(attrDouble) && taxonList.Contains(attrDouble))
                    {
                        seed = node;
                        break;
                    }
                }

                while (seed != null && !GetAllAttributesDouble(seed, attributeName).ContainsAll(taxonList))
                {
                    seed = seed.Parent;
                }

                return seed;
            }
            else
            {
                return null;
            }
        }

19 Source : DepthsSubscription.cs
with MIT License
from centaurus-project

public override void SetValues(string rawValues)
        {
            var values = rawValues.Split('-', StringSplitOptions.RemoveEmptyEntries);


            if (values.Length != 3) //Market, Side and Precision
                throw new ArgumentException("Market, Side or Precision property is not specified.");

            SetMarket(values[0]);

            if (!Enum.TryParse<DepthsSide>(values[1], out var side))
                throw new ArgumentException($"{values[1]} is not valid Side value.");
            Side = side;
            if (!double.TryParse(values[2], NumberStyles.Any, CultureInfo.InvariantCulture, out var precision) || !Precisions.Contains(precision))
                throw new ArgumentException($"{values[2]} is not valid precision value.");
            Precision = precision;

        }

19 Source : LookupTable.cs
with GNU General Public License v3.0
from DeepHydro

public T GetValue(string col_name, double row_name)
        {
            if (_RowIndex.Keys.Contains(row_name) && _ColIndex.Keys.Contains(col_name))
                return _MappingTable[0,_RowIndex[row_name],_ColIndex[col_name]];
            else
                return NoValue;
        }

19 Source : PackageCoverage.cs
with GNU General Public License v3.0
from DeepHydro

public float GetValue(string col_name, double row_id)
        {
            //if (_ColIndex.Keys.Contains(col_name) && _RowIndex.Keys.Contains(row_id))
            //    return LookupTable.Rows[_RowIndex[row_id]][col_name].ToString();
            //else
            //    return ZonalStatastics.NoDataValueString;
            if (_ColIndex.Keys.Contains(col_name) && _RowIndex.Keys.Contains(row_id))
                return float.Parse(LookupTable.Rows[_RowIndex[row_id]][col_name].ToString());
            else
                return ZonalStatastics.NoDataValue;
        }

19 Source : KnotVector.cs
with MIT License
from GSharker

public Dictionary<double, int> Multiplicities()
        {
            Dictionary<double, int> multiplicities = new Dictionary<double, int>(Count);
            foreach (double knot in this)
            {
                var multiplicity = Multiplicity(knot);
                if (!multiplicities.Keys.Contains(knot))
                {
                    multiplicities.Add(knot, multiplicity);
                }

                multiplicities[knot] = multiplicity;
            }
            return multiplicities;
        }

19 Source : DonutChartTests.cs
with MIT License
from MudBlazor

[Test]
        [TestCase(new double[]{50, 25, 20, 5 })]
        [TestCase(new double[]{50, 25, 20, 5 , 12})]
        public void DonutChartExampleData(double[] data)
        {
            string[] labels = { "Fossil", "Nuclear", "Solar", "Wind", "Oil", "Coal", "Gas", "Biomreplaced",
                "Hydro", "Geothermal", "Fossil", "Nuclear", "Solar", "Wind", "Oil",
                "Coal", "Gas", "Biomreplaced", "Hydro", "Geothermal" };
            
            var comp = Context.RenderComponent<MudChart>(parameters => parameters
                .Add(p => p.ChartType, ChartType.Donut)
                .Add(p => p.Height, "300px")
                .Add(p => p.Width, "300px")
                .Add(p => p.InputData, data)
                .Add(p => p.ChartOptions, new ChartOptions {ChartPalette = _baseChartPalette})
                .Add(p => p.InputLabels,labels));
            
            comp.Markup.Should().Contain("clreplaced=\"mud-chart-donut\"");
            comp.Markup.Should().Contain("clreplaced=\"mud-chart-serie mud-donut-segment\"");
            comp.Markup.Should().Contain("mud-chart-legend-item");
            
            if (data.Length <= 4)
            {
                comp.Markup.Should().
                    Contain("Fossil").And.Contain("Nuclear").And.Contain("Solar").And.Contain("Wind");
            }
            
            if (data.Length >= 5)
            {
                comp.Markup.Should()
                    .Contain("Oil");
            }
            
            if (data.Length == 4 && data.Contains(50))
            {
                comp.Markup.Should()
                    .Contain("stroke-dasharray=\"50 50\" stroke-dashoffset=\"125\"");
            }

            if (data.Length == 4 && data.Contains(5))
            {
                comp.Markup.Should()
                    .Contain("stroke-dasharray=\"5 95\" stroke-dashoffset=\"30\"");
            }
            
            comp.SetParametersAndRender(parameters => parameters
                .Add(p => p.ChartOptions, new ChartOptions(){ChartPalette = _modifiedPalette}));

            comp.Markup.Should().Contain(_modifiedPalette[0]);
        }

19 Source : PieChartTests.cs
with MIT License
from MudBlazor

[Theory]
        [TestCase(new double[]{77, 25, 20, 5})]
        [TestCase(new double[]{77, 25, 20, 5, 8})]
        public void PieChartExampleData(double[] data)
        {
            string[] labels = { "Uranium", "Plutonium", "Thorium", "Caesium", "Technetium", "Promethium",
                "Polonium", "Astatine", "Radon", "Francium", "Radium", "Actinium", "Protactinium",
                "Neptunium", "Americium", "Curium", "Berkelium", "Californium", "Einsteinium", "Mudblaznium" };
            
            var comp = Context.RenderComponent<MudChart>(parameters => parameters
                .Add(p => p.ChartType, ChartType.Pie)
                .Add(p => p.ChartOptions, new ChartOptions {ChartPalette = _baseChartPalette})
                .Add(p => p.Height, "300px")
                .Add(p => p.Width, "300px")
                .Add(p => p.InputData, data)
                .Add(p => p.InputLabels,labels));
            
            comp.Markup.Should().Contain("clreplaced=\"mud-chart-pie\"");
            comp.Markup.Should().Contain("clreplaced=\"mud-chart-serie\"");
            comp.Markup.Should().Contain("mud-chart-legend-item");

            if (data.Length <= 4)
            {
                comp.Markup.Should().
                    Contain("Uranium").And.Contain("Plutonium").And.Contain("Thorium").And.Contain("Caesium");
            }
            
            if (data.Length >= 5)
            {
                comp.Markup.Should()
                    .Contain("Technetium");
            }

            if (data.Length == 4 && data.Contains(77))
            {
                comp.Markup.Should()
                    .Contain("M 1 0 A 1 1 0 1 1 -0.7851254621398548 -0.6193367490305087 L 0 0");
            }

            if (data.Length == 4 && data.Contains(5))
            {
                comp.Markup.Should()
                    .Contain("M 0.9695598647982466 -0.24485438238350116 A 1 1 0 0 1 1 -2.4492935982947064E-16 L 0 0");
            }
            
            comp.SetParametersAndRender(parameters => parameters
                .Add(p => p.ChartOptions, new ChartOptions(){ChartPalette = _modifiedPalette}));

            comp.Markup.Should().Contain(_modifiedPalette[0]);
        }

19 Source : BagOfVisualWordsTest.cs
with MIT License
from PacktPublishing

[Test]
        [Category("Random")]
#if NET35
        [Ignore("Random behaviour differs in net35.")]
#endif
        public void GetFeatureVectorTest()
        {
            var images = GetImages();

            Accord.Math.Random.Generator.Seed = 0;

            // The Bag-of-Visual-Words model converts arbitrary-size images 
            // into fixed-length feature vectors. In this example, we will
            // be setting the codebook size to 10. This means all generated
            // feature vectors will have the same length of 10.

            // Create a new Bag-of-Visual-Words (BoW) model
            BagOfVisualWords bow = new BagOfVisualWords(10);

            bow.ParallelOptions.MaxDegreeOfParallelism = 1;

            // Compute the model using
            // a set of training images
            bow.Compute(images);

            // After this point, we will be able to translate
            // images into double[] feature vectors using
            double[] feature = bow.GetFeatureVector(images[0]);

            replacedert.AreEqual(10, feature.Length);


            double[][] expected = new double[][]
            {
                new double[] { 4, 28, 24, 68, 51, 97, 60, 35, 18, 24 },
                new double[] { 53, 111, 89, 70, 24, 80, 130, 46, 50, 74 },
                new double[] { 31, 29, 57, 102, 63, 142, 40, 18, 37, 33 }
            };

            double[][] actual = new double[expected.Length][];
            for (int i = 0; i < actual.Length; i++)
                actual[i] = bow.GetFeatureVector(images[i]);

            string str = actual.ToCSharp();

            for (int i = 0; i < actual.Length; i++)
                for (int j = 0; j < actual[i].Length; j++)
                    replacedert.IsTrue(expected[i].Contains(actual[i][j]));
        }

19 Source : BagOfVisualWordsTest.cs
with MIT License
from PacktPublishing

[Test]
        [Category("Random")]
#if NET35
        [Ignore("Random behaviour differs in net35.")]
#endif
        public void learn_new()
        {
            #region doc_learn
            // Ensure results are reproducible
            Accord.Math.Random.Generator.Seed = 0;

            // The Bag-of-Visual-Words model converts images of arbitrary 
            // size into fixed-length feature vectors. In this example, we
            // will be setting the codebook size to 10. This means all feature
            // vectors that will be generated will have the same length of 10.

            // By default, the BoW object will use the sparse SURF as the 
            // feature extractor and K-means as the clustering algorithm.

            // Create a new Bag-of-Visual-Words (BoW) model
            var bow = BagOfVisualWords.Create(numberOfWords: 10);
            // Note: a simple BoW model can also be created using
            // var bow = new BagOfVisualWords(numberOfWords: 10);

            // Get some training images
            Bitmap[] images = GetImages();

            // Compute the model
            bow.Learn(images);

            // After this point, we will be able to translate
            // images into double[] feature vectors using
            double[][] features = bow.Transform(images);

            // We can also check some statistics about the dataset:
            int numberOfImages = bow.Statistics.TotalNumberOfInstances; // 6

            // Statistics about all the descriptors that have been extracted:
            int totalDescriptors = bow.Statistics.TotalNumberOfDescriptors; // 4132
            double totalMean = bow.Statistics.TotalNumberOfDescriptorsPerInstance.Mean; // 688.66666666666663
            double totalVar = bow.Statistics.TotalNumberOfDescriptorsPerInstance.Variance; // 96745.866666666669
            IntRange totalRange = bow.Statistics.TotalNumberOfDescriptorsPerInstanceRange; // [409, 1265]

            // Statistics only about the descriptors that have been actually used:
            int takenDescriptors = bow.Statistics.NumberOfDescriptorsTaken; // 4132
            double takenMean = bow.Statistics.NumberOfDescriptorsTakenPerInstance.Mean; // 688.66666666666663
            double takenVar = bow.Statistics.NumberOfDescriptorsTakenPerInstance.Variance; // 96745.866666666669
            IntRange takenRange = bow.Statistics.NumberOfDescriptorsTakenPerInstanceRange; // [409, 1265]
            #endregion

            replacedert.AreEqual(6, numberOfImages);

            replacedert.AreEqual(4132, totalDescriptors);
            replacedert.AreEqual(688.66666666666663, totalMean);
            replacedert.AreEqual(96745.866666666669, totalVar);
            replacedert.AreEqual(new IntRange(409, 1265), totalRange);

            replacedert.AreEqual(4132, takenDescriptors);
            replacedert.AreEqual(688.66666666666663, takenMean);
            replacedert.AreEqual(96745.866666666669, takenVar);
            replacedert.AreEqual(new IntRange(409, 1265), takenRange);


            var kmeans = bow.Clustering as KMeans;
            replacedert.AreEqual(64, kmeans.Clusters.NumberOfInputs);
            replacedert.AreEqual(10, kmeans.Clusters.NumberOfOutputs);
            replacedert.AreEqual(10, kmeans.Clusters.NumberOfClreplacedes);

            string str = kmeans.Clusters.Proportions.ToCSharp();
            double[] expectedProportions = new double[] { 0.0960793804453049, 0.0767182962245886, 0.103823814133591, 0.0738141335914811, 0.0997095837366893, 0.0815585672797677, 0.0788964181994192, 0.090513068731849, 0.117376573088093, 0.181510164569216 };

            replacedert.IsTrue(kmeans.Clusters.Proportions.IsEqual(expectedProportions, 1e-10));
            replacedert.IsTrue(kmeans.Clusters.Covariances.All(x => x == null));

            replacedert.AreEqual(features.GetLength(), new[] { 6, 10 });

            str = features.ToCSharp();

            double[][] expected = new double[][]
            {
                new double[] { 47, 44, 42, 4, 23, 22, 28, 53, 50, 96 },
                new double[] { 26, 91, 71, 49, 99, 70, 59, 28, 155, 79 },
                new double[] { 71, 34, 51, 33, 53, 25, 44, 64, 32, 145 },
                new double[] { 49, 41, 31, 24, 54, 19, 41, 63, 66, 72 },
                new double[] { 137, 16, 92, 115, 39, 75, 24, 92, 41, 88 },
                new double[] { 67, 91, 142, 80, 144, 126, 130, 74, 141, 270 }
            };

            for (int i = 0; i < features.Length; i++)
                for (int j = 0; j < features[i].Length; j++)
                    replacedert.IsTrue(expected[i].Contains(features[i][j]));

            #region doc_clreplacedification

            // Now, the features can be used to train any clreplacedification
            // algorithm as if they were the images themselves. For example,
            // let's replacedume the first three images belong to a clreplaced and
            // the second three to another clreplaced. We can train an SVM using

            int[] labels = { -1, -1, -1, +1, +1, +1 };

            // Create the SMO algorithm to learn a Linear kernel SVM
            var teacher = new SequentialMinimalOptimization<Linear>()
            {
                Complexity = 10000 // make a hard margin SVM
            };

            // Obtain a learned machine
            var svm = teacher.Learn(features, labels);

            // Use the machine to clreplacedify the features
            bool[] output = svm.Decide(features);

            // Compute the error between the expected and predicted labels
            double error = new ZeroOneLoss(labels).Loss(output);
            #endregion

            replacedert.IsTrue(new ZeroOneLoss(labels).IsBinary);
            replacedert.AreEqual(error, 0);
        }

19 Source : BagOfVisualWordsTest.cs
with MIT License
from PacktPublishing

[Test]
        [Category("Random")]
#if NET35
        [Ignore("Random behaviour differs in net35.")]
#endif
        public void custom_clustering_test()
        {
            #region doc_clustering
            // Ensure results are reproducible
            Accord.Math.Random.Generator.Seed = 0;

            // The Bag-of-Visual-Words model converts images of arbitrary 
            // size into fixed-length feature vectors. In this example, we
            // will be setting the codebook size to 10. This means all feature
            // vectors that will be generated will have the same length of 10.

            // By default, the BoW object will use the sparse SURF as the 
            // feature extractor and K-means as the clustering algorithm.
            // In this example, we will use the Binary-Split clustering
            // algorithm instead.

            // Create a new Bag-of-Visual-Words (BoW) model
            var bow = BagOfVisualWords.Create(new BinarySplit(10));

            // Since we are using generics, we can setup properties 
            // of the binary split clustering algorithm directly:
            bow.Clustering.ComputeProportions = true;
            bow.Clustering.ComputeCovariances = false;

            // Get some training images
            Bitmap[] images = GetImages();

            // Compute the model
            bow.Learn(images);

            // After this point, we will be able to translate
            // images into double[] feature vectors using
            double[][] features = bow.Transform(images);
            #endregion

            replacedert.AreEqual(-1, bow.NumberOfInputs);
            replacedert.AreEqual(10, bow.NumberOfOutputs);
            replacedert.AreEqual(10, bow.NumberOfWords);
            replacedert.AreEqual(64, bow.Clustering.Clusters.NumberOfInputs);
            replacedert.AreEqual(10, bow.Clustering.Clusters.NumberOfOutputs);
            replacedert.AreEqual(10, bow.Clustering.Clusters.NumberOfClreplacedes);

            BinarySplit binarySplit = bow.Clustering;

            string str = binarySplit.Clusters.Proportions.ToCSharp();
            double[] expectedProportions = new double[] { 0.158034849951597, 0.11810261374637, 0.0871248789932236, 0.116408518877057, 0.103581800580833, 0.192642787996128, 0.0365440464666021, 0.0716360116166505, 0.0575992255566312, 0.058325266214908 };

            replacedert.IsTrue(binarySplit.Clusters.Proportions.IsEqual(expectedProportions, 1e-10));
            replacedert.IsTrue(binarySplit.Clusters.Covariances.All(x => x == null));

            replacedert.AreEqual(features.GetLength(), new[] { 6, 10 });

            str = features.ToCSharp();

            double[][] expected = new double[][]
            {
                new double[] { 73, 36, 41, 50, 7, 106, 23, 22, 22, 29 },
                new double[] { 76, 93, 25, 128, 86, 114, 20, 91, 22, 72 },
                new double[] { 106, 47, 67, 57, 37, 131, 33, 31, 22, 21 },
                new double[] { 84, 41, 49, 59, 33, 73, 32, 50, 6, 33 },
                new double[] { 169, 105, 92, 47, 95, 67, 16, 25, 83, 20 },
                new double[] { 145, 166, 86, 140, 170, 305, 27, 77, 83, 66 }
            };

            for (int i = 0; i < features.Length; i++)
                for (int j = 0; j < features[i].Length; j++)
                    replacedert.IsTrue(expected[i].Contains(features[i][j]));

            #region doc_clreplacedification_clustering

            // Now, the features can be used to train any clreplacedification
            // algorithm as if they were the images themselves. For example,
            // let's replacedume the first three images belong to a clreplaced and
            // the second three to another clreplaced. We can train an SVM using

            int[] labels = { -1, -1, -1, +1, +1, +1 };

            // Create the SMO algorithm to learn a Linear kernel SVM
            var teacher = new SequentialMinimalOptimization<Linear>()
            {
                Complexity = 10000 // make a hard margin SVM
            };

            // Obtain a learned machine
            var svm = teacher.Learn(features, labels);

            // Use the machine to clreplacedify the features
            bool[] output = svm.Decide(features);

            // Compute the error between the expected and predicted labels
            double error = new ZeroOneLoss(labels).Loss(output); // should be 0
            #endregion

            replacedert.AreEqual(error, 0);
        }

19 Source : BagOfVisualWordsTest.cs
with MIT License
from PacktPublishing

[Test]
        [Category("Random")]
#if NET35
        [Ignore("Random behaviour differs in net35.")]
#endif
        public void custom_feature_test()
        {
            #region doc_feature
            Accord.Math.Random.Generator.Seed = 0;

            // The Bag-of-Visual-Words model converts images of arbitrary 
            // size into fixed-length feature vectors. In this example, we
            // will be setting the codebook size to 10. This means all feature
            // vectors that will be generated will have the same length of 10.

            // By default, the BoW object will use the sparse SURF as the 
            // feature extractor and K-means as the clustering algorithm.
            // In this example, we will use the HOG feature extractor
            // and the Binary-Split clustering algorithm instead. However, 
            // this is just an example: the best features and the best clustering 
            // algorithm might need to be found through experimentation. Please
            // also try with KMeans first to obtain a baseline value.

            // Create a new Bag-of-Visual-Words (BoW) model using HOG features
            var bow = BagOfVisualWords.Create(new HistogramsOfOrientedGradients(), new BinarySplit(10));

            // Get some training images
            Bitmap[] images = GetImages();

            // Compute the model
            bow.Learn(images);

            // After this point, we will be able to translate
            // images into double[] feature vectors using
            double[][] features = bow.Transform(images);
            #endregion

            replacedert.AreEqual(features.GetLength(), new[] { 6, 10 });

            string str = features.ToCSharp();

            double[][] expected = new double[][]
            {
                new double[] { 53, 285, 317, 292, 389, 264, 127, 250, 283, 92 },
                new double[] { 64, 326, 267, 418, 166, 241, 160, 237, 324, 149 },
                new double[] { 63, 234, 229, 221, 645, 178, 226, 178, 218, 160 },
                new double[] { 87, 322, 324, 295, 180, 276, 219, 218, 247, 184 },
                new double[] { 60, 312, 285, 285, 352, 274, 166, 226, 290, 102 },
                new double[] { 110, 292, 299, 324, 72, 208, 317, 248, 252, 230 }
            };

            for (int i = 0; i < features.Length; i++)
                for (int j = 0; j < features[i].Length; j++)
                    replacedert.IsTrue(expected[i].Contains(features[i][j]));

            #region doc_clreplacedification_feature

            // Now, the features can be used to train any clreplacedification
            // algorithm as if they were the images themselves. For example,
            // let's replacedume the first three images belong to a clreplaced and
            // the second three to another clreplaced. We can train an SVM using

            int[] labels = { -1, -1, -1, +1, +1, +1 };

            // Create the SMO algorithm to learn a Linear kernel SVM
            var teacher = new SequentialMinimalOptimization<Linear>()
            {
                Complexity = 100 // make a hard margin SVM
            };

            // Obtain a learned machine
            var svm = teacher.Learn(features, labels);

            // Use the machine to clreplacedify the features
            bool[] output = svm.Decide(features);

            // Compute the error between the expected and predicted labels
            double error = new ZeroOneLoss(labels).Loss(output); // should be 0
            #endregion

            replacedert.AreEqual(error, 0);
        }

19 Source : BagOfVisualWordsTest.cs
with MIT License
from PacktPublishing

[Test, Category("Random")]
#if NET35
        [Ignore("Random")]
#endif
        public void custom_feature_test_lbp()
        {
            #region doc_feature_lbp
            // Ensure results are reproducible
            Accord.Math.Random.Generator.Seed = 0;

            // The Bag-of-Visual-Words model converts images of arbitrary 
            // size into fixed-length feature vectors. In this example, we
            // will be setting the codebook size to 3. This means all feature
            // vectors that will be generated will have the same length of 3.

            // By default, the BoW object will use the sparse SURF as the 
            // feature extractor and K-means as the clustering algorithm.
            // In this example, we will use the Local Binary Pattern (LBP) 
            // feature extractor and the Binary-Split clustering algorithm.
            // However, this is just an example: the best features and the
            // best clustering algorithm might need to be found through 
            // experimentation. Please also try with KMeans first to obtain
            // a baseline value.

            // Create a new Bag-of-Visual-Words (BoW) model using LBP features
            var bow = BagOfVisualWords.Create(new LocalBinaryPattern(), new BinarySplit(3));

            // Since we are using generics, we can setup properties 
            // of the binary split clustering algorithm directly:
            bow.Clustering.ComputeCovariances = false;
            bow.Clustering.ComputeProportions = false;
            bow.Clustering.ComputeError = false;

            // Get some training images
            Bitmap[] images = GetImages();

            // Compute the model
            bow.Learn(images);

            // After this point, we will be able to translate
            // images into double[] feature vectors using
            double[][] features = bow.Transform(images);
            #endregion

            replacedert.AreEqual(features.GetLength(), new[] { 6, 3 });

            string str = features.ToCSharp();

            double[][] expected = new double[][]
            {
                new double[] { 1608, 374, 370 },
                new double[] { 1508, 337, 507 },
                new double[] { 1215, 343, 794 },
                new double[] { 782, 550, 1020 },
                new double[] { 1480, 360, 512 },
                new double[] { 15, 724, 1613 }
            };

            for (int i = 0; i < features.Length; i++)
                for (int j = 0; j < features[i].Length; j++)
                    replacedert.IsTrue(expected[i].Contains(features[i][j]));

            #region doc_clreplacedification_feature_lbp

            // Now, the features can be used to train any clreplacedification
            // algorithm as if they were the images themselves. For example,
            // let's replacedume the first three images belong to a clreplaced and
            // the second three to another clreplaced. We can train an SVM using

            int[] labels = { -1, -1, +1, +1, +1, +1 };

            // Create the SMO algorithm to learn a Linear kernel SVM
            var teacher = new SequentialMinimalOptimization<Gaussian>()
            {
                Complexity = 100 // make a hard margin SVM
            };

            // Obtain a learned machine
            var svm = teacher.Learn(features, labels);

            // Use the machine to clreplacedify the features
            bool[] output = svm.Decide(features);

            // Compute the error between the expected and predicted labels
            double error = new ZeroOneLoss(labels).Loss(output); // should be 0
            #endregion

            replacedert.AreEqual(error, 0);
        }

19 Source : BagOfVisualWordsTest.cs
with MIT License
from PacktPublishing

[Test]
        [Category("Random")]
#if NET35
        [Ignore("Random behaviour differs in net35.")]
#endif
        public void custom_data_type_test()
        {
            #region doc_datatype
            // Ensure results are reproducible
            Accord.Math.Random.Generator.Seed = 0;

            // The Bag-of-Visual-Words model converts images of arbitrary 
            // size into fixed-length feature vectors. In this example, we
            // will be setting the codebook size to 10. This means all feature
            // vectors that will be generated will have the same length of 10.

            // By default, the BoW object will use the sparse SURF as the 
            // feature extractor and K-means as the clustering algorithm.
            // In this example, we will use the FREAK feature extractor
            // and the K-Modes clustering algorithm instead.

            // Create a new Bag-of-Visual-Words (BoW) model using FREAK binary features
            var bow = BagOfVisualWords.Create<FastRetinaKeypointDetector, KModes<byte>, byte[]>(
                new FastRetinaKeypointDetector(), new KModes<byte>(10, new Hamming()));

            // Get some training images
            Bitmap[] images = GetImages();

            // Compute the model
            bow.Learn(images);

            // After this point, we will be able to translate
            // images into double[] feature vectors using
            double[][] features = bow.Transform(images);
            #endregion

            replacedert.AreEqual(features.GetLength(), new[] { 6, 10 });

            string str = features.ToCSharp();

            double[][] expected = new double[][]
            {
                new double[] { 33, 58, 19, 35, 112, 67, 70, 155, 86, 45 },
                new double[] { 130, 91, 74, 114, 200, 90, 136, 37, 53, 92 },
                new double[] { 45, 49, 68, 55, 123, 142, 40, 100, 92, 37 },
                new double[] { 25, 17, 89, 136, 138, 59, 33, 7, 23, 12 },
                new double[] { 186, 78, 86, 133, 198, 60, 65, 25, 38, 77 },
                new double[] { 45, 33, 10, 131, 192, 26, 99, 20, 82, 28 }
            };

            for (int i = 0; i < features.Length; i++)
                for (int j = 0; j < features[i].Length; j++)
                    replacedert.IsTrue(expected[i].Contains(features[i][j]));

            #region doc_clreplacedification_datatype

            // Now, the features can be used to train any clreplacedification
            // algorithm as if they were the images themselves. For example,
            // let's replacedume the first three images belong to a clreplaced and
            // the second three to another clreplaced. We can train an SVM using

            int[] labels = { -1, -1, -1, +1, +1, +1 };

            // Create the SMO algorithm to learn a Linear kernel SVM
            var teacher = new SequentialMinimalOptimization<Linear>()
            {
                Complexity = 1000 // make a hard margin SVM
            };

            // Obtain a learned machine
            var svm = teacher.Learn(features, labels);

            // Use the machine to clreplacedify the features
            bool[] output = svm.Decide(features);

            // Compute the error between the expected and predicted labels
            double error = new ZeroOneLoss(labels).Loss(output); // should be 0
            #endregion

            replacedert.AreEqual(error, 0);
        }

19 Source : BagOfVisualWordsTest.cs
with MIT License
from PacktPublishing

[Test]
        [Category("Random")]
#if NET35
        [Ignore("Random behaviour differs in net35.")]
#endif
        public void freak_binary_split()
        {
            #region doc_feature_freak
            // Ensure results are reproducible
            Accord.Math.Random.Generator.Seed = 0;

            // The Bag-of-Visual-Words model converts images of arbitrary 
            // size into fixed-length feature vectors. In this example, we
            // will be setting the codebook size to 10. This means all feature
            // vectors that will be generated will have the same length of 10.

            // By default, the BoW object will use the sparse SURF as the 
            // feature extractor and K-means as the clustering algorithm.
            // In this example, we will use the FREAK feature extractor
            // and the Binary-Split clustering algorithm instead.

            // Create a new Bag-of-Visual-Words (BoW) model using FREAK binary features
            var bow = BagOfVisualWords.Create(new FastRetinaKeypointDetector(), new BinarySplit(10));

            // Get some training images
            Bitmap[] images = GetImages();

            // Compute the model
            bow.Learn(images);

            bow.ParallelOptions.MaxDegreeOfParallelism = 1;

            // After this point, we will be able to translate
            // images into double[] feature vectors using
            double[][] features = bow.Transform(images);
            #endregion

            replacedert.AreEqual(features.GetLength(), new[] { 6, 10 });

            string str = features.ToCSharp();

            double[][] expected = new double[][]
            {
                new double[] { 135, 69, 55, 131, 62, 64, 20, 29, 47, 68 },
                new double[] { 299, 64, 174, 93, 32, 101, 163, 56, 17, 18 },
                new double[] { 141, 70, 120, 128, 53, 52, 51, 58, 52, 26 },
                new double[] { 150, 13, 200, 55, 4, 36, 58, 20, 0, 3 },
                new double[] { 236, 31, 204, 72, 22, 78, 217, 53, 25, 8 },
                new double[] { 208, 21, 193, 106, 8, 43, 52, 8, 4, 23 }
            };

            for (int i = 0; i < features.Length; i++)
                for (int j = 0; j < features[i].Length; j++)
                    replacedert.IsTrue(expected[i].Contains(features[i][j]));

            #region doc_clreplacedification_feature_freak

            // Now, the features can be used to train any clreplacedification
            // algorithm as if they were the images themselves. For example,
            // let's replacedume the first three images belong to a clreplaced and
            // the second three to another clreplaced. We can train an SVM using

            int[] labels = { -1, -1, -1, +1, +1, +1 };

            // Create the SMO algorithm to learn a Linear kernel SVM
            var teacher = new SequentialMinimalOptimization<Linear>()
            {
                Complexity = 1000 // make a hard margin SVM
            };

            // Obtain a learned machine
            var svm = teacher.Learn(features, labels);

            // Use the machine to clreplacedify the features
            bool[] output = svm.Decide(features);

            // Compute the error between the expected and predicted labels
            double error = new ZeroOneLoss(labels).Loss(output); // should be 0
            #endregion

            replacedert.AreEqual(error, 0);
        }

19 Source : BagOfVisualWordsTest.cs
with MIT License
from PacktPublishing

[Test]
        [Category("Random")]
#if NET35
        [Ignore("Random behaviour differs in net35.")]
#endif
        public void learn_from_disk()
        {
            string basePath = Path.Combine(TestContext.CurrentContext.TestDirectory, "Resources", "SURF");

            #region doc_learn_disk
            // Ensure results are reproducible
            Accord.Math.Random.Generator.Seed = 0;

            // Depending on the problem we are trying to tackle, learning a BoW might require 
            // large amounts of available memory. In those cases, we can alleviate the amount
            // of memory required by using only a subsample of the training datasete to learn
            // the model. Likewise, we can also load images from the disk on-demand instead of
            // having to load all of them right at the beginning.

            // Create a new Bag-of-Visual-Words (BoW) model
            var bow = BagOfVisualWords.Create(numberOfWords: 10);

            // We will learn the codebooks from only 25 descriptors, which
            // will be randomly selected from the multiple training images
            bow.NumberOfDescriptors = 1000; // Note: in the real world, use >10,000 samples

            // We will load at most 5 descriptors from each image. This means
            // that we will only keep 5 descriptors per image at maximum in 
            // memory at a given time.
            bow.MaxDescriptorsPerInstance = 200; // Note: In the real world, use >1,000 samples

            // Get some training images. Here, instead of loading Bitmaps as in
            // the other examples, we will just specify their paths in the disk:
            string[] filenames =
            {
                Path.Combine(basePath, "flower01.jpg"),
                Path.Combine(basePath, "flower02.jpg"),
                Path.Combine(basePath, "flower03.jpg"),
                Path.Combine(basePath, "flower04.jpg"),
                Path.Combine(basePath, "flower05.jpg"),
                Path.Combine(basePath, "flower06.jpg"),
            };

            // Compute the model
            bow.Learn(filenames);

            // After this point, we will be able to translate
            // images into double[] feature vectors using
            double[][] features = bow.Transform(filenames);

            // We can also check some statistics about the dataset:
            int numberOfImages = bow.Statistics.TotalNumberOfInstances; // 6

            // Statistics about all the descriptors that have been extracted:
            int totalDescriptors = bow.Statistics.TotalNumberOfDescriptors; // 4132
            double totalMean = bow.Statistics.TotalNumberOfDescriptorsPerInstance.Mean; // 688.66666666666663
            double totalVar = bow.Statistics.TotalNumberOfDescriptorsPerInstance.Variance; // 96745.866666666669
            IntRange totalRange = bow.Statistics.TotalNumberOfDescriptorsPerInstanceRange; // [409, 1265]

            // Statistics only about the descriptors that have been actually used:
            int takenDescriptors = bow.Statistics.NumberOfDescriptorsTaken; // 1000
            double takenMean = bow.Statistics.NumberOfDescriptorsTakenPerInstance.Mean; // 200
            double takenVar = bow.Statistics.NumberOfDescriptorsTakenPerInstance.Variance; // 0
            IntRange takenRange = bow.Statistics.NumberOfDescriptorsTakenPerInstanceRange; // [200, 200]
            #endregion

            replacedert.AreEqual(6, numberOfImages);

            replacedert.AreEqual(4132, totalDescriptors);
            replacedert.AreEqual(688.66666666666663, totalMean);
            replacedert.AreEqual(96745.866666666669, totalVar);
            replacedert.AreEqual(new IntRange(409, 1265), totalRange);

            replacedert.AreEqual(1000, takenDescriptors);
            replacedert.AreEqual(200, takenMean);
            replacedert.AreEqual(0, takenVar);
            replacedert.AreEqual(new IntRange(200, 200), takenRange);

            var kmeans = bow.Clustering as KMeans;
            replacedert.AreEqual(64, kmeans.Clusters.NumberOfInputs);
            replacedert.AreEqual(10, kmeans.Clusters.NumberOfOutputs);
            replacedert.AreEqual(10, kmeans.Clusters.NumberOfClreplacedes);

            string str = kmeans.Clusters.Proportions.ToCSharp();
            double[] expectedProportions = new double[] { 0.029, 0.167, 0.143, 0.129, 0.079, 0.104, 0.068, 0.09, 0.094, 0.097 };

            replacedert.IsTrue(kmeans.Clusters.Proportions.IsEqual(expectedProportions, 1e-10));
            replacedert.IsTrue(kmeans.Clusters.Covariances.All(x => x == null));

            replacedert.AreEqual(features.GetLength(), new[] { 6, 10 });

            str = features.ToCSharp();

            double[][] expected = new double[][]
            {
                new double[] { 6, 104, 59, 68, 41, 7, 45, 25, 26, 28 },
                new double[] { 13, 102, 61, 39, 51, 114, 69, 108, 115, 55 },
                new double[] { 10, 138, 91, 78, 27, 46, 28, 39, 52, 43 },
                new double[] { 4, 66, 51, 84, 59, 32, 25, 54, 61, 24 },
                new double[] { 88, 85, 161, 94, 5, 119, 13, 35, 22, 97 },
                new double[] { 57, 269, 134, 81, 53, 214, 59, 111, 139, 148 }
            };

            for (int i = 0; i < features.Length; i++)
                for (int j = 0; j < features[i].Length; j++)
                    replacedert.IsTrue(expected[i].Contains(features[i][j]));

            #region doc_clreplacedification_disk

            // Now, the features can be used to train any clreplacedification
            // algorithm as if they were the images themselves. For example,
            // let's replacedume the first three images belong to a clreplaced and
            // the second three to another clreplaced. We can train an SVM using

            int[] labels = { -1, -1, -1, +1, +1, +1 };

            // Create the SMO algorithm to learn a Linear kernel SVM
            var teacher = new SequentialMinimalOptimization<Linear>()
            {
                Complexity = 10000 // make a hard margin SVM
            };

            // Obtain a learned machine
            var svm = teacher.Learn(features, labels);

            // Use the machine to clreplacedify the features
            bool[] output = svm.Decide(features);

            // Compute the error between the expected and predicted labels
            double error = new ZeroOneLoss(labels).Loss(output);
            #endregion

            replacedert.IsTrue(new ZeroOneLoss(labels).IsBinary);
            replacedert.AreEqual(error, 0);
        }

19 Source : CurveDataHandler.cs
with Apache License 2.0
from ProteoWizard

protected virtual DataColumn GetZAxisColumn(IPointList points)
        {
            var values = new double?[points.Count];
            for (int i = 0; i < points.Count; i++)
            {
                var point = points[i];
                if (point.IsMissing)
                {
                    values[i] = null;
                }
                else
                {
                    values[i] = point.Z;
                }
            }
            if (values.Contains(null))
            {
                return new DataColumn<double?>(@"Z", values);
            }
            return new DataColumn<double>(@"Z", values.Cast<double>());
        }