Here are the examples of the csharp api System.Collections.Generic.Dictionary.Remove(double) taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
9 Examples
19
Source : LayersDetector.cs
with MIT License
from gradientspace
with MIT License
from gradientspace
public void Compute()
{
LayersCounts = new Dictionary<double, int>();
Action<IToolpath> processPathF = (path) => {
if ( path.HasFinitePositions ) {
foreach (Vector3d v in path.AllPositionsItr())
acreplacedulate(v);
}
};
Action<IToolpathSet> processPathsF = null;
processPathsF = (paths) => {
foreach (IToolpath path in paths) {
if (path is IToolpathSet)
processPathsF(path as IToolpathSet);
else
processPathF(path);
}
};
processPathsF(Paths);
List<double> erase = new List<double>();
foreach ( var v in LayersCounts ) {
// [RMS] nothing should be at Z=0
if ( v.Key == 0 ) {
erase.Add(v.Key);
continue;
}
if (v.Value < MinLayerCount)
erase.Add(v.Key);
}
foreach (var e in erase)
LayersCounts.Remove(e);
LayerZ = new List<double>(LayersCounts.Keys);
LayerZ.Sort();
// estimate layer height
Dictionary<double, int> LayerHeights = new Dictionary<double, int>();
for (int i = 0; i < LayerZ.Count - 1; ++i ) {
double dz = Math.Round(LayerZ[i + 1] - LayerZ[i], 3);
if (LayerHeights.ContainsKey(dz) == false)
LayerHeights[dz] = 0;
LayerHeights[dz] = LayerHeights[dz] + 1;
}
double best_height = 0; int max_count = 0;
foreach ( var pair in LayerHeights ) {
if ( pair.Value > max_count ) {
max_count = pair.Value;
best_height = pair.Key;
}
}
EstimatedLayerHeight = best_height;
}
19
Source : MeshPlanarMillSlicer.cs
with MIT License
from gradientspace
with MIT License
from gradientspace
public Result Compute()
{
Result result = new Result();
if (Meshes.Count == 0)
return result;
// find Z interval we want to slice in
Interval1d zrange = Interval1d.Empty;
foreach ( var meshinfo in Meshes ) {
zrange.Contain(meshinfo.bounds.Min.z);
zrange.Contain(meshinfo.bounds.Max.z);
}
if (SetMinZValue != double.MinValue)
zrange.a = SetMinZValue;
result.TopZ = Math.Round(zrange.b, PrecisionDigits);
result.BaseZ = Math.Round(zrange.a, PrecisionDigits);
// [TODO] might be able to make better decisions if we took flat regions
// into account when constructing initial Z-heights? if we have large flat
// region just below Zstep, might make sense to do two smaller Z-steps so we
// can exactly hit it??
// construct list of clearing Z-heights
List<double> clearingZLayers = new List<double>();
double cur_layer_z = zrange.b;
int layer_i = 0;
while (cur_layer_z > zrange.a) {
double layer_height = get_layer_height(layer_i);
cur_layer_z -= layer_height;
double z = Math.Round(cur_layer_z, PrecisionDigits);
clearingZLayers.Add(z);
layer_i++;
}
if ( clearingZLayers.Last() < result.BaseZ )
clearingZLayers[clearingZLayers.Count-1] = result.BaseZ;
if ( clearingZLayers.Last() == clearingZLayers[clearingZLayers.Count-2] )
clearingZLayers.RemoveAt(clearingZLayers.Count-1);
// construct layer slices from Z-heights
List<PlanarSlice> clearing_slice_list = new List<PlanarSlice>();
layer_i = 0;
for ( int i = 0; i < clearingZLayers.Count; ++i ) {
double layer_height = (i == clearingZLayers.Count-1) ?
(result.TopZ-clearingZLayers[i]) : (clearingZLayers[i+1]-clearingZLayers[i]);
double z = clearingZLayers[i];
Interval1d zspan = new Interval1d(z, z+layer_height);
if (SliceLocation == SliceLocations.EpsilonBase)
z += 0.001;
PlanarSlice slice = SliceFactoryF(zspan, z, layer_i);
clearing_slice_list.Add(slice);
layer_i++;
}
int NH = clearing_slice_list.Count;
if (NH > MaxLayerCount)
throw new Exception("MeshPlanarSlicer.Compute: exceeded layer limit. Increase .MaxLayerCount.");
PlanarSlice[] clearing_slices = clearing_slice_list.ToArray();
// replacedume Resolve() takes 2x as long as meshes...
TotalCompute = (Meshes.Count * NH) + (2*NH);
Progress = 0;
// compute slices separately for each mesh
for (int mi = 0; mi < Meshes.Count; ++mi ) {
if (Cancelled())
break;
DMesh3 mesh = Meshes[mi].mesh;
PrintMeshOptions mesh_options = Meshes[mi].options;
// [TODO] should we hang on to this spatial? or should it be part of replacedembly?
DMeshAABBTree3 spatial = new DMeshAABBTree3(mesh, true);
AxisAlignedBox3d bounds = Meshes[mi].bounds;
bool is_cavity = mesh_options.IsCavity;
bool is_crop = mesh_options.IsCropRegion;
bool is_support = mesh_options.IsSupport;
bool is_closed = (mesh_options.IsOpen) ? false : mesh.IsClosed();
var useOpenMode = (mesh_options.OpenPathMode == PrintMeshOptions.OpenPathsModes.Default) ?
DefaultOpenPathMode : mesh_options.OpenPathMode;
if (is_crop || is_support)
throw new Exception("Not supported!");
// each layer is independent so we can do in parallel
gParallel.ForEach(Interval1i.Range(NH), (i) => {
if (Cancelled())
return;
double z = clearing_slices[i].Z;
if (z < bounds.Min.z || z > bounds.Max.z)
return;
// compute cut
Polygon2d[] polys; PolyLine2d[] paths;
ComputeSlicePlaneCurves(mesh, spatial, z, is_closed, out polys, out paths);
if (is_closed) {
// construct planar complex and "solids"
// (ie outer polys and nested holes)
PlanarComplex complex = new PlanarComplex();
foreach (Polygon2d poly in polys)
complex.Add(poly);
PlanarComplex.FindSolidsOptions options
= PlanarComplex.FindSolidsOptions.Default;
options.WantCurveSolids = false;
options.SimplifyDeviationTolerance = 0.001;
options.TrustOrientations = true;
options.AllowOverlappingHoles = true;
PlanarComplex.SolidRegionInfo solids = complex.FindSolidRegions(options);
List<GeneralPolygon2d> solid_polygons = ApplyValidRegions(solids.Polygons);
if (is_cavity) {
add_cavity_polygons(clearing_slices[i], solid_polygons, mesh_options);
} else {
if (ExpandStockAmount > 0)
solid_polygons = ClipperUtil.MiterOffset(solid_polygons, ExpandStockAmount);
add_solid_polygons(clearing_slices[i], solid_polygons, mesh_options);
}
}
Interlocked.Increment(ref Progress);
}); // end of parallel.foreach
} // end mesh iter
// resolve planar intersections, etc
gParallel.ForEach(Interval1i.Range(NH), (i) => {
if (Cancelled())
return;
clearing_slices[i].Resolve();
Interlocked.Add(ref Progress, 2);
});
// add to clearing stack
result.Clearing = SliceStackFactoryF();
for (int k = 0; k < clearing_slices.Length; ++k)
result.Clearing.Add(clearing_slices[k]);
/*
* Horizontal planar regions finishing preplaced.
* First we find all planar horizontal Z-regions big enough to mill.
* Then we add slices at the Z's we haven't touched yet.
*
* Cannot just 'fill' planar regions because we will miss edges that might
* be millable. So we grow region and then intersect with full-slice millable area.
*/
// find set of horizontal flat regions
Dictionary<double, List<PlanarRegion>> flat_regions = FindPlanarZRegions(ToolDiameter);
if (flat_regions.Count == 0)
goto done_slicing;
// if we have already milled this exact Z-height in clearing preplaced, then we can skip it
List<double> doneZ = new List<double>();
foreach (double z in flat_regions.Keys) {
if (clearingZLayers.Contains(z))
doneZ.Add(z);
}
foreach (var z in doneZ)
flat_regions.Remove(z);
// create slice for each layer
PlanarSlice[] horz_slices = new PlanarSlice[flat_regions.Count];
List<double> flatZ = new List<double>(flat_regions.Keys);
flatZ.Sort();
for ( int k = 0; k < horz_slices.Length; ++k) {
double z = flatZ[k];
Interval1d zspan = new Interval1d(z, z + LayerHeightMM);
horz_slices[k] = SliceFactoryF(zspan, z, k);
// compute full millable region slightly above this slice.
PlanarSlice clip_slice = ComputeSolidSliceAtZ(z + 0.0001, false);
clip_slice.Resolve();
// extract planar polys
List<Polygon2d> polys = GetPlanarPolys(flat_regions[z]);
PlanarComplex complex = new PlanarComplex();
foreach (Polygon2d poly in polys)
complex.Add(poly);
// convert to planar solids
PlanarComplex.FindSolidsOptions options
= PlanarComplex.FindSolidsOptions.SortPolygons;
options.SimplifyDeviationTolerance = 0.001;
options.TrustOrientations = true;
options.AllowOverlappingHoles = true;
PlanarComplex.SolidRegionInfo solids = complex.FindSolidRegions(options);
List<GeneralPolygon2d> solid_polygons = ApplyValidRegions(solids.Polygons);
// If planar solid has holes, then when we do inset later, we might lose
// too-thin parts. Shrink the holes to avoid this case.
//FilterHoles(solid_polygons, 0.55 * ToolDiameter);
// ok now we need to expand region and intersect with full region.
solid_polygons = ClipperUtil.MiterOffset(solid_polygons, ToolDiameter*0.5, 0.0001);
solid_polygons = ClipperUtil.Intersection(solid_polygons, clip_slice.Solids, 0.0001);
// Same idea as above, but if we do after, we keep more of the hole and
// hence do less extra clearing.
// Also this could then be done at the slicer level instead of here...
// (possibly this entire thing should be done at slicer level, except we need clip_slice!)
FilterHoles(solid_polygons, 1.1 * ToolDiameter);
add_solid_polygons(horz_slices[k], solid_polygons, PrintMeshOptions.Default());
}
// resolve planar intersections, etc
int NF = horz_slices.Length;
gParallel.ForEach(Interval1i.Range(NF), (i) => {
if (Cancelled())
return;
horz_slices[i].Resolve();
Interlocked.Add(ref Progress, 2);
});
// add to clearing stack
result.HorizontalFinish = SliceStackFactoryF();
for (int k = 0; k < horz_slices.Length; ++k)
result.HorizontalFinish.Add(horz_slices[k]);
done_slicing:
return result;
}
19
Source : AdaptiveGrid.cs
with MIT License
from hypar-io
with MIT License
from hypar-io
private void DeleteVertex(ulong id)
{
var vertex = _vertices[id];
_vertices.Remove(id);
var zDict = GetAddressParent(_verticesLookup, vertex.Point, tolerance: Tolerance);
if (zDict == null)
{
return;
}
zDict.Remove(vertex.Point.Z);
TryGetValue(_verticesLookup, vertex.Point.X, out var yzDict, Tolerance);
if (zDict.Count == 0)
{
yzDict.Remove(vertex.Point.Y);
}
if (yzDict.Count == 0)
{
_verticesLookup.Remove(vertex.Point.X);
}
}
19
Source : LineRenderer.cs
with MIT License
from LumpBloom7
with MIT License
from LumpBloom7
private void removeHitObjectFromEntry(double entryTime, SentakkiLanedHitObject hitObject)
{
// Safety check to ensure the a line entry actually exists
if (lineEntries.TryGetValue(entryTime, out var line))
{
line.Remove(hitObject);
// Remove this entry completely if there aren't any hitObjects using it
if (!line.HitObjects.Any())
{
lifetimeManager.RemoveEntry(lineEntries[entryTime]);
lineEntries.Remove(entryTime);
}
}
}
19
Source : OrderBookStatsComputer.cs
with Apache License 2.0
from Marfusios
with Apache License 2.0
from Marfusios
private void RemoveBook(Book book)
{
var id = book.Price;
if (_bids.ContainsKey(id))
_bids.Remove(id);
if (_asks.ContainsKey(id))
_asks.Remove(id);
}
19
Source : ExportForm.cs
with Apache License 2.0
from ProteoWizard
with Apache License 2.0
from ProteoWizard
private Dictionary<double, FragmentPeakInfo> AnnotatePeaks(List<double> originalMZs, Dictionary<string, double> fragmentList, int charge)
{
Dictionary<double, FragmentPeakInfo> availableAnnotations = CreatePossibleAnnotations(fragmentList, charge);
var annotationList = availableAnnotations.Select(item => item.Key).OrderBy(x => x).ToList();
var possibleExplanations = new Dictionary<double, FragmentPeakInfo>();
foreach (var peak in originalMZs)
{
var peak1 = peak;
var closestMatch = 0.0;
var peakExplanationList =
annotationList.Where(x => x < peak1 + _libraryExportSettings.fragmentMzTolerance && x > peak1 - _libraryExportSettings.fragmentMzTolerance).ToList();
if (peakExplanationList.Any())
{
var explanationList = availableAnnotations.Where(x => peakExplanationList.Contains(x.Key)).ToList();
if (explanationList.Count < 0)
continue;
var minComplexity = peakExplanationList.Min(x => availableAnnotations[x].complexity);
peakExplanationList =
peakExplanationList.Where(x => availableAnnotations[x].complexity == minComplexity).ToList();
if (peakExplanationList.Any())
{
var closestDistance = double.MaxValue;
foreach (var match in peakExplanationList)
{
var distance = Math.Abs(peak - match);
if (distance < closestDistance)
{
closestDistance = distance;
closestMatch = match;
}
}
}
}
if (peakExplanationList.Any() && availableAnnotations.ContainsKey(closestMatch))
{
possibleExplanations.Add(peak, availableAnnotations[closestMatch]);
availableAnnotations.Remove(closestMatch);
annotationList.Remove(closestMatch);
}
else
{
possibleExplanations.Add(peak,
new FragmentPeakInfo
{
fragmentID = "?",
originalMZvalue = peak,
relativePosition = 0,
complexity = int.MaxValue
}
);
}
}
return possibleExplanations;
}
19
Source : ByMagnitudeDetector.cs
with GNU General Public License v3.0
from Seank23
with GNU General Public License v3.0
from Seank23
public void Detect()
{
double[] input = null;
double scale = 0;
if (InputData.GetType().Name == "Double[]")
input = (double[])InputData;
if(InputArgs.ContainsKey("SCALE"))
{
if (InputArgs["SCALE"].GetType().Name == "Double")
scale = (double)InputArgs["SCALE"];
}
if (input == null || scale == 0)
return;
Dictionary<double, double> output = new Dictionary<double, double>();
double freq;
double gainThreshold = input.Average() + int.Parse(Settings["THOLD_FROM_AVG"][0]);
// Iterates through frequency data, storing the frequency and gain of the largest frequency bins
for (int i = (int)(scale * int.Parse(Settings["MIN_FREQ"][0])); i < Math.Min(input.Length, (int)(scale * int.Parse(Settings["MAX_FREQ"][0]))); i++)
{
if (input[i] > gainThreshold)
{
freq = (i + 1) / scale; // Frequency value of bin
output.Add(freq, input[i]);
if (output.Count > int.Parse(Settings["PEAK_BUFFER"][0])) // When fftPeaks overflows, remove smallest frequency bin
{
output = output.OrderByDescending(x => x.Value).ToDictionary(x => x.Key, x => x.Value); // Order: Gain - high to low
double keyToRemove = GetDictKey(output, output.Count - 1);
output.Remove(keyToRemove);
}
}
}
output = output.OrderBy(x => x.Key).ToDictionary(x => x.Key, x => x.Value); // Order: Frequency - low to high
List<KeyValuePair<double, double>> cluster = null;
KeyValuePair<double, double> largestGain = new KeyValuePair<double, double>();
int peakIndex = 0;
while (peakIndex < output.Count) // Removes unwanted and redundant peaks
{
double myFreq = GetDictKey(output, peakIndex);
if (cluster == null)
{
cluster = new List<KeyValuePair<double, double>>();
largestGain = new KeyValuePair<double, double>(myFreq, output[myFreq]);
cluster.Add(largestGain);
peakIndex++;
continue;
}
else if ((myFreq - largestGain.Key) <= largestGain.Key / 100 * double.Parse(Settings["MAX_FREQ_CHANGE"][0])) // Finds clusters of points that represent the same peak
{
cluster.Add(new KeyValuePair<double, double>(myFreq, output[myFreq]));
if (output[myFreq] > largestGain.Value)
largestGain = new KeyValuePair<double, double>(myFreq, output[myFreq]);
if (peakIndex < output.Count - 1)
{
peakIndex++;
continue;
}
}
if (cluster.Count > 1) // Keeps only the largest value in the cluster
{
cluster.Remove(largestGain);
for (int j = 0; j < cluster.Count; j++)
{
output.Remove(cluster[j].Key);
}
peakIndex -= cluster.Count;
}
cluster = null;
}
output = output.OrderBy(x => x.Key).ToDictionary(x => x.Key, x => x.Value); // Order: Frequency - low to high
List<double> discardFreqs = new List<double>();
for (int i = 0; i < output.Count - 1; i++) // Removes any unwanted residual peaks after a large peak
{
double freqA = GetDictKey(output, i);
double freqB = GetDictKey(output, i + 1);
if (Math.Abs(output[freqA] - output[freqB]) >= double.Parse(Settings["MAX_GAIN_CHANGE"][0]))
{
if (output[freqA] > output[freqB]) // Discard lowest value
discardFreqs.Add(freqB);
else
discardFreqs.Add(freqA);
}
}
foreach (double frequency in discardFreqs)
output.Remove(frequency);
Output = output.OrderByDescending(x => x.Value).ToDictionary(x => x.Key, x => x.Value); // Order: Gain - high to low
}
19
Source : PitchByMagnitudeDetector.cs
with GNU General Public License v3.0
from Seank23
with GNU General Public License v3.0
from Seank23
public void Detect()
{
double[] input = null;
double scale = 0;
double tuning = 0;
if (InputData.GetType().Name == "Double[]")
input = (double[])InputData;
if (InputArgs.ContainsKey("SCALE"))
{
if (InputArgs["SCALE"].GetType().Name == "Double")
scale = (double)InputArgs["SCALE"];
}
if(InputArgs.ContainsKey("TUNING"))
{
if (InputArgs["TUNING"].GetType().Name == "Double")
tuning = (double)InputArgs["TUNING"];
}
if (input == null || scale == 0)
return;
Dictionary<double, double> output = new Dictionary<double, double>();
double spectAvg = input.Average();
double spectMax = input.Max();
int notesPerOctave = 12;
double minFreq = double.Parse(Settings["MIN_FREQ"][0]) + (tuning * double.Parse(Settings["MIN_FREQ"][0]) / 100);
double[] noteFreqs = new double[notesPerOctave * int.Parse(Settings["OCTAVES"][0])];
for (int i = 0; i < noteFreqs.Length; i++)
noteFreqs[i] = minFreq * Math.Pow(2, (double)i / notesPerOctave);
double prevMag = 0;
double prevFreq = 0;
for(int i = 0; i < noteFreqs.Length; i++)
{
int inputIndex = (int)Math.Round(noteFreqs[i] * scale) - 1;
int clusterSize = 0;
while (clusterSize / scale <= noteFreqs[i] / 100 * double.Parse(Settings["FREQ_TOLERANCE"][0]))
clusterSize++;
if (inputIndex + clusterSize > input.Length)
break;
double avgMag = 0;
double maxVal = 0, maxIndex = 0;
for (int j = -clusterSize; j < clusterSize; j++)
{
avgMag += input[inputIndex + j];
if(input[inputIndex + j] > maxVal)
{
maxVal = input[inputIndex + j];
maxIndex = inputIndex + j;
}
}
avgMag /= 2 * clusterSize + 1;
double maxFreq = maxIndex / scale;
if(Math.Abs(prevMag - maxVal) > Math.Max(prevMag, maxVal) / 4 && noteFreqs[i] < 200)
{
if (prevMag - maxVal < 0)
output.Remove(prevFreq);
else
{
prevMag = maxVal;
prevFreq = maxFreq;
continue;
}
}
if (avgMag > spectAvg + ((spectMax - spectAvg) * double.Parse(Settings["MAG_THRESHOLD"][0])))
output[maxFreq] = maxVal;
prevMag = maxVal;
prevFreq = maxFreq;
//Console.WriteLine("Note Freq: " + noteFreqs[i] + " Cluster Size: " + clusterSize + " Max Freq: " + (maxIndex / scale) + " Max Mag: " + maxVal);
}
output = output.OrderByDescending(x => x.Value).ToDictionary(x => x.Key, x => x.Value);
Output = output.Take(int.Parse(Settings["MAX_VALS"][0])).ToDictionary(pair => pair.Key, pair => pair.Value);
OutputArgs.Add("TUNING_OUT", -int.Parse(Settings["TUNING_OFFSET"][0])); // Preplaced tuning offset back to the app to get applied
}
19
Source : RemoveKickNoiseProcessor.cs
with GNU General Public License v3.0
from Seank23
with GNU General Public License v3.0
from Seank23
public void Process()
{
Dictionary<double, double> input = null;
if (InputBuffer.GetType().Name == "Dictionary`2")
input = (Dictionary<double, double>)InputBuffer;
if (input == null)
return;
List<double> discardFreqs = new List<double>();
double prevFreq = 0;
input = input.OrderBy(x => x.Key).ToDictionary(x => x.Key, x => x.Value); // Order: Frequency - low to high
foreach (double freq in input.Keys)
{
if (freq > double.Parse(Settings["CUTOFF_FREQ"][0]))
break;
if (prevFreq == 0)
{
prevFreq = freq;
continue;
}
if ((freq - prevFreq) <= freq / 100 * (2.5 * double.Parse(Settings["MAX_FREQ_CHANGE"][0]))) // Checking for consecutive, closely packed peaks - noise
{
if (Math.Abs(input[freq] - input[prevFreq]) <= double.Parse(Settings["SIMILAR_GAIN_THRESHOLD"][0]))
{
if (!discardFreqs.Contains(prevFreq))
discardFreqs.Add(prevFreq);
discardFreqs.Add(freq);
}
}
prevFreq = freq;
}
foreach (double frequency in discardFreqs)
input.Remove(frequency);
OutputBuffer = input;
}