Here are the examples of the csharp api System.Collections.Generic.IEnumerable.Sum() taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
905 Examples
19
Source : CachingMetrics.cs
with GNU General Public License v3.0
from BRH-Media
with GNU General Public License v3.0
from BRH-Media
public static long SizeOfFiles(string[] files)
{
//total size of all files in the array
long size = 0;
try
{
//for each file in the array, grab its size and add it to the counter
size +=
(from f in files where File.Exists(f) select new FileInfo(f) into fi select fi.Length).Sum();
}
catch (Exception ex)
{
//log it and then continue as normal
LoggingHelpers.RecordException(ex.Message, "CacheSizeCalcError");
}
//return the final size
return size;
}
19
Source : SwingsPerSecond.cs
with GNU General Public License v2.0
from Caeden117
with GNU General Public License v2.0
from Caeden117
public void Update()
{
var interval = 10;
var songBpm = BeatSaberSongContainer.Instance.Song.BeatsPerMinute;
var swings = SwingCount(songBpm);
var red = swings[0];
var blue = swings[1];
var swingCountList = new int[red.Length];
for (var x = 0; x < red.Length; x++) swingCountList[x] = red[x] + blue[x];
if (interval < 1)
{
Debug.LogWarning("Interval cannot be less than 1");
return;
}
if (swingCountList.Sum() == 0)
{
Total = new Stats(0, 0, 0);
//Debug.LogWarning("Map has no notes");
return;
}
// Used to calculate median and SPS across the set of all intervals
var redSpsPerInterval = new List<double>();
var blueSpsPerInterval = new List<double>();
var totalSpsPerInterval = new List<double>();
for (var i = 0; i < swingCountList.Length; i += interval)
{
double intervalLength = i + interval > swingCountList.Length ? swingCountList.Length - i : interval;
var swingCountSlice = swingCountList.Skip(i).Take(interval);
var totalSps = swingCountSlice.Sum() / intervalLength;
var redSlice = red.Skip(i).Take(interval);
var redSps = redSlice.Sum() / intervalLength;
var blueSlice = blue.Skip(i).Take(interval);
var blueSps = blueSlice.Sum() / intervalLength;
/*Debug.LogFormat("{0} to {1}: R({2:0.00})|B({3:0.00})|T({4:0.00})",
ConvertTime(i),
ConvertTime(i + (int)intervalLength - 1),
red_sps, blue_sps, total_sps
);*/
blueSpsPerInterval.Add(blueSps);
redSpsPerInterval.Add(redSps);
totalSpsPerInterval.Add(totalSps);
}
var firstInteractionTime = FirstInteractiveObjectTime(songBpm);
var lastInteractionTime = LastInteractiveObjectTime(songBpm);
Red = new Stats(
red.Sum() / (lastInteractionTime - firstInteractionTime),
CalculateMaxRollingSps(red, interval),
Median(redSpsPerInterval)
);
Blue = new Stats(
blue.Sum() / (lastInteractionTime - firstInteractionTime),
CalculateMaxRollingSps(blue, interval),
Median(blueSpsPerInterval)
);
Total = new Stats(
swingCountList.Sum() / (lastInteractionTime - firstInteractionTime),
CalculateMaxRollingSps(swingCountList, interval),
Median(totalSpsPerInterval)
);
/*Debug.LogFormat("Normalized Deviation: R({0})|B({})|T({})".format(
round(statistics.pstdev(red_sps_per_interval) / red_total if red_total > 0 else 1, 2),
round(statistics.pstdev(blue_sps_per_interval) / blue_total if blue_total > 0 else 1, 2),
round(statistics.pstdev(total_sps_per_interval) / total if total > 0 else 1, 2)))
if len(red_sps_per_interval) > 1 and len(blue_sps_per_interval) > 1:
print("Variance: R({})|B({})|T({})".format(
round(statistics.variance(red_sps_per_interval), 2),
round(statistics.variance(blue_sps_per_interval), 2),
round(statistics.variance(total_sps_per_interval, 2))))
return total*/
}
19
Source : SwingsPerSecond.cs
with GNU General Public License v2.0
from Caeden117
with GNU General Public License v2.0
from Caeden117
private float CalculateMaxRollingSps(int[] spsList, int interval)
{
if (spsList.Length == 0) return 0;
if (spsList.Length < interval) return spsList.Sum() / spsList.Length;
var currentSps = spsList.Take(interval).Sum();
var maxSps = currentSps;
for (var x = 0; x < spsList.Length - interval; x++)
{
currentSps = currentSps - spsList[x] + spsList[x + interval];
maxSps = Mathf.Max(maxSps, currentSps);
}
return maxSps / (float)interval;
}
19
Source : HP.cs
with MIT License
from cabarius
with MIT License
from cabarius
public static void ApplyHPDice(UnitDescriptor unit, LevelUpState state, BlueprintCharacterClreplaced[] appliedClreplacedes) {
if (appliedClreplacedes.Count() <= 0) return;
var newClreplacedLvls = appliedClreplacedes.Select(cl => unit.Progression.GetClreplacedLevel(cl)).ToArray();
var clreplacedCount = newClreplacedLvls.Length;
var hitDies = appliedClreplacedes.Select(cl => (int)cl.HitDie).ToArray();
var mainClreplacedIndex = appliedClreplacedes.ToList().FindIndex(ch => ch == state.SelectedClreplaced);
//Logger.ModLoggerDebug($"mainClreplacedIndex = {mainClreplacedIndex}");
var mainClreplacedHPDie = hitDies[mainClreplacedIndex];
var currentHPIncrease = hitDies[mainClreplacedIndex];
var newIncrease = currentHPIncrease;
switch (Main.settings.multiclreplacedHitPointPolicy) {
case ProgressionPolicy.Average:
newIncrease = hitDies.Sum() / clreplacedCount;
break;
case ProgressionPolicy.Largest:
newIncrease = hitDies.Max();
break;
case ProgressionPolicy.Sum:
newIncrease = hitDies.Sum();
break;
default:
break; ;
}
unit.Stats.GetStat(StatType.HitPoints).BaseValue += newIncrease - currentHPIncrease;
}
19
Source : Spinometer.cs
with MIT License
from Caeden117
with MIT License
from Caeden117
private IEnumerator SecondTick()
{
while (true)
{
yield return new WaitForSecondsRealtime(1);
leftQuaternions.Clear();
rightQuaternions.Clear();
float leftSpeed = leftAngles.Sum();
float rightSpeed = rightAngles.Sum();
leftAngles.Clear();
rightAngles.Clear();
float averageSpeed = (leftSpeed + rightSpeed) / 2;
if (leftSpeed > highestSpin) highestSpin = leftSpeed;
if (rightSpeed > highestSpin) highestSpin = rightSpeed;
switch (Settings.Mode)
{
case SpinometerMode.Average:
spinometer.text = $"<color=#{DetermineColor(averageSpeed)}>{Mathf.RoundToInt(averageSpeed)}</color>";
break;
case SpinometerMode.Highest:
spinometer.text = $"<color=#{DetermineColor(highestSpin)}>{Mathf.RoundToInt(highestSpin)}</color>";
break;
case SpinometerMode.SplitAverage:
spinometer.text = $"<color=#{DetermineColor(leftSpeed)}>{Mathf.RoundToInt(leftSpeed)}</color> | <color=#{DetermineColor(rightSpeed)}>{Mathf.RoundToInt(rightSpeed)}</color>";
break;
}
}
}
19
Source : TrackViewModel.cs
with Apache License 2.0
from Capnode
with Apache License 2.0
from Capnode
internal static decimal CalcSharpe(IList<Trade> trades)
{
IEnumerable<decimal> range = trades.Select(m => m.ProfitLoss - m.TotalFees);
decimal netProfit = range.Sum();
decimal stddev = StandardDeviation(range);
decimal sharpe = stddev == 0 ? 0 : netProfit / stddev;
return sharpe;
}
19
Source : TrackViewModel.cs
with Apache License 2.0
from Capnode
with Apache License 2.0
from Capnode
private static double LinearDeviation(IList<Trade> trades)
{
int count = trades.Count;
if (count == 0)
{
return 0;
}
IEnumerable<decimal> range = trades.Select(m => m.ProfitLoss - m.TotalFees);
decimal netProfit = range.Sum();
decimal avg = netProfit / count;
decimal profit = 0;
decimal ideal = 0;
decimal sum = 0;
foreach (decimal trade in range)
{
profit += trade;
ideal += avg;
decimal epsilon = profit - ideal;
sum += epsilon * epsilon;
}
double variance = (double)sum / count;
return Math.Sqrt(variance);
}
19
Source : ReturnsSymbolData.cs
with Apache License 2.0
from Capnode
with Apache License 2.0
from Capnode
public static double[,] FormReturnsMatrix(this Dictionary<Symbol, ReturnsSymbolData> symbolData, IEnumerable<Symbol> symbols)
{
var returnsByDate = (from s in symbols join sd in symbolData on s equals sd.Key select sd.Value.Returns).ToList();
// Consolidate by date
var alldates = returnsByDate.SelectMany(r => r.Keys).Distinct().ToList();
var max = symbolData.Count == 0 ? 0 : symbolData.Max(kvp => kvp.Value.Returns.Count);
// Perfect match between the dates in the ReturnsSymbolData objects
if (max == alldates.Count)
{
return Accord.Math.Matrix.Create(alldates
// if a return date isn't found for a symbol we use 'double.NaN'
.Select(d => returnsByDate.Select(s => s.GetValueOrDefault(d, double.NaN)).ToArray())
.Where(r => !r.Select(Math.Abs).Sum().IsNaNOrZero()) // remove empty rows
.ToArray());
}
// If it is not a match, we replacedume that each index correspond to the same point in time
var returnsByIndex = returnsByDate.Select((doubles, i) => doubles.Values.ToArray());
return Accord.Math.Matrix.Create(Enumerable.Range(0, max)
// there is no guarantee that all symbols have the same amount of returns so we need to check range and use 'double.NaN' if required as above
.Select(d => returnsByIndex.Select(s => s.Length < (d + 1) ? double.NaN : s[d]).ToArray())
.Where(r => !r.Select(Math.Abs).Sum().IsNaNOrZero()) // remove empty rows
.ToArray());
}
19
Source : Statistics.cs
with Apache License 2.0
from Capnode
with Apache License 2.0
from Capnode
public static Dictionary<string, string> Generate(IEnumerable<ChartPoint> pointsEquity,
SortedDictionary<DateTime, decimal> profitLoss,
IEnumerable<ChartPoint> pointsPerformance,
Dictionary<DateTime, decimal> unsortedBenchmark,
decimal startingCash,
decimal totalFees,
decimal totalTrades,
double tradingDaysPerYear = 252
)
{
//Initialise the response:
double riskFreeRate = 0;
decimal totalClosedTrades = 0;
decimal totalWins = 0;
decimal totalLosses = 0;
decimal averageWin = 0;
decimal averageLoss = 0;
decimal averageWinRatio = 0;
decimal winRate = 0;
decimal lossRate = 0;
decimal totalNetProfit = 0;
double fractionOfYears = 1;
decimal profitLossValue = 0, runningCash = startingCash;
decimal algoCompoundingPerformance = 0;
decimal finalBenchmarkCash = 0;
decimal benchCompoundingPerformance = 0;
var years = new List<int>();
var annualTrades = new SortedDictionary<int, int>();
var annualWins = new SortedDictionary<int, int>();
var annualLosses = new SortedDictionary<int, int>();
var annualLossTotal = new SortedDictionary<int, decimal>();
var annualWinTotal = new SortedDictionary<int, decimal>();
var annualNetProfit = new SortedDictionary<int, decimal>();
var statistics = new Dictionary<string, string>();
var dtPrevious = new DateTime();
var listPerformance = new List<double>();
var listBenchmark = new List<double>();
var equity = new SortedDictionary<DateTime, decimal>();
var performance = new SortedDictionary<DateTime, decimal>();
SortedDictionary<DateTime, decimal> benchmark = null;
try
{
//Get array versions of the performance:
performance = ChartPointToDictionary(pointsPerformance);
equity = ChartPointToDictionary(pointsEquity);
performance.Values.ToList().ForEach(i => listPerformance.Add((double)(i / 100)));
benchmark = new SortedDictionary<DateTime, decimal>(unsortedBenchmark);
// to find the delta in benchmark for first day, we need to know the price at the opening
// moment of the day, but since we cannot find this, we cannot find the first benchmark's delta,
// so we pad it with Zero. If running a short backtest this will skew results, longer backtests
// will not be affected much
listBenchmark.Add(0);
//Get benchmark performance array for same period:
benchmark.Keys.ToList().ForEach(dt =>
{
if (dt >= equity.Keys.FirstOrDefault().AddDays(-1) && dt < equity.Keys.LastOrDefault())
{
decimal previous;
if (benchmark.TryGetValue(dtPrevious, out previous) && previous != 0)
{
var deltaBenchmark = (benchmark[dt] - previous)/previous;
listBenchmark.Add((double)(deltaBenchmark));
}
else
{
listBenchmark.Add(0);
}
dtPrevious = dt;
}
});
// TODO : if these lists are required to be the same length then we should create structure to pair the values, this way, by contract it will be enforced.
//THIS SHOULD NEVER HAPPEN --> But if it does, log it and fail silently.
while (listPerformance.Count < listBenchmark.Count)
{
listPerformance.Add(0);
Log.Error("Statistics.Generate(): Padded Performance");
}
while (listPerformance.Count > listBenchmark.Count)
{
listBenchmark.Add(0);
Log.Error("Statistics.Generate(): Padded Benchmark");
}
}
catch (Exception err)
{
Log.Error(err, "Dic-Array Convert:");
}
try
{
//Number of years in this dataset:
fractionOfYears = (equity.Keys.LastOrDefault() - equity.Keys.FirstOrDefault()).TotalDays / 365;
}
catch (Exception err)
{
Log.Error(err, "Fraction of Years:");
}
try
{
if (benchmark != null)
{
algoCompoundingPerformance = CompoundingAnnualPerformance(startingCash, equity.Values.LastOrDefault(), (decimal) fractionOfYears);
finalBenchmarkCash = ((benchmark.Values.Last() - benchmark.Values.First())/benchmark.Values.First())*startingCash;
benchCompoundingPerformance = CompoundingAnnualPerformance(startingCash, finalBenchmarkCash, (decimal) fractionOfYears);
}
}
catch (Exception err)
{
Log.Error(err, "Compounding:");
}
try
{
//Run over each equity day:
foreach (var closedTrade in profitLoss.Keys)
{
profitLossValue = profitLoss[closedTrade];
//Check if this date is in the "years" array:
var year = closedTrade.Year;
if (!years.Contains(year))
{
//Initialise a new year holder:
years.Add(year);
annualTrades.Add(year, 0);
annualWins.Add(year, 0);
annualWinTotal.Add(year, 0);
annualLosses.Add(year, 0);
annualLossTotal.Add(year, 0);
}
//Add another trade:
annualTrades[year]++;
//Profit loss tracking:
if (profitLossValue > 0)
{
annualWins[year]++;
annualWinTotal[year] += profitLossValue / runningCash;
}
else
{
annualLosses[year]++;
annualLossTotal[year] += profitLossValue / runningCash;
}
//Increment the cash:
runningCash += profitLossValue;
}
//Get the annual percentage of profit and loss:
foreach (var year in years)
{
annualNetProfit[year] = (annualWinTotal[year] + annualLossTotal[year]);
}
//Sum the totals:
try
{
if (profitLoss.Keys.Count > 0)
{
totalClosedTrades = annualTrades.Values.Sum();
totalWins = annualWins.Values.Sum();
totalLosses = annualLosses.Values.Sum();
totalNetProfit = (equity.Values.LastOrDefault() / startingCash) - 1;
//-> Handle Div/0 Errors
if (totalWins == 0)
{
averageWin = 0;
}
else
{
averageWin = annualWinTotal.Values.Sum() / totalWins;
}
if (totalLosses == 0)
{
averageLoss = 0;
averageWinRatio = 0;
}
else
{
averageLoss = annualLossTotal.Values.Sum() / totalLosses;
averageWinRatio = Math.Abs(averageWin / averageLoss);
}
if (totalTrades == 0)
{
winRate = 0;
lossRate = 0;
}
else
{
winRate = Math.Round(totalWins / totalClosedTrades, 5);
lossRate = Math.Round(totalLosses / totalClosedTrades, 5);
}
}
}
catch (Exception err)
{
Log.Error(err, "Second Half:");
}
var profitLossRatio = ProfitLossRatio(averageWin, averageLoss);
var profitLossRatioHuman = profitLossRatio.ToString(CultureInfo.InvariantCulture);
if (profitLossRatio == -1) profitLossRatioHuman = "0";
//Add the over all results first, break down by year later:
statistics = new Dictionary<string, string> {
{ "Total Trades", Math.Round(totalTrades, 0).ToStringInvariant() },
{ "Average Win", Math.Round(averageWin * 100, 2).ToStringInvariant() + "%" },
{ "Average Loss", Math.Round(averageLoss * 100, 2).ToStringInvariant() + "%" },
{ "Compounding Annual Return", Math.Round(algoCompoundingPerformance * 100, 3).ToStringInvariant() + "%" },
{ "Drawdown", (DrawdownPercent(equity, 3) * 100).ToStringInvariant() + "%" },
{ "Expectancy", Math.Round((winRate * averageWinRatio) - (lossRate), 3).ToStringInvariant() },
{ "Net Profit", Math.Round(totalNetProfit * 100, 3).ToStringInvariant() + "%"},
{ "Sharpe Ratio", Math.Round(SharpeRatio(listPerformance, riskFreeRate), 3).ToStringInvariant() },
{ "Loss Rate", Math.Round(lossRate * 100).ToStringInvariant() + "%" },
{ "Win Rate", Math.Round(winRate * 100).ToStringInvariant() + "%" },
{ "Profit-Loss Ratio", profitLossRatioHuman },
{ "Alpha", Math.Round(Alpha(listPerformance, listBenchmark, riskFreeRate), 3).ToStringInvariant() },
{ "Beta", Math.Round(Beta(listPerformance, listBenchmark), 3).ToStringInvariant() },
{ "Annual Standard Deviation", Math.Round(AnnualStandardDeviation(listPerformance, tradingDaysPerYear), 3).ToStringInvariant() },
{ "Annual Variance", Math.Round(AnnualVariance(listPerformance, tradingDaysPerYear), 3).ToStringInvariant() },
{ "Information Ratio", Math.Round(InformationRatio(listPerformance, listBenchmark), 3).ToStringInvariant() },
{ "Tracking Error", Math.Round(TrackingError(listPerformance, listBenchmark), 3).ToStringInvariant() },
{ "Treynor Ratio", Math.Round(TreynorRatio(listPerformance, listBenchmark, riskFreeRate), 3).ToStringInvariant() },
{ "Total Fees", "$" + totalFees.ToStringInvariant("0.00") }
};
}
catch (Exception err)
{
Log.Error(err);
}
return statistics;
}
19
Source : FCSDeepDrillerContainer.cs
with MIT License
from ccgould
with MIT License
from ccgould
internal int GetContainerTotal()
{
//Go through the dictionary of items and add all the values together if container is null return 0.
return _container?.Select((t, i) => _container?.ElementAt(i).Value).Sum() ?? 0;
}
19
Source : CalculateDriver.cs
with Apache License 2.0
from cdy816
with Apache License 2.0
from cdy816
public double TagValueSum(params string[] tags)
{
try
{
double[] dtmps = new double[tags.Length];
for (int i = 0; i < tags.Length; i++)
{
dtmps[i] = Convert.ToDouble(GetTagValue(tags[i]));
}
return dtmps.Sum();
}
catch (Exception ex)
{
LoggerService.Service.Erro("Calculate", ex.StackTrace);
}
return 0;
}
19
Source : MajorityManager.cs
with MIT License
from centaurus-project
with MIT License
from centaurus-project
public void Add(IncomingMessage message)
{
MessageEnvelope consensus = null;
MajorityResults majorityResult = MajorityResults.Unknown;
lock (syncRoot)
{
if (!storage.TryGetValue(message.MessageHash, out var envelopeVote))//first result with such hash
{
envelopeVote = new MessageEnvelopeVote(message.Envelope, message.MessageHash);
storage.Add(message.MessageHash, envelopeVote);
}
else
envelopeVote.AddSignature(message.Envelope.Signatures[0]);
if (storage.Count > 1)
{ }
if (IsProcessed)
{
if (storage.Select(e => e.Value.Signatures.Count).Sum() == ((AlphaStateManager)majorityManager.Context.AppState).ConnectedAuditorsCount) //remove if all auditors sent results
{
majorityManager?.Remove(Id);
}
return;
}
majorityResult = CheckMajority(out consensus);
if (majorityResult != MajorityResults.Unknown)
{
majorityManager.OnResult(majorityResult, consensus);
IsProcessed = true;
}
}
}
19
Source : PerformanceStatisticsManager.cs
with MIT License
from centaurus-project
with MIT License
from centaurus-project
protected int GetQuantaAvgLength()
{
LastQuantaQueueLengths.Add(Context.QuantumHandler.QuantaQueueLenght);
if (LastQuantaQueueLengths.Count > 20)
LastQuantaQueueLengths.RemoveAt(0);
return (int)Math.Floor(decimal.Divide(LastQuantaQueueLengths.Sum(), LastQuantaQueueLengths.Count));
}
19
Source : FunctionalGroupVisual.cs
with Apache License 2.0
from Chem4Word
with Apache License 2.0
from Chem4Word
private void Render(Point location, string colour)
{
int textStorePosition = 0;
bool flipped;
Position = location;
if (ParentAtom == null)
{
flipped = false;
}
else
{
flipped = Flipped;
}
var textStore = new FunctionalGroupTextSource(ParentGroup, colour, flipped)
{
SymbolSize = SymbolSize,
SubscriptSize = SubscriptSize,
SuperscriptSize = SuperscriptSize
};
//main textformatter - this does the writing of the visual
using (TextFormatter textFormatter = TextFormatter.Create())
{
//set up the default paragraph properties
var paraprops = new FunctionalGroupTextSource.GenericTextParagraphProperties(
FlowDirection.LeftToRight,
TextAlignment.Left,
true,
false,
new LabelTextRunProperties(colour, SymbolSize),
TextWrapping.NoWrap,
SymbolSize,
0d);
var anchorRuns = textStore.Runs.Where(f => f.IsAnchor);
string anchorString = string.Empty;
foreach (var run in anchorRuns)
{
anchorString += run.Text;
}
using (TextLine myTextLine =
textFormatter.FormatLine(textStore, textStorePosition, 999, paraprops, null))
{
IList<TextBounds> textBounds;
Rect firstRect = Rect.Empty;
if (!Flipped) //isolate them at the beginning
{
textBounds = myTextLine.GetTextBounds(0, anchorString.Length);
}
else
{
//isolate them at the end
var start = myTextLine.Length - 1 - anchorString.Length;
textBounds = myTextLine.GetTextBounds(start, anchorString.Length);
}
//add all the bounds together
foreach (TextBounds anchorBound in textBounds)
{
firstRect.Union(anchorBound.Rectangle);
}
//center will be position close to the origin 0,0
Point center = new Point((firstRect.Left + firstRect.Right) / 2,
(firstRect.Top + firstRect.Bottom) / 2);
//the displacement vector will be added to each relative coordinate for the glyph run
var displacementVector = location - center;
//locus is where the text line is drawn
var locus = new Point(0, 0) + displacementVector;
textBounds = myTextLine.GetTextBounds(0, 999);
var obb = textBounds[0].Rectangle;
//draw the line of text
using (DrawingContext dc = RenderOpen())
{
myTextLine.Draw(dc, locus, InvertAxes.None);
#if DEBUG
#if SHOWBOUNDS
obb.Offset(new Vector(locus.X, locus.Y));
dc.DrawRectangle(null, new Pen(new SolidColorBrush(Colors.BlueViolet), 1.0), obb);
#endif
#endif
var glyphRuns = myTextLine.GetIndexedGlyphRuns();
List<Point> outline = new List<Point>();
double advanceWidths = 0d;
//build up the convex hull from each glyph
//you need to add in the advance widths for each
//glyph run as they are traversed,
//to the outline
foreach (IndexedGlyphRun igr in glyphRuns)
{
var originalRun = textStore.GetTextRun(igr.TextSourceCharacterIndex);
var currentRun = igr.GlyphRun;
//need to work out how much the current run has been offset from the baseline
var runBounds =
myTextLine.GetTextBounds(igr.TextSourceCharacterIndex, igr.TextSourceLength);
//get the bounding rect
var rect = runBounds[0].TextRunBounds[0].Rectangle;
//it's relative to the baseline
//adjust it
rect.Offset(new Vector(locus.X, locus.Y));
var rectCopy = rect;
#if DEBUG
#if SHOWBOUNDS
dc.DrawRectangle(null, new Pen(new SolidColorBrush(Colors.DarkOrange), 1.0), rect);
#endif
#endif
var runOutline = GlyphUtils.GetOutline(currentRun);
//need to see if the run has been super or sub-scripted
var variants = originalRun.Properties.TypographyProperties.Variants;
if (variants == FontVariants.Subscript || variants == FontVariants.Superscript)
{
//simply union in the rect -it's easier!
outline.AddRange(new[]
{
rectCopy.BottomLeft, rectCopy.BottomRight, rectCopy.TopLeft,
rectCopy.TopRight
});
}
else
{
//add in the points from the convex hull
for (int i = 0; i < runOutline.Count; i++)
{
var point = runOutline[i] + displacementVector +
new Vector(0.0, myTextLine.Baseline);
point.X += advanceWidths;
runOutline[i] = point;
}
outline.AddRange(runOutline);
}
advanceWidths += currentRun.AdvanceWidths.Sum();
}
_sortedOutline = (from Point p in outline
orderby p.X ascending, p.Y descending
select p).ToList();
Hull = Geometry<Point>.GetHull(_sortedOutline, p => p);
// Diag: Show Hulls or Atom centres
#if DEBUG
#if SHOWHULLS
dc.DrawGeometry(null, new Pen(Brushes.GreenYellow, thickness: 1), HullGeometry);
#endif
#if SHOWATOMCENTRES
dc.DrawEllipse(Brushes.Red, null, ParentAtom.Position, 5, 5);
#endif
#endif
// End Diag
dc.Close();
}
}
}
}
19
Source : HiddenMarkovClassifierLearning.Continuous.cs
with MIT License
from chen0040
with MIT License
from chen0040
public double Run(double[][] observations_db, int[] clreplaced_labels)
{
int clreplaced_count = mClreplacedifier.ClreplacedCount;
double[] logLikelihood = new double[clreplaced_count];
int K=clreplaced_labels.Length;
DiagnosticsHelper.replacedert(observations_db.Length==K);
int[] clreplaced_label_counts = new int[clreplaced_count];
Parallel.For(0, clreplaced_count, i =>
{
IUnsupervisedLearning teacher = mAlgorithmEnreplacedy(i);
List<int> match_record_index_set = new List<int>();
for (int k = 0; k < K; ++k)
{
if (clreplaced_labels[k] == i)
{
match_record_index_set.Add(k);
}
}
int K2 = match_record_index_set.Count;
clreplaced_label_counts[i] = K2;
if (K2 != 0)
{
double[][] observations_subdb = new double[K2][];
for (int k = 0; k < K2; ++k)
{
int record_index = match_record_index_set[k];
observations_subdb[k] = observations_db[record_index];
}
logLikelihood[i] = teacher.Run(observations_subdb);
}
});
if (mEmpirical)
{
for (int i = 0; i < clreplaced_count; i++)
{
mClreplacedifier.Priors[i] = (double)clreplaced_label_counts[i] / K;
}
}
//if (mRejection)
//{
// mClreplacedifier.Threshold = Threshold();
//}
return logLikelihood.Sum();
}
19
Source : HiddenMarkovClassifierLearning.cs
with MIT License
from chen0040
with MIT License
from chen0040
public double Run(int[][] observations_db, int[] clreplaced_labels)
{
ValidationHelper.ValidateObservationDb(observations_db, 0, mClreplacedifier.SymbolCount);
int clreplaced_count = mClreplacedifier.ClreplacedCount;
double[] logLikelihood = new double[clreplaced_count];
int K=clreplaced_labels.Length;
DiagnosticsHelper.replacedert(observations_db.Length==K);
int[] clreplaced_label_counts = new int[clreplaced_count];
Parallel.For(0, clreplaced_count, i =>
{
IUnsupervisedLearning teacher = mAlgorithmEnreplacedy(i);
List<int> match_record_index_set = new List<int>();
for (int k = 0; k < K; ++k)
{
if (clreplaced_labels[k] == i)
{
match_record_index_set.Add(k);
}
}
int K2 = match_record_index_set.Count;
clreplaced_label_counts[i] = K2;
if (K2 != 0)
{
int[][] observations_subdb = new int[K2][];
for (int k = 0; k < K2; ++k)
{
int record_index = match_record_index_set[k];
observations_subdb[k] = observations_db[record_index];
}
logLikelihood[i] = teacher.Run(observations_subdb);
}
});
if (mEmpirical)
{
for (int i = 0; i < clreplaced_count; i++)
{
mClreplacedifier.Priors[i] = (double)clreplaced_label_counts[i] / K;
}
}
//if (mRejection)
//{
// mClreplacedifier.Threshold = Threshold();
//}
return logLikelihood.Sum();
}
19
Source : MaximumLikelihoodLearning.Continuous.cs
with MIT License
from chen0040
with MIT License
from chen0040
public double Run(double[][] observations_db, int[][] path_db)
{
int K = observations_db.Length;
DiagnosticsHelper.replacedert(path_db.Length == K);
int N = mModel.StateCount;
int M = mModel.SymbolCount;
int[] initial=new int[N];
int[,] transition_matrix = new int[N, N];
for (int k = 0; k < K; ++k)
{
initial[path_db[k][0]]++;
}
int T = 0;
for (int k = 0; k < K; ++k)
{
int[] path = path_db[k];
double[] observations = observations_db[k];
T = path.Length;
for (int t = 0; t < T-1; ++t)
{
transition_matrix[path[t], path[t + 1]]++;
}
}
// 3. Count emissions for each state
List<double>[] clusters = new List<double>[N];
for (int i = 0; i < N; i++)
clusters[i] = new List<double>();
// Count symbol frequencies per state
for (int k = 0; k < K; k++)
{
for (int t = 0; t < path_db[k].Length; t++)
{
int state = path_db[k][t];
double symbol = observations_db[k][t];
clusters[state].Add(symbol);
}
}
// Estimate probability distributions
for (int i = 0; i < N; i++)
{
if (clusters[i].Count > 0)
{
mModel.EmissionModels[i].Process(clusters[i].ToArray());
}
}
if (mUseLaplaceRule)
{
for (int i = 0; i < N; ++i)
{
initial[i]++;
for (int j = 0; j < N; ++j)
{
transition_matrix[i, j]++;
}
}
}
int initial_sum = initial.Sum();
int[] transition_sum_vec = Sum(transition_matrix, 1);
for (int i = 0; i < N; ++i)
{
mModel.LogProbabilityVector[i] = System.Math.Log(initial[i] / (double)initial_sum);
}
for (int i = 0; i < N; ++i)
{
double transition_sum = (double)transition_sum_vec[i];
for (int j = 0; j < N; ++j)
{
mModel.LogTransitionMatrix[i, j] = System.Math.Log(transition_matrix[i, j] / transition_sum);
}
}
double logLikelihood = double.NegativeInfinity;
for (int i = 0; i < observations_db.Length; i++)
logLikelihood = LogHelper.LogSum(logLikelihood, mModel.Evaluate(observations_db[i]));
return logLikelihood;
}
19
Source : MaximumLikelihoodLearning.cs
with MIT License
from chen0040
with MIT License
from chen0040
public double Run(int[][] observations_db, int[][] path_db)
{
int K = observations_db.Length;
DiagnosticsHelper.replacedert(path_db.Length == K);
int N = mModel.StateCount;
int M = mModel.SymbolCount;
int[] initial=new int[N];
int[,] transition_matrix = new int[N, N];
int[,] emission_matrix = new int[N, M];
for (int k = 0; k < K; ++k)
{
initial[path_db[k][0]]++;
}
int T = 0;
for (int k = 0; k < K; ++k)
{
int[] path = path_db[k];
int[] observations = observations_db[k];
T = path.Length;
for (int t = 0; t < T-1; ++t)
{
transition_matrix[path[t], path[t + 1]]++;
}
for (int t = 0; t < T; ++t)
{
emission_matrix[path[t], observations[t]]++;
}
}
if (mUseLaplaceRule)
{
for (int i = 0; i < N; ++i)
{
initial[i]++;
for (int j = 0; j < N; ++j)
{
transition_matrix[i, j]++;
}
for (int j = 0; j < M; ++j)
{
emission_matrix[i, j]++;
}
}
}
int initial_sum = initial.Sum();
int[] transition_sum_vec = Sum(transition_matrix, 1);
int[] emission_sum_vec = Sum(emission_matrix, 1);
for (int i = 0; i < N; ++i)
{
mModel.LogProbabilityVector[i] = System.Math.Log(initial[i] / (double)initial_sum);
}
for (int i = 0; i < N; ++i)
{
double transition_sum = (double)transition_sum_vec[i];
for (int j = 0; j < N; ++j)
{
mModel.LogTransitionMatrix[i, j] = System.Math.Log(transition_matrix[i, j] / transition_sum);
}
}
for (int i = 0; i < N; ++i)
{
double emission_sum = (double)emission_sum_vec[i];
for (int m = 0; m < M; ++m)
{
mModel.LogEmissionMatrix[i, m] = System.Math.Log(emission_matrix[i, m] / emission_sum);
}
}
double logLikelihood = double.NegativeInfinity;
for (int i = 0; i < observations_db.Length; i++)
logLikelihood = LogHelper.LogSum(logLikelihood, mModel.Evaluate(observations_db[i]));
return logLikelihood;
}
19
Source : ANOVA.cs
with MIT License
from chen0040
with MIT License
from chen0040
public static Dictionary<int, double> GetMeanWithinGroup(Dictionary<int, List<double>> groupedSample)
{
Dictionary<int, double> result = new Dictionary<int, double>();
foreach (int grpId in groupedSample.Keys)
{
result[grpId] = groupedSample[grpId].Sum() / groupedSample[grpId].Count;
}
return result;
}
19
Source : BenchmarkBase.cs
with MIT License
from ChilliCream
with MIT License
from ChilliCream
public void Dispose()
{
Console.WriteLine($"Completed in {Elapsed}");
Console.WriteLine("-----------------------------------");
foreach (var field in _.OrderBy(t => t.Key.ToString()))
{
Console.WriteLine($"{field.Key}:{field.Value}");
}
Console.WriteLine("-----------------------------------");
Console.WriteLine($"Fields:{_.Select(t => t.Value).Sum()}");
Console.WriteLine($"Starts:{_starts}");
BatchExecutionDiagnostics.Starts = _starts;
Console.WriteLine("-----------------------------------");
Console.WriteLine("-----------------------------------");
_stopwatch.Stop();
}
19
Source : Fixed8Extensions.cs
with MIT License
from CityOfZion
with MIT License
from CityOfZion
public static Fixed8 Sum<TSource>(this IEnumerable<TSource> source, Func<TSource, Fixed8> selector)
{
return source.Select(selector).Sum();
}
19
Source : Class4.cs
with MIT License
from ClaveConsulting
with MIT License
from ClaveConsulting
[Expressionify]
public static int Something(IEnumerable<string> x) => x.Select(Foo).Sum();
19
Source : EliteConsole.cs
with GNU General Public License v3.0
from cobbr
with GNU General Public License v3.0
from cobbr
private void PrintMenuType(List<int> ColumnsMaxLengths)
{
EliteConsole.PrintInfo(Spacer);
EliteConsole.PrintHighlightLine(this.replacedle);
EliteConsole.PrintInfo(Spacer);
EliteConsole.PrintInfoLine(new String('=', ColumnsMaxLengths.Sum() + Columns.Count - 1));
foreach (List<string> row in Rows)
{
EliteConsole.PrintInfo(Spacer);
for (int i = 0; i < row.Count; i++)
{
EliteConsole.PrintInfo(row[i]);
EliteConsole.PrintInfo(new String(' ', ColumnsMaxLengths[i] - row[i].Length + 1));
}
EliteConsole.PrintInfoLine();
}
}
19
Source : EliteConsole.cs
with GNU General Public License v3.0
from cobbr
with GNU General Public License v3.0
from cobbr
private void PrintParameterType(List<int> ColumnsMaxLengths)
{
EliteConsole.PrintInfo(Spacer);
EliteConsole.PrintHighlightLine(this.replacedle);
EliteConsole.PrintInfo(Spacer);
EliteConsole.PrintInfoLine(new String('=', ColumnsMaxLengths.Sum() + Columns.Count - 1));
foreach (List<string> row in Rows)
{
EliteConsole.PrintInfo(Spacer);
for (int i = 0; i < row.Count; i++)
{
EliteConsole.PrintInfo(row[i]);
EliteConsole.PrintInfo(new String(' ', ColumnsMaxLengths[i] - row[i].Length + 1));
}
EliteConsole.PrintInfoLine();
}
}
19
Source : MethodArgKeywordClass.cs
with MIT License
from codingseb
with MIT License
from codingseb
public int SumOf(int val1, int val2, params int[] otherVals)
{
return val1 + val2 + otherVals.Sum();
}
19
Source : MethodArgKeywordClass.cs
with MIT License
from codingseb
with MIT License
from codingseb
public int SumOf2(params int[] otherVals)
{
return otherVals.Sum();
}
19
Source : SentimentIntensityAnalyzer.cs
with MIT License
from codingupastorm
with MIT License
from codingupastorm
private SentimentreplacedysisResults ScoreValence(IList<double> sentiments, string text)
{
if (sentiments.Count == 0)
return new SentimentreplacedysisResults(); //will return with all 0
double sum = sentiments.Sum();
double puncAmplifier = PunctuationEmphasis(text);
sum += Math.Sign(sum) * puncAmplifier;
double compound = SentimentUtils.Normalize(sum);
SiftSentiments sifted = SiftSentimentScores(sentiments);
if (sifted.PosSum > Math.Abs(sifted.NegSum))
{
sifted.PosSum += puncAmplifier;
}
else if (sifted.PosSum < Math.Abs(sifted.NegSum))
{
sifted.NegSum -= puncAmplifier;
}
double total = sifted.PosSum + Math.Abs(sifted.NegSum) + sifted.NeuCount;
return new SentimentreplacedysisResults
{
Compound = Math.Round(compound,4),
Positive = Math.Round(Math.Abs(sifted.PosSum /total), 3),
Negative = Math.Round(Math.Abs(sifted.NegSum/total),3),
Neutral = Math.Round(Math.Abs(sifted.NeuCount/total), 3)
};
}
19
Source : LinearRegression.cs
with MIT License
from coldino
with MIT License
from coldino
public static Tuple<double, double> Fit(double[] x, double[] y)
{
double mx = x.Sum() / x.Length;
double my = y.Sum() / y.Length;
double covariance = 0.0;
double variance = 0.0;
for (int i = 0; i < x.Length; i++)
{
double diff = x[i] - mx;
covariance += diff * (y[i] - my);
variance += diff * diff;
}
var b = covariance / variance;
return new Tuple<double, double>(my - b * mx, b);
}
19
Source : ShardingDbAccessor.cs
with Apache License 2.0
from Coldairarrow
with Apache License 2.0
from Coldairarrow
private async Task<int> WriteTableAsync<T>(List<T> enreplacedies, Func<T, IDbAccessor, Task<int>> accessDataAsync)
{
List<(T obj, IDbAccessor db)> targetDbs = enreplacedies
.Select(x => new
{
Obj = x,
Conifg = _shardingConfig.GetTheWriteTable(x)
})
.ToList()
.Select(x => (x.Obj, GetMapDbAccessor(x.Conifg.conString, x.Conifg.dbType, x.Conifg.suffix)))
.ToList();
return await PackAccessDataAsync(async () =>
{
//同一个IDbAccessor对象只能在一个线程中
List<Task<int>> tasks = new List<Task<int>>();
var dbs = targetDbs.Select(x => x.db).Distinct().ToList();
dbs.ForEach(aDb =>
{
tasks.Add(Task.Run(async () =>
{
int count = 0;
var objs = targetDbs.Where(x => x.db == aDb).ToList();
foreach (var aObj in objs)
{
count += await accessDataAsync(aObj.obj, aObj.db);
}
return count;
}));
});
return (await Task.WhenAll(tasks.ToArray())).Sum();
});
}
19
Source : CustomVisionLocal.cs
with MIT License
from cookieofcode
with MIT License
from cookieofcode
private float[] Softmax(float[] values)
{
var maxVal = values.Max();
var exp = values.Select(v => Math.Exp(v - maxVal));
var sumExp = exp.Sum();
return exp.Select(v => (float)(v / sumExp)).ToArray();
}
19
Source : ProjectService.cs
with BSD 2-Clause "Simplified" License
from countincognito
with BSD 2-Clause "Simplified" License
from countincognito
private static double CalculateActivityRiskWithStdDevCorrection(IEnumerable<ActivityModel> activities)
{
if (activities == null)
{
throw new ArgumentNullException(nameof(activities));
}
double numerator = 0.0;
double maxTotalSlack = 0.0;
IList<double> totalSlacks = activities
.Where(x => x.TotalSlack.HasValue)
.Select(x => Convert.ToDouble(x.TotalSlack.GetValueOrDefault()))
.ToList();
double correctionValue = 0;
if (totalSlacks.Count > 0)
{
double meanAverage = totalSlacks.Average();
double sumOfSquaresOfDifferences = totalSlacks.Select(val => (val - meanAverage) * (val - meanAverage)).Sum();
double stdDev = Math.Sqrt(sumOfSquaresOfDifferences / totalSlacks.Count);
correctionValue = Math.Round(meanAverage + stdDev, MidpointRounding.AwayFromZero);
}
foreach (double totalSlack in totalSlacks)
{
double localTotalSlack = totalSlack;
if (localTotalSlack > correctionValue)
{
localTotalSlack = correctionValue;
}
if (localTotalSlack > maxTotalSlack)
{
maxTotalSlack = localTotalSlack;
}
numerator += localTotalSlack;
}
double denominator = maxTotalSlack * activities.Count();
return 1.0 - (numerator / denominator);
}
19
Source : DivisionFunction.cs
with MIT License
from craigbridges
with MIT License
from craigbridges
protected override double Compute
(
List<double> numbers
)
{
return numbers.Sum();
}
19
Source : AssetScriptReferenceRetargeter.cs
with MIT License
from cre8ivepark
with MIT License
from cre8ivepark
private static void RunRetargetToDLL()
{
string[] allFilesUnderreplacedets = Directory.GetFiles(Application.dataPath, "*", SearchOption.AllDirectories);
Dictionary<string, ClreplacedInformation> scriptFilesReferences = ProcessScripts(allFilesUnderreplacedets);
Debug.Log($"Found {scriptFilesReferences.Count} script file references.");
// DLL name to Guid
Dictionary<string, string> asmDefMappings = RetrieveAsmDefGuids(allFilesUnderreplacedets);
Dictionary<string, replacedemblyInformation> compiledClreplacedReferences = ProcessCompiledDLLs("Packagedreplacedemblies", Application.dataPath.Replace("replacedets", "NuGet/Plugins/EditorPlayer"), asmDefMappings);
Debug.Log($"Found {compiledClreplacedReferences.Select(t => t.Value.CompiledClreplacedes.Count).Sum()} compiled clreplaced references.");
Dictionary<string, Tuple<string, long>> remapDictionary = new Dictionary<string, Tuple<string, long>>();
foreach (KeyValuePair<string, replacedemblyInformation> pair in compiledClreplacedReferences)
{
foreach (KeyValuePair<string, ClreplacedInformation> compiledClreplaced in pair.Value.CompiledClreplacedes)
{
ClreplacedInformation compiledClreplacedInfo = compiledClreplaced.Value;
if (scriptFilesReferences.TryGetValue(compiledClreplaced.Key, out ClreplacedInformation scriptClreplacedInfo))
{
if (scriptClreplacedInfo.ExecutionOrder != 0)
{
pair.Value.ExecutionOrderEntries.Add($"{scriptClreplacedInfo.Namespace}.{scriptClreplacedInfo.Name}", scriptClreplacedInfo.ExecutionOrder);
}
remapDictionary.Add(scriptClreplacedInfo.Guid, new Tuple<string, long>(compiledClreplacedInfo.Guid, compiledClreplacedInfo.FileId));
scriptFilesReferences.Remove(compiledClreplaced.Key);
}
else
{
Debug.LogWarning($"Can't find a script version of the compiled clreplaced: {compiledClreplaced.Key}; {pair.Key}.dll. This generally means the compiled clreplaced is second or later in a script file, and Unity doesn't parse it as two different replacedets.");
}
}
}
ProcessYAMLreplacedets(allFilesUnderreplacedets, Application.dataPath.Replace("replacedets", "NuGet/Content"), remapDictionary, compiledClreplacedReferences);
}
19
Source : QueryTests.cs
with MIT License
from crookookoo
with MIT License
from crookookoo
[Test]
public void FoldTest([ValueSource("list0")] QueryArg arg) {
var list = arg.ToList();
if (list.Count == 0) {
replacedert.That(() => arg.ToQuery().Fold((a, b) => a + b), Throws.InvalidOperationException);
} else {
replacedert.That(arg.ToQuery().Fold((a, b) => a + b), Is.EqualTo(
arg.ToList().Sum()));
}
}
19
Source : QueryTests.cs
with MIT License
from crookookoo
with MIT License
from crookookoo
[Test]
public void SumTests([ValueSource("list0")] QueryArg arg) {
if (arg.ToList().Count == 0) {
replacedert.Ignore("Ignore empty queries for sum tests.");
return;
}
replacedert.That(arg.ToQuery().Sum(), Is.EqualTo(
arg.ToList().Sum()));
}
19
Source : BinanceBuyExecuteMarketRule.cs
with MIT License
from CryptoDevTV
with MIT License
from CryptoDevTV
public IRuleResult RuleExecuted(Solbot solbot)
{
var result = false;
var message = string.Empty;
if (solbot.Communication.Buy.IsReady)
{
var buyOrderResult = _binanceClient.Spot.Order.PlaceOrder(
solbot.Strategy.AvailableStrategy.Symbol,
OrderSide.Buy,
OrderType.Market,
quoteOrderQuanreplacedy: solbot.Communication.Buy.AvailableFund);
if (!(buyOrderResult is null))
{
result = buyOrderResult.Success;
if (buyOrderResult.Success)
{
Logger.Info(LogGenerator.TradeResultStart($"{buyOrderResult.Data.OrderId}"));
var prices = new List<decimal>();
var quanreplacedy = new List<decimal>();
var commission = new List<decimal>();
if (buyOrderResult.Data.Fills.AnyAndNotNull())
{
foreach (var item in buyOrderResult.Data.Fills)
{
Logger.Info(LogGenerator.TradeResult(MarketOrder, item));
prices.Add(item.Price);
quanreplacedy.Add(item.Quanreplacedy);
commission.Add(item.Commission);
}
}
solbot.Actions.BoughtPrice = prices.Average();
Logger.Info(LogGenerator.TradeResultEnd($"{buyOrderResult.Data.OrderId}", prices.Average(), quanreplacedy.Sum(), commission.Sum()));
_pushOverNotificationService.Send(
LogGenerator.Notificationreplacedle(EnvironmentType.PRODUCTION, MarketOrder, solbot.Strategy.AvailableStrategy.Symbol),
LogGenerator.NotificationMessage(
solbot.Communication.Average.Current,
solbot.Communication.Price.Current,
solbot.Communication.Buy.Change));
}
else
Logger.Warn(buyOrderResult.Error.Message);
}
}
return new MarketRuleResult()
{
Success = result,
Message = result
? LogGenerator.OrderMarketSuccess(MarketOrder)
: LogGenerator.OrderMarketError(MarketOrder, message)
};
}
19
Source : BinanceSellExecuteMarketRule.cs
with MIT License
from CryptoDevTV
with MIT License
from CryptoDevTV
public IRuleResult RuleExecuted(Solbot solbot)
{
var result = false;
var message = string.Empty;
if (solbot.Communication.Sell.IsReady)
{
var quanreplacedy = BinanceHelpers.ClampQuanreplacedy(solbot.Communication.Symbol.MinQuanreplacedy, solbot.Communication.Symbol.MaxQuanreplacedy, solbot.Communication.Symbol.StepSize, solbot.Communication.Availablereplacedet.Base);
var minNotional = quanreplacedy * solbot.Communication.Price.Current;
if (minNotional > solbot.Communication.Symbol.MinNotional)
{
var sellOrderResult = _binanceClient.Spot.Order.PlaceOrder(
solbot.Strategy.AvailableStrategy.Symbol,
OrderSide.Sell,
OrderType.Market,
quanreplacedy: quanreplacedy);
if (!(sellOrderResult is null))
{
result = sellOrderResult.Success;
if (sellOrderResult.Success)
{
solbot.Actions.BoughtPrice = 0;
Logger.Info(LogGenerator.TradeResultStart($"{sellOrderResult.Data.OrderId}"));
var prices = new List<decimal>();
var quanreplacedyAll = new List<decimal>();
var commission = new List<decimal>();
if (sellOrderResult.Data.Fills.AnyAndNotNull())
{
foreach (var item in sellOrderResult.Data.Fills)
{
Logger.Info(LogGenerator.TradeResult(MarketOrder, item));
prices.Add(item.Price);
quanreplacedyAll.Add(item.Quanreplacedy);
commission.Add(item.Commission);
}
}
Logger.Info(LogGenerator.TradeResultEnd($"{sellOrderResult.Data.OrderId}", prices.Average(), quanreplacedyAll.Sum(), commission.Sum()));
_pushOverNotificationService.Send(
LogGenerator.Notificationreplacedle(EnvironmentType.PRODUCTION, MarketOrder, solbot.Strategy.AvailableStrategy.Symbol),
LogGenerator.NotificationMessage(
solbot.Communication.Average.Current,
solbot.Communication.Price.Current,
solbot.Communication.Sell.Change));
}
else
Logger.Warn(sellOrderResult.Error.Message);
}
}
else
message = "not enough";
}
return new MarketRuleResult()
{
Success = result,
Message = result
? LogGenerator.OrderMarketSuccess(MarketOrder)
: LogGenerator.OrderMarketError(MarketOrder, message)
};
}
19
Source : BinanceStopLossExecuteMarketRule.cs
with MIT License
from CryptoDevTV
with MIT License
from CryptoDevTV
public IRuleResult RuleExecuted(Solbot solbot)
{
var result = false;
var message = string.Empty;
if (solbot.Communication.StopLoss.IsReady)
{
WebCallResult<BinancePlacedOrder> stopLossOrderResult = null;
var quanreplacedy = BinanceHelpers.ClampQuanreplacedy(solbot.Communication.Symbol.MinQuanreplacedy, solbot.Communication.Symbol.MaxQuanreplacedy, solbot.Communication.Symbol.StepSize, solbot.Communication.Availablereplacedet.Base);
var minNotional = quanreplacedy * solbot.Communication.Price.Current;
if (minNotional > solbot.Communication.Symbol.MinNotional)
{
stopLossOrderResult = _binanceClient.Spot.Order.PlaceOrder(
solbot.Strategy.AvailableStrategy.Symbol,
OrderSide.Sell,
OrderType.Market,
quanreplacedy: quanreplacedy);
}
else
message = "not enough";
if (!(stopLossOrderResult is null))
{
result = stopLossOrderResult.Success;
if (stopLossOrderResult.Success)
{
solbot.Actions.BoughtPrice = 0;
solbot.Actions.StopLossReached = true;
Logger.Info(LogGenerator.TradeResultStart($"{stopLossOrderResult.Data.OrderId}"));
var prices = new List<decimal>();
var quanreplacedyAll = new List<decimal>();
var commission = new List<decimal>();
if (stopLossOrderResult.Data.Fills.AnyAndNotNull())
{
foreach (var item in stopLossOrderResult.Data.Fills)
{
Logger.Info(LogGenerator.TradeResult(MarketOrder, item));
prices.Add(item.Price);
quanreplacedyAll.Add(item.Quanreplacedy);
commission.Add(item.Commission);
}
}
Logger.Info(LogGenerator.TradeResultEnd($"{stopLossOrderResult.Data.OrderId}", prices.Average(), quanreplacedyAll.Sum(), commission.Sum()));
_pushOverNotificationService.Send(
LogGenerator.Notificationreplacedle(EnvironmentType.PRODUCTION, MarketOrder, solbot.Strategy.AvailableStrategy.Symbol),
LogGenerator.NotificationMessage(
solbot.Communication.Average.Current,
solbot.Communication.Price.Current,
solbot.Communication.StopLoss.Change));
}
else
Logger.Warn(stopLossOrderResult.Error.Message);
}
}
return new MarketRuleResult()
{
Success = result,
Message = result
? LogGenerator.OrderMarketSuccess(MarketOrder)
: LogGenerator.OrderMarketError(MarketOrder, message)
};
}
19
Source : XldContainer.cs
with MIT License
from csinkers
with MIT License
from csinkers
static byte[] Loadreplacedet(int subItem, ISerializer s)
{
var lengths = HeaderSerdes(null, s);
if (subItem >= lengths.Length)
throw new ArgumentOutOfRangeException($"Tried to load subItem {subItem} from XLD, but it only contains {lengths.Length} items.");
long offset = s.Offset;
offset += lengths.Where((x, i) => i < subItem).Sum();
s.Seek(offset);
return s.Bytes(null, null, lengths[subItem]);
}
19
Source : XldContainer.cs
with MIT License
from csinkers
with MIT License
from csinkers
static void ReadEmbedded<TContext>(
XldCategory category,
int firstId,
TContext context,
ISerializer s,
Action<int, int, TContext, ISerializer> func)
{
var descriptor = s.Object("XldDescriptor", (XldDescriptor)null, XldDescriptor.Serdes);
ApiUtil.replacedert(descriptor.Category == category);
ApiUtil.replacedert(descriptor.Number == firstId / 100);
var preheader = s.Offset;
var lengths = HeaderSerdes(null, s);
ApiUtil.replacedert(preheader + HeaderSize(lengths.Length) == s.Offset);
ApiUtil.replacedert(lengths.Sum() + HeaderSize(lengths.Length) == descriptor.Size);
long offset = s.Offset;
for (int i = 0; i < 100 && i < lengths.Length; i++)
{
if (lengths[i] == 0)
continue;
using var window = new WindowingFacadeSerializer(s, null);
func(i + firstId, lengths[i], context, window);
offset += lengths[i];
ApiUtil.replacedert(offset == s.Offset);
}
}
19
Source : XldContainer.cs
with MIT License
from csinkers
with MIT License
from csinkers
static void WriteEmbedded<TContext>(
XldCategory category,
int firstId,
int lastId,
TContext context,
ISerializer s,
Action<int, int, TContext, ISerializer> func,
IList<int> populatedIds)
{
int count = populatedIds.Where(x => x >= firstId && x <= lastId).Max() - firstId + 1;
var descriptorOffset = s.Offset;
var lengths = new int[count];
s.Seek(s.Offset + XldDescriptor.SizeInBytes + HeaderSize(count));
for (int i = 0; i < count; i++)
{
using var window = new WindowingFacadeSerializer(s, null);
func(i + firstId, 0, context, window);
lengths[i] = (int)window.Offset;
}
var endOffset = s.Offset;
// Jump back to the start and write the descriptor including the total size
s.Seek(descriptorOffset);
var descriptor = new XldDescriptor
{
Category = category,
Number = (ushort)(firstId / 100),
Size = (uint)(lengths.Sum() + lengths.Length * 4 + 8)
};
s.Object("XldDescriptor", descriptor, XldDescriptor.Serdes);
HeaderSerdes(lengths, s);
s.Seek(endOffset);
}
19
Source : Breadcrumb.cs
with MIT License
from curiosity-ai
with MIT License
from curiosity-ai
private void Recompute()
{
int childElementCount = (int) _childContainer.childElementCount;
if (childElementCount == 0) return;
if (_chevronToUseAsButton is object)
{
//Reset modified chevron if any
_chevronToUseAsButton.clreplacedList.add(_chevronIcon, "tss-breadcrumb-collapse");
_chevronToUseAsButton.clreplacedList.remove("la-ellipsis-h", "tss-breadcrumb-opencolapsed");
_chevronToUseAsButton.onclick = null;
_chevronToUseAsButton = null;
}
UpdateChildrenSizes();
bool isChevron(HTMLElement e) => e.clreplacedList.contains("tss-breadcrumb-chevron");
var keep = new int[childElementCount];
const int KEEP = 2;
const int COLLAPSE = 1;
const int NOTMEASURED = 0;
if (_overflowIndex >= 0)
{
for (int i = 0; i <= Math.Min(keep.Length - 1, ((_overflowIndex) * 2)); i++)
{
keep[i] = KEEP;
if ((i + 1 < _overflowIndex - 2))
{
var child = (HTMLElement) _childContainer.children[(uint) i + 1];
if (isChevron(child))
{
keep[i + 1] = KEEP;
}
}
}
keep[keep.Length - 1] = KEEP;
var debt = _cachedFullWidth - _cachedSizes.Values.Sum() - 64;
while (debt < 0)
{
var candidate = Array.IndexOf(keep, NOTMEASURED);
if (candidate >= 0)
{
keep[candidate] = COLLAPSE;
var child = (HTMLElement) _childContainer.children[(uint) candidate];
debt += _cachedSizes[child];
}
else
{
break;
}
}
var hidden = new List<HTMLElement>();
for (uint i = 0; i < _childContainer.childElementCount; i++)
{
var child = (HTMLElement) _childContainer.children[i];
if (keep[i] == COLLAPSE)
{
if (_chevronToUseAsButton is null)
{
if (isChevron(child))
{
_chevronToUseAsButton = child;
continue; //Don't collapse this, instead keep for menu button
}
else if (i > 0)
{
//previous element is a chevron, so use it instead
_chevronToUseAsButton = (HTMLElement) _childContainer.children[i - 1];
}
}
if (!isChevron(child)) hidden.Add(child);
child.clreplacedList.add("tss-breadcrumb-collapse");
}
else
{
child.clreplacedList.remove("tss-breadcrumb-collapse");
}
}
if (_chevronToUseAsButton is object)
{
_chevronToUseAsButton.clreplacedList.add("la-ellipsis-h", "tss-breadcrumb-opencolapsed");
_chevronToUseAsButton.clreplacedList.remove(_chevronIcon, "tss-breadcrumb-collapse");
_chevronToUseAsButton.onclick = (e) =>
{
StopEvent(e);
var clones = hidden.Select(element => ContextMenuItem(Clone(element)).OnClick((s2, e2) => element.click())).ToArray();
ContextMenu().Items(clones).ShowFor(_chevronToUseAsButton);
};
}
}
}
19
Source : OverflowSet.cs
with MIT License
from curiosity-ai
with MIT License
from curiosity-ai
private void Recompute()
{
int childElementCount = (int)_childContainer.childElementCount;
if (childElementCount <= 1) return;
if (_chevronToUseAsButton is object)
{
//Reset modified chevron if any
_chevronToUseAsButton.clreplacedList.remove("las", _expandIcon, "tss-overflowset-opencolapsed");
_chevronToUseAsButton.onclick = null;
_chevronToUseAsButton = null;
}
UpdateChildrenSizes();
bool isChevron(HTMLElement e) => e.clreplacedList.contains("tss-overflowset-separator");
var keep = new int[childElementCount];
const int KEEP = 2;
const int COLLAPSE = 1;
const int NOTMEASURED = 0;
if (_overflowIndex >= 0)
{
keep[0] = KEEP;
for (int i = 0; i <= Math.Min(keep.Length - 1, ((_overflowIndex)*2)); i++)
{
keep[i] = KEEP;
int nextIndex = i + 1;
if ((nextIndex < _overflowIndex-2) && nextIndex < childElementCount)
{
var child = (HTMLElement)_childContainer.children[(uint)nextIndex];
if (isChevron(child))
{
keep[i + 1] = KEEP;
}
}
}
}
if(!keep.Any(k => k == KEEP))
{
keep[0] = KEEP;
}
keep[keep.Length - 1] = NOTMEASURED;
var debt = _cachedFullWidth - _cachedSizes.Values.Sum() - 32;
while(debt < 0)
{
var candidate = Array.LastIndexOf(keep, NOTMEASURED);
if(candidate >= 0)
{
keep[candidate] = COLLAPSE;
var child = (HTMLElement)_childContainer.children[(uint)candidate];
debt += _cachedSizes[child];
}
else
{
break;
}
}
var hidden = new List<HTMLElement>();
for (uint i = 0; i < _childContainer.childElementCount; i++)
{
var child = (HTMLElement)_childContainer.children[i];
if (keep[i] == COLLAPSE)
{
if(_chevronToUseAsButton is null)
{
if (isChevron(child))
{
_chevronToUseAsButton = child;
continue; //Don't collapse this, instead keep for menu button
}
else if (i > 0)
{
//previous element is a chevron, so use it instead
_chevronToUseAsButton = (HTMLElement)_childContainer.children[i - 1];
}
}
if (!isChevron(child)) hidden.Add(child);
child.clreplacedList.add("tss-overflowset-collapse");
}
else
{
child.clreplacedList.remove("tss-overflowset-collapse");
}
}
IComponent clone(Node node)
{
var c = (HTMLElement)(node.cloneNode(true));
c.clreplacedList.remove("tss-overflowset-collapse");
return Raw(c);
}
if (_chevronToUseAsButton is object)
{
_chevronToUseAsButton.clreplacedList.add("las", _expandIcon, "tss-overflowset-opencolapsed");
_chevronToUseAsButton.clreplacedList.remove("tss-overflowset-collapse");
_chevronToUseAsButton.onclick = (e) =>
{
StopEvent(e);
var clones = hidden.Select(element => ContextMenuItem(clone(element)).OnClick((s2, e2) => element.click())).ToArray();
ContextMenu().Items(clones).ShowFor(_chevronToUseAsButton);
};
}
}
19
Source : Solution.cs
with MIT License
from cwetanow
with MIT License
from cwetanow
public int GetSublistSum(int start, int end) => list
.Where((_, index) => index >= start && index < end)
.Sum();
19
Source : Tests.cs
with MIT License
from cwetanow
with MIT License
from cwetanow
[Test]
public void TestReduce_ShouldReturnCorrectly()
{
// Arrange
var input = Enumerable.Range(1, 10).ToList();
var expectedResult = input.Sum();
var sumFunc = new Func<int, int, int>((a, b) => a + b);
// Act
var actualResult = Solution.Reduce(input, sumFunc, 0);
// replacedert
actualResult.Should().Be(expectedResult);
}
19
Source : Solution.cs
with MIT License
from cwetanow
with MIT License
from cwetanow
public static IEnumerable<int> RoundArrayToSmallestDifferenceWithEqualSums(IList<double> input)
{
return RoundArrayToSmallestDifferenceWithEqualSums(input, 0, (int)Math.Round(input.Sum()), 0, new List<int>(input.Count)).result;
}
19
Source : Solution.cs
with MIT License
from cwetanow
with MIT License
from cwetanow
private static (IEnumerable<int> result, double differencesSum) RoundArrayToSmallestDifferenceWithEqualSums(IList<double> input,
int currentIndex,
int inputSum,
double differencesSum,
IList<int> currentResult)
{
if (currentIndex == input.Count)
{
if (currentResult.Sum() != inputSum)
{
return (null, -1);
}
return (currentResult, differencesSum);
}
var roundedUpList = currentResult.Select(r => r).ToList();
roundedUpList.Add((int)Math.Ceiling(input[currentIndex]));
var (roundedUpResult, roundedUpDifferencesSum) = RoundArrayToSmallestDifferenceWithEqualSums(input, currentIndex + 1, inputSum,
differencesSum + Math.Abs(roundedUpList[currentIndex] - input[currentIndex]), roundedUpList);
var roundedDownList = currentResult.Select(r => r).ToList();
roundedDownList.Add((int)Math.Floor(input[currentIndex]));
var (roundedDownResult, roundedDownDifferencesSum) = RoundArrayToSmallestDifferenceWithEqualSums(input, currentIndex + 1, inputSum,
differencesSum + Math.Abs(roundedDownList[currentIndex] - input[currentIndex]), roundedDownList);
if (roundedUpResult != null && roundedDownResult != null)
{
return roundedUpDifferencesSum > roundedDownDifferencesSum
? (roundedDownResult, roundedDownDifferencesSum)
: (roundedUpResult, roundedUpDifferencesSum);
}
return roundedDownResult != null ?
(roundedDownResult, roundedDownDifferencesSum)
: (roundedUpResult, roundedUpDifferencesSum);
}
19
Source : TestJsonRpcService.cs
with Apache License 2.0
from CXuesong
with Apache License 2.0
from CXuesong
[JsonRpcMethod]
public int AddMany([JsonRpcParameter(DefaultValue = new[] {1, 2, 3})] int[] values)
{
return values.Sum();
}
19
Source : TilePaint.cs
with Apache License 2.0
from CyanCode
with Apache License 2.0
from CyanCode
public void CalculateAlphamap(int[,] biomeMap, int resolution) {
//Initialize Alphamap structure
Alphamap = new float[resolution, resolution, Splats.Length];
//Sample weights and fill in textures
for (int x = 0; x < resolution; x++) {
for (int y = 0; y < resolution; y++) {
float[] weights = GetSplatWeights(x, y, biomeMap, resolution);
//Normalize weights before replacedigning to Alphamap
float sum = weights.Sum();
for (var i = 0; i < weights.Length; i++) {
Alphamap[y, x, i] = weights[i] / sum;
}
}
}
}
19
Source : DeathKnight.cs
with GNU General Public License v3.0
from CypherCore
with GNU General Public License v3.0
from CypherCore
void HandleCalcAmount(AuraEffect aurEff, ref int amount, ref bool canBeRecalculated)
{
canBeRecalculated = true;
amount = Enumerable.Range(1, _damagePerSecond.Length).Sum();
}
See More Examples