Here are the examples of the csharp api System.Collections.Generic.IEnumerable.Max(System.Func) taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
1132 Examples
19
View Source File : IrdTests.cs
License : MIT License
Project Creator : 13xforever
License : MIT License
Project Creator : 13xforever
[Test, Explicit("Requires custom data")]
public async Task TocSizeTest()
{
var path = @"E:\FakeCDs\PS3 Games\ird";
var result = new List<(string filename, long size)>();
foreach (var f in Directory.EnumerateFiles(path, "*.ird", SearchOption.TopDirectoryOnly))
{
var bytes = await File.ReadAllBytesAsync(f).ConfigureAwait(false);
var ird = IrdParser.Parse(bytes);
using (var header = GetDecompressHeader(ird))
result.Add((Path.GetFileName(f), header.Length));
}
replacedert.That(result.Count, Is.GreaterThan(0));
var groupedStats = (from t in result
group t by t.size into g
select new {size = g.Key, count = g.Count()}
).OrderByDescending(i => i.count)
.ThenByDescending(i => i.size)
.ToList();
var largest = groupedStats.Max(i => i.size);
var largesreplacedem = result.First(i => i.size == largest);
Console.WriteLine($"Largest TOC: {largesreplacedem.filename} ({largest.replacedtorageUnit()})");
foreach (var s in groupedStats)
Console.WriteLine($"{s.count} items of size {s.size}");
replacedert.That(groupedStats.Count, Is.EqualTo(1));
}
19
View Source File : Program.cs
License : MIT License
Project Creator : 13xforever
License : MIT License
Project Creator : 13xforever
internal static async Task Main(string[] args)
{
try
{
if (args.Length == 0)
{
Console.WriteLine("Drag .pkg files and/or folders onto this .exe to verify the packages.");
var isFirstChar = true;
var completedPath = false;
var path = new StringBuilder();
do
{
var keyInfo = Console.ReadKey(true);
if (isFirstChar)
{
isFirstChar = false;
if (keyInfo.KeyChar != '"')
return;
}
else
{
if (keyInfo.KeyChar == '"')
{
completedPath = true;
args = new[] {path.ToString()};
}
else
path.Append(keyInfo.KeyChar);
}
} while (!completedPath);
Console.Clear();
}
Console.OutputEncoding = new UTF8Encoding(false);
Console.replacedle = replacedle;
Console.CursorVisible = false;
Console.WriteLine("Scanning for PKGs...");
var pkgList = new List<FileInfo>();
Console.ForegroundColor = ConsoleColor.Yellow;
foreach (var item in args)
{
var path = item.Trim('"');
if (File.Exists(path))
pkgList.Add(new FileInfo(path));
else if (Directory.Exists(path))
pkgList.AddRange(GetFilePaths(path, "*.pkg", SearchOption.AllDirectories).Select(p => new FileInfo(p)));
else
Console.WriteLine("Unknown path: " + path);
}
Console.ResetColor();
if (pkgList.Count == 0)
{
Console.WriteLine("No packages were found. Check paths, and try again.");
return;
}
var longestFilename = Math.Max(pkgList.Max(i => i.Name.Length), HeaderPkgName.Length);
var sigWidth = Math.Max(HeaderSignature.Length, 8);
var csumWidth = Math.Max(HeaderChecksum.Length, 5);
var csumsWidth = 1 + sigWidth + 1 + csumWidth + 1;
var idealWidth = longestFilename + csumsWidth;
try
{
if (idealWidth > Console.LargestWindowWidth)
{
longestFilename = Console.LargestWindowWidth - csumsWidth;
idealWidth = Console.LargestWindowWidth;
}
if (idealWidth > Console.WindowWidth)
{
Console.BufferWidth = Math.Max(Console.BufferWidth, idealWidth);
Console.WindowWidth = idealWidth;
}
Console.BufferHeight = Math.Max(Console.BufferHeight, Math.Min(9999, pkgList.Count + 10));
}
catch (PlatformNotSupportedException) { }
Console.WriteLine($"{HeaderPkgName.Trim(longestFilename).PadRight(longestFilename)} {HeaderSignature.PadLeft(sigWidth)} {HeaderChecksum.PadLeft(csumWidth)}");
using var cts = new CancellationTokenSource();
Console.CancelKeyPress += (sender, eventArgs) => { cts.Cancel(); };
var t = new Thread(() =>
{
try
{
var indicatorIdx = 0;
while (!cts.Token.IsCancellationRequested)
{
Task.Delay(1000, cts.Token).ConfigureAwait(false).GetAwaiter().GetResult();
if (cts.Token.IsCancellationRequested)
return;
PkgChecker.Sync.Wait(cts.Token);
try
{
var frame = Animation[(indicatorIdx++) % Animation.Length];
var currentProgress = PkgChecker.CurrentFileProcessedBytes;
Console.replacedle = $"{replacedle} [{(double)(PkgChecker.ProcessedBytes + currentProgress) / PkgChecker.TotalFileSize * 100:0.00}%] {frame}";
if (PkgChecker.CurrentPadding > 0)
{
Console.CursorVisible = false;
var (top, left) = (Console.CursorTop, Console.CursorLeft);
Console.Write($"{(double)currentProgress / PkgChecker.CurrentFileSize * 100:0}%".PadLeft(PkgChecker.CurrentPadding));
Console.CursorTop = top;
Console.CursorLeft = left;
Console.CursorVisible = false;
}
}
finally
{
PkgChecker.Sync.Release();
}
}
}
catch (TaskCanceledException)
{
}
});
t.Start();
await PkgChecker.CheckAsync(pkgList, longestFilename, sigWidth, csumWidth, csumsWidth-2, cts.Token).ConfigureAwait(false);
cts.Cancel(false);
t.Join();
}
finally
{
Console.replacedle = replacedle;
Console.WriteLine("Press any key to exit");
Console.ReadKey();
Console.WriteLine();
Console.CursorVisible = true;
}
}
19
View Source File : EntityMapperProvider.cs
License : Apache License 2.0
Project Creator : 1448376744
License : Apache License 2.0
Project Creator : 1448376744
public ConstructorInfo FindConstructor(Type csharpType)
{
var constructor = csharpType.GetConstructor(Type.EmptyTypes);
if (constructor == null)
{
var constructors = csharpType.GetConstructors();
constructor = constructors.Where(a => a.GetParameters().Length == constructors.Max(s => s.GetParameters().Length)).FirstOrDefault();
}
return constructor;
}
19
View Source File : Program.cs
License : GNU Affero General Public License v3.0
Project Creator : 3CORESec
License : GNU Affero General Public License v3.0
Project Creator : 3CORESec
public static void WriteSuricataFileResult(Options o, Dictionary<string, List<string>> techniques)
{
try
{
var entries = techniques
.ToList()
.Select(entry => new
{
techniqueID = entry.Key,
score = entry.Value.Count,
comment = (o.NoComment) ? null : string.Join(Environment.NewLine, entry.Value.Select(x => x.Split("/").Last()))
});
string filename = o.OutFile.EndsWith(".json") ? "suricata-coverage.json" : $"{o.OutFile}.json";
File.WriteAllText(filename, JsonConvert.SerializeObject(new
{
domain = "mitre-enterprise",
name = "Suricata rules coverage",
gradient = new
{
colors = new[] { "#a0eab5", "#0f480f" },
maxValue = techniques
.Values
.Max(x => x.Count),
minValue = 0
},
version = "4.2",
techniques = entries
}, Formatting.Indented, new JsonSerializerSettings
{
NullValueHandling = NullValueHandling.Ignore
}));
Console.WriteLine($"[*] Layer file written in {filename} ({entries.Count()} techniques covered)");
}
catch (Exception e)
{
Console.WriteLine("Problem writing to file: " + e.Message);
}
}
19
View Source File : Fellowship.cs
License : GNU Affero General Public License v3.0
Project Creator : ACEmulator
License : GNU Affero General Public License v3.0
Project Creator : ACEmulator
private void CalculateXPSharing()
{
// - If all members of the fellowship are level 50 or above, all members will share XP equally
// - If all members of the fellowship are within 5 levels of the founder, XP will be shared equally
// - If members are all within ten levels of the founder, XP will be shared proportionally.
var fellows = GetFellowshipMembers();
var allEvenShareLevel = PropertyManager.GetLong("fellowship_even_share_level").Item;
var allOverEvenShareLevel = !fellows.Values.Any(f => (f.Level ?? 1) < allEvenShareLevel);
if (allOverEvenShareLevel)
{
ShareXP = DesiredShareXP;
EvenShare = true;
return;
}
var leader = PlayerManager.GetOnlinePlayer(FellowshipLeaderGuid);
if (leader == null)
return;
var maxLevelDiff = fellows.Values.Max(f => Math.Abs((leader.Level ?? 1) - (f.Level ?? 1)));
if (maxLevelDiff <= 5)
{
ShareXP = DesiredShareXP;
EvenShare = true;
}
else if (maxLevelDiff <= 10)
{
ShareXP = DesiredShareXP;
EvenShare = false;
}
else
{
ShareXP = false;
EvenShare = false;
}
}
19
View Source File : Repository.cs
License : Apache License 2.0
Project Creator : adamralph
License : Apache License 2.0
Project Creator : adamralph
public Version GetVersion(string tagPrefix, VersionPart autoIncrement, string defaultPreReleasePhase, ILogger log)
{
var commit = this.head;
if (commit == null)
{
var version = new Version(defaultPreReleasePhase);
log.Info($"No commits found. Using default version {version}.");
return version;
}
var tagsAndVersions = this.tags
.Select(tag => (tag, Version.ParseOrDefault(tag.Name, tagPrefix)))
.OrderBy(tagAndVersion => tagAndVersion.Item2)
.ThenBy(tagsAndVersion => tagsAndVersion.tag.Name)
.ToList();
var commitsChecked = new HashSet<string>();
var count = 0;
var height = 0;
var candidates = new List<Candidate>();
var commitsToCheck = new Stack<(Commit, int, Commit)>();
Commit previousCommit = null;
if (log.IsTraceEnabled)
{
log.Trace($"Starting at commit {commit.ShortSha} (height {height})...");
}
while (true)
{
var parentCount = 0;
if (commitsChecked.Add(commit.Sha))
{
++count;
var commitTagsAndVersions = tagsAndVersions.Where(tagAndVersion => tagAndVersion.tag.Sha == commit.Sha).ToList();
var foundVersion = false;
foreach (var (tag, commitVersion) in commitTagsAndVersions)
{
var candidate = new Candidate { Commit = commit, Height = height, Tag = tag.Name, Version = commitVersion, Index = candidates.Count };
foundVersion = foundVersion || candidate.Version != null;
if (log.IsTraceEnabled)
{
log.Trace($"Found {(candidate.Version == null ? "non-" : null)}version tag {candidate}.");
}
candidates.Add(candidate);
}
if (!foundVersion)
{
if (log.IsTraceEnabled)
{
var parentIndex = 0;
Commit firstParent = null;
foreach (var parent in commit.Parents)
{
switch (parentIndex)
{
case 0:
firstParent = parent;
break;
case 1:
log.Trace($"History diverges from {commit.ShortSha} (height {height}) to:");
log.Trace($"- {firstParent.ShortSha} (height {height + 1})");
goto default;
default:
log.Trace($"- {parent.ShortSha} (height {height + 1})");
break;
}
++parentIndex;
parentCount = parentIndex;
}
}
foreach (var parent in ((IEnumerable<Commit>)commit.Parents).Reverse())
{
commitsToCheck.Push((parent, height + 1, commit));
}
if (commitsToCheck.Count == 0 || commitsToCheck.Peek().Item2 <= height)
{
var candidate = new Candidate { Commit = commit, Height = height, Tag = null, Version = new Version(defaultPreReleasePhase), Index = candidates.Count };
if (log.IsTraceEnabled)
{
log.Trace($"Found root commit {candidate}.");
}
candidates.Add(candidate);
}
}
}
else
{
if (log.IsTraceEnabled)
{
log.Trace($"History converges from {previousCommit.ShortSha} (height {height - 1}) back to previously seen commit {commit.ShortSha} (height {height}). Abandoning path.");
}
}
if (commitsToCheck.Count == 0)
{
break;
}
if (log.IsTraceEnabled)
{
previousCommit = commit;
}
var oldHeight = height;
Commit child;
(commit, height, child) = commitsToCheck.Pop();
if (log.IsTraceEnabled)
{
if (parentCount > 1)
{
log.Trace($"Following path from {child.ShortSha} (height {height - 1}) through first parent {commit.ShortSha} (height {height})...");
}
else if (height <= oldHeight)
{
if (commitsToCheck.Any() && commitsToCheck.Peek().Item2 == height)
{
log.Trace($"Backtracking to {child.ShortSha} (height {height - 1}) and following path through next parent {commit.ShortSha} (height {height})...");
}
else
{
log.Trace($"Backtracking to {child.ShortSha} (height {height - 1}) and following path through last parent {commit.ShortSha} (height {height})...");
}
}
}
}
log.Debug($"{count:N0} commits checked.");
var orderedCandidates = candidates.OrderBy(candidate => candidate.Version).ThenByDescending(candidate => candidate.Index).ToList();
var tagWidth = log.IsDebugEnabled ? orderedCandidates.Max(candidate => candidate.Tag?.Length ?? 2) : 0;
var versionWidth = log.IsDebugEnabled ? orderedCandidates.Max(candidate => candidate.Version?.ToString().Length ?? 4) : 0;
var heightWidth = log.IsDebugEnabled ? orderedCandidates.Max(candidate => candidate.Height).ToString(CultureInfo.CurrentCulture).Length : 0;
if (log.IsDebugEnabled)
{
foreach (var candidate in orderedCandidates.Take(orderedCandidates.Count - 1))
{
log.Debug($"Ignoring {candidate.ToString(tagWidth, versionWidth, heightWidth)}.");
}
}
var selectedCandidate = orderedCandidates.Last();
if (selectedCandidate.Tag == null)
{
log.Info($"No commit found with a valid SemVer 2.0 version{(tagPrefix == null ? null : $" prefixed with '{tagPrefix}'")}. Using default version {selectedCandidate.Version}.");
}
log.Info($"Using{(log.IsDebugEnabled && orderedCandidates.Count > 1 ? " " : " ")}{selectedCandidate.ToString(tagWidth, versionWidth, heightWidth)}.");
return selectedCandidate.Version.WithHeight(selectedCandidate.Height, autoIncrement, defaultPreReleasePhase);
}
19
View Source File : Output.cs
License : Apache License 2.0
Project Creator : adamralph
License : Apache License 2.0
Project Creator : adamralph
private static string GetListLines(TargetCollection targets, IEnumerable<string> rootTargets, int maxDepth, int maxDepthToShowInputs, bool listInputs, string startingPrefix, Palette p)
{
var lines = new List<(string, string)>();
foreach (var rootTarget in rootTargets)
{
Append(new List<string> { rootTarget }, new Stack<string>(), true, "", 0);
}
var maxColumn1Width = lines.Max(line => Palette.StripColours(line.Item1).Length);
return string.Join("", lines.Select(line => $"{line.Item1.PadRight(maxColumn1Width + line.Item1.Length - Palette.StripColours(line.Item1).Length)} {line.Item2}{Environment.NewLine}"));
void Append(IReadOnlyCollection<string> names, Stack<string> seenTargets, bool isRoot, string previousPrefix, int depth)
{
if (depth > maxDepth)
{
return;
}
foreach (var item in names.Select((name, index) => new { name, index }))
{
var circularDependency = seenTargets.Contains(item.name);
seenTargets.Push(item.name);
try
{
var prefix = isRoot
? startingPrefix
: $"{previousPrefix.Replace(p.TreeCorner, " ", StringComparison.Ordinal).Replace(p.TreeFork, p.TreeDown, StringComparison.Ordinal)}{(item.index == names.Count - 1 ? p.TreeCorner : p.TreeFork)}";
var isMissing = !targets.Contains(item.name);
var line = $"{prefix}{p.Target}{item.name}";
if (isMissing)
{
lines.Add((line + $"{p.Reset} {p.Failed}(missing){p.Reset}", ""));
continue;
}
if (circularDependency)
{
lines.Add((line + $"{p.Reset} {p.Failed}(circular dependency){p.Reset}", targets[item.name].Description));
continue;
}
lines.Add((line + p.Reset, targets[item.name].Description));
var target = targets[item.name];
if (listInputs && depth <= maxDepthToShowInputs && target is IHaveInputs hasInputs)
{
foreach (var inpureplacedem in hasInputs.Inputs.Select((input, index) => new { input, index }))
{
var inputPrefix = $"{prefix.Replace(p.TreeCorner, " ", StringComparison.Ordinal).Replace(p.TreeFork, p.TreeDown, StringComparison.Ordinal)}{(target.Dependencies.Count > 0 && depth + 1 <= maxDepth ? p.TreeDown : " ")}";
lines.Add(($"{inputPrefix}{p.Input}{inpureplacedem.input}{p.Reset}", ""));
}
}
Append(target.Dependencies, seenTargets, false, prefix, depth + 1);
}
finally
{
_ = seenTargets.Pop();
}
}
}
}
19
View Source File : Output.Results.cs
License : Apache License 2.0
Project Creator : adamralph
License : Apache License 2.0
Project Creator : adamralph
private static string GetResultLines(IEnumerable<KeyValuePair<Target, TargetResult>> results, TimeSpan? totalDuration, string prefix, Palette p)
{
// whitespace (e.g. can change to '·' for debugging)
var ws = ' ';
var rows = new List<SummaryRow> { new SummaryRow($"{p.Default}Target{p.Reset}", $"{p.Default}Outcome{p.Reset}", $"{p.Default}Duration{p.Reset}", "") };
foreach (var item in results.OrderBy(i => i.Value.Ordinal))
{
var target = $"{p.Target}{item.Key}{p.Reset}";
var outcome = item.Value.Outcome == TargetOutcome.Failed
? $"{p.Failed}{FailedMessage}{p.Reset}"
: item.Value.Outcome == TargetOutcome.NoInputs
? $"{p.Warning}{NoInputsMessage}{p.Reset}"
: $"{p.Succeeded}{SucceededMessage}{p.Reset}";
var duration = item.Value.Duration.HasValue
? $"{p.Timing}{item.Value.Duration.Humanize()}{p.Reset}"
: "";
var percentage = item.Value.Duration.HasValue && totalDuration.HasValue && totalDuration.Value > TimeSpan.Zero
? $"{p.Timing}{100 * item.Value.Duration.Value.TotalMilliseconds / totalDuration.Value.TotalMilliseconds:N1}%{p.Reset}"
: "";
rows.Add(new SummaryRow(target, outcome, duration, percentage));
var index = 0;
foreach (var result in item.Value.InputResults.Values.OrderBy(result => result.Ordinal))
{
var input = $"{ws}{ws}{p.Input}{result.Input}{p.Reset}";
var inputOutcome = result.Outcome == TargetInputOutcome.Failed ? $"{p.Failed}{FailedMessage}{p.Reset}" : $"{p.Succeeded}{SucceededMessage}{p.Reset}";
var inputDuration = result.Duration.HasValue
? $"{(index < item.Value.InputResults.Count - 1 ? p.TreeFork : p.TreeCorner)}{p.Timing}{result.Duration.Humanize()}{p.Reset}"
: "";
var inputPercentage = result.Duration.HasValue && totalDuration.HasValue && totalDuration.Value > TimeSpan.Zero
? $"{(index < item.Value.InputResults.Count - 1 ? p.TreeFork : p.TreeCorner)}{p.Timing}{100 * result.Duration.Value.TotalMilliseconds / totalDuration.Value.TotalMilliseconds:N1}%{p.Reset}"
: "";
rows.Add(new SummaryRow(input, inputOutcome, inputDuration, inputPercentage));
++index;
}
}
// target or input column width
var tarW = rows.Max(row => Palette.StripColours(row.TargetOrInput).Length);
// outcome column width
var outW = rows.Max(row => Palette.StripColours(row.Outcome).Length);
// duration column width
var durW = rows.Count > 1 ? rows.Skip(1).Max(row => Palette.StripColours(row.Duration).Length) : 0;
// percentage column width
var perW = rows.Max(row => Palette.StripColours(row.Percentage).Length);
// timing column width (duration and percentage)
var timW = Max(Palette.StripColours(rows[0].Duration).Length, durW + 2 + perW);
// expand percentage column width to ensure time and percentage are as wide as duration
perW = Max(timW - durW - 2, perW);
var builder = new StringBuilder();
// summary start separator
_ = builder.AppendLine($"{p.Prefix}{prefix}:{p.Reset} {p.Default}{Prp("", tarW + 2 + outW + 2 + timW, p.Horizontal)}{p.Reset}");
// header
_ = builder.AppendLine($"{p.Prefix}{prefix}:{p.Reset} {Prp(rows[0].TargetOrInput, tarW, ws)}{ws}{ws}{Prp(rows[0].Outcome, outW, ws)}{ws}{ws}{Prp(rows[0].Duration, timW, ws)}");
// header separator
_ = builder.AppendLine($"{p.Prefix}{prefix}:{p.Reset} {p.Default}{Prp("", tarW, p.Horizontal)}{p.Reset}{ws}{ws}{p.Default}{Prp("", outW, p.Horizontal)}{p.Reset}{ws}{ws}{p.Default}{Prp("", timW, p.Horizontal)}{p.Reset}");
// targets
foreach (var row in rows.Skip(1))
{
_ = builder.AppendLine($"{p.Prefix}{prefix}:{p.Reset} {Prp(row.TargetOrInput, tarW, ws)}{p.Reset}{ws}{ws}{Prp(row.Outcome, outW, ws)}{p.Reset}{ws}{ws}{Prp(row.Duration, durW, ws)}{p.Reset}{ws}{ws}{Prp(row.Percentage, perW, ws)}{p.Reset}");
}
// summary end separator
_ = builder.AppendLine($"{p.Prefix}{prefix}:{p.Reset} {p.Default}{Prp("", tarW + 2 + outW + 2 + timW, p.Horizontal)}{p.Reset}");
return builder.ToString();
// pad right printed
static string Prp(string text, int totalWidth, char paddingChar) =>
text.PadRight(totalWidth + (text.Length - Palette.StripColours(text).Length), paddingChar);
}
19
View Source File : PortalSolutions.cs
License : MIT License
Project Creator : Adoxio
License : MIT License
Project Creator : Adoxio
private void TraceSolutions()
{
var stringBuilder = new StringBuilder();
var tableDifinition = new Dictionary<string, Func<SolutionInfo, string>>
{
{ "Unique name", s => s.Name },
{ "Version", s => s.SolutionVersion.ToString() },
{ "Installed on", s => s.InstalledOn.ToString() }
};
var columnFormat = new Dictionary<string, string>();
// Calcule width of each column and write header
foreach (var columnDefinition in tableDifinition)
{
var maxWidth = this.Solutions.Values.Max(solution => tableDifinition[columnDefinition.Key](solution).Length);
var format = string.Format("{{0, -{0}}}", maxWidth);
columnFormat[columnDefinition.Key] = format;
stringBuilder.AppendFormat(format, columnDefinition.Key);
stringBuilder.Append(" ");
}
stringBuilder.AppendLine();
// Render rows
foreach (var solution in this.Solutions.Values)
{
foreach (var columnDefinition in tableDifinition)
{
stringBuilder.AppendFormat(columnFormat[columnDefinition.Key], columnDefinition.Value(solution));
stringBuilder.Append(" ");
}
stringBuilder.AppendLine();
}
ADXTrace.Instance.TraceInfo(TraceCategory.Application, string.Format("Installed portal solutions on CRM {0}:{1}{2}", this.CrmVersion, Environment.NewLine, stringBuilder));
}
19
View Source File : OrganizationServiceContextExtensions.cs
License : MIT License
Project Creator : Adoxio
License : MIT License
Project Creator : Adoxio
public static DateTime? GetOpportunityLatestStatusModifiedOn(this OrganizationServiceContext context, Enreplacedy opportunity)
{
opportunity.replacedertEnreplacedyName("opportunity");
var opportunityHistories = context.GetOpportunityHistories(opportunity);
return opportunityHistories.Any()
? (DateTime?)opportunityHistories.Max(history => history.NoteCreatedOn)
: null;
}
19
View Source File : TagCloudData.cs
License : MIT License
Project Creator : Adoxio
License : MIT License
Project Creator : Adoxio
protected virtual IEnumerable<TagCloudDataItem> replacedignWeights(int numberOfWeights, IEnumerable<TagCloudDataItem> items)
{
// The call to Max on the next line will fail if there are no items,
// so return if the collection is empty
if (items.Count() == 0) return items;
// Find the highest count in our collection--items with this count will have
// the max weight (i.e., the value of the "weights" param)
var maxFrequency = items.Max(item => item.TaggedItemCount);
// Find the lowest count in our collection--items with this count will have a
// weight of 1
var minFrequency = items.Min(item => item.TaggedItemCount);
// The size of each frequency threshold
var delta = (maxFrequency - minFrequency) / (double)numberOfWeights;
return items.Select(item =>
{
for (var weight = 1; weight <= numberOfWeights; weight++)
{
// We add 2 to each threshold and adjustedFrequency, to cancel out the
// possibility of log(0) or log(1), which would have distorting effects
var threshold = 100 * Math.Log((minFrequency + weight * delta) + 2);
var adjustedFrequency = 100 * Math.Log(item.TaggedItemCount + 2);
if (adjustedFrequency <= threshold)
{
item.Weight = weight;
break;
}
}
return item;
});
}
19
View Source File : TreeModel.cs
License : MIT License
Project Creator : Adsito
License : MIT License
Project Creator : Adsito
void Init (IList<T> data)
{
if (data == null)
throw new ArgumentNullException("data", "Input data is null. Ensure input is a non-null list.");
m_Data = data;
if (m_Data.Count > 0)
m_Root = TreeElementUtility.ListToTree(data);
m_MaxID = m_Data.Max(e => e.id);
}
19
View Source File : BlockKeyTransform.cs
License : GNU General Public License v3.0
Project Creator : Aekras1a
License : GNU General Public License v3.0
Project Creator : Aekras1a
private void MatchHandlers(EHMap map, ref bool updated)
{
// handler start = 0xffffffff
// finally end = next block of try end
foreach(var start in map.Starts)
{
var key = Keys[start];
if(key.Entry != 0xffffffff)
{
key.Entry = 0xffffffff;
Keys[start] = key;
updated = true;
}
}
foreach(var info in map.Finally.Values)
{
var maxEnd = info.FinallyEnds.Max(block => Keys[block].Exit);
var maxEntry = info.TryEndNexts.Max(block => Keys[block].Entry);
var maxId = Math.Max(maxEnd, maxEntry);
foreach(var block in info.FinallyEnds)
{
var key = Keys[block];
if(key.Exit != maxId)
{
key.Exit = maxId;
Keys[block] = key;
updated = true;
}
}
foreach(var block in info.TryEndNexts)
{
var key = Keys[block];
if(key.Entry != maxId)
{
key.Entry = maxId;
Keys[block] = key;
updated = true;
}
}
}
}
19
View Source File : MemoryBuffer.cs
License : MIT License
Project Creator : Aiko-IT-Systems
License : MIT License
Project Creator : Aiko-IT-Systems
public void CopyTo(Stream destination)
{
if (this._isDisposed)
throw new ObjectDisposedException("This buffer is disposed.");
#if HAS_SPAN_STREAM_OVERLOADS
foreach (var seg in this._segments)
destination.Write(seg.Memory.Span);
#else
var longest = this._segments.Max(x => x.Memory.Length);
var buff = new byte[longest];
foreach (var seg in this._segments)
{
var mem = seg.Memory.Span;
var spn = buff.replacedpan(0, mem.Length);
mem.CopyTo(spn);
destination.Write(buff, 0, spn.Length);
}
#endif
}
19
View Source File : SessionService.cs
License : MIT License
Project Creator : aksoftware98
License : MIT License
Project Creator : aksoftware98
public async Task<EnreplacedyApiResponse<WorkScheduleDetail>> CreateWorkScheduleAsync(WorkScheduleDetail scheduleDetail, string currentUserId)
{
if (scheduleDetail is null)
throw new ArgumentNullException(nameof(scheduleDetail));
if (scheduleDetail.Sessions is null || scheduleDetail.Sessions.Count < 1)
return new EnreplacedyApiResponse<WorkScheduleDetail>(error: "Sessions can't be empty");
// if there exists a session with startDate DOW different that endDate DOW
if (scheduleDetail.Sessions.Any(s => s.StartDate.DayOfWeek != s.EndDate.DayOfWeek))
return new EnreplacedyApiResponse<WorkScheduleDetail>(error: "Sessions start and end date should be on the same day");
// Check if there is a session with end date earlier than start date
if (scheduleDetail.Sessions.Any(s => s.EndDate <= s.StartDate))
return new EnreplacedyApiResponse<WorkScheduleDetail>(error: "Schedule has incorrect time ranges");
var tomorrowDate = DateTime.UtcNow.AddDays(1).NormalizedDate();
if (scheduleDetail.Sessions.Any(s => s.StartDate < tomorrowDate))
return new EnreplacedyApiResponse<WorkScheduleDetail>(error: "Sessions can only be added for tomorrow and beyond");
if (scheduleDetail.Sessions.Any(s => s.User is null))
return new EnreplacedyApiResponse<WorkScheduleDetail>(error: "A session doesn't have a user specified for it");
var sessionsByDayOfWeek = scheduleDetail.Sessions.GroupBy(s => s.StartDate.DayOfWeek);
// Check if any session has conflicts wit any other session
if (HasTimeConflicts(sessionsByDayOfWeek))
return new EnreplacedyApiResponse<WorkScheduleDetail>(error: "Schedule has some time conflicts");
var org = await _orgRepository.GetByIdAsync(scheduleDetail.OrganiztionId);
if (org is null)
return new EnreplacedyApiResponse<WorkScheduleDetail>(error: "Organization does not exist");
var usersIdsRetrieved = new List<string>();
var sessionsAdded = new Session[scheduleDetail.Sessions.Count];
int i = 0;
foreach (var sessionDetail in scheduleDetail.Sessions)
{
if (!usersIdsRetrieved.Contains(sessionDetail.User.Id))
{
var user = await _userManager.FindByIdAsync(sessionDetail.User.Id);
if (user is null)
return new EnreplacedyApiResponse<WorkScheduleDetail>
(error: $"Session with name: {sessionDetail.Name} has a selected user that does not exist");
usersIdsRetrieved.Add(user.Id);
}
var session = new Session
{
Name = sessionDetail.Name?.Trim(),
Description = sessionDetail.Description?.Trim(),
StartDate = sessionDetail.StartDate.ToUniversalTime(),
EndDate = sessionDetail.EndDate.ToUniversalTime(),
CreatedById = currentUserId,
ModifiedById = currentUserId,
OrganizationId = org.Id,
UserId = sessionDetail.User.Id
};
await _sessionRepository.InsertAsync(session);
sessionsAdded[i] = session;
i++;
}
var endDate = sessionsAdded.Max(s => s.EndDate);
return new EnreplacedyApiResponse<WorkScheduleDetail>(enreplacedy: new WorkScheduleDetail(sessionsAdded, tomorrowDate, endDate));
}
19
View Source File : FileHelpers.cs
License : MIT License
Project Creator : albyho
License : MIT License
Project Creator : albyho
private static bool IsValidFileExtensionAndSignature(string fileName, Stream data, string[] permittedExtensions)
{
if (string.IsNullOrEmpty(fileName) || data == null || data.Length == 0)
{
return false;
}
var ext = Path.GetExtension(fileName).ToLowerInvariant();
if (string.IsNullOrEmpty(ext) || !permittedExtensions.Contains(ext))
{
return false;
}
data.Position = 0;
using (var reader = new BinaryReader(data))
{
if (ext.Equals(".txt") || ext.Equals(".csv") || ext.Equals(".prn"))
{
if (_allowedChars.Length == 0)
{
// Limits characters to ASCII encoding.
for (var i = 0; i < data.Length; i++)
{
if (reader.ReadByte() > sbyte.MaxValue)
{
return false;
}
}
}
else
{
// Limits characters to ASCII encoding and
// values of the _allowedChars array.
for (var i = 0; i < data.Length; i++)
{
var b = reader.ReadByte();
if (b > sbyte.MaxValue ||
!_allowedChars.Contains(b))
{
return false;
}
}
}
return true;
}
// Uncomment the following code block if you must permit
// files whose signature isn't provided in the _fileSignature
// dictionary. We recommend that you add file signatures
// for files (when possible) for all file types you intend
// to allow on the system and perform the file signature
// check.
/*
if (!_fileSignature.ContainsKey(ext))
{
return true;
}
*/
// File signature check
// --------------------
// With the file signatures provided in the _fileSignature
// dictionary, the following code tests the input content's
// file signature.
var signatures = _fileSignature[ext];
var headerBytes = reader.ReadBytes(signatures.Max(m => m.Length));
return signatures.Any(signature =>
headerBytes.Take(signature.Length).SequenceEqual(signature));
}
}
19
View Source File : BiomeSurfaceGraph.cs
License : MIT License
Project Creator : alelievr
License : MIT License
Project Creator : alelievr
public bool BuildGraph(List< BiomeSurfaceSwitch > surfacesSwitches)
{
var bSwitchCellMap = new Dictionary< BiomeSurfaceSwitch, BiomeSurfaceCell >();
isBuilt = false;
if (surfacesSwitches.Count == 0)
return false;
surfaceType = surfacesSwitches.First().surface.type;
Action< BiomeSurfaceCell, BiomeSurfaceSwitch > AddLink = (cell, s) => {
var link = new BiomeSurfaceLink();
link.toCell = bSwitchCellMap[s];
if (!cell.links.Any(c => c.toCell == link.toCell))
cell.links.Add(link);
};
//calcul ranges
float heightRange = surfacesSwitches.Max(s => s.maxHeight) - surfacesSwitches.Min(s => s.minHeight);
float slopeRange = surfacesSwitches.Max(s => s.maxSlope) - surfacesSwitches.Min(s => s.minSlope);
float paramRange = surfacesSwitches.Max(s => s.maxParam) - surfacesSwitches.Min(s => s.minParam);
//Generate surface switches nodes:
foreach (var bSwitch in surfacesSwitches)
bSwitchCellMap[bSwitch] = new BiomeSurfaceCell();
cells.Clear();
lastCell = null;
foreach (var bSwitch in surfacesSwitches)
{
BiomeSurfaceCell cell = bSwitchCellMap[bSwitch];
cell.surface = bSwitch.surface;
cell.surfaceSwitch = bSwitch;
cell.weight = bSwitch.GetWeight(heightRange, slopeRange, paramRange);
foreach (var biomeSwitch in surfacesSwitches)
if (biomeSwitch.Overlaps(bSwitch))
AddLink(cell, biomeSwitch);
cell.links.Sort((l1, l2) => {
float w1 = l1.toCell.weight;
float w2 = l2.toCell.weight;
//reverse sort
return w2.CompareTo(w1);
});
cells.Add(cell);
}
rootCell = cells.First();
if (!CheckValid())
return false;
isBuilt = true;
return true;
}
19
View Source File : MultiLikeControlViewModel.cs
License : GNU General Public License v3.0
Project Creator : alexdillon
License : GNU General Public License v3.0
Project Creator : alexdillon
private async Task DoMultiLike()
{
var range = this.GroupContentsControlViewModel.CurrentlySelectedMessages;
if (range == null)
{
return;
}
var itemList = (range as ObservableCollection<object>).Cast<MessageControlViewModelBase>().ToList();
var oldestId = itemList.Min(m => long.Parse(m.Id));
var newestId = itemList.Max(m => long.Parse(m.Id));
var loadingControl = new LoadingControlViewModel();
this.GroupContentsControlViewModel.SmallDialogManager.OpenPopup(loadingControl, Guid.Empty);
foreach (var message in this.GroupContentsControlViewModel.MessagesSorted)
{
var id = long.Parse(message.Id);
if (id >= oldestId && id <= newestId && message is MessageControlViewModel mcvm)
{
loadingControl.Message = $"Liking Message {mcvm.Message.Text}";
await mcvm.LikeMessageAsync();
await Task.Delay(this.LikeDelay);
}
}
this.DisableMultiLike();
this.GroupContentsControlViewModel.SmallDialogManager.ClosePopup();
}
19
View Source File : MultiLikeControlViewModel.cs
License : GNU General Public License v3.0
Project Creator : alexdillon
License : GNU General Public License v3.0
Project Creator : alexdillon
private async Task DoMultiLike()
{
var range = this.GroupContentsControlViewModel.CurrentlySelectedMessages;
if (range == null)
{
return;
}
var itemList = (range as ObservableCollection<object>).Cast<MessageControlViewModelBase>().ToList();
var oldestId = itemList.Min(m => long.Parse(m.Id));
var newestId = itemList.Max(m => long.Parse(m.Id));
var loadingControl = new LoadingControlViewModel();
this.GroupContentsControlViewModel.PopupManager.PopupDialog = loadingControl;
foreach (var message in this.GroupContentsControlViewModel.SortedMessages)
{
var id = long.Parse(message.Id);
if (id >= oldestId && id <= newestId && message is MessageControlViewModel mcvm)
{
loadingControl.Message = $"Liking Message {mcvm.Message.Text}";
await mcvm.LikeMessageAsync();
await Task.Delay(this.LikeDelay);
}
}
this.DisableMultiLike();
this.GroupContentsControlViewModel.PopupManager.PopupDialog = null;
}
19
View Source File : PlotModel.cs
License : MIT License
Project Creator : AlexGyver
License : MIT License
Project Creator : AlexGyver
private void EnforceCartesianTransforms()
{
// Set the same scaling on all axes
double sharedScale = this.Axes.Min(a => Math.Abs(a.Scale));
foreach (var a in this.Axes)
{
a.Zoom(sharedScale);
}
sharedScale = this.Axes.Max(a => Math.Abs(a.Scale));
foreach (var a in this.Axes)
{
a.Zoom(sharedScale);
}
foreach (var a in this.Axes)
{
a.UpdateTransform(this.PlotArea);
}
}
19
View Source File : NotificationViewModel.cs
License : MIT License
Project Creator : aliprogrammer69
License : MIT License
Project Creator : aliprogrammer69
private async void Getnotifications() {
FetchingNotifications = true;
NotificationsResponse notifications = await _userService.GetNotifications(_pagingRequest);
if (!notifications.Success) {
_messageService.Show($"Failed to notifications. {notifications.Error_message}");
return;
}
IEnumerable<NotificationModel> newNotifications = notifications.Notifications.Where(n => n.Notification_Id > _latestNnotificationId);
if (newNotifications.Any()) {
AllNotifications.AddRange(newNotifications);
NotificationsView.Refresh();
_latestNnotificationId = newNotifications.Max(n => n.Notification_Id);
}
FetchingNotifications = false;
_pagingRequest.TotalCount = notifications.Count;
_pagingRequest.NextPageCount = notifications.Next;
_pagingRequest.PreviousPageCount = notifications.Previous;
}
19
View Source File : MainViewModel.cs
License : MIT License
Project Creator : alkampfergit
License : MIT License
Project Creator : alkampfergit
private void InnerExecuteExport()
{
var baseFolder = Environment.GetFolderPath(Environment.SpecialFolder.MyDoreplacedents);
foreach (var selectedTemplate in Templates.Where(t => t.IsSelected))
{
if (selectedTemplate.IsScriptTemplate)
{
if (ArrayParameters.Any())
{
var arrayParameters = ArrayParameters.Select(p => new
{
Name = p.Name,
Values = p.Value?.Split(',', ';').ToList() ?? new List<string>()
})
.ToList();
Int32 maxParameterCount = arrayParameters.Max(p => p.Values.Count);
for (int i = 0; i < maxParameterCount; i++)
{
StringBuilder fileSuffix = new StringBuilder();
Dictionary<string, object> parameters = PrepareUserParameters();
foreach (var arrayParameter in arrayParameters)
{
var value = arrayParameter.Values.Count > i ? arrayParameter.Values[i] : String.Empty;
parameters[arrayParameter.Name] = value;
fileSuffix.Append(arrayParameter.Name);
fileSuffix.Append("_");
fileSuffix.Append(value);
}
var fileName = Path.Combine(baseFolder, selectedTemplate.TemplateName + "_" + DateTime.Now.ToString("dd_MM_yyyy hh mm")) + "_" + fileSuffix.ToString();
GenerateFileFromScriptTemplate(fileName, selectedTemplate, parameters);
}
}
else
{
var fileName = Path.Combine(baseFolder, selectedTemplate.TemplateName + "_" + DateTime.Now.ToString("dd_MM_yyyy hh mm"));
Dictionary<string, object> parameters = PrepareUserParameters();
GenerateFileFromScriptTemplate(fileName, selectedTemplate, parameters);
}
}
else
{
var fileName = Path.Combine(baseFolder, selectedTemplate.TemplateName + "_" + DateTime.Now.ToString("dd_MM_yyyy hh mm")) + ".docx";
var selected = SelectedQuery?.Results?.Where(q => q.Selected).ToList();
if (selected == null || selected.Count == 0)
{
return;
}
var template = selectedTemplate.WordTemplateFolderManager;
using (WordManipulator manipulator = new WordManipulator(fileName, true))
{
foreach (var workItemResult in selected)
{
var workItem = workItemResult.WorkItem;
manipulator.InsertWorkItem(workItem, template.GetTemplateFor(workItem.Type.Name), true);
}
}
ManageGeneratedWordFile(fileName);
}
}
Status = $"Export Completed";
}
19
View Source File : Transformer.cs
License : MIT License
Project Creator : allisterb
License : MIT License
Project Creator : allisterb
protected virtual StageResult Transform(int? recordBatchSize = null, int? recordLimit = null, Dictionary<string, string> options = null)
{
if (!ParallelExecution || InputRecords.Count < 100 || ((RecordLimitSize > 0) && (RecordLimitSize < 100)))
{
using (Operation transformOp = Begin("Transforming {0} records using sequential execution", InputRecords.Count))
{
for (int i = 0; i < InputRecords.Count; i++)
{
OutputRecords.Add(TransformInputToOutput(this, WriterOptions, InputRecords[i]));
if ((i + 1) % 1000 == 0)
{
Info("Transformed range {0} to {1} of {2} records...", (i + 1) - 1000, i + 1, InputRecords.Count);
}
if ((RecordLimitSize > 0) && (i + 1 == RecordLimitSize))
{
Info("Stopping transformation at record limit {0}.", i + 1);
transformOp.Complete();
break;
}
}
transformOp.Complete();
}
}
else
{
int limit = RecordLimitSize > 0 ? RecordLimitSize <= InputRecords.Count ? RecordLimitSize : InputRecords.Count : InputRecords.Count;
using (Operation transformOp = Begin("Transforming {0} records using parallel execution", limit))
{
ConcurrentDictionary<int, TRecord> concurrentOutputDictionary = new ConcurrentDictionary<int, TRecord>();
Parallel.For(0, limit, (i, loop) =>
{
TRecord output = TransformInputToOutput(this, WriterOptions, InputRecords[i]);
concurrentOutputDictionary.TryAdd(i, output);
if ((i + 1) % 1000 == 0)
{
Info("Transformed range {0} to {1} of {2} records...", (i + 1) - 1000, i + 1, InputRecords.Count);
}
});
OutputRecords = concurrentOutputDictionary.Values.ToList();
transformOp.Complete();
}
}
Info("Transformed {0} records with maximum {1} features to {2}.", OutputRecords.Count, OutputRecords.Max(r => r.Features.Count), OutputFileName);
return StageResult.SUCCESS;
}
19
View Source File : ParquetReaderTest.cs
License : MIT License
Project Creator : aloneguid
License : MIT License
Project Creator : aloneguid
[Fact]
public void Read_multiple_data_pages()
{
using (var reader =
new ParquetReader(OpenTestFile("/special/multi_data_page.parquet"), leaveStreamOpen: false))
{
DataColumn[] columns = reader.ReadEntireRowGroup();
string[] s = (string[]) columns[0].Data;
double?[] d = (double?[]) columns[1].Data;
// check for nulls (issue #370)
for (int i = 0; i < s.Length; i++)
{
replacedert.True(s[i] != null, "found null in s at " + i);
replacedert.True(d[i] != null, "found null in d at " + i);
}
// run aggregations checking row alignment (issue #371)
var seq = s.Zip(d.Cast<double>(), (w, v) => new {w, v})
.Where(p => p.w == "favorable")
.ToList();
// double matching is fuzzy, but matching strings is enough for this test
// ground truth was computed using Spark
replacedert.Equal(26706.6185312147, seq.Sum(p => p.v), 5);
replacedert.Equal(0.808287234987281, seq.Average(p => p.v), 5);
replacedert.Equal(0.71523915461624, seq.Min(p => p.v), 5);
replacedert.Equal(0.867111980015206, seq.Max(p => p.v), 5);
}
}
19
View Source File : ProcessRepository.cs
License : MIT License
Project Creator : ambleside138
License : MIT License
Project Creator : ambleside138
public WorkProcess Regist(WorkProcess process)
{
var newId = 1;
if(_ListProcess.Any())
{
newId = _ListProcess.Max(i => i.Id.Value) + 1;
}
var newProcess = new WorkProcess(new Domain.Idenreplacedy<WorkProcess>(newId), process.replacedle);
_ListProcess.Add(newProcess);
return newProcess;
}
19
View Source File : Metadata.cs
License : MIT License
Project Creator : AndnixSH
License : MIT License
Project Creator : AndnixSH
private void ProcessingMetadataUsage()
{
metadataUsageDic = new Dictionary<Il2CppMetadataUsage, SortedDictionary<uint, uint>>();
for (uint i = 1; i <= 6; i++)
{
metadataUsageDic[(Il2CppMetadataUsage)i] = new SortedDictionary<uint, uint>();
}
foreach (var metadataUsageList in metadataUsageLists)
{
for (int i = 0; i < metadataUsageList.count; i++)
{
var offset = metadataUsageList.start + i;
var metadataUsagePair = metadataUsagePairs[offset];
var usage = GetEncodedIndexType(metadataUsagePair.encodedSourceIndex);
var decodedIndex = GetDecodedMethodIndex(metadataUsagePair.encodedSourceIndex);
metadataUsageDic[(Il2CppMetadataUsage)usage][metadataUsagePair.destinationIndex] = decodedIndex;
}
}
maxMetadataUsages = metadataUsageDic.Max(x => x.Value.Max(y => y.Key)) + 1;
}
19
View Source File : EventTreeMerger.cs
License : GNU General Public License v3.0
Project Creator : AndreiFedarets
License : GNU General Public License v3.0
Project Creator : AndreiFedarets
private IEventTree[] MergeByThreads(IEnumerable<ISingleEventTree> source)
{
IGrouping<uint, ISingleEventTree>[] groups = source.GroupBy(x => x.ThreadUid).ToArray();
IEventTree[] result = new IEventTree[groups.Length];
for (int i = 0; i < groups.Length; i++)
{
ISingleEventTree[] items = groups[i].ToArray();
uint beginLifetime = items.Min(x => x.BeginLifetime);
uint endLifetime = items.Max(x => x.EndLifetime);
uint threadOsId = items[0].ThreadOsId;
uint threadUid = items[0].ThreadUid;
List<byte[]> data = items.Select(x => x.GetBinaryData()).ToList();
uint hits = 1;
uint time = (uint)data.Sum(x => NativeEventHelper.GetTime(x));
byte[] mergedData = _agentLibrary.MergeEventTrees(data, NativeEventHelper.CreateEvent(ThreadEventTreeMessage.EventType, 0, threadUid, time, hits));
ThreadEventTree threadEventTree = new ThreadEventTree(threadUid, threadOsId, beginLifetime, endLifetime, mergedData);
result[i] = threadEventTree;
}
return result;
}
19
View Source File : EventTreeMerger.cs
License : GNU General Public License v3.0
Project Creator : AndreiFedarets
License : GNU General Public License v3.0
Project Creator : AndreiFedarets
private IEventTree[] MergeByRoots(IEnumerable<ISingleEventTree> source)
{
IGrouping<ulong, ISingleEventTree>[] groups = source.GroupBy(x => x.EventHash).ToArray();
IEventTree[] result = new IEventTree[groups.Length];
for (int i = 0; i < groups.Length; i++)
{
ISingleEventTree[] items = groups[i].ToArray();
uint beginLifetime = items.Min(x => x.BeginLifetime);
uint endLifetime = items.Max(x => x.EndLifetime);
List<byte[]> data = items.Select(x => x.GetBinaryData()).ToList();
byte[] mergedData = _agentLibrary.MergeEventTrees(data);
MergedEventTree mergedEventTree = new MergedEventTree(beginLifetime, endLifetime, mergedData);
result[i] = mergedEventTree;
}
return result;
}
19
View Source File : Histogram.cs
License : MIT License
Project Creator : AndreyAkinshin
License : MIT License
Project Creator : AndreyAkinshin
[PublicAPI, Pure]
public string ToString(Func<double, string> formatter, char binSymbol = '@', bool full = false)
{
var lower = new string[Bins.Length];
var upper = new string[Bins.Length];
for (int i = 0; i < Bins.Length; i++)
{
lower[i] = formatter(Bins[i].Lower);
upper[i] = formatter(Bins[i].Upper);
}
int lowerWidth = lower.Max(it => it.Length);
int upperWidth = upper.Max(it => it.Length);
var builder = new StringBuilder();
for (int i = 0; i < Bins.Length; i++)
{
string intervalStr = $"[{lower[i].PadLeft(lowerWidth)} ; {upper[i].PadLeft(upperWidth)})";
string barStr = full
? string.Join(", ", Bins[i].Values.Select(formatter))
: new string(binSymbol, Bins[i].Count);
builder.AppendLine($"{intervalStr} | {barStr}");
}
return builder.ToString().Trim();
}
19
View Source File : GreenwaldKhannaQuantileEstimator.cs
License : MIT License
Project Creator : AndreyAkinshin
License : MIT License
Project Creator : AndreyAkinshin
[NotNull]
internal string DumpToString(string format = "N2")
{
if (tuples.Count == 0)
return "";
var rMaxBuilder = new StringBuilder("rMax :");
var valueBuilder = new StringBuilder("value :");
var rMinBuilder = new StringBuilder("rMin :");
var detailBuilder = new StringBuilder("Tuples:");
int indexW = (tuples.Count - 1).ToString().Length;
int valueW = tuples.Max(t => t.Value.ToStringInvariant(format).Length);
int gapW = tuples.Max(t => t.Gap.ToString().Length);
int deltaW = tuples.Max(t => t.Delta.ToString().Length);
int rMin = 0;
for (int i = 0; i < tuples.Count; i++)
{
rMin += tuples[i].Gap;
int rMax = rMin + tuples[i].Delta;
string rMaxStr = rMax.ToString();
string valueStr = tuples[i].Value.ToStringInvariant(format);
string rMinStr = rMin.ToString();
int w = new[] { rMaxStr.Length, valueStr.Length, rMinStr.Length }.Max() + 1;
rMaxBuilder.Append(rMaxStr.PadLeft(w));
valueBuilder.Append(valueStr.PadLeft(w));
rMinBuilder.Append(rMinStr.PadLeft(w));
detailBuilder.AppendLine(
$"[{i.ToString().PadLeft(indexW)}]: " +
$"v = {valueStr.PadLeft(valueW)}, " +
$"g = {tuples[i].Gap.ToString().PadLeft(gapW)}, " +
$"delta = {tuples[i].Delta.ToString().PadLeft(deltaW)}");
}
return string.Join(Environment.NewLine, rMaxBuilder, valueBuilder, rMinBuilder, "", detailBuilder);
}
19
View Source File : GreenwaldKhannaQuantileEstimator.cs
License : MIT License
Project Creator : AndreyAkinshin
License : MIT License
Project Creator : AndreyAkinshin
[NotNull]
internal string DumpToString(string format = "N2")
{
if (tuples.Count == 0)
return "";
var rMaxBuilder = new StringBuilder("rMax :");
var valueBuilder = new StringBuilder("value :");
var rMinBuilder = new StringBuilder("rMin :");
var detailBuilder = new StringBuilder("Tuples:");
int indexW = (tuples.Count - 1).ToString().Length;
int valueW = tuples.Max(t => t.Value.ToStringInvariant(format).Length);
int gapW = tuples.Max(t => t.Gap.ToString().Length);
int deltaW = tuples.Max(t => t.Delta.ToString().Length);
int rMin = 0;
for (int i = 0; i < tuples.Count; i++)
{
rMin += tuples[i].Gap;
int rMax = rMin + tuples[i].Delta;
string rMaxStr = rMax.ToString();
string valueStr = tuples[i].Value.ToStringInvariant(format);
string rMinStr = rMin.ToString();
int w = new[] { rMaxStr.Length, valueStr.Length, rMinStr.Length }.Max() + 1;
rMaxBuilder.Append(rMaxStr.PadLeft(w));
valueBuilder.Append(valueStr.PadLeft(w));
rMinBuilder.Append(rMinStr.PadLeft(w));
detailBuilder.AppendLine(
$"[{i.ToString().PadLeft(indexW)}]: " +
$"v = {valueStr.PadLeft(valueW)}, " +
$"g = {tuples[i].Gap.ToString().PadLeft(gapW)}, " +
$"delta = {tuples[i].Delta.ToString().PadLeft(deltaW)}");
}
return string.Join(Environment.NewLine, rMaxBuilder, valueBuilder, rMinBuilder, "", detailBuilder);
}
19
View Source File : RqqPeltSimulation.cs
License : MIT License
Project Creator : AndreyAkinshin
License : MIT License
Project Creator : AndreyAkinshin
public void Run(string[] args)
{
var stopwatch = Stopwatch.StartNew();
var fullDataSet = CpdReferenceDataSet.Generate(new Random(42), 2);
string dataSetArg = args.Length > 0 ? args[0] : "*";
bool printReports = args.Contains("--reports");
int limit = int.MaxValue;
int limitArgIndex = Array.IndexOf(args, "--limit");
if (limitArgIndex >= 0 && limitArgIndex < args.Length - 1)
if (int.TryParse(args[limitArgIndex + 1], out int actualLimit))
limit = actualLimit;
var dataSet = dataSetArg == "*" ? fullDataSet : fullDataSet.Where(data => data.Name.Contains(dataSetArg)).ToList();
if (limit < dataSet.Count)
{
new Shuffler(42).Shuffle(dataSet);
dataSet.RemoveRange(limit, dataSet.Count - limit);
}
if (dataSet.Count == 0)
{
PrintLine("DataSet is empty");
return;
}
dataSet.Sort((a, b) => string.Compare(a.Name, b.Name, StringComparison.Ordinal));
if (args.Contains("--tune"))
{
var heterogeneityFactors = new ArithmeticProgressionSequence(1.1, 0.05).GenerateArray(7);
var sensitivities = new ArithmeticProgressionSequence(0.4, 0.02).GenerateArray(8);
var quantileSets = new List<QuantileSet>
{
QuantileSet.Clreplacedic,
QuantileSet.ArithmeticProgression(12, 0),
QuantileSet.SteadyPlusArithmeticProgression(12, 7, -0.01),
QuantileSet.ArithmeticProgressionWithRepereplacedions(12, 4, -0.1)
};
int quantileSetMaxLength = quantileSets.Max(s => s.Name.Length);
var results =
new List<(double HeterogeneityFactor, double Sensitivity, string QuantileSet, double MaxPenalty, double SumPenalty)>();
foreach (double heterogeneityFactor in heterogeneityFactors)
foreach (double sensitivity in sensitivities)
foreach (var quantileSet in quantileSets)
{
double replacedgeneityFactor = heterogeneityFactor - 1;
PrintLine(Separator('@'));
PrintLine(
$"@ HeterogeneityFactor = {heterogeneityFactor:0.0}, Sensitivity = {sensitivity:0.00}, QuantileSet = {quantileSet.Name}");
var detector = new RqqPeltChangePointDetector(
quantileSet.Probabilities,
quantileSet.Factors,
sensitivity: sensitivity,
heterogeneityFactor: heterogeneityFactor,
replacedgeneityFactor: replacedgeneityFactor);
var penalties = RunSingle(detector, dataSet, printReports);
results.Add((heterogeneityFactor, sensitivity, quantileSet.Name, penalties.Max(), penalties.Sum()));
}
PrintLine(Separator('*'));
PrintLine(Separator('*'));
PrintLine(Separator('*'));
results.Sort((a, b) =>
Math.Sign(b.MaxPenalty.CompareTo(a.MaxPenalty)) * 10 + Math.Sign(b.SumPenalty.CompareTo(a.SumPenalty)));
foreach ((double heterogeneityFactor, double sensitivity, string quantileSet, double maxPenalty,
double sumPenalty) in results)
PrintLine(
$"{heterogeneityFactor:0.00} {sensitivity:0.00} {quantileSet.PadRight(quantileSetMaxLength)} : {maxPenalty} / {sumPenalty}");
}
else
RunSingle(RqqPeltChangePointDetector.Instance, dataSet, printReports);
stopwatch.Stop();
PrintLine();
PrintLine($"TotalTime = {stopwatch.Elapsed.TotalSeconds:0.0} sec");
}
19
View Source File : ScrappedData.cs
License : MIT License
Project Creator : andruzzzhka
License : MIT License
Project Creator : andruzzzhka
public IEnumerator DownloadScrappedData(Action<List<ScrappedSong>> callback)
{
Plugin.log.Info("Downloading scrapped data...");
UnityWebRequest www;
bool timeout = false;
float time = 0f;
UnityWebRequestAsyncOperation asyncRequest;
try
{
www = UnityWebRequest.Get(scrappedDataURL);
asyncRequest = www.SendWebRequest();
}
catch (Exception e)
{
Plugin.log.Error(e);
yield break;
}
while (!asyncRequest.isDone)
{
yield return null;
time += Time.deltaTime;
if (time >= 5f && asyncRequest.progress <= float.Epsilon)
{
www.Abort();
timeout = true;
Plugin.log.Error("Connection timed out!");
}
}
if (www.isNetworkError || www.isHttpError || timeout)
{
Plugin.log.Error("Unable to download scrapped data! " + (www.isNetworkError ? $"Network error: {www.error}" : (www.isHttpError ? $"HTTP error: {www.error}" : "Unknown error")));
}
else
{
Plugin.log.Info("Received response from github.com...");
Songs = JsonConvert.DeserializeObject<List<ScrappedSong>>(www.downloadHandler.text).OrderByDescending(x => x.Diffs.Count > 0 ? x.Diffs.Max(y => y.Stars) : 0).ToList();
callback?.Invoke(Songs);
Plugin.log.Info("Scrapped data downloaded!");
}
}
19
View Source File : DnaEnvironment.cs
License : MIT License
Project Creator : angelsix
License : MIT License
Project Creator : angelsix
protected void ProcessCommandNewTemplate(string command)
{
// Extract values split by space
var templateArguments = command.Split(' ');
// We expect name as third argument
if (templateArguments.Length != 3)
{
// Log error
CoreLogger.Log($"new template has unknown number of commands. Expected 'new template [name]'");
// Stop
return;
}
// Get name
var name = templateArguments[2];
// Find template
var foundTemplate = LiveDataManager.FindTemplate(name);
// If we didn't find out...
if (foundTemplate == null)
{
// Log it
CoreLogger.LogInformation($"Template not found '{name}'");
// Stop
return;
}
// Make sure visual output path ends with \
var outputPath = Configuration.MonitorPath;
if (!outputPath.EndsWith("\\"))
outputPath += '\\';
// Ask for extraction folder
CoreLogger.LogInformation($"Extract to: {outputPath}", newLine: false);
var destination = Console.ReadLine();
// Resolve path based on the monitor path being the root
destination = DnaConfiguration.ResolveFullPath(Configuration.MonitorPath, destination, true, out var wasRelative);
try
{
// Disable watching for now while we unzip
DisableWatching = true;
// Now try extracting this template to the specified folder
var successful = ZipHelpers.Unzip(foundTemplate.FilePath, destination);
// If we failed...
if (!successful)
// Log it
CoreLogger.LogInformation($"Template not found '{name}'");
// If we succeeded
else
// Log it
CoreLogger.Log($"Template {foundTemplate.Name} successfully extracted to {destination}", type: LogType.Success);
}
finally
{
// Wait for any pending timeouts
Task.Delay(Engines.Max(engine => engine.ProcessDelay) + 10).ContinueWith(async (t) =>
{
// Re-enable watching
DisableWatching = false;
// Reload configurations
LoadConfigurations();
// Regenerate entire system
await PostConfigurationMethods();
});
}
}
19
View Source File : AdjecentCellGrouper.cs
License : MIT License
Project Creator : AnnoDesigner
License : MIT License
Project Creator : AnnoDesigner
private Rect FindLargestGroup<T>(T[][] cells)
{
var largest = new Rect();
var column = new int[cells.Max(c => c.Length)];
for (var x = 0; x < cells.Length; x++)
{
for (var y = 0; y < cells[x].Length; y++)
column[y] = !EqualityComparer<T>.Default.Equals(cells[x][y], default) ? column[y] + 1 : 0;
var area = FindLargestAreaUnderHistogram(column);
if (largest.Width * largest.Height < area.width * area.height)
largest = new Rect(x - area.width + 1, area.y, area.width, area.height);
}
return largest;
}
19
View Source File : RoadSearchHelper.cs
License : MIT License
Project Creator : AnnoDesigner
License : MIT License
Project Creator : AnnoDesigner
public static bool[][] BreadthFirstSearch(
IEnumerable<AnnoObject> placedObjects,
IEnumerable<AnnoObject> startObjects,
Func<AnnoObject, int> rangeGetter,
Moved2DArray<AnnoObject> gridDictionary = null,
Action<AnnoObject> inRangeAction = null)
{
if (startObjects.Count() == 0)
{
return new bool[0][];
}
gridDictionary = gridDictionary ?? PrepareGridDictionary(placedObjects);
if (gridDictionary is null)
{
return new bool[0][];
}
inRangeAction = inRangeAction ?? DoNothing;
var visitedObjects = new HashSet<AnnoObject>(placedObjects.Count() / 2);//inital capacity is half of all placed objecs to avoid resizing the HashSet
var visitedCells = Enumerable.Range(0, gridDictionary.Count).Select(i => new bool[gridDictionary[0].Length]).ToArrayWithCapacity(gridDictionary.Count);
var distanceToStartObjects = startObjects.ToLookup(o => rangeGetter(o));
var remainingDistance = distanceToStartObjects.Max(g => g.Key);
var currentCells = new List<(int x, int y)>();
var nextCells = new List<(int x, int y)>();
void ProcessCell(int x, int y)
{
if (!visitedCells[x][y] && gridDictionary[x][y] is AnnoObject cellObject)
{
if (cellObject.Road)
{
if (remainingDistance > 1)
{
nextCells.Add((x, y));
}
}
else if (visitedObjects.Add(cellObject))
{
inRangeAction(cellObject);
}
}
visitedCells[x][y] = true;
}
do
{
// ILookup returns empty collection if key is not found
// queue cells adjecent to starting objects, also sets cells inside of all start objects as visited, to exclude them from the search
foreach (var startObject in distanceToStartObjects[remainingDistance])
{
var initRange = rangeGetter(startObject);
var startX = (int)startObject.Position.X - gridDictionary.Offset.x;
var startY = (int)startObject.Position.Y - gridDictionary.Offset.y;
var leftX = startX - 1;
var rightX = (int)(startX + startObject.Size.Width);
var topY = startY - 1;
var bottomY = (int)(startY + startObject.Size.Height);
// queue top and bottom edges
for (var i = 0; i < startObject.Size.Width; i++)
{
var x = i + startX;
if (gridDictionary[x][topY]?.Road == true)
{
nextCells.Add((x, topY));
visitedCells[x][topY] = true;
}
if (gridDictionary[x][bottomY]?.Road == true)
{
nextCells.Add((x, bottomY));
visitedCells[x][bottomY] = true;
}
}
// queue left and right edges
for (var i = 0; i < startObject.Size.Height; i++)
{
var y = i + startY;
if (gridDictionary[leftX][y]?.Road == true)
{
nextCells.Add((leftX, y));
visitedCells[leftX][y] = true;
}
if (gridDictionary[rightX][y]?.Road == true)
{
nextCells.Add((rightX, y));
visitedCells[rightX][y] = true;
}
}
// visit all cells under start object
visitedObjects.Add(startObject);
for (var i = 0; i < startObject.Size.Width; i++)
{
for (var j = 0; j < startObject.Size.Height; j++)
{
visitedCells[startX + i][startY + j] = true;
}
}
}
var temp = nextCells;
nextCells = currentCells;
currentCells = temp;
if (remainingDistance > 1)
{
foreach (var (x, y) in currentCells)
{
ProcessCell(x + 1, y);
if (x > 0)
{
ProcessCell(x - 1, y);
}
ProcessCell(x, y + 1);
if (y > 0)
{
ProcessCell(x, y - 1);
}
}
}
currentCells.Clear();
remainingDistance--;
} while (remainingDistance > 1);
return visitedCells;
}
19
View Source File : AdjecentCellGrouperTests.cs
License : MIT License
Project Creator : AnnoDesigner
License : MIT License
Project Creator : AnnoDesigner
public static bool[][] ParseGrid(params string[] gridLines)
{
var preTranspose = gridLines.Select(line => line.Select(c => c == 'X').ToArray()).ToArray();
var postTranspose = Enumerable.Range(0, gridLines.Max(i => i.Length)).Select(i => new bool[gridLines.Length]).ToArray();
for (var i = 0; i < gridLines.Length; i++)
{
for (var j = 0; j < gridLines[i].Length; j++)
{
postTranspose[j][i] = preTranspose[i][j];
}
}
return postTranspose;
}
19
View Source File : PolygonBoundaryFinderHelperTests.cs
License : MIT License
Project Creator : AnnoDesigner
License : MIT License
Project Creator : AnnoDesigner
public static bool[][] ParseGrid(params string[] gridLines)
{
var preTranspose = gridLines.Select(line => line.Select(c => c == 'X').ToArray()).ToArray();
var postTranspose = Enumerable.Range(0, gridLines.Max(i => i.Length)).Select(i => new bool[gridLines.Length]).ToArray();
for (var i = 0; i < gridLines.Length; i++)
for (var j = 0; j < gridLines[i].Length; j++)
postTranspose[j][i] = preTranspose[i][j];
return postTranspose;
}
19
View Source File : SpellTable.cs
License : BSD 3-Clause "New" or "Revised" License
Project Creator : anoyetta
License : BSD 3-Clause "New" or "Revised" License
Project Creator : anoyetta
public IList<Spell> LoadFromFile(
string file)
{
var data = default(IList<Spell>);
if (!File.Exists(file))
{
return data;
}
using (var sr = new StreamReader(file, new UTF8Encoding(false)))
{
if (sr.BaseStream.Length > 0)
{
var xs = new XmlSerializer(table.GetType());
data = xs.Deserialize(sr) as IList<Spell>;
// IDは振り直す
if (data != null)
{
var id = this.table.Any() ?
this.table.Max(x => x.ID) + 1 :
1;
foreach (var item in data)
{
item.ID = id++;
item.Guid = Guid.NewGuid();
}
}
}
}
return data;
}
19
View Source File : TickerTable.cs
License : BSD 3-Clause "New" or "Revised" License
Project Creator : anoyetta
License : BSD 3-Clause "New" or "Revised" License
Project Creator : anoyetta
public IList<Ticker> LoadFromFile(
string file)
{
var data = default(IList<Ticker>);
if (!File.Exists(file))
{
return data;
}
using (var sr = new StreamReader(file, new UTF8Encoding(false)))
{
if (sr.BaseStream.Length > 0)
{
var xs = new XmlSerializer(table.GetType());
data = xs.Deserialize(sr) as IList<Ticker>;
if (data != null)
{
var id = this.table.Any() ?
this.table.Max(x => x.ID) + 1 :
1;
foreach (var item in data)
{
item.Guid = Guid.NewGuid();
item.ID = id++;
}
}
}
}
return data;
}
19
View Source File : HistogramsModel.cs
License : BSD 3-Clause "New" or "Revised" License
Project Creator : anoyetta
License : BSD 3-Clause "New" or "Revised" License
Project Creator : anoyetta
private static HistogramsModel CreateDesigntimeModel()
{
var model = new HistogramsModel()
{
SpecName = "Black Mage",
Ranks = new[]
{
new HistogramModel() { SpecName = "Black Mage", Rank = 1800, RankFrom = 1800, RankPercentile = 0.000 , FrequencyPercent = 0.016 },
new HistogramModel() { SpecName = "Black Mage", Rank = 1900, RankFrom = 1900, RankPercentile = 0.016 , FrequencyPercent = 0.000 },
new HistogramModel() { SpecName = "Black Mage", Rank = 2000, RankFrom = 2000, RankPercentile = 0.016 , FrequencyPercent = 0.000 },
new HistogramModel() { SpecName = "Black Mage", Rank = 2100, RankFrom = 2100, RankPercentile = 0.016 , FrequencyPercent = 0.000 },
new HistogramModel() { SpecName = "Black Mage", Rank = 2200, RankFrom = 2200, RankPercentile = 0.016 , FrequencyPercent = 0.000 },
new HistogramModel() { SpecName = "Black Mage", Rank = 2300, RankFrom = 2300, RankPercentile = 0.016 , FrequencyPercent = 0.016 },
new HistogramModel() { SpecName = "Black Mage", Rank = 2400, RankFrom = 2400, RankPercentile = 0.032 , FrequencyPercent = 0.000 },
new HistogramModel() { SpecName = "Black Mage", Rank = 2500, RankFrom = 2500, RankPercentile = 0.032 , FrequencyPercent = 0.000 },
new HistogramModel() { SpecName = "Black Mage", Rank = 2600, RankFrom = 2600, RankPercentile = 0.032 , FrequencyPercent = 0.000 },
new HistogramModel() { SpecName = "Black Mage", Rank = 2700, RankFrom = 2700, RankPercentile = 0.032 , FrequencyPercent = 0.000 },
new HistogramModel() { SpecName = "Black Mage", Rank = 2800, RankFrom = 2800, RankPercentile = 0.032 , FrequencyPercent = 0.000 },
new HistogramModel() { SpecName = "Black Mage", Rank = 2900, RankFrom = 2900, RankPercentile = 0.032 , FrequencyPercent = 0.016 },
new HistogramModel() { SpecName = "Black Mage", Rank = 3000, RankFrom = 3000, RankPercentile = 0.047 , FrequencyPercent = 0.047 },
new HistogramModel() { SpecName = "Black Mage", Rank = 3100, RankFrom = 3100, RankPercentile = 0.095 , FrequencyPercent = 0.032 },
new HistogramModel() { SpecName = "Black Mage", Rank = 3200, RankFrom = 3200, RankPercentile = 0.126 , FrequencyPercent = 0.000 },
new HistogramModel() { SpecName = "Black Mage", Rank = 3300, RankFrom = 3300, RankPercentile = 0.126 , FrequencyPercent = 0.032 },
new HistogramModel() { SpecName = "Black Mage", Rank = 3400, RankFrom = 3400, RankPercentile = 0.158 , FrequencyPercent = 0.000 },
new HistogramModel() { SpecName = "Black Mage", Rank = 3500, RankFrom = 3500, RankPercentile = 0.158 , FrequencyPercent = 0.032 },
new HistogramModel() { SpecName = "Black Mage", Rank = 3600, RankFrom = 3600, RankPercentile = 0.190 , FrequencyPercent = 0.047 },
new HistogramModel() { SpecName = "Black Mage", Rank = 3700, RankFrom = 3700, RankPercentile = 0.237 , FrequencyPercent = 0.047 },
new HistogramModel() { SpecName = "Black Mage", Rank = 3800, RankFrom = 3800, RankPercentile = 0.284 , FrequencyPercent = 0.047 },
new HistogramModel() { SpecName = "Black Mage", Rank = 3900, RankFrom = 3900, RankPercentile = 0.332 , FrequencyPercent = 0.095 },
new HistogramModel() { SpecName = "Black Mage", Rank = 4000, RankFrom = 4000, RankPercentile = 0.427 , FrequencyPercent = 0.111 },
new HistogramModel() { SpecName = "Black Mage", Rank = 4100, RankFrom = 4100, RankPercentile = 0.537 , FrequencyPercent = 0.111 },
new HistogramModel() { SpecName = "Black Mage", Rank = 4200, RankFrom = 4200, RankPercentile = 0.648 , FrequencyPercent = 0.142 },
new HistogramModel() { SpecName = "Black Mage", Rank = 4300, RankFrom = 4300, RankPercentile = 0.790 , FrequencyPercent = 0.253 },
new HistogramModel() { SpecName = "Black Mage", Rank = 4400, RankFrom = 4400, RankPercentile = 1.043 , FrequencyPercent = 0.363 },
new HistogramModel() { SpecName = "Black Mage", Rank = 4500, RankFrom = 4500, RankPercentile = 1.406 , FrequencyPercent = 0.300 },
new HistogramModel() { SpecName = "Black Mage", Rank = 4600, RankFrom = 4600, RankPercentile = 1.706 , FrequencyPercent = 0.427 },
new HistogramModel() { SpecName = "Black Mage", Rank = 4700, RankFrom = 4700, RankPercentile = 2.133 , FrequencyPercent = 0.237 },
new HistogramModel() { SpecName = "Black Mage", Rank = 4800, RankFrom = 4800, RankPercentile = 2.370 , FrequencyPercent = 0.537 },
new HistogramModel() { SpecName = "Black Mage", Rank = 4900, RankFrom = 4900, RankPercentile = 2.907 , FrequencyPercent = 0.648 },
new HistogramModel() { SpecName = "Black Mage", Rank = 5000, RankFrom = 5000, RankPercentile = 3.555 , FrequencyPercent = 1.059 },
new HistogramModel() { SpecName = "Black Mage", Rank = 5100, RankFrom = 5100, RankPercentile = 4.614 , FrequencyPercent = 1.027 },
new HistogramModel() { SpecName = "Black Mage", Rank = 5200, RankFrom = 5200, RankPercentile = 5.641 , FrequencyPercent = 1.201 },
new HistogramModel() { SpecName = "Black Mage", Rank = 5300, RankFrom = 5300, RankPercentile = 6.842 , FrequencyPercent = 1.738 },
new HistogramModel() { SpecName = "Black Mage", Rank = 5400, RankFrom = 5400, RankPercentile = 8.580 , FrequencyPercent = 1.659 },
new HistogramModel() { SpecName = "Black Mage", Rank = 5500, RankFrom = 5500, RankPercentile = 10.239 , FrequencyPercent = 1.975 },
new HistogramModel() { SpecName = "Black Mage", Rank = 5600, RankFrom = 5600, RankPercentile = 12.214 , FrequencyPercent = 2.212 },
new HistogramModel() { SpecName = "Black Mage", Rank = 5700, RankFrom = 5700, RankPercentile = 14.426 , FrequencyPercent = 2.639 },
new HistogramModel() { SpecName = "Black Mage", Rank = 5800, RankFrom = 5800, RankPercentile = 17.064 , FrequencyPercent = 3.429 },
new HistogramModel() { SpecName = "Black Mage", Rank = 5900, RankFrom = 5900, RankPercentile = 20.493 , FrequencyPercent = 3.492 },
new HistogramModel() { SpecName = "Black Mage", Rank = 6000, RankFrom = 6000, RankPercentile = 23.985 , FrequencyPercent = 3.760 },
new HistogramModel() { SpecName = "Black Mage", Rank = 6100, RankFrom = 6100, RankPercentile = 27.745 , FrequencyPercent = 4.282 },
new HistogramModel() { SpecName = "Black Mage", Rank = 6200, RankFrom = 6200, RankPercentile = 32.027 , FrequencyPercent = 4.677 },
new HistogramModel() { SpecName = "Black Mage", Rank = 6300, RankFrom = 6300, RankPercentile = 36.704 , FrequencyPercent = 5.498 },
new HistogramModel() { SpecName = "Black Mage", Rank = 6400, RankFrom = 6400, RankPercentile = 42.203 , FrequencyPercent = 5.024 },
new HistogramModel() { SpecName = "Black Mage", Rank = 6500, RankFrom = 6500, RankPercentile = 47.227 , FrequencyPercent = 5.309, IsCurrent = true },
new HistogramModel() { SpecName = "Black Mage", Rank = 6600, RankFrom = 6600, RankPercentile = 52.536 , FrequencyPercent = 5.894 },
new HistogramModel() { SpecName = "Black Mage", Rank = 6700, RankFrom = 6700, RankPercentile = 58.429 , FrequencyPercent = 5.135 },
new HistogramModel() { SpecName = "Black Mage", Rank = 6800, RankFrom = 6800, RankPercentile = 63.565 , FrequencyPercent = 4.661 },
new HistogramModel() { SpecName = "Black Mage", Rank = 6900, RankFrom = 6900, RankPercentile = 68.226 , FrequencyPercent = 4.076 },
new HistogramModel() { SpecName = "Black Mage", Rank = 7000, RankFrom = 7000, RankPercentile = 72.302 , FrequencyPercent = 4.234 },
new HistogramModel() { SpecName = "Black Mage", Rank = 7100, RankFrom = 7100, RankPercentile = 76.537 , FrequencyPercent = 3.666 },
new HistogramModel() { SpecName = "Black Mage", Rank = 7200, RankFrom = 7200, RankPercentile = 80.202 , FrequencyPercent = 3.365 },
new HistogramModel() { SpecName = "Black Mage", Rank = 7300, RankFrom = 7300, RankPercentile = 83.568 , FrequencyPercent = 2.781 },
new HistogramModel() { SpecName = "Black Mage", Rank = 7400, RankFrom = 7400, RankPercentile = 86.349 , FrequencyPercent = 2.496 },
new HistogramModel() { SpecName = "Black Mage", Rank = 7500, RankFrom = 7500, RankPercentile = 88.845 , FrequencyPercent = 2.465 },
new HistogramModel() { SpecName = "Black Mage", Rank = 7600, RankFrom = 7600, RankPercentile = 91.310 , FrequencyPercent = 1.375 },
new HistogramModel() { SpecName = "Black Mage", Rank = 7700, RankFrom = 7700, RankPercentile = 92.684 , FrequencyPercent = 1.422 },
new HistogramModel() { SpecName = "Black Mage", Rank = 7800, RankFrom = 7800, RankPercentile = 94.106 , FrequencyPercent = 1.343 },
new HistogramModel() { SpecName = "Black Mage", Rank = 7900, RankFrom = 7900, RankPercentile = 95.450 , FrequencyPercent = 1.106 },
new HistogramModel() { SpecName = "Black Mage", Rank = 8000, RankFrom = 8000, RankPercentile = 96.556 , FrequencyPercent = 1.027 },
new HistogramModel() { SpecName = "Black Mage", Rank = 8100, RankFrom = 8100, RankPercentile = 97.583 , FrequencyPercent = 0.679 },
new HistogramModel() { SpecName = "Black Mage", Rank = 8200, RankFrom = 8200, RankPercentile = 98.262 , FrequencyPercent = 0.521 },
new HistogramModel() { SpecName = "Black Mage", Rank = 8300, RankFrom = 8300, RankPercentile = 98.783 , FrequencyPercent = 0.458 },
new HistogramModel() { SpecName = "Black Mage", Rank = 8400, RankFrom = 8400, RankPercentile = 99.242 , FrequencyPercent = 0.205 },
new HistogramModel() { SpecName = "Black Mage", Rank = 8500, RankFrom = 8500, RankPercentile = 99.447 , FrequencyPercent = 0.284 },
new HistogramModel() { SpecName = "Black Mage", Rank = 8600, RankFrom = 8600, RankPercentile = 99.731 , FrequencyPercent = 0.126 },
new HistogramModel() { SpecName = "Black Mage", Rank = 8700, RankFrom = 8700, RankPercentile = 99.858 , FrequencyPercent = 0.032 },
new HistogramModel() { SpecName = "Black Mage", Rank = 8800, RankFrom = 8800, RankPercentile = 99.889 , FrequencyPercent = 0.000 },
new HistogramModel() { SpecName = "Black Mage", Rank = 8900, RankFrom = 8900, RankPercentile = 99.889 , FrequencyPercent = 0.016 },
new HistogramModel() { SpecName = "Black Mage", Rank = 9000, RankFrom = 9000, RankPercentile = 99.905 , FrequencyPercent = 0.016 },
new HistogramModel() { SpecName = "Black Mage", Rank = 9100, RankFrom = 9100, RankPercentile = 99.921 , FrequencyPercent = 0.016 },
new HistogramModel() { SpecName = "Black Mage", Rank = 9200, RankFrom = 9200, RankPercentile = 99.937 , FrequencyPercent = 0.016 },
new HistogramModel() { SpecName = "Black Mage", Rank = 9300, RankFrom = 9300, RankPercentile = 99.953 , FrequencyPercent = 0.016 },
new HistogramModel() { SpecName = "Black Mage", Rank = 9400, RankFrom = 9400, RankPercentile = 99.968 , FrequencyPercent = 0.016 },
new HistogramModel() { SpecName = "Black Mage", Rank = 9500, RankFrom = 9500, RankPercentile = 99.984 , FrequencyPercent = 0.000 },
new HistogramModel() { SpecName = "Black Mage", Rank = 9600, RankFrom = 9600, RankPercentile = 99.984 , FrequencyPercent = 0.016 },
},
};
var i = 1;
foreach (var rank in model.Ranks)
{
rank.ID = i++;
}
model.MaxRank = model.Ranks.Max(x => x.Rank);
model.MinRank = model.Ranks.Min(x => x.Rank);
model.MaxFrequencyPercent = Math.Ceiling(model.Ranks.Max(x => x.FrequencyPercent));
foreach (var rank in model.Ranks)
{
rank.FrequencyRatioToMaximum = rank.FrequencyPercent / model.MaxFrequencyPercent;
}
return model;
}
19
View Source File : StatisticsDatabase.cs
License : BSD 3-Clause "New" or "Revised" License
Project Creator : anoyetta
License : BSD 3-Clause "New" or "Revised" License
Project Creator : anoyetta
public async Task CreateHistogramAsync(
string rankingFileName)
{
if (!File.Exists(rankingFileName))
{
return;
}
using (var cn = this.OpenRankingDatabaseConnection(rankingFileName))
{
using (var tran = cn.BeginTransaction())
{
using (var cm = cn.CreateCommand())
{
cm.Transaction = tran;
var q = new StringBuilder();
q.AppendLine("DELETE FROM histograms;");
cm.CommandText = q.ToString();
await cm.ExecuteNonQueryAsync();
}
tran.Commit();
}
using (var db = new DataContext(cn))
using (var tran = cn.BeginTransaction())
{
db.Transaction = tran;
var rankings = db.GetTable<RankingModel>().ToArray();
var averages =
from x in rankings
group x by
x.CharacterHash
into g
select new
{
SpecName = g.First().Spec,
DPSAverage = g.Average(z => z.Total),
Rank = ((int)(g.Average(z => z.Total)) / 100) * 100,
};
var histograms =
from x in averages
group x by new
{
x.SpecName,
x.Rank
}
into g
select new
{
g.Key.SpecName,
g.Key.Rank,
RankFrom = g.Key.Rank,
Frequency = (double)g.Count(),
};
var id = 1;
var specs =
from x in histograms
orderby
x.SpecName,
x.Rank
group x by
x.SpecName;
var enreplacedies = new List<HistogramModel>(histograms.Count());
foreach (var spec in specs)
{
var totalCount = spec.Sum(x => x.Frequency);
var count = 0d;
var rankMin = spec.Min(x => x.Rank);
var rankMax = spec.Max(x => x.Rank);
for (int i = rankMin; i <= rankMax; i += 100)
{
var entry = spec.FirstOrDefault(x => x.Rank == i);
var f = entry?.Frequency ?? 0;
var hist = new HistogramModel()
{
ID = id++,
SpecName = spec.Key,
Rank = i,
RankFrom = i,
Frequency = f,
FrequencyPercent = round(f / totalCount * 100d),
RankPercentile = round(count / totalCount * 100d),
};
enreplacedies.Add(hist);
count += f;
}
}
var table = db.GetTable<HistogramModel>();
table.InsertAllOnSubmit<HistogramModel>(enreplacedies);
db.SubmitChanges();
// ランキングテーブルを消去する
using (var cm = cn.CreateCommand())
{
cm.Transaction = tran;
var q = new StringBuilder();
q.AppendLine("DELETE FROM rankings;");
cm.CommandText = q.ToString();
await cm.ExecuteNonQueryAsync();
}
tran.Commit();
}
// DBを最適化する
using (var cm = cn.CreateCommand())
{
var q = new StringBuilder();
q.AppendLine("VACUUM;");
q.AppendLine("PRAGMA Optimize;");
cm.CommandText = q.ToString();
await cm.ExecuteNonQueryAsync();
}
}
double round(double value)
{
return float.Parse(value.ToString("N3"));
}
}
19
View Source File : BaseBatchMessageHandler.cs
License : MIT License
Project Creator : AntonyVorontsov
License : MIT License
Project Creator : AntonyVorontsov
private async Task Handle(IEnumerable<BasicDeliverEventArgs> messages, CancellationToken cancellationToken)
{
var messagesCollection = messages.ToList();
await HandleMessages(messagesCollection, cancellationToken).ConfigureAwait(false);
var latestDeliveryTag = messagesCollection.Max(x => x.DeliveryTag);
Channel.EnsureIsNotNull().BasicAck(latestDeliveryTag, true);
}
19
View Source File : LocalFilesTileSource.cs
License : MIT License
Project Creator : apdevelop
License : MIT License
Project Creator : apdevelop
Task ITileSource.InitAsync()
{
// Configuration values priority:
// 1. Default values for local files source type.
// 2. Actual values (from first found tile properties).
// 3. Values from configuration file - overrides given above, if provided.
// Detect zoom levels range - build list of folders
var zoomLevels = new List<int>();
var xIndex = this.configuration.Location.IndexOf("{x}", StringComparison.InvariantCultureIgnoreCase);
var yIndex = this.configuration.Location.IndexOf("{y}", StringComparison.InvariantCultureIgnoreCase);
var zIndex = this.configuration.Location.IndexOf("{z}", StringComparison.InvariantCultureIgnoreCase);
if ((zIndex < yIndex) && (zIndex < xIndex))
{
var baseFolder = new Uri(this.configuration.Location.Substring(0, zIndex)).LocalPath;
foreach (var directory in Directory.GetDirectories(baseFolder))
{
if (Int32.TryParse(Path.GetFileName(directory), out int zoomLevel))
{
zoomLevels.Add(zoomLevel);
}
}
}
var replacedle = String.IsNullOrEmpty(this.configuration.replacedle) ?
this.configuration.Id :
this.configuration.replacedle;
var minZoom = this.configuration.MinZoom ?? (zoomLevels.Count > 0 ? zoomLevels.Min(z => z) : 0);
var maxZoom = this.configuration.MaxZoom ?? (zoomLevels.Count > 0 ? zoomLevels.Max(z => z) : 24);
// Re-create configuration
this.configuration = new TileSourceConfiguration
{
Id = this.configuration.Id,
Type = this.configuration.Type,
Format = this.configuration.Format, // TODO: from file properties (extension)
replacedle = replacedle,
Tms = this.configuration.Tms ?? false, // Default is tms=false for file storage
Srs = Utils.SrsCodes.EPSG3857, // TODO: support for EPSG:4326
Location = this.configuration.Location,
ContentType = Utils.EnreplacediesConverter.TileFormatToContentType(this.configuration.Format), // TODO: from file properties
MinZoom = minZoom,
MaxZoom = maxZoom,
};
return Task.CompletedTask;
}
19
View Source File : RasterTileSource.cs
License : MIT License
Project Creator : apdevelop
License : MIT License
Project Creator : apdevelop
private void DrawGeoTiffTilesToRasterCanvas(
Bitmap outputImage,
M.Bounds tileBounds,
IList<GeoTiff.TileCoordinates> sourceTileCoordinates,
int backgroundColor,
int sourceTileWidth,
int sourceTileHeight)
{
var tileMinX = sourceTileCoordinates.Min(t => t.X);
var tileMinY = sourceTileCoordinates.Min(t => t.Y);
var tilesCountX = sourceTileCoordinates.Max(t => t.X) - tileMinX + 1;
var tilesCountY = sourceTileCoordinates.Max(t => t.Y) - tileMinY + 1;
var canvasWidth = tilesCountX * sourceTileWidth;
var canvasHeight = tilesCountY * sourceTileHeight;
// TODO: ? scale before draw to reduce memory allocation
// TODO: check max canvas size
var canvas = U.ImageHelper.CreateEmptyPngImage(canvasWidth, canvasHeight, backgroundColor);
using (var canvasImageStream = new MemoryStream(canvas))
{
using (var canvasImage = new Bitmap(canvasImageStream))
{
using (var graphics = Graphics.FromImage(canvasImage))
{
// Draw all source tiles without scaling
foreach (var sourceTile in sourceTileCoordinates)
{
var pixelX = sourceTile.X * this.rasterProperties.TileWidth;
var pixelY = sourceTile.Y * this.rasterProperties.TileHeight;
if ((pixelX >= this.rasterProperties.ImageWidth) || (pixelY >= this.rasterProperties.ImageHeight))
{
continue;
}
var imageBuffer = ReadTiffTile(
this.configuration.Location,
this.rasterProperties.TileWidth,
this.rasterProperties.TileHeight,
this.rasterProperties.TileSize,
pixelX,
pixelY);
const int PixelDataSize = 4;
var stride = this.rasterProperties.TileWidth * PixelDataSize;
var handle = GCHandle.Alloc(imageBuffer, GCHandleType.Pinned);
Bitmap sourceImage = null;
try
{
var offsetX = (sourceTile.X - tileMinX) * sourceTileWidth;
var offsetY = (sourceTile.Y - tileMinY) * sourceTileHeight;
sourceImage = new Bitmap(this.rasterProperties.TileWidth, this.rasterProperties.TileHeight, stride, PixelFormat.Format32bppArgb, handle.AddrOfPinnedObject());
if ((sourceImage.HorizontalResolution == canvasImage.HorizontalResolution) &&
(sourceImage.VerticalResolution == canvasImage.VerticalResolution))
{
graphics.DrawImageUnscaled(sourceImage, offsetX, offsetY);
}
else
{
graphics.DrawImage(sourceImage, new Rectangle(offsetX, offsetY, sourceImage.Width, sourceImage.Height));
}
// For debug
////using var borderPen = new Pen(Color.Magenta, 5.0f);
////graphics.DrawRectangle(borderPen, new Rectangle(offsetX, offsetY, sourceImage.Width, sourceImage.Height));
////graphics.DrawString($"R = {sourceTile.Y * this.geoTiffInfo.TileHeight}", new Font("Arial", 36.0f), Brushes.Magenta, offsetX, offsetY);
}
finally
{
handle.Free();
sourceImage.Dispose();
}
}
}
// TODO: ! better image transformation / reprojection between coordinate systems
var pixelOffsetX = XToGeoTiffPixelX(this.rasterProperties, tileBounds.Left) - sourceTileWidth * tileMinX;
var pixelOffsetY = YToGeoTiffPixelY(this.rasterProperties, tileBounds.Top) - sourceTileHeight * tileMinY;
var pixelWidth = XToGeoTiffPixelX(this.rasterProperties, tileBounds.Right) - XToGeoTiffPixelX(this.rasterProperties, tileBounds.Left);
var pixelHeight = YToGeoTiffPixelY(this.rasterProperties, tileBounds.Bottom) - YToGeoTiffPixelY(this.rasterProperties, tileBounds.Top);
var sourceRectangle = new Rectangle(
(int)Math.Round(pixelOffsetX),
(int)Math.Round(pixelOffsetY),
(int)Math.Round(pixelWidth),
(int)Math.Round(pixelHeight));
// Clip and scale to requested size of output image
var destRectangle = new Rectangle(0, 0, outputImage.Width, outputImage.Height);
using (var graphics = Graphics.FromImage(outputImage))
{
graphics.InterpolationMode = System.Drawing.Drawing2D.InterpolationMode.Bicubic;
graphics.DrawImage(canvasImage, destRectangle, sourceRectangle, GraphicsUnit.Pixel);
}
}
}
}
19
View Source File : WmsHelper.cs
License : MIT License
Project Creator : apdevelop
License : MIT License
Project Creator : apdevelop
public static void DrawWebMercatorTilesToRasterCanvas(
Bitmap outputImage,
Models.Bounds boundingBox,
IList<Models.TileDataset> sourceTiles,
int backgroundColor,
int tileSize)
{
var zoom = sourceTiles[0].Z;
var tileMinX = sourceTiles.Min(t => t.X);
var tileMinY = sourceTiles.Min(t => t.Y);
var tilesCountX = sourceTiles.Max(t => t.X) - tileMinX + 1;
var tilesCountY = sourceTiles.Max(t => t.Y) - tileMinY + 1;
var canvasWidth = tilesCountX * tileSize;
var canvasHeight = tilesCountY * tileSize;
var canvas = ImageHelper.CreateEmptyPngImage(canvasWidth, canvasHeight, backgroundColor);
using (var canvasImageStream = new MemoryStream(canvas))
{
using (var canvasImage = new Bitmap(canvasImageStream))
{
using (var graphics = Graphics.FromImage(canvasImage))
{
// Draw all tiles without scaling
foreach (var sourceTile in sourceTiles)
{
var offsetX = (sourceTile.X - tileMinX) * tileSize;
var offsetY = (sourceTile.Y - tileMinY) * tileSize;
using (var sourceStream = new MemoryStream(sourceTile.ImageData))
{
using (var sourceImage = Image.FromStream(sourceStream))
{
if ((sourceImage.HorizontalResolution == canvasImage.HorizontalResolution) &&
(sourceImage.VerticalResolution == canvasImage.VerticalResolution))
{
graphics.DrawImageUnscaled(sourceImage, offsetX, offsetY);
}
else
{
graphics.DrawImage(sourceImage, new Rectangle(offsetX, offsetY, sourceImage.Width, sourceImage.Height));
}
}
}
}
}
var geoBBox = EnreplacediesConverter.MapRectangleToGeographicalBounds(boundingBox);
var pixelOffsetX = WebMercator.LongitudeToPixelXAtZoom(geoBBox.MinLongitude, zoom) - tileSize * tileMinX;
var pixelOffsetY = WebMercator.LareplacedudeToPixelYAtZoom(geoBBox.MaxLareplacedude, zoom) - tileSize * tileMinY;
var pixelWidth = WebMercator.LongitudeToPixelXAtZoom(geoBBox.MaxLongitude, zoom) - WebMercator.LongitudeToPixelXAtZoom(geoBBox.MinLongitude, zoom);
var pixelHeight = WebMercator.LareplacedudeToPixelYAtZoom(geoBBox.MinLareplacedude, zoom) - WebMercator.LareplacedudeToPixelYAtZoom(geoBBox.MaxLareplacedude, zoom);
var sourceRectangle = new Rectangle(
(int)Math.Round(pixelOffsetX),
(int)Math.Round(pixelOffsetY),
(int)Math.Round(pixelWidth),
(int)Math.Round(pixelHeight));
// Clip and scale to requested size of output image
var destRectangle = new Rectangle(0, 0, outputImage.Width, outputImage.Height);
using (var graphics = Graphics.FromImage(outputImage))
{
graphics.InterpolationMode = System.Drawing.Drawing2D.InterpolationMode.Bicubic;
graphics.DrawImage(canvasImage, destRectangle, sourceRectangle, GraphicsUnit.Pixel);
}
}
}
}
19
View Source File : ExcelWorksheets.cs
License : Apache License 2.0
Project Creator : Appdynamics
License : Apache License 2.0
Project Creator : Appdynamics
private void GetSheetURI(ref string Name, out int sheetID, out Uri uriWorksheet, bool isChart)
{
Name = ValidateFixSheetName(Name);
sheetID = this.Any() ? this.Max(ws => ws.SheetID) + 1 : 1;
var uriId = sheetID;
// get the next available worhsheet uri
do
{
if (isChart)
{
uriWorksheet = new Uri("/xl/chartsheets/chartsheet" + uriId + ".xml", UriKind.Relative);
}
else
{
uriWorksheet = new Uri("/xl/worksheets/sheet" + uriId + ".xml", UriKind.Relative);
}
uriId++;
} while (_pck.Package.PartExists(uriWorksheet));
}
19
View Source File : BindableMKMapView.cs
License : Apache License 2.0
Project Creator : AppRopio
License : Apache License 2.0
Project Creator : AppRopio
public void ZoomToAll()
{
if (Items == null || Items.Count < 1 || Items.All(x => x.Coordinates == null))
return;
try
{
double minLareplacedude = Items.Min(x => x.Coordinates?.Lareplacedude ?? double.MaxValue);
double maxLareplacedude = Items.Max(x => x.Coordinates?.Lareplacedude ?? double.MinValue);
double minLongitude = Items.Min(x => x.Coordinates?.Longitude ?? double.MaxValue);
double maxLongitude = Items.Max(x => x.Coordinates?.Longitude ?? double.MinValue);
MKCoordinateRegion region;
region.Center.Lareplacedude = (minLareplacedude + maxLareplacedude) / 2;
region.Center.Longitude = (minLongitude + maxLongitude) / 2;
double lareplacedudeDelta = (maxLareplacedude - minLareplacedude) * MAP_PADDING;
region.Span.LareplacedudeDelta = (lareplacedudeDelta < MIN_VISIBLE_LAreplacedUDE) ? MIN_VISIBLE_LAreplacedUDE : lareplacedudeDelta;
region.Span.LongitudeDelta = (maxLongitude - minLongitude) * MAP_PADDING;
SetRegion(RegionThatFits(region), true);
}
catch (Exception ex)
{
Mvx.IoCProvider.Resolve<IMvxLog>().Error(ex.ToString());
}
}
19
View Source File : Operation.cs
License : MIT License
Project Creator : ar1st0crat
License : MIT License
Project Creator : ar1st0crat
public static void NormalizePeak(float[] samples, double peakDb)
{
var norm = (float)Scale.FromDecibel(peakDb) / samples.Max(x => Math.Abs(x));
for (var i = 0; i < samples.Length; i++)
{
samples[i] *= norm;
}
}
19
View Source File : IFilterExtensions.cs
License : MIT License
Project Creator : ar1st0crat
License : MIT License
Project Creator : ar1st0crat
public static float EstimateGain(this IOnlineFilter filter, int fftSize = 512)
{
var unit = DiscreteSignal.Unit(fftSize);
// get impulse response
var response = unit.Samples.Select(s => filter.Process(s)).ToArray();
// get frequency response
var spectrum = new float[fftSize / 2 + 1];
var fft = new RealFft(fftSize);
fft.MagnitudeSpectrum(response, spectrum);
return 1 / spectrum.Max(s => Math.Abs(s));
}
See More Examples