Here are the examples of the csharp api System.Collections.Generic.HashSet.Contains(long) taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
77 Examples
19
Source : Program.cs
with MIT License
from Alan-FGR
with MIT License
from Alan-FGR
private void Bench()
{
nums.Clear();
current = 0;
Thread[] threads = new Thread[THREADS];
var sw = Stopwatch.StartNew();
for (int i = 0;
i < THREADS;
i++) threads[i] = new Thread(Add);
for (int i = 0;
i < THREADS;
i++) threads[i].Start();
for (int i = 0;
i < THREADS;
i++) threads[i].Join();
var elapsed = sw.ElapsedMilliseconds;
bool preplaceded = true;
int last = 0;
HashSet<long> check = new HashSet<long>();
foreach (TS i in nums)
{
if (check.Contains(i.id))
{
preplaceded = false;
break;
}
check.Add(i.id);
}
Console.WriteLine($"{preplaceded}, {nums.Count}, {elapsed}ms");
resultList_.Add(elapsed);
}
19
Source : AdminVerifier.cs
with GNU Affero General Public License v3.0
from b11p
with GNU Affero General Public License v3.0
from b11p
public Task<bool> IsAdminAsync(long qq) => Task.FromResult(AdminCollection.Contains(qq));
19
Source : HardcodedProvider.cs
with GNU Affero General Public License v3.0
from b11p
with GNU Affero General Public License v3.0
from b11p
public async Task<bool> ShouldIgnoreAsync(long qq) => IgnoreList.Contains(qq);
19
Source : HardcodedProvider.cs
with GNU Affero General Public License v3.0
from b11p
with GNU Affero General Public License v3.0
from b11p
public async Task<bool> ShouldIgnorePerformanceAsync(long group, long qq) => group == NewbieGroupId ? IgnorePerformanceListBase.Contains(qq) : false;
19
Source : LogDataDto.cs
with MIT License
from baaron4
with MIT License
from baaron4
private static Dictionary<Spec, IReadOnlyList<Buff>> BuildPersonalBuffData(ParsedEvtcLog log, Dictionary<string, List<long>> persBuffDict, Dictionary<long, Buff> usedBuffs)
{
var boonsBySpec = new Dictionary<Spec, IReadOnlyList<Buff>>();
// Collect all personal buffs by spec
foreach (KeyValuePair<Spec, List<AbstractSingleActor>> pair in log.FriendliesListBySpec)
{
List<AbstractSingleActor> friendlies = pair.Value;
var specBoonIds = new HashSet<long>(log.Buffs.GetPersonalBuffsList(pair.Key).Select(x => x.ID));
var boonToUse = new HashSet<Buff>();
foreach (AbstractSingleActor actor in friendlies)
{
foreach (PhaseData phase in log.FightData.GetPhases(log))
{
IReadOnlyDictionary<long, FinalActorBuffs> boons = actor.GetBuffs(BuffEnum.Self, log, phase.Start, phase.End);
foreach (Buff boon in log.StatisticsHelper.GetPresentRemainingBuffsOnPlayer(actor))
{
if (boons.TryGetValue(boon.ID, out FinalActorBuffs uptime))
{
if (uptime.Uptime > 0 && specBoonIds.Contains(boon.ID))
{
boonToUse.Add(boon);
}
}
}
}
}
boonsBySpec[pair.Key] = boonToUse.ToList();
}
foreach (KeyValuePair<Spec, IReadOnlyList<Buff>> pair in boonsBySpec)
{
persBuffDict[pair.Key.ToString()] = new List<long>();
foreach (Buff boon in pair.Value)
{
persBuffDict[pair.Key.ToString()].Add(boon.ID);
usedBuffs[boon.ID] = boon;
}
}
return boonsBySpec;
}
19
Source : BuffSourceFinder.cs
with MIT License
from baaron4
with MIT License
from baaron4
private List<AbstractCastEvent> GetExtensionSkills(ParsedEvtcLog log, long time, HashSet<long> idsToKeep)
{
if (_extensionSkills == null)
{
_extensionSkills = new List<AbstractCastEvent>();
foreach (Player p in log.PlayerList)
{
_extensionSkills.AddRange(p.GetIntersectingCastEvents(log, 0, log.FightData.FightEnd).Where(x => ExtensionIDS.Contains(x.SkillId) && x.Status != AbstractCastEvent.AnimationStatus.Interrupted));
}
}
return _extensionSkills.Where(x => idsToKeep.Contains(x.SkillId) && x.Time <= time && time <= x.EndTime + ParserHelper.ServerDelayConstant).ToList();
}
19
Source : BuffSourceFinder.cs
with MIT License
from baaron4
with MIT License
from baaron4
public Agenreplacedem TryFindSrc(Agenreplacedem dst, long time, long extension, ParsedEvtcLog log, long buffID)
{
if (!_boonIds.Contains(buffID))
{
return dst;
}
List<Agenreplacedem> imperialImpactCheck = CouldBeImperialImpact(extension, time, log);
if (imperialImpactCheck.Count > 1)
{
return ParserHelper._unknownAgent;
}
int essenceOfSpeedCheck = CouldBeEssenceOfSpeed(dst, extension, buffID, log);
// can only be the soulbeast
if (essenceOfSpeedCheck == 1)
{
return dst;
}
HashSet<long> idsToCheck = GetIDs(log, buffID, extension);
if (idsToCheck.Any())
{
List<AbstractCastEvent> cls = GetExtensionSkills(log, time, idsToCheck);
// If only one cast item
if (cls.Count == 1)
{
AbstractCastEvent item = cls.First();
// If uncertainty due to essence of speed, imbued melodies or imperial impact, return unknown
if (essenceOfSpeedCheck == 0 || CouldBeImbuedMelodies(item.Caster, time, extension, log) || imperialImpactCheck.Any())
{
return ParserHelper._unknownAgent;
}
// otherwise the src is the caster
return item.Caster;
}
// If no cast item and
else if (!cls.Any())
{
// If uncertainty due to imbued melodies, return unknown
if (CouldBeImbuedMelodies(dst, time, extension, log))
{
return ParserHelper._unknownAgent;
}
// uncertainty due to essence of speed but not due to imperial impact
if (essenceOfSpeedCheck == 0 && !imperialImpactCheck.Any())
{
// the soulbeast
return dst;
}
// uncertainty due to imperial impact but not due to essence of speed
if (essenceOfSpeedCheck == -1 && imperialImpactCheck.Count == 1)
{
// the vindicator
return imperialImpactCheck.First();
}
}
}
return ParserHelper._unknownAgent;
}
19
Source : ElementalistHelper.cs
with MIT License
from baaron4
with MIT License
from baaron4
public static void RemoveDualBuffs(IReadOnlyList<AbstractBuffEvent> buffsPerDst, Dictionary<long, List<AbstractBuffEvent>> buffsByID, SkillData skillData)
{
var duals = new HashSet<long>
{
FireDual,
WaterDual,
AirDual,
EarthDual,
};
var toClean = new HashSet<long>();
foreach (AbstractBuffEvent c in buffsPerDst.Where(x => duals.Contains(x.BuffID)))
{
toClean.Add(c.BuffID);
c.Invalidate(skillData);
}
foreach (long buffID in toClean)
{
buffsByID[buffID].RemoveAll(x => x.BuffID == NoBuff);
}
}
19
Source : WeaverHelper.cs
with MIT License
from baaron4
with MIT License
from baaron4
public static List<AbstractBuffEvent> TransformWeaverAttunements(IReadOnlyList<AbstractBuffEvent> buffs, Dictionary<long, List<AbstractBuffEvent>> buffsByID, Agenreplacedem a, SkillData skillData)
{
var res = new List<AbstractBuffEvent>();
var attunements = new HashSet<long>
{
5585,
5586,
5575,
5580
};
// not useful for us
/*const long fireAir = 45162;
const long fireEarth = 42756;
const long fireWater = 45502;
const long waterAir = 46418;
const long waterEarth = 42792;
const long airEarth = 45683;*/
var weaverAttunements = new HashSet<long>
{
FireMajor,
FireMinor,
WaterMajor,
WaterMinor,
AirMajor,
AirMinor,
EarthMajor,
EarthMinor,
FireDual,
WaterDual,
AirDual,
EarthDual,
/*fireAir,
fireEarth,
fireWater,
waterAir,
waterEarth,
airEarth,*/
};
// first we get rid of standard attunements
var toClean = new HashSet<long>();
var attuns = buffs.Where(x => attunements.Contains(x.BuffID)).ToList();
foreach (AbstractBuffEvent c in attuns)
{
toClean.Add(c.BuffID);
c.Invalidate(skillData);
}
// get all weaver attunements ids and group them by time
var weaverAttuns = buffs.Where(x => weaverAttunements.Contains(x.BuffID)).ToList();
if (weaverAttuns.Count == 0)
{
return res;
}
Dictionary<long, List<AbstractBuffEvent>> groupByTime = GroupByTime(weaverAttuns);
long prevID = 0;
foreach (KeyValuePair<long, List<AbstractBuffEvent>> pair in groupByTime)
{
var applies = pair.Value.OfType<BuffApplyEvent>().ToList();
long curID = TranslateWeaverAttunement(applies);
foreach (AbstractBuffEvent c in pair.Value)
{
toClean.Add(c.BuffID);
c.Invalidate(skillData);
}
if (curID == 0)
{
continue;
}
uint curInstanceID = applies.First().BuffInstance;
res.Add(new BuffApplyEvent(a, a, pair.Key, int.MaxValue, skillData.Get(curID), curInstanceID, true));
if (prevID != 0)
{
res.Add(new BuffRemoveManualEvent(a, a, pair.Key, int.MaxValue, skillData.Get(prevID)));
res.Add(new BuffRemoveAllEvent(a, a, pair.Key, int.MaxValue, skillData.Get(prevID), 1, int.MaxValue));
}
prevID = curID;
}
foreach (long buffID in toClean)
{
buffsByID[buffID].RemoveAll(x => x.BuffID == NoBuff);
}
return res;
}
19
Source : MesmerHelper.cs
with MIT License
from baaron4
with MIT License
from baaron4
internal static bool IsClone(Agenreplacedem agenreplacedem)
{
return _cloneIDs.Contains(agenreplacedem.ID);
}
19
Source : MesmerHelper.cs
with MIT License
from baaron4
with MIT License
from baaron4
internal static bool IsClone(long id)
{
return _cloneIDs.Contains(id);
}
19
Source : RevenantHelper.cs
with MIT License
from baaron4
with MIT License
from baaron4
public static bool IsLegendSwap(long id)
{
return _legendSwaps.Contains(id);
}
19
Source : JsonPlayerBuilder.cs
with MIT License
from baaron4
with MIT License
from baaron4
private static List<JsonBuffsUptime> GetPlayerJsonBuffsUptime(AbstractSingleActor player, List<IReadOnlyDictionary<long, FinalActorBuffs>> buffs, ParsedEvtcLog log, RawFormatSettings settings, Dictionary<string, JsonLog.BuffDesc> buffDesc, Dictionary<string, HashSet<long>> personalBuffs)
{
var res = new List<JsonBuffsUptime>();
var profEnums = new HashSet<ParserHelper.Source>(SpecToSources(player.Spec));
IReadOnlyList<PhaseData> phases = log.FightData.GetNonDummyPhases(log);
foreach (KeyValuePair<long, FinalActorBuffs> pair in buffs[0])
{
Buff buff = log.Buffs.BuffsByIds[pair.Key];
var data = new List<JsonBuffsUptimeData>();
for (int i = 0; i < phases.Count; i++)
{
PhaseData phase = phases[i];
Dictionary<long, FinalBuffsDictionary> buffsDictionary = player.GetBuffsDictionary(log, phase.Start, phase.End);
if (buffs[i].TryGetValue(pair.Key, out FinalActorBuffs val))
{
JsonBuffsUptimeData value = JsonBuffsUptimeBuilder.BuildJsonBuffsUptimeData(val, buffsDictionary[pair.Key]);
data.Add(value);
}
else
{
var value = new JsonBuffsUptimeData();
data.Add(value);
}
}
if (buff.Nature == Buff.BuffNature.GraphOnlyBuff && profEnums.Contains(buff.Source))
{
if (player.GetBuffDistribution(log, phases[0].Start, phases[0].End).GetUptime(pair.Key) > 0)
{
if (personalBuffs.TryGetValue(player.Spec.ToString(), out HashSet<long> list) && !list.Contains(pair.Key))
{
list.Add(pair.Key);
}
else
{
personalBuffs[player.Spec.ToString()] = new HashSet<long>()
{
pair.Key
};
}
}
}
res.Add(JsonBuffsUptimeBuilder.BuildJsonBuffsUptime(player, pair.Key, log, settings, data, buffDesc));
}
return res;
}
19
Source : WeaverHelper.cs
with MIT License
from baaron4
with MIT License
from baaron4
private static long TranslateWeaverAttunement(List<BuffApplyEvent> buffApplies)
{
// check if more than 3 ids are present
// Seems to happen when the attunement bug happens
// removed the throw
/*if (buffApplies.Select(x => x.BuffID).Distinct().Count() > 3)
{
throw new EIException("Too much buff apply events in TranslateWeaverAttunement");
}*/
var duals = new HashSet<long>
{
FireDual,
WaterDual,
AirDual,
EarthDual
};
HashSet<long> major = null;
HashSet<long> minor = null;
foreach (BuffApplyEvent c in buffApplies)
{
if (duals.Contains(c.BuffID))
{
return c.BuffID;
}
if (_majorsTranslation.ContainsKey(c.BuffID))
{
major = _majorsTranslation[c.BuffID];
}
else if (_minorsTranslation.ContainsKey(c.BuffID))
{
minor = _minorsTranslation[c.BuffID];
}
}
if (major == null || minor == null)
{
return 0;
}
IEnumerable<long> inter = major.Intersect(minor);
if (inter.Count() != 1)
{
throw new InvalidDataException("Intersection incorrect in TranslateWeaverAttunement");
}
return inter.First();
}
19
Source : SkillData.cs
with MIT License
from baaron4
with MIT License
from baaron4
public bool IsNotAccurate(long ID)
{
return NotAccurate.Contains(ID);
}
19
Source : SingleActorBuffsHelper.cs
with MIT License
from baaron4
with MIT License
from baaron4
private void SetBuffGraphs(ParsedEvtcLog log)
{
_buffGraphs = new Dictionary<long, BuffsGraphModel>();
if (_buffMap == null)
{
ComputeBuffMap(log);
}
BuffDictionary buffMap = _buffMap;
long dur = log.FightData.FightEnd;
int fightDuration = (int)(dur) / 1000;
var boonPresenceGraph = new BuffsGraphModel(log.Buffs.BuffsByIds[NumberOfBoonsID]);
var activeCombatMinionsGraph = new BuffsGraphModel(log.Buffs.BuffsByIds[NumberOfActiveCombatMinionsID]);
BuffsGraphModel numberOfClonesGraph = null;
var canSummonClones = MesmerHelper.CanSummonClones(Actor.Spec);
if (canSummonClones)
{
numberOfClonesGraph = new BuffsGraphModel(log.Buffs.BuffsByIds[NumberOfClonesID]);
}
var condiPresenceGraph = new BuffsGraphModel(log.Buffs.BuffsByIds[NumberOfConditionsID]);
var boonIds = new HashSet<long>(log.Buffs.BuffsByNature[BuffNature.Boon].Select(x => x.ID));
var condiIds = new HashSet<long>(log.Buffs.BuffsByNature[BuffNature.Condition].Select(x => x.ID));
// Init status
_buffDistribution = new CachingCollection<BuffDistribution>(log);
_buffPresence = new CachingCollection<Dictionary<long, long>>(log);
foreach (Buff buff in GetTrackedBuffs(log))
{
long buffID = buff.ID;
if (buffMap.TryGetValue(buffID, out List<AbstractBuffEvent> buffEvents) && buffEvents.Count != 0 && !_buffGraphs.ContainsKey(buffID))
{
AbstractBuffSimulator simulator;
try
{
simulator = buff.CreateSimulator(log, false);
simulator.Simulate(buffEvents, dur);
}
catch (EIBuffSimulatorIDException)
{
// get rid of logs invalid for HreplacedtackIDs false
log.UpdateProgressWithCancellationCheck("Failed id based simulation on " + Actor.Character + " for " + buff.Name);
buffEvents.RemoveAll(x => !x.IsBuffSimulatorCompliant(log.FightData.FightEnd, false));
simulator = buff.CreateSimulator(log, true);
simulator.Simulate(buffEvents, dur);
}
_buffSimulators[buffID] = simulator;
bool updateBoonPresence = boonIds.Contains(buffID);
bool updateCondiPresence = condiIds.Contains(buffID);
var graphSegments = new List<Segment>();
foreach (BuffSimulationItem simul in simulator.GenerationSimulation)
{
// Graph
var segment = simul.ToSegment();
if (graphSegments.Count == 0)
{
graphSegments.Add(new Segment(0, segment.Start, 0));
}
else if (graphSegments.Last().End != segment.Start)
{
graphSegments.Add(new Segment(graphSegments.Last().End, segment.Start, 0));
}
graphSegments.Add(segment);
}
// Graph object creation
if (graphSegments.Count > 0)
{
graphSegments.Add(new Segment(graphSegments.Last().End, dur, 0));
}
else
{
graphSegments.Add(new Segment(0, dur, 0));
}
_buffGraphs[buffID] = new BuffsGraphModel(buff, graphSegments);
if (updateBoonPresence || updateCondiPresence)
{
(updateBoonPresence ? boonPresenceGraph : condiPresenceGraph).MergePresenceInto(_buffGraphs[buffID].BuffChart);
}
}
}
_buffGraphs[NumberOfBoonsID] = boonPresenceGraph;
_buffGraphs[NumberOfConditionsID] = condiPresenceGraph;
foreach (Minions minions in Actor.GetMinions(log).Values)
{
IReadOnlyList<IReadOnlyList<Segment>> segments = minions.GetLifeSpanSegments(log);
foreach (IReadOnlyList<Segment> minionsSegments in segments)
{
activeCombatMinionsGraph.MergePresenceInto(minionsSegments);
}
if (canSummonClones && MesmerHelper.IsClone(minions.ID))
{
foreach (IReadOnlyList<Segment> minionsSegments in segments)
{
numberOfClonesGraph.MergePresenceInto(minionsSegments);
}
}
}
if (activeCombatMinionsGraph.BuffChart.Any())
{
_buffGraphs[NumberOfActiveCombatMinionsID] = activeCombatMinionsGraph;
}
if (canSummonClones && numberOfClonesGraph.BuffChart.Any())
{
_buffGraphs[NumberOfClonesID] = numberOfClonesGraph;
}
}
19
Source : ElementalistHelper.cs
with MIT License
from baaron4
with MIT License
from baaron4
public static bool IsElementalSwap(long id)
{
return _elementalSwaps.Contains(id);
}
19
Source : EXTHealingCombatData.cs
with MIT License
from baaron4
with MIT License
from baaron4
public EXTHealingType GetHealingType(long id, ParsedEvtcLog log)
{
if (HybridHealIDs.Contains(id))
{
return EXTHealingType.Hybrid;
}
if (EncounteredIDs.TryGetValue(id, out EXTHealingType type))
{
return type;
}
if (log.CombatData.GetDamageData(id).Any(x => x.HealthDamage > 0 && !x.DoubleProcHit))
{
type = EXTHealingType.ConversionBased;
}
else
{
type = EXTHealingType.HealingPower;
}
EncounteredIDs[id] = type;
return type;
}
19
Source : SmartScriptSolutionItem.cs
with The Unlicense
from BAndysc
with The Unlicense
from BAndysc
public void UpdateDependants(HashSet<long> usedTimed)
{
for (int i = Items.Count - 1; i >= 0; --i)
{
if (!usedTimed.Contains(((SmartScriptSolutionItem) Items[i]).Entry))
Items.RemoveAt(i);
else
usedTimed.Remove(((SmartScriptSolutionItem) Items[i]).Entry);
}
foreach (var t in usedTimed)
{
Items.Add(new SmartScriptSolutionItem((int)t, SmartScriptType.TimedActionList));
}
}
19
Source : InPacketCleaner.cs
with GNU General Public License v3.0
from BlowaXD
with GNU General Public License v3.0
from BlowaXD
public void Filter(string filePath, string outputPath)
{
string path = filePath;
string tmp = File.ReadAllText(path, Encoding.GetEncoding(1252));
string[] lines = tmp.Split(new[] { "\r\n", "\r", "\n" }, StringSplitOptions.None);
HashSet<long> moverIds = new HashSet<long>();
foreach (string line in lines.Where(s => s.StartsWith("at") || s.StartsWith("in 2") || s.StartsWith("in 3") || s.StartsWith("eff") || s.StartsWith("mv")))
{
if (line.StartsWith("mv"))
{
string[] currentPacket = line.Split('\t', ' ');
long moverId = long.Parse(currentPacket[2]);
if (moverIds.Contains(moverId))
{
continue;
}
moverIds.Add(moverId);
}
_packetList.Add(line);
}
var builder = new StringBuilder();
foreach (string packet in _packetList)
{
builder.AppendLine(packet);
}
builder.Replace('\t', ' ');
File.WriteAllText(outputPath, builder.ToString(), Encoding.UTF8);
}
19
Source : Migrator.cs
with MIT License
from Byndyusoft
with MIT License
from Byndyusoft
public void Migrate()
{
HashSet<long> appliedVersions;
using (var session = _sessionsFactory.Create())
{
CheckAndCreateVersionTable(session);
appliedVersions = new HashSet<long>(GetAppliedVersions(session));
session.Commit();
}
var newMigrations = _migrations.Where(x => appliedVersions.Contains(x.Version) == false).OrderBy(k => k.Version).ToArray();
foreach (var newMigration in newMigrations)
using (var session = _sessionsFactory.Create())
{
var commands = newMigration.SqlSource.Split(new[] { "\nGO\r\n" }, StringSplitOptions.RemoveEmptyEntries);
foreach (var command in commands)
session.Execute(command, commandTimeout: 0);
AddAppliedVersion(session, newMigration.Version);
session.Commit();
}
}
19
Source : SecurityExchangeHours.cs
with Apache License 2.0
from Capnode
with Apache License 2.0
from Capnode
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public bool IsOpen(DateTime startLocalDateTime, DateTime endLocalDateTime, bool extendedMarket)
{
if (startLocalDateTime == endLocalDateTime)
{
// if we're testing an instantaneous moment, use the other function
return IsOpen(startLocalDateTime, extendedMarket);
}
// we must make intra-day requests to LocalMarketHours, so check for a day gap
var start = startLocalDateTime;
var end = new DateTime(Math.Min(endLocalDateTime.Ticks, start.Date.Ticks + Time.OneDay.Ticks - 1));
do
{
if (!_holidays.Contains(start.Date.Ticks) && !IsTimeAfterEarlyClose(start) && !IsTimeBeforeLateOpen(start))
{
// check to see if the market is open
var marketHours = GetMarketHours(start.DayOfWeek);
if (marketHours.IsOpen(start.TimeOfDay, end.TimeOfDay, extendedMarket))
{
return true;
}
}
start = start.Date.AddDays(1);
end = new DateTime(Math.Min(endLocalDateTime.Ticks, end.Ticks + Time.OneDay.Ticks));
}
while (end > start);
return false;
}
19
Source : SecurityExchangeHours.cs
with Apache License 2.0
from Capnode
with Apache License 2.0
from Capnode
public bool IsDateOpen(DateTime localDateTime)
{
var marketHours = GetMarketHours(localDateTime.DayOfWeek);
if (marketHours.IsClosedAllDay)
{
// if we don't have hours for this day then we're not open
return false;
}
// if we don't have a holiday then we're open
return !_holidays.Contains(localDateTime.Date.Ticks);
}
19
Source : SecurityExchangeHours.cs
with Apache License 2.0
from Capnode
with Apache License 2.0
from Capnode
public DateTime GetNextMarketOpen(DateTime localDateTime, bool extendedMarket)
{
var time = localDateTime;
var oneWeekLater = localDateTime.Date.AddDays(15);
do
{
var marketHours = GetMarketHours(time.DayOfWeek);
if (!marketHours.IsClosedAllDay && !_holidays.Contains(time.Date.Ticks))
{
TimeSpan lateOpenTime;
if (_lateOpens.TryGetValue(time.Date, out lateOpenTime))
{
var lateOpenDateTime = time.Date.Add(lateOpenTime);
if (time < lateOpenDateTime)
return lateOpenDateTime;
time = time.Date + Time.OneDay;
continue;
}
TimeSpan earlyCloseTime;
if (_earlyCloses.TryGetValue(time.Date, out earlyCloseTime))
{
var earlyCloseDateTime = time.Date.Add(earlyCloseTime);
if (time > earlyCloseDateTime)
{
time = time.Date + Time.OneDay;
continue;
}
}
var marketOpenTimeOfDay = marketHours.GetMarketOpen(time.TimeOfDay, extendedMarket);
if (marketOpenTimeOfDay.HasValue)
{
var marketOpen = time.Date + marketOpenTimeOfDay.Value;
if (localDateTime < marketOpen)
{
return marketOpen;
}
}
}
time = time.Date + Time.OneDay;
}
while (time < oneWeekLater);
throw new ArgumentException("Unable to locate next market open within two weeks.");
}
19
Source : SecurityExchangeHours.cs
with Apache License 2.0
from Capnode
with Apache License 2.0
from Capnode
public bool IsOpen(DateTime localDateTime, bool extendedMarket)
{
if (_holidays.Contains(localDateTime.Date.Ticks) || IsTimeAfterEarlyClose(localDateTime) || IsTimeBeforeLateOpen(localDateTime))
{
return false;
}
return GetMarketHours(localDateTime.DayOfWeek).IsOpen(localDateTime.TimeOfDay, extendedMarket);
}
19
Source : SecurityExchangeHours.cs
with Apache License 2.0
from Capnode
with Apache License 2.0
from Capnode
public DateTime GetNextMarketClose(DateTime localDateTime, bool extendedMarket)
{
var time = localDateTime;
var oneWeekLater = localDateTime.Date.AddDays(15);
do
{
var marketHours = GetMarketHours(time.DayOfWeek);
if (!marketHours.IsClosedAllDay && !_holidays.Contains(time.Date.Ticks))
{
TimeSpan earlyCloseTime;
if (_earlyCloses.TryGetValue(time.Date, out earlyCloseTime))
{
var earlyCloseDateTime = time.Date.Add(earlyCloseTime);
if (time < earlyCloseDateTime)
return earlyCloseDateTime;
time = time.Date + Time.OneDay;
continue;
}
TimeSpan lateOpenTime;
if (_lateOpens.TryGetValue(time.Date, out lateOpenTime))
{
var lateOpenDateTime = time.Date.Add(lateOpenTime);
if (time < lateOpenDateTime)
{
time = lateOpenDateTime;
continue;
}
}
var marketCloseTimeOfDay = marketHours.GetMarketClose(time.TimeOfDay, extendedMarket);
if (marketCloseTimeOfDay.HasValue)
{
var marketClose = time.Date + marketCloseTimeOfDay.Value;
if (localDateTime < marketClose)
{
return marketClose;
}
}
}
time = time.Date + Time.OneDay;
}
while (time < oneWeekLater);
throw new ArgumentException("Unable to locate next market close within two weeks.");
}
19
Source : ProcessController.cs
with MIT License
from CUSTIS-public
with MIT License
from CUSTIS-public
[HttpGet("{pid}/threads")]
public ThreadsResult GetThreads(int pid)
{
var result = new ThreadsResult();
var infos = new ConcurrentBag<ThreadInfo>();
try
{
infos = GetThreadsFromClrMd(pid);
}
catch (Exception e)
{
result.ErrorMessage = $"One or more errors occured while retrieving detailed info about threads: " +
$"{Environment.NewLine}{GetErrorMessage(e)}";
}
using var tokenSource = new CancellationTokenSource();
var token = tokenSource.Token;
var tasks = new List<Task>();
var touched = new HashSet<long>(infos.Select(i => i.Id));
using var proc = Process.GetProcessById(pid);
foreach (var thread in proc.Threads.Cast<ProcessThread>())
{
if (touched.Contains(thread.Id))
{
continue;
}
touched.Add(thread.Id);
var t = TasksHelper.DoWithInterrupt(() =>
{
var threadInfo = new ThreadInfo(thread);
token.ThrowIfCancellationRequested();
infos.Add(threadInfo);
}, token);
tasks.Add(t);
}
tokenSource.CancelAfter(TasksHelper.WaitTime);
try
{
Task.WaitAll(tasks.ToArray(), TasksHelper.WaitTime);
}
catch { }
result.Infos = infos;
return result;
}
19
Source : DataFrame.Join.cs
with MIT License
from dotnet
with MIT License
from dotnet
public DataFrame Merge(DataFrame other, string[] leftJoinColumns, string[] rightJoinColumns, string leftSuffix = "_left", string rightSuffix = "_right", JoinAlgorithm joinAlgorithm = JoinAlgorithm.Left)
{
if (other == null)
throw new ArgumentNullException(nameof(other));
//In Outer join the joined dataframe retains each row — even if no other matching row exists in supplementary dataframe.
//Outer joins subdivide further into left outer joins (left dataframe is retained), right outer joins (rightdataframe is retained), in full outer both are retained
PrimitiveDataFrameColumn<long> retainedRowIndices;
PrimitiveDataFrameColumn<long> supplementaryRowIndices;
DataFrame supplementaryDataFrame;
DataFrame retainedDataFrame;
bool isLeftDataFrameRetained;
if (joinAlgorithm == JoinAlgorithm.Left || joinAlgorithm == JoinAlgorithm.Right)
{
isLeftDataFrameRetained = (joinAlgorithm == JoinAlgorithm.Left);
supplementaryDataFrame = isLeftDataFrameRetained ? other : this;
var supplementaryJoinColumns = isLeftDataFrameRetained ? rightJoinColumns : leftJoinColumns;
retainedDataFrame = isLeftDataFrameRetained ? this : other;
var retainedJoinColumns = isLeftDataFrameRetained ? leftJoinColumns : rightJoinColumns;
Merge(retainedDataFrame, supplementaryDataFrame, retainedJoinColumns, supplementaryJoinColumns, out retainedRowIndices, out supplementaryRowIndices);
}
else if (joinAlgorithm == JoinAlgorithm.Inner)
{
// use as supplementary (for Hashing) the dataframe with the smaller RowCount
isLeftDataFrameRetained = (Rows.Count > other.Rows.Count);
supplementaryDataFrame = isLeftDataFrameRetained ? other : this;
var supplementaryJoinColumns = isLeftDataFrameRetained ? rightJoinColumns : leftJoinColumns;
retainedDataFrame = isLeftDataFrameRetained ? this : other;
var retainedJoinColumns = isLeftDataFrameRetained ? leftJoinColumns : rightJoinColumns;
Merge(retainedDataFrame, supplementaryDataFrame, retainedJoinColumns, supplementaryJoinColumns, out retainedRowIndices, out supplementaryRowIndices, true);
}
else if (joinAlgorithm == JoinAlgorithm.FullOuter)
{
//In full outer join we would like to retain data from both side, so we do it into 2 steps: one first we do LEFT JOIN and then add lost data from the RIGHT side
//Step 1
//Do LEFT JOIN
isLeftDataFrameRetained = true;
supplementaryDataFrame = isLeftDataFrameRetained ? other : this;
var supplementaryJoinColumns = isLeftDataFrameRetained ? rightJoinColumns : leftJoinColumns;
retainedDataFrame = isLeftDataFrameRetained ? this : other;
var retainedJoinColumns = isLeftDataFrameRetained ? leftJoinColumns : rightJoinColumns;
var intersection = Merge(retainedDataFrame, supplementaryDataFrame, retainedJoinColumns, supplementaryJoinColumns, out retainedRowIndices, out supplementaryRowIndices, calculateIntersection: true);
//Step 2
//Do RIGHT JOIN to retain all data from supplementary DataFrame too (take into account data intersection from the first step to avoid duplicates)
for (long i = 0; i < supplementaryDataFrame.Columns.RowCount; i++)
{
var columns = supplementaryJoinColumns.Select(name => supplementaryDataFrame.Columns[name]).ToArray();
if (!IsAnyNullValueInColumns(columns, i))
{
if (!intersection.Contains(i))
{
retainedRowIndices.Append(null);
supplementaryRowIndices.Append(i);
}
}
}
}
else
throw new NotImplementedException(nameof(joinAlgorithm));
DataFrame ret = new DataFrame();
//insert columns from left dataframe (this)
for (int i = 0; i < this.Columns.Count; i++)
{
ret.Columns.Insert(i, this.Columns[i].Clone(isLeftDataFrameRetained ? retainedRowIndices : supplementaryRowIndices));
}
//insert columns from right dataframe (other)
for (int i = 0; i < other.Columns.Count; i++)
{
DataFrameColumn column = other.Columns[i].Clone(isLeftDataFrameRetained ? supplementaryRowIndices : retainedRowIndices);
SetSuffixForDuplicatedColumnNames(ret, column, leftSuffix, rightSuffix);
ret.Columns.Insert(ret.Columns.Count, column);
}
return ret;
}
19
Source : DataFrame.Join.cs
with MIT License
from dotnet
with MIT License
from dotnet
private static HashSet<long> Merge(DataFrame retainedDataFrame, DataFrame supplementaryDataFrame, string[] retainedJoinColumnNames, string[] supplemetaryJoinColumnNames, out PrimitiveDataFrameColumn<long> retainedRowIndices, out PrimitiveDataFrameColumn<long> supplementaryRowIndices, bool isInner = false, bool calculateIntersection = false)
{
if (retainedJoinColumnNames == null)
throw new ArgumentNullException(nameof(retainedJoinColumnNames));
if (supplemetaryJoinColumnNames == null)
throw new ArgumentNullException(nameof(supplemetaryJoinColumnNames));
if (retainedJoinColumnNames.Length != supplemetaryJoinColumnNames.Length)
throw new ArgumentException(Strings.MismatchedArrayLengths, nameof(retainedJoinColumnNames));
HashSet<long> intersection = calculateIntersection ? new HashSet<long>() : null;
// Get occurrences of values in columns used for join in the retained and supplementary dataframes
Dictionary<long, ICollection<long>> occurrences = null;
Dictionary<long, long> retainedIndicesReverseMapping = null;
HashSet<long> supplementaryJoinColumnsNullIndices = new HashSet<long>();
for (int colNameIndex = 0; colNameIndex < retainedJoinColumnNames.Length; colNameIndex++)
{
DataFrameColumn shrinkedRetainedColumn = retainedDataFrame.Columns[retainedJoinColumnNames[colNameIndex]];
//shrink retained column by row occurrences from previous step
if (occurrences != null)
{
//only rows with occurences from previose step should go for futher processing
var shrinkedRetainedIndices = occurrences.Keys.ToArray();
//create reverse mapping of index of the row in the shrinked column to the index of this row in the original dataframe (new index -> original index)
var newRetainedIndicesReverseMapping = new Dictionary<long, long>(shrinkedRetainedIndices.Length);
for (int i = 0; i < shrinkedRetainedIndices.Length; i++)
{
//store reverse mapping to restore original dataframe indices from indices in shrinked row
var originalIndex = shrinkedRetainedIndices[i];
newRetainedIndicesReverseMapping.Add(i, originalIndex);
}
retainedIndicesReverseMapping = newRetainedIndicesReverseMapping;
shrinkedRetainedColumn = shrinkedRetainedColumn.Clone(new Int64DataFrameColumn("Indices", shrinkedRetainedIndices));
}
DataFrameColumn supplementaryColumn = supplementaryDataFrame.Columns[supplemetaryJoinColumnNames[colNameIndex]];
//Find occurrenses on current step (join column)
var newOccurrences = shrinkedRetainedColumn.GetGroupedOccurrences(supplementaryColumn, out HashSet<long> supplementaryColumnNullIndices);
//Convert indices from in key from local (shrinked row) to indices in original dataframe
if (retainedIndicesReverseMapping != null)
newOccurrences = newOccurrences.ToDictionary(kvp => retainedIndicesReverseMapping[kvp.Key], kvp => kvp.Value);
supplementaryJoinColumnsNullIndices.UnionWith(supplementaryColumnNullIndices);
// shrink join result on current column by previous join columns (if any)
// (we have to remove occurrences that doesn't exist in previous columns, because JOIN happens only if ALL left and right columns in JOIN are matched)
if (occurrences != null)
{
var shrinkedOccurences = new Dictionary<long, ICollection<long>>();
foreach (var kvp in newOccurrences)
{
var newValue = kvp.Value.Where(i => occurrences[kvp.Key].Contains(i)).ToArray();
if (newValue.Any())
{
shrinkedOccurences.Add(kvp.Key, newValue);
}
}
newOccurrences = shrinkedOccurences;
}
occurrences = newOccurrences;
}
retainedRowIndices = new Int64DataFrameColumn("RetainedIndices");
supplementaryRowIndices = new Int64DataFrameColumn("SupplementaryIndices");
//Perform Merging
var retainJoinColumns = retainedJoinColumnNames.Select(name => retainedDataFrame.Columns[name]).ToArray();
for (long i = 0; i < retainedDataFrame.Columns.RowCount; i++)
{
if (!IsAnyNullValueInColumns(retainJoinColumns, i))
{
//Get all row indexes from supplementary dataframe that sutisfy JOIN condition
if (occurrences.TryGetValue(i, out ICollection<long> rowIndices))
{
foreach (long supplementaryRowIndex in rowIndices)
{
retainedRowIndices.Append(i);
supplementaryRowIndices.Append(supplementaryRowIndex);
//store intersection if required
if (calculateIntersection)
{
if (!intersection.Contains(supplementaryRowIndex))
{
intersection.Add(supplementaryRowIndex);
}
}
}
}
else
{
if (isInner)
continue;
retainedRowIndices.Append(i);
supplementaryRowIndices.Append(null);
}
}
else
{
foreach (long row in supplementaryJoinColumnsNullIndices)
{
retainedRowIndices.Append(i);
supplementaryRowIndices.Append(row);
}
}
}
return intersection;
}
19
Source : EdgeSet.cs
with GNU General Public License v3.0
from freezy
with GNU General Public License v3.0
from freezy
private bool Has(int i, int j) {
return _edges.Contains(GetKey(i, j));
}
19
Source : BepuCallbacks.cs
with MIT License
from FreneticLLC
with MIT License
from FreneticLLC
public bool AllowContactGeneration(int workerIndex, CollidableReference a, CollidableReference b)
{
if (a.Mobility != CollidableMobility.Dynamic && b.Mobility != CollidableMobility.Dynamic)
{
return false;
}
EnreplacedyPhysicsProperty aEnreplacedy = PhysPropForCollidable(a);
EnreplacedyPhysicsProperty bEnreplacedy = PhysPropForCollidable(b);
if (aEnreplacedy == null || bEnreplacedy == null)
{
EnreplacedyPhysicsProperty validOne = (aEnreplacedy ?? bEnreplacedy);
if (validOne != null)
{
return validOne.CGroup.DoesCollide(CollisionUtil.WorldSolid);
}
return false;
}
HashSet<long> noCollide = aEnreplacedy.Internal.NoCollideIDs;
if (noCollide != null && noCollide.Contains(bEnreplacedy.Enreplacedy.EID))
{
return false;
}
return aEnreplacedy.CGroup.DoesCollide(bEnreplacedy.CGroup);
}
19
Source : ThreadSynchronizationContext.cs
with MIT License
from github-for-unity
with MIT License
from github-for-unity
public bool Wait(long id, CancellationToken token)
{
bool signaled = false;
do
{
signaled = signaledIds.Contains(id);
if (signaled)
break;
Wait(token);
}
while (!token.IsCancellationRequested && !signaled);
return signaled;
}
19
Source : ObjectContainer.cs
with MIT License
from katalash
with MIT License
from katalash
public bool SerializeDS2Events(PARAM evs)
{
HashSet<long> ids = new HashSet<long>();
foreach (var o in Objects)
{
if (o is MapEnreplacedy m && m.Type == MapEnreplacedy.MapEnreplacedyType.DS2Event && m.WrappedObject is PARAM.Row mp)
{
if (!ids.Contains(mp.ID))
{
ids.Add(mp.ID);
}
else
{
MessageBox.Show($@"{mp.Name} has an ID that's already used. Please change it to something unique and save again.", "", MessageBoxButtons.OK, MessageBoxIcon.Error);
return false;
}
var newloc = new PARAM.Row(mp);
evs.Rows.Add(newloc);
}
}
return true;
}
19
Source : ObjectContainer.cs
with MIT License
from katalash
with MIT License
from katalash
public bool SerializeDS2Generators(PARAM locations, PARAM generators)
{
HashSet<long> ids = new HashSet<long>();
foreach (var o in Objects)
{
if (o is MapEnreplacedy m && m.Type == MapEnreplacedy.MapEnreplacedyType.DS2Generator && m.WrappedObject is MergedParamRow mp)
{
if (!ids.Contains(mp.ID))
{
ids.Add(mp.ID);
}
else
{
MessageBox.Show($@"{mp.Name} has an ID that's already used. Please change it to something unique and save again.", "", MessageBoxButtons.OK, MessageBoxIcon.Error);
return false;
}
var loc = mp.GetRow("generator-loc");
if (loc != null)
{
// Adjust the location to be relative to the mapoffset
var newloc = new PARAM.Row(loc);
newloc["PositionX"].Value = (float)loc["PositionX"].Value - MapOffset.Position.X;
newloc["PositionY"].Value = (float)loc["PositionY"].Value - MapOffset.Position.Y;
newloc["PositionZ"].Value = (float)loc["PositionZ"].Value - MapOffset.Position.Z;
locations.Rows.Add(newloc);
}
var gen = mp.GetRow("generator");
if (gen != null)
{
generators.Rows.Add(gen);
}
}
}
return true;
}
19
Source : ObjectContainer.cs
with MIT License
from katalash
with MIT License
from katalash
public bool SerializeDS2Regist(PARAM regist)
{
HashSet<long> ids = new HashSet<long>();
foreach (var o in Objects)
{
if (o is MapEnreplacedy m && m.Type == MapEnreplacedy.MapEnreplacedyType.DS2GeneratorRegist && m.WrappedObject is PARAM.Row mp)
{
if (!ids.Contains(mp.ID))
{
ids.Add(mp.ID);
}
else
{
MessageBox.Show($@"{mp.Name} has an ID that's already used. Please change it to something unique and save again.", "", MessageBoxButtons.OK, MessageBoxIcon.Error);
return false;
}
regist.Rows.Add(mp);
}
}
return true;
}
19
Source : ObjectContainer.cs
with MIT License
from katalash
with MIT License
from katalash
public bool SerializeDS2EventLocations(PARAM locs)
{
HashSet<long> ids = new HashSet<long>();
foreach (var o in Objects)
{
if (o is MapEnreplacedy m && m.Type == MapEnreplacedy.MapEnreplacedyType.DS2EventLocation && m.WrappedObject is PARAM.Row mp)
{
if (!ids.Contains(mp.ID))
{
ids.Add(mp.ID);
}
else
{
MessageBox.Show($@"{mp.Name} has an ID that's already used. Please change it to something unique and save again.", "", MessageBoxButtons.OK, MessageBoxIcon.Error);
return false;
}
// Adjust the location to be relative to the mapoffset
var newloc = new PARAM.Row(mp);
newloc["PositionX"].Value = (float)mp["PositionX"].Value - MapOffset.Position.X;
newloc["PositionY"].Value = (float)mp["PositionY"].Value - MapOffset.Position.Y;
newloc["PositionZ"].Value = (float)mp["PositionZ"].Value - MapOffset.Position.Z;
locs.Rows.Add(newloc);
}
}
return true;
}
19
Source : ObjectContainer.cs
with MIT License
from katalash
with MIT License
from katalash
public bool SerializeDS2ObjInstances(PARAM objs)
{
HashSet<long> ids = new HashSet<long>();
foreach (var o in Objects)
{
if (o is MapEnreplacedy m && m.Type == MapEnreplacedy.MapEnreplacedyType.DS2ObjectInstance && m.WrappedObject is PARAM.Row mp)
{
if (!ids.Contains(mp.ID))
{
ids.Add(mp.ID);
}
else
{
MessageBox.Show($@"{mp.Name} has an ID that's already used. Please change it to something unique and save again.", "", MessageBoxButtons.OK, MessageBoxIcon.Error);
return false;
}
var newobj = new PARAM.Row(mp);
objs.Rows.Add(newobj);
}
}
return true;
}
19
Source : DuplicateSearch_MinHashing.cs
with Creative Commons Zero v1.0 Universal
from Koziev
with Creative Commons Zero v1.0 Universal
from Koziev
static void Main(string[] args)
{
// Путь к исходному тексту
string SENTx_path = args[0];
// Путь к файлу, куда будем записывать дубликаты
string result_path = args[1];
// Пороги похожести, зависят от длины предложений.
double threshold2 = double.Parse(args[2], System.Globalization.CultureInfo.InvariantCulture);
double threshold1 = threshold2 * 0.8;
Console.WriteLine("threshold1={0} threshold2={1}", threshold1, threshold2);
// Макс. число предложений
int max_sent = int.Parse(args[3]);
HashSet<long> sample_hashes = new HashSet<long>(); // для устранения повторов в результатах
MD5 md5 = MD5.Create();
DateTime started = DateTime.Now;
int n_groups = 0;
using (System.IO.StreamWriter wrt = new System.IO.StreamWriter(result_path))
{
// Загружаем предложения в vals
Console.WriteLine("Processing {0}...", SENTx_path);
List<string> vals = new List<string>();
using (System.IO.StreamReader rdr = new System.IO.StreamReader(SENTx_path))
{
while (!rdr.EndOfStream && vals.Count <= max_sent)
{
string line = rdr.ReadLine();
if (line == null)
{
break;
}
line = NormalizeSent( line.Trim().Replace(" ", " ").Replace(" ", " ") );
vals.Add(line);
}
}
Console.WriteLine("{0} lines in {1}", vals.Count, SENTx_path);
// -----------------------------------------------------------------------
Dictionary<string, int> shingle2id = new Dictionary<string, int>();
List<HashSet<int>> val_sets = new List<HashSet<int>>();
foreach (string v in vals)
{
string uv = SetSimilarity2.Tools.NormalizeStr(v);
HashSet<int> v_set = new HashSet<int>();
for (int i0 = 0; i0 < uv.Length - 3; ++i0)
{
string shingle = uv.Substring(i0, 3);
int id = -1;
if (!shingle2id.TryGetValue(shingle, out id))
{
id = shingle2id.Count;
shingle2id.Add(shingle, id);
}
v_set.Add(id);
}
val_sets.Add(v_set);
}
/*
// --- отладка ---
{
int iset1 = vals.IndexOf("Ты меня слышишь?");
int iset2 = vals.IndexOf("Ты слышишь меня?");
double sim0 = SetSimilarity2.Tools.CalcJackardSim(val_sets[iset1], val_sets[iset2]);
Console.WriteLine("iset1={0} iset2={1} sim={2}", iset1, iset2, sim0);
}
// ---------------
*/
Console.WriteLine("Hashing...");
// ------------------------------------------------------------------
SetSimilarity2.MinHash minhash = new SetSimilarity2.MinHash(shingle2id.Count);
// --- для отладки
//string abc = minhash.GetABC();
// ---------------
int[,] SIG = minhash.GetSignatureMatrix(val_sets);
double sim12 = minhash.ComputeSimilarity(SIG, 0, 1);
// Решение с использованием Local Sensitiviy Hash
SetSimilarity2.LSH lsh = new SetSimilarity2.LSH(SIG, val_sets);
Console.WriteLine("Searching duplicates...");
List<Tuple<int, int>> sim_pairs = new List<Tuple<int, int>>();
for (int iset1 = 0; iset1 < val_sets.Count; ++iset1)
{
List<int> sets2 = lsh.FindClosest(iset1, minhash, threshold1);
List<int> isets_bucket = new List<int>();
isets_bucket.Add(iset1);
List<string> toks1 = StringToWords(vals[iset1]);
foreach (int iset2 in sets2)
{
if (iset2 > iset1)
{
double sim0 = SetSimilarity2.Tools.CalcJackardSim(val_sets[iset1], val_sets[iset2]);
if (sim0 > threshold2)
{
// Дополнительная проверка строк, в частности надо убедиться,
// что частица НЕ/НИ осталась перед тем же словом.
List<string> toks2 = StringToWords(vals[iset2]);
if (SetsAreEqual(toks1, toks2))
{
isets_bucket.Add(iset2);
//Console.WriteLine("\nistr1={0}\nistr2={1}\nstr1={2}\nstr2={3}\nsim={4:N5}", iset1, iset2, vals[iset1], vals[iset2], sim0);
}
}
}
}
if (isets_bucket.Count > 1)
{
List<string> printed = new List<string>();
foreach (int isetx in isets_bucket)
{
string line = NormalizeSent(vals[isetx]);
if (!printed.Contains(line))
{
byte[] hash = md5.ComputeHash(System.Text.Encoding.UTF8.GetBytes(line));
Int64 ihash1 = BitConverter.ToInt64(hash, 0);
Int64 ihash2 = BitConverter.ToInt64(hash, 8);
Int64 ihash = ihash1 ^ ihash2;
if (!sample_hashes.Contains(ihash))
{
sample_hashes.Add(ihash);
printed.Add(line);
}
}
}
if (printed.Count > 1)
{
foreach (string l in printed)
{
wrt.WriteLine("{0}", l);
}
wrt.WriteLine("\n");
n_groups++;
}
}
if ((iset1 % 1000) == 0)
{
Console.Write("iset1={0}/{1} n_groups={2}\r", iset1, vals.Count, n_groups);
}
}
}
DateTime finished = DateTime.Now;
Console.WriteLine("Done via LSH, elapsed time={0} sec", (finished - started).TotalSeconds);
return;
}
19
Source : GetAvailableForReviewProject.cs
with GNU Affero General Public License v3.0
from kysect
with GNU Affero General Public License v3.0
from kysect
public async Task<Response> Handle(Query request, CancellationToken cancellationToken)
{
HashSet<long> userProjects = _context
.ProjectReviewRequests
.Where(k => k.AuthorId == request.AuthorizedUser.Id)
.SelectToHashSet(k => k.ProjectId);
List<GithubRepositoryInfoDto> result = await _context
.StudentProjects
.Where(p => p.OwnerUserId == request.AuthorizedUser.Id && !userProjects.Contains(p.Id))
.Select(GithubRepositoryInfoDto.FromEnreplacedy)
.ToListAsync();
return new Response(result);
}
19
Source : RequiredDependencyAnalyzer.cs
with MIT License
from microsoft
with MIT License
from microsoft
public void ProcessFingerprintComputed(ProcessFingerprintComputationEventData data)
{
if (data.Kind != FingerprintComputationKind.Execution)
{
return;
}
m_consumer = m_replacedyzer.GetEntry(data.PipId);
m_consumedFilesByPath.Clear();
m_dependencyConsumedFileIndex.Clear();
m_dependencyConsumedFileEndIndex.Clear();
m_dependencies.Clear();
m_builder.Clear();
m_directoryDependenciesFilterMap.Clear();
m_directoryHreplacedources.Clear();
var computation = data.StrongFingerprintComputations[0];
var pip = (Process)m_replacedyzer.GetPip(data.PipId);
PipArtifacts.ForEachInput(pip, input =>
{
if (input.IsFile)
{
AddConsumedFile(input.FileArtifact, DirectoryArtifact.Invalid, ContentFlag.Static | ContentFlag.Consumed);
}
else
{
foreach (var file in m_replacedyzer.GetContents(input.DirectoryArtifact))
{
if (file.IsSourceFile)
{
m_directoryHreplacedources.Add(input.DirectoryArtifact);
}
AddConsumedFile(file, input.DirectoryArtifact, ContentFlag.Dynamic);
}
}
return true;
}, includeLazyInputs: false);
foreach (var input in computation.ObservedInputs)
{
var flag = (ContentFlag)((int)ContentFlag.AbsentPathProbe << (int)input.Type) | ContentFlag.Consumed;
if (input.Type == ObservedInputType.FileContentRead || input.Type == ObservedInputType.ExistingFileProbe)
{
if (m_consumedFilesByPath.TryGetValue(input.Path, out var file))
{
file.AddFlag(ContentFlag.Consumed);
if (file.SourceFile != null)
{
file.SourceFile.AddFlag(ContentFlag.Consumed);
}
if (file.FinalFile != null)
{
file.FinalFile.AddFlag(ContentFlag.Consumed);
}
}
else
{
AddConsumedFile(FileArtifact.CreateSourceFile(input.Path), m_replacedyzer.PipGraph.TryGetSealSourceAncestor(input.Path), flag | ContentFlag.Unknown);
}
}
else if (m_replacedyzer.AllAccesses)
{
AddConsumedFile(FileArtifact.CreateSourceFile(input.Path), m_replacedyzer.PipGraph.TryGetSealSourceAncestor(input.Path), flag);
}
}
var entry = m_consumer;
// Sort file dependencies for consistent output
entry.FileDependencies.Sort(s_fileReferenceComparer);
foreach (var fileDependency in entry.FileDependencies)
{
if (fileDependency.Producer != null)
{
var reference = entry.PipDependencies.GetOrAdd(fileDependency.Producer.PipId, p => new PipReference());
if (reference.Pip == null)
{
reference.Pip = m_replacedyzer.GetEntry(fileDependency.Producer.PipId);
}
reference.Flags |= fileDependency.ConsumedFile.Flags;
}
}
string describe(PipEntry pe)
{
return $"{pe.SpecFileName}-{m_replacedyzer.GetDescription(m_replacedyzer.GetPip(pe.PipId))}";
}
m_builder.AppendLine(describe(entry));
foreach (var fileDependency in entry.FileDependencies)
{
if (fileDependency.Producer != null
&& fileDependency.ConsumedFile.File.Artifact.IsOutputFile)
{
var pipId = fileDependency.Producer.PipId;
var pipReference = entry.PipDependencies[pipId];
var directory = fileDependency.Directory?.Directory ?? DirectoryArtifact.Invalid;
if (m_dependencies.Add((pipId, directory)))
{
if (pipReference.HasFlag(ContentFlag.Consumed))
{
m_directoryDependenciesFilterMap[directory] = true;
m_builder.AppendLine($"{entry.Identifier} -> Retaining pip dependency on '{describe(pipReference.Pip)}' (declared via directory '{ToString(fileDependency.Directory)}') (consumes '{ToString(fileDependency.ConsumedFile.File.Artifact)}')");
}
else
{
m_directoryDependenciesFilterMap.TryAdd(directory, false);
m_builder.AppendLine($"{entry.Identifier} -> Removing pip dependency on '{describe(pipReference.Pip)}' (declared via directory '{ToString(fileDependency.Directory)}')");
}
}
}
}
var trimmedDirectoryDependencies = new List<DirectoryArtifact>();
foreach (var d in entry.Process.DirectoryDependencies)
{
if (m_directoryDependenciesFilterMap.TryGetValue(d, out var shouldInclude))
{
if (shouldInclude)
{
m_builder.AppendLine($"{entry.Identifier} -> Retaining directory dependency on '{ToString(d)}' (used)");
}
else if (m_directoryHreplacedources.Contains(d))
{
m_builder.AppendLine($"{entry.Identifier} -> Retaining directory dependency on '{ToString(d)}' (has sources)");
}
else
{
m_builder.AppendLine($"{entry.Identifier} -> Removing directory dependency on '{ToString(d)}'");
continue;
}
}
else
{
var sealId = m_replacedyzer.PipGraph.GetSealedDirectoryNode(d).ToPipId();
if (!m_directoryHreplacedources.Contains(d) && !m_replacedyzer.PipTable.GetSealDirectoryKind(sealId).IsSourceSeal())
{
m_builder.AppendLine($"{entry.Identifier} -> Removing directory dependency on '{ToString(d)}' (unused output directory)");
continue;
}
}
entry.PipDependencies.TryAdd(m_replacedyzer.PipGraph.GetSealedDirectoryNode(d).ToPipId(), default);
trimmedDirectoryDependencies.Add(d);
}
// Update directory dependencies which trimmed directory dependencies to allow writing
// a pip into the serialized pip table that can run without the unnecessary dependencies
entry.Process.UnsafeUpdateDirectoryDependencies(trimmedDirectoryDependencies.ToReadOnlyArray());
m_builder.AppendLine();
// Update the graph
var modifiedGraph = m_replacedyzer.m_mutableGraph;
using (var scope = modifiedGraph.AcquireExclusiveIncomingEdgeScope(entry.PipId.ToNodeId()))
{
foreach (var dependency in entry.PipDependencies)
{
if (dependency.Value == null || dependency.Value.HasFlag(ContentFlag.Consumed))
{
scope.AddEdge(dependency.Key.ToNodeId());
}
}
entry.AddedEdges = true;
}
if (m_replacedyzer.SemiStableHashes.Contains(entry.SemistableHash))
{
using (var writer = new StreamWriter(Path.Combine(m_replacedyzer.OutputFilePath,
$"{GetFileName(entry.SpecFile)}_Pip{pip.FormattedSemiStableHash}.csv")))
{
var table = new DisplayTable<Columns>(" , ");
foreach (var dependency in entry.FileDependencies)
{
table.NextRow();
table.Set(Columns.Path, ToString(dependency.ConsumedFile.File.Artifact.Path));
table.Set(Columns.RwCount, dependency.ConsumedFile.File.Artifact.RewriteCount.ToString());
table.Set(Columns.Flags, dependency.ConsumedFile.Flags.ToString());
table.Set(Columns.Producer, dependency.Producer?.Identifier);
table.Set(Columns.ProducerSpec, GetFileName(dependency.Producer?.SpecFile ?? AbsolutePath.Invalid));
table.Set(Columns.Dir, ToString(dependency.Directory));
table.Set(Columns.DirId, dependency.Directory?.Id);
table.Set(Columns.DirSsh, dependency.Directory?.SemistableHash);
}
table.Write(writer);
}
}
if (m_builder.Length != 0)
{
m_replacedyzer.Write(m_builder);
}
}
19
Source : TestLoader.cs
with MIT License
from microsoft
with MIT License
from microsoft
private unsafe void LoadDataFromFile<TKey, TKeySetter>(string filePath, string distribution, TKey[] init_keys, TKey[] txn_keys, TKeySetter keySetter)
where TKeySetter : IKeySetter<TKey>
{
string init_filename = filePath + "/load_" + distribution + "_250M_raw.dat";
string txn_filename = filePath + "/run_" + distribution + "_250M_1000M_raw.dat";
var sw = Stopwatch.StartNew();
if (this.Options.UseSmallData)
{
Console.WriteLine($"loading subset of keys and txns from {txn_filename} into memory...");
using FileStream stream = File.Open(txn_filename, FileMode.Open, FileAccess.Read, FileShare.Read);
byte[] chunk = new byte[YcsbConstants.kFileChunkSize];
GCHandle chunk_handle = GCHandle.Alloc(chunk, GCHandleType.Pinned);
byte* chunk_ptr = (byte*)chunk_handle.AddrOfPinnedObject();
var initValueSet = new HashSet<long>(init_keys.Length);
long init_count = 0;
long txn_count = 0;
long offset = 0;
while (true)
{
stream.Position = offset;
int size = stream.Read(chunk, 0, YcsbConstants.kFileChunkSize);
for (int idx = 0; idx < size && txn_count < txn_keys.Length; idx += 8)
{
var value = *(long*)(chunk_ptr + idx);
if (!initValueSet.Contains(value))
{
if (init_count >= init_keys.Length)
{
if (distribution == YcsbConstants.ZipfDist)
continue;
// Uniform distribution at current small-data counts is about a 1% hit rate, which is too slow here, so just modulo.
value %= init_keys.Length;
}
else
{
initValueSet.Add(value);
keySetter.Set(init_keys, init_count, value);
++init_count;
}
}
keySetter.Set(txn_keys, txn_count, value);
++txn_count;
}
if (size == YcsbConstants.kFileChunkSize)
offset += YcsbConstants.kFileChunkSize;
else
break;
if (txn_count == txn_keys.Length)
break;
}
sw.Stop();
chunk_handle.Free();
if (init_count != init_keys.Length)
throw new InvalidDataException($"Init file subset load fail! Expected {init_keys.Length} keys; found {init_count}");
if (txn_count != txn_keys.Length)
throw new InvalidDataException($"Txn file subset load fail! Expected {txn_keys.Length} keys; found {txn_count}");
Console.WriteLine($"loaded {init_keys.Length:N0} keys and {txn_keys.Length:N0} txns in {(double)sw.ElapsedMilliseconds / 1000:N3} seconds");
return;
}
Console.WriteLine($"loading all keys from {init_filename} into memory...");
long count = 0;
using (FileStream stream = File.Open(init_filename, FileMode.Open, FileAccess.Read, FileShare.Read))
{
byte[] chunk = new byte[YcsbConstants.kFileChunkSize];
GCHandle chunk_handle = GCHandle.Alloc(chunk, GCHandleType.Pinned);
byte* chunk_ptr = (byte*)chunk_handle.AddrOfPinnedObject();
long offset = 0;
while (true)
{
stream.Position = offset;
int size = stream.Read(chunk, 0, YcsbConstants.kFileChunkSize);
for (int idx = 0; idx < size; idx += 8)
{
keySetter.Set(init_keys, count, *(long*)(chunk_ptr + idx));
++count;
if (count == init_keys.Length)
break;
}
if (size == YcsbConstants.kFileChunkSize)
offset += YcsbConstants.kFileChunkSize;
else
break;
if (count == init_keys.Length)
break;
}
chunk_handle.Free();
if (count != init_keys.Length)
throw new InvalidDataException($"Init file load fail! Expected {init_keys.Length} keys; found {count}");
}
sw.Stop();
Console.WriteLine($"loaded {init_keys.Length:N0} keys in {(double)sw.ElapsedMilliseconds / 1000:N3} seconds");
Console.WriteLine($"loading all txns from {txn_filename} into memory...");
sw.Restart();
using (FileStream stream = File.Open(txn_filename, FileMode.Open, FileAccess.Read, FileShare.Read))
{
byte[] chunk = new byte[YcsbConstants.kFileChunkSize];
GCHandle chunk_handle = GCHandle.Alloc(chunk, GCHandleType.Pinned);
byte* chunk_ptr = (byte*)chunk_handle.AddrOfPinnedObject();
count = 0;
long offset = 0;
while (true)
{
stream.Position = offset;
int size = stream.Read(chunk, 0, YcsbConstants.kFileChunkSize);
for (int idx = 0; idx < size; idx += 8)
{
keySetter.Set(txn_keys, count, *(long*)(chunk_ptr + idx));
++count;
if (count == txn_keys.Length)
break;
}
if (size == YcsbConstants.kFileChunkSize)
offset += YcsbConstants.kFileChunkSize;
else
break;
if (count == txn_keys.Length)
break;
}
chunk_handle.Free();
if (count != txn_keys.Length)
throw new InvalidDataException($"Txn file load fail! Expected {txn_keys.Length} keys; found {count}");
}
sw.Stop();
Console.WriteLine($"loaded {txn_keys.Length:N0} txns in {(double)sw.ElapsedMilliseconds / 1000:N3} seconds");
}
19
Source : EditDiffer.cs
with MIT License
from nesrak1
with MIT License
from nesrak1
public long NextPathID()
{
long nextPathId = 1;
while (usedIds.Contains(nextPathId))
{
nextPathId++;
}
usedIds.Add(nextPathId);
lastId = nextPathId;
return nextPathId;
}
19
Source : LeftRight.cs
with MIT License
from nicknash
with MIT License
from nicknash
public void BeginRead(long which)
{
RE.MaybeSwitch();
RE.replacedert(!_writing.Contains(which), $"Write in progress during read at {which}");
_reading.Add(which);
}
19
Source : StarvationLeftRight.cs
with MIT License
from nicknash
with MIT License
from nicknash
public void EndRead(long which)
{
RE.MaybeSwitch();
_reading.Remove(which);
RE.replacedert(!_writing.Contains(which), $"Write in progress during read at {which}");
}
19
Source : StarvationLeftRight.cs
with MIT License
from nicknash
with MIT License
from nicknash
public void BeginWrite(long which)
{
RE.MaybeSwitch();
RE.replacedert(!_reading.Contains(which), $"Read in progress during write at {which}");
RE.replacedert(!_writing.Contains(which), $"Write in progress during write at {which}");
_writing.Add(which);
}
19
Source : StarvationLeftRight.cs
with MIT License
from nicknash
with MIT License
from nicknash
public void EndWrite(long which)
{
RE.MaybeSwitch();
RE.replacedert(!_reading.Contains(which), $"Write in progress during read at {which}");
_writing.Remove(which);
}
19
Source : DomainRestorer.cs
with MIT License
from NtreevSoft
with MIT License
from NtreevSoft
private void CollectPostedActions()
{
var postedPath = Path.Combine(this.workingPath, DomainLogger.PostedFileName);
using (var reader = XmlReader.Create(postedPath, readerSettings))
{
reader.Read();
while (reader.EOF != true)
{
DomainActionBase actionObject = null;
if (reader.Name == typeof(NewRowAction).Name)
{
actionObject = DataContractSerializerUtility.Read<NewRowAction>(reader);
}
else if (reader.Name == typeof(RemoveRowAction).Name)
{
actionObject = DataContractSerializerUtility.Read<RemoveRowAction>(reader);
}
else if (reader.Name == typeof(SetRowAction).Name)
{
actionObject = DataContractSerializerUtility.Read<SetRowAction>(reader);
}
else if (reader.Name == typeof(SetPropertyAction).Name)
{
actionObject = DataContractSerializerUtility.Read<SetPropertyAction>(reader);
}
else if (reader.Name == typeof(JoinAction).Name)
{
actionObject = DataContractSerializerUtility.Read<JoinAction>(reader);
}
else if (reader.Name == typeof(DisjoinAction).Name)
{
actionObject = DataContractSerializerUtility.Read<DisjoinAction>(reader);
}
else if (reader.Name == typeof(KickAction).Name)
{
actionObject = DataContractSerializerUtility.Read<KickAction>(reader);
}
else if (reader.Name == typeof(SetOwnerAction).Name)
{
actionObject = DataContractSerializerUtility.Read<SetOwnerAction>(reader);
}
else
{
throw new NotImplementedException();
}
this.lastID = actionObject.ID;
if (this.completedActions.Contains(actionObject.ID) == false)
continue;
this.postedActions.Add(actionObject);
}
}
}
19
Source : JsonUtils.cs
with MIT License
from OndrejNepozitek
with MIT License
from OndrejNepozitek
private static string RemoveUnusedIds(string json)
{
var root = JToken.Parse(json);
var queue = new Queue<JToken>();
queue.Enqueue(root);
var ids = new HashSet<long>();
var refs = new HashSet<long>();
while (queue.Count > 0)
{
var token = queue.Dequeue();
if (token is JProperty property)
{
if (property.Name == "$id")
{
var value = property.Value.Value<long>();
ids.Add(value);
}
else if (property.Name == "$ref")
{
var value = property.Value.Value<long>();
refs.Add(value);
}
}
foreach (var child in token.Children())
{
queue.Enqueue(child);
}
}
queue.Enqueue(root);
while (queue.Count > 0)
{
var token = queue.Dequeue();
if (token is JProperty property)
{
if (property.Name == "$id")
{
var value = property.Value.Value<long>();
if (!refs.Contains(value))
{
token.Remove();
}
}
}
foreach (var child in token.Children())
{
queue.Enqueue(child);
}
}
return root.ToString(Formatting.Indented);
}
19
Source : HexViewModel.cs
with MIT License
from parezj
with MIT License
from parezj
private void SaveFile(MemoryStream stream)
{
try
{
var fw = new BSL430_NET.FirmwareTools.FwTools.Firmware(stream, this.FwInfo.Format, this.FwInfo.AddrFirst);
if (this.FwInfo.FilledFFAddr != null && this.FwInfo.FilledFFAddr.Count > 0)
{
var hashSet = new HashSet<long>(this.FwInfo.FilledFFAddr);
for (int i = fw.Nodes.Count - 1; i >= 0; i--)
{
if (fw.Nodes[i].Data == 0xFF && hashSet.Contains(fw.Nodes[i].Addr))
fw.Nodes.RemoveAt(i);
}
}
using (StreamWriter wr = new StreamWriter(this.FwPath, false))
{
wr.Write(BSL430_NET.FirmwareTools.FwTools.Create(fw, this.FwInfo.Format, BslSettings.Instance.FwWriteLineLength));
}
}
catch (Exception ex)
{
MessageBox.Show(ex.Message, "BSL430.NET", MessageBoxButton.OK, MessageBoxImage.Error);
}
}
19
Source : ZoneMissionTargetObjects.cs
with MIT License
from PerpetuumOnline
with MIT License
from PerpetuumOnline
protected override bool CanHandleMissionEvent(LockUnitEventInfo e)
{
var npc = e.LockedNpc;
if (MyZoneMissionInProgress.missionGuid != npc.GetMissionGuid())
return false;
if (_lockedUnits.Contains(npc.Eid))
{
//unit already locked
return false;
}
Log("marked npc was locked " + this);
return true;
}
See More Examples