Here are the examples of the csharp api System.Collections.Generic.HashSet.Add(string) taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
3552 Examples
19
Source : Watchdog.cs
with MIT License
from 0ffffffffh
with MIT License
from 0ffffffffh
public void Start()
{
Log.Info("Watchdog starting...");
WatchdogState ws = WatchdogState.Create();
watchList = new HashSet<string>();
watchdogWorkerCb = new WaitCallback(WatchdogWorkerRoutine);
watchList.Add("view");
watchList.Add("index");
watchList.Add("add");
WatchdogTimerState wts = new WatchdogTimerState(ws);
this.wts = wts;
workTimer =
new Timer(new TimerCallback(Check), wts, 10 * 1000, 30 * 1000);
Log.Info("Watchdog started");
}
19
Source : CacheManager.cs
with MIT License
from 0ffffffffh
with MIT License
from 0ffffffffh
internal static bool AddKey(KeysetId setId, string key)
{
CacheKeySetContext cskc;
HashSet<string> set;
cskc = GetKeysetContext(setId);
if (!CacheManager.TryGetCachedResult<HashSet<string>>(cskc.CacheSetKey,out set))
{
set = new HashSet<string>();
set.Add(key);
CacheManager.CacheObject(cskc.CacheSetKey, set);
}
else
{
lock (cskc.lck)
{
if (setId.ValidateKeys)
CheckSetKeys(set);
if (!set.Contains(key))
set.Add(key);
}
CacheManager.CacheObject(cskc.CacheSetKey, set);
}
return true;
}
19
Source : EdisFace.cs
with MIT License
from 0ffffffffh
with MIT License
from 0ffffffffh
private void CacheDeniedClient(string clientIp)
{
string key = Edi.MakeUniqueCacheKey("EDI_DENYLIST");
HashSet<string> denyHashlist = null;
lock (lck)
{
if (!CacheManager.TryGetCachedResult<HashSet<string>>(
key,
out denyHashlist)
)
{
denyHashlist = new HashSet<string>();
}
if (!denyHashlist.Contains(clientIp))
{
denyHashlist.Add(clientIp);
CacheManager.CacheObject(key, denyHashlist,TimeSpan.FromHours(3));
}
}
denyHashlist.Clear();
denyHashlist = null;
}
19
Source : CelesteNetEmojiComponent.cs
with MIT License
from 0x0ade
with MIT License
from 0x0ade
public void Handle(CelesteNetConnection con, DataNetEmoji netemoji) {
Logger.Log(LogLevel.VVV, "netemoji", $"Received {netemoji.ID}");
string dir = Path.Combine(Path.GetTempPath(), "CelesteNetClientEmojiCache");
if (!Directory.Exists(dir))
Directory.CreateDirectory(dir);
string path = Path.Combine(dir, $"{netemoji.ID}-{netemoji.GetHashCode():X8}.png");
using (FileStream fs = File.OpenWrite(path))
using (MemoryStream ms = new(netemoji.Data))
ms.CopyTo(fs);
RunOnMainThread(() => {
Logger.Log(LogLevel.VVV, "netemoji", $"Registering {netemoji.ID}");
bool registered = false;
try {
VirtualTexture vt = VirtualContent.CreateTexture(path);
MTexture mt = new(vt);
if (vt.Texture_Safe == null) // Needed to trigger lazy loading.
throw new Exception($"Couldn't load emoji {netemoji.ID}");
Registered.Add(netemoji.ID);
RegisteredFiles.Add(path);
Emoji.Register(netemoji.ID, mt);
Emoji.Fill(CelesteNetClientFont.Font);
registered = true;
} finally {
if (!registered)
File.Delete(path);
}
});
}
19
Source : CelesteNetEmojiComponent.cs
with MIT License
from 0x0ade
with MIT License
from 0x0ade
public void Handle(CelesteNetConnection con, DataNetEmoji netemoji) {
Logger.Log(LogLevel.VVV, "netemoji", $"Received {netemoji.ID}");
string dir = Path.Combine(Path.GetTempPath(), "CelesteNetClientEmojiCache");
if (!Directory.Exists(dir))
Directory.CreateDirectory(dir);
string path = Path.Combine(dir, $"{netemoji.ID}-{netemoji.GetHashCode():X8}.png");
using (FileStream fs = File.OpenWrite(path))
using (MemoryStream ms = new(netemoji.Data))
ms.CopyTo(fs);
RunOnMainThread(() => {
Logger.Log(LogLevel.VVV, "netemoji", $"Registering {netemoji.ID}");
bool registered = false;
try {
VirtualTexture vt = VirtualContent.CreateTexture(path);
MTexture mt = new(vt);
if (vt.Texture_Safe == null) // Needed to trigger lazy loading.
throw new Exception($"Couldn't load emoji {netemoji.ID}");
Registered.Add(netemoji.ID);
RegisteredFiles.Add(path);
Emoji.Register(netemoji.ID, mt);
Emoji.Fill(CelesteNetClientFont.Font);
registered = true;
} finally {
if (!registered)
File.Delete(path);
}
});
}
19
Source : StringMap.cs
with MIT License
from 0x0ade
with MIT License
from 0x0ade
public void CountRead(string value) {
if (value.Length <= MinLength)
return;
lock (Pending) {
if (MappedRead.Contains(value) || Pending.Contains(value))
return;
if (!Counting.TryGetValue(value, out int count))
count = 0;
if (++count >= PromotionCount) {
Counting.Remove(value);
Pending.Add(value);
} else {
Counting[value] = count;
}
if (Counting.Count >= MaxCounting)
Cleanup();
}
}
19
Source : StringMap.cs
with MIT License
from 0x0ade
with MIT License
from 0x0ade
public List<Tuple<string, int>> PromoteRead() {
lock (Pending) {
if (Pending.Count == 0)
return Dummy<Tuple<string, int>>.EmptyList;
List<Tuple<string, int>> added = new();
foreach (string value in Pending) {
int id = NextID++;
MapRead[id] = value;
MappedRead.Add(value);
added.Add(Tuple.Create(value, id));
}
Pending.Clear();
return added;
}
}
19
Source : StringMap.cs
with MIT License
from 0x0ade
with MIT License
from 0x0ade
public void Cleanup() {
lock (Pending) {
foreach (KeyValuePair<string, int> entry in Counting) {
int score = entry.Value - DemotionScore;
if (score <= 0) {
CountingUpdateKeys.Add(entry.Key);
CountingUpdateValues.Add(0);
} else if (score >= PromotionTreshold) {
CountingUpdateKeys.Add(entry.Key);
CountingUpdateValues.Add(0);
Pending.Add(entry.Key);
} else {
CountingUpdateKeys.Add(entry.Key);
CountingUpdateValues.Add(score);
}
}
for (int i = 0; i < CountingUpdateKeys.Count; i++) {
string key = CountingUpdateKeys[i];
int value = CountingUpdateValues[i];
if (value == 0)
Counting.Remove(key);
else
Counting[key] = value;
}
CountingUpdateKeys.Clear();
CountingUpdateValues.Clear();
}
}
19
Source : XnaToFnaUtil.cs
with zlib License
from 0x0ade
with zlib License
from 0x0ade
public void ScanPath(string path) {
if (Directory.Exists(path)) {
// Use the directory as "dependency directory" and scan in it.
if (Directories.Contains(path))
// No need to scan the dir if the dir is scanned...
return;
RestoreBackup(path);
Log($"[ScanPath] Scanning directory {path}");
Directories.Add(path);
replacedemblyResolver.AddSearchDirectory(path); // Needs to be added manually as DependencyDirs was already added
// Most probably the actual game directory - let's just copy XnaToFna.exe to there to be referenced properly.
string xtfPath = Path.Combine(path, Path.GetFileName(Thisreplacedembly.Location));
if (Path.GetDirectoryName(Thisreplacedembly.Location) != path) {
Log($"[ScanPath] Found separate game directory - copying XnaToFna.exe and FNA.dll");
File.Copy(Thisreplacedembly.Location, xtfPath, true);
string dbExt = null;
if (File.Exists(Path.ChangeExtension(Thisreplacedembly.Location, "pdb")))
dbExt = "pdb";
if (File.Exists(Path.ChangeExtension(Thisreplacedembly.Location, "mdb")))
dbExt = "mdb";
if (dbExt != null)
File.Copy(Path.ChangeExtension(Thisreplacedembly.Location, dbExt), Path.ChangeExtension(xtfPath, dbExt), true);
if (File.Exists(Path.Combine(Path.GetDirectoryName(Thisreplacedembly.Location), "FNA.dll")))
File.Copy(Path.Combine(Path.GetDirectoryName(Thisreplacedembly.Location), "FNA.dll"), Path.Combine(path, "FNA.dll"), true);
else if (File.Exists(Path.Combine(Path.GetDirectoryName(Thisreplacedembly.Location), "FNA.dll.tmp")))
File.Copy(Path.Combine(Path.GetDirectoryName(Thisreplacedembly.Location), "FNA.dll.tmp"), Path.Combine(path, "FNA.dll"), true);
}
ScanPaths(Directory.GetFiles(path));
return;
}
if (File.Exists(path + ".xex")) {
if (!ExtractedXEX.Contains(path)) {
// Remove the original file - let XnaToFna unpack and handle it later.
File.Delete(path);
} else {
// XnaToFna will handle the .xex instead.
}
return;
}
if (path.EndsWith(".xex")) {
string pathTarget = path.Substring(0, path.Length - 4);
if (string.IsNullOrEmpty(Path.GetExtension(pathTarget)))
return;
using (Stream streamXEX = File.OpenRead(path))
using (BinaryReader reader = new BinaryReader(streamXEX))
using (Stream streamRAW = File.OpenWrite(pathTarget)) {
XEXImageData data = new XEXImageData(reader);
int offset = 0;
int size = data.m_memorySize;
// Check if this file is a PE containing an embedded PE.
if (data.m_memorySize > 0x10000) { // One default segment alignment.
using (MemoryStream streamMEM = new MemoryStream(data.m_memoryData))
using (BinaryReader mem = new BinaryReader(streamMEM)) {
if (mem.ReadUInt32() != 0x00905A4D) // MZ
goto WriteRaw;
// This is horrible.
streamMEM.Seek(0x00000280, SeekOrigin.Begin);
if (mem.ReadUInt64() != 0x000061746164692E) // ".idata\0\0"
goto WriteRaw;
streamMEM.Seek(0x00000288, SeekOrigin.Begin);
mem.ReadInt32(); // Virtual size; It's somewhat incorrect?
offset = mem.ReadInt32(); // Virtual offset.
// mem.ReadInt32(); // Raw size; Still incorrect.
// Let's just write everything...
size = data.m_memorySize - offset;
}
}
WriteRaw:
streamRAW.Write(data.m_memoryData, offset, size);
}
path = pathTarget;
ExtractedXEX.Add(pathTarget);
} else if (!path.EndsWith(".dll") && !path.EndsWith(".exe"))
return;
// Check if .dll is CLR replacedembly
replacedemblyName name;
try {
name = replacedemblyName.GetreplacedemblyName(path);
} catch {
return;
}
ReaderParameters modReaderParams = Modder.GenReaderParameters(false);
// Don't ReadWrite if the module being read is XnaToFna or a relink target.
bool isReadWrite =
#if !CECIL0_9
modReaderParams.ReadWrite =
#endif
path != Thisreplacedembly.Location &&
!Mappings.Exists(mappings => name.Name == mappings.Target);
// Only read debug info if it exists
if (!File.Exists(path + ".mdb") && !File.Exists(Path.ChangeExtension(path, "pdb")))
modReaderParams.ReadSymbols = false;
Log($"[ScanPath] Checking replacedembly {name.Name} ({(isReadWrite ? "rw" : "r-")})");
ModuleDefinition mod;
try {
mod = MonoModExt.ReadModule(path, modReaderParams);
} catch (Exception e) {
Log($"[ScanPath] WARNING: Cannot load replacedembly: {e}");
return;
}
bool add = !isReadWrite || name.Name == ThisreplacedemblyName;
if ((mod.Attributes & ModuleAttributes.ILOnly) != ModuleAttributes.ILOnly) {
// Mono.Cecil can't handle mixed mode replacedemblies.
Log($"[ScanPath] WARNING: Cannot handle mixed mode replacedembly {name.Name}");
if (MixedDeps == MixedDepAction.Stub) {
ModulesToStub.Add(mod);
add = true;
} else {
if (MixedDeps == MixedDepAction.Remove) {
RemoveDeps.Add(name.Name);
}
#if !CECIL0_9
mod.Dispose();
#endif
return;
}
}
if (add && !isReadWrite) { // XNA replacement
foreach (XnaToFnaMapping mapping in Mappings)
if (name.Name == mapping.Target) {
mapping.IsActive = true;
mapping.Module = mod;
foreach (string from in mapping.Sources) {
Log($"[ScanPath] Mapping {from} -> {name.Name}");
Modder.RelinkModuleMap[from] = mod;
}
}
} else if (!add) {
foreach (XnaToFnaMapping mapping in Mappings)
if (mod.replacedemblyReferences.Any(dep => mapping.Sources.Contains(dep.Name))) {
add = true;
Log($"[ScanPath] XnaToFna-ing {name.Name}");
goto BreakMappings;
}
}
BreakMappings:
if (add) {
Modules.Add(mod);
ModulePaths[mod] = path;
} else {
#if !CECIL0_9
mod.Dispose();
#endif
}
}
19
Source : FilenameProvider.cs
with MIT License
from 0xd4d
with MIT License
from 0xd4d
public string GetFilename(uint token, string name) {
string candidate;
switch (filenameFormat) {
case FilenameFormat.MemberName:
candidate = name;
break;
case FilenameFormat.TokenMemberName:
candidate = token.ToString("X8") + "_" + name;
break;
case FilenameFormat.Token:
candidate = token.ToString("X8");
break;
default:
throw new ArgumentOutOfRangeException(nameof(filenameFormat));
}
if (candidate == string.Empty)
candidate = "<UNKNOWN>";
candidate = ReplaceInvalidFilenameChars(candidate);
if (candidate.Length > MAX_NAME_LEN)
candidate = candidate.Substring(0, MAX_NAME_LEN) + "-";
if (!usedFilenames.Add(candidate)) {
for (int i = 1; i < int.MaxValue; i++) {
var newCand = candidate + "_" + i.ToString();
if (usedFilenames.Add(newCand)) {
candidate = newCand;
break;
}
}
}
return Path.Combine(outputDir, candidate + extension);
}
19
Source : IrdProvider.cs
with MIT License
from 13xforever
with MIT License
from 13xforever
public async Task<HashSet<DiscKeyInfo>> EnumerateAsync(string discKeyCachePath, string ProductCode, CancellationToken cancellationToken)
{
ProductCode = ProductCode?.ToUpperInvariant();
var result = new HashSet<DiscKeyInfo>();
var knownFilenames = new HashSet<string>(StringComparer.InvariantCultureIgnoreCase);
Log.Trace("Searching local cache for a match...");
if (Directory.Exists(discKeyCachePath))
{
var matchingIrdFiles = Directory.GetFiles(discKeyCachePath, "*.ird", SearchOption.TopDirectoryOnly);
foreach (var irdFile in matchingIrdFiles)
{
try
{
try
{
var ird = IrdParser.Parse(File.ReadAllBytes(irdFile));
result.Add(new DiscKeyInfo(ird.Data1, null, irdFile, KeyType.Ird, ird.Crc32.ToString("x8")));
knownFilenames.Add(Path.GetFileName(irdFile));
}
catch (InvalidDataException)
{
File.Delete(irdFile);
continue;
}
catch (Exception e)
{
Log.Warn(e);
continue;
}
}
catch (Exception e)
{
Log.Warn(e, e.Message);
}
}
}
Log.Trace("Searching IRD Library for match...");
var irdInfoList = await Client.SearchAsync(ProductCode, cancellationToken).ConfigureAwait(false);
var irdList = irdInfoList?.Data?.Where(
i => !knownFilenames.Contains(i.Filename) && i.Filename.Substring(0, 9).ToUpperInvariant() == ProductCode
).ToList() ?? new List<SearchResulreplacedem>(0);
if (irdList.Count == 0)
Log.Debug("No matching IRD file was found in the Library");
else
{
Log.Info($"Found {irdList.Count} new match{(irdList.Count == 1 ? "" : "es")} in the IRD Library");
foreach (var irdInfo in irdList)
{
var ird = await Client.DownloadAsync(irdInfo, discKeyCachePath, cancellationToken).ConfigureAwait(false);
result.Add(new DiscKeyInfo(ird.Data1, null, Path.Combine(discKeyCachePath, irdInfo.Filename), KeyType.Ird, ird.Crc32.ToString("x8")));
knownFilenames.Add(irdInfo.Filename);
}
}
if (knownFilenames.Count == 0)
{
Log.Warn("No valid matching IRD file could be found");
Log.Info($"If you have matching IRD file, you can put it in '{discKeyCachePath}' and try dumping the disc again");
}
Log.Info($"Found {result.Count} IRD files");
return result;
}
19
Source : IrdProvider.cs
with MIT License
from 13xforever
with MIT License
from 13xforever
public async Task<HashSet<DiscKeyInfo>> EnumerateAsync(string discKeyCachePath, string ProductCode, CancellationToken cancellationToken)
{
ProductCode = ProductCode?.ToUpperInvariant();
var result = new HashSet<DiscKeyInfo>();
var knownFilenames = new HashSet<string>(StringComparer.InvariantCultureIgnoreCase);
Log.Trace("Searching local cache for a match...");
if (Directory.Exists(discKeyCachePath))
{
var matchingIrdFiles = Directory.GetFiles(discKeyCachePath, "*.ird", SearchOption.TopDirectoryOnly);
foreach (var irdFile in matchingIrdFiles)
{
try
{
try
{
var ird = IrdParser.Parse(File.ReadAllBytes(irdFile));
result.Add(new DiscKeyInfo(ird.Data1, null, irdFile, KeyType.Ird, ird.Crc32.ToString("x8")));
knownFilenames.Add(Path.GetFileName(irdFile));
}
catch (InvalidDataException)
{
File.Delete(irdFile);
continue;
}
catch (Exception e)
{
Log.Warn(e);
continue;
}
}
catch (Exception e)
{
Log.Warn(e, e.Message);
}
}
}
Log.Trace("Searching IRD Library for match...");
var irdInfoList = await Client.SearchAsync(ProductCode, cancellationToken).ConfigureAwait(false);
var irdList = irdInfoList?.Data?.Where(
i => !knownFilenames.Contains(i.Filename) && i.Filename.Substring(0, 9).ToUpperInvariant() == ProductCode
).ToList() ?? new List<SearchResulreplacedem>(0);
if (irdList.Count == 0)
Log.Debug("No matching IRD file was found in the Library");
else
{
Log.Info($"Found {irdList.Count} new match{(irdList.Count == 1 ? "" : "es")} in the IRD Library");
foreach (var irdInfo in irdList)
{
var ird = await Client.DownloadAsync(irdInfo, discKeyCachePath, cancellationToken).ConfigureAwait(false);
result.Add(new DiscKeyInfo(ird.Data1, null, Path.Combine(discKeyCachePath, irdInfo.Filename), KeyType.Ird, ird.Crc32.ToString("x8")));
knownFilenames.Add(irdInfo.Filename);
}
}
if (knownFilenames.Count == 0)
{
Log.Warn("No valid matching IRD file could be found");
Log.Info($"If you have matching IRD file, you can put it in '{discKeyCachePath}' and try dumping the disc again");
}
Log.Info($"Found {result.Count} IRD files");
return result;
}
19
Source : LProjectDependencies.cs
with MIT License
from 3F
with MIT License
from 3F
public override bool Positioned(ISvc svc, RawText line)
{
var pItem = GetProjecreplacedem(line.trimmed, svc.Sln.SolutionDir);
if(pItem.pGuid == null) {
return false;
}
Projects[pItem.pGuid] = pItem;
map[pItem.pGuid] = new HashSet<string>();
while((line = svc.ReadLine(this)) != null && (line != "EndProject"))
{
if(!line.trimmed.StartsWith("ProjectSection(ProjectDependencies)", StringComparison.Ordinal)) {
continue;
}
for(line = svc.ReadLine(this); line != null; line = svc.ReadLine(this))
{
if(line.trimmed.StartsWith("EndProjectSection", StringComparison.Ordinal)) {
break;
}
map[pItem.pGuid].Add(
FormatGuid(RPatterns.PropertyLine.Match(line).Groups["PName"].Value)
);
}
}
svc.Sln.SetProjectDependencies(this);
return true;
}
19
Source : FuzzerShould.cs
with Apache License 2.0
from 42skillz
with Apache License 2.0
from 42skillz
public HotelGroupBuilder WithHotelIn(string city)
{
_cities.Add(city);
return this;
}
19
Source : ExcelExportUtil.cs
with MIT License
from 404Lcc
with MIT License
from 404Lcc
public static void ExportSheetClreplaced(ISheet sheet, List<Cell> cellList, HashSet<string> uniqes)
{
if (sheet.GetRow(1) == null) return;
for (int i = 0; i < sheet.GetRow(1).LastCellNum; i++)
{
string fieldName = GetCell(sheet, 1, i);
if (string.IsNullOrEmpty(fieldName))
{
continue;
}
if (!uniqes.Add(fieldName))
{
continue;
}
string fieldAttribute = GetCell(sheet, 0, i);
string fieldType = GetCell(sheet, 2, i);
string fieldDesc = GetCell(sheet, 3, i);
cellList.Add(new Cell(fieldAttribute, fieldName, fieldType, fieldDesc));
}
}
19
Source : MultiTenantMigrateExecuter.cs
with MIT License
from 52ABP
with MIT License
from 52ABP
public bool Run(bool skipConnVerification)
{
var hostConnStr = CensorConnectionString(_connectionStringResolver.GetNameOrConnectionString(new ConnectionStringResolveArgs(MulreplacedenancySides.Host)));
if (hostConnStr.IsNullOrWhiteSpace())
{
_log.Write("Configuration file should contain a connection string named 'Default'");
return false;
}
_log.Write("Host database: " + ConnectionStringHelper.GetConnectionString(hostConnStr));
if (!skipConnVerification)
{
_log.Write("Continue to migration for this host database and all tenants..? (Y/N): ");
var command = Console.ReadLine();
if (!command.IsIn("Y", "y"))
{
_log.Write("Migration canceled.");
return false;
}
}
_log.Write("HOST database migration started...");
try
{
_migrator.CreateOrMigrateForHost(SeedHelper.SeedHostDb);
}
catch (Exception ex)
{
_log.Write("An error occured during migration of host database:");
_log.Write(ex.ToString());
_log.Write("Canceled migrations.");
return false;
}
_log.Write("HOST database migration completed.");
_log.Write("--------------------------------------------------------");
var migratedDatabases = new HashSet<string>();
var tenants = _tenantRepository.GetAllList(t => t.ConnectionString != null && t.ConnectionString != "");
for (var i = 0; i < tenants.Count; i++)
{
var tenant = tenants[i];
_log.Write(string.Format("Tenant database migration started... ({0} / {1})", (i + 1), tenants.Count));
_log.Write("Name : " + tenant.Name);
_log.Write("TenancyName : " + tenant.TenancyName);
_log.Write("Tenant Id : " + tenant.Id);
_log.Write("Connection string : " + SimpleStringCipher.Instance.Decrypt(tenant.ConnectionString));
if (!migratedDatabases.Contains(tenant.ConnectionString))
{
try
{
_migrator.CreateOrMigrateForTenant(tenant);
}
catch (Exception ex)
{
_log.Write("An error occured during migration of tenant database:");
_log.Write(ex.ToString());
_log.Write("Skipped this tenant and will continue for others...");
}
migratedDatabases.Add(tenant.ConnectionString);
}
else
{
_log.Write("This database has already migrated before (you have more than one tenant in same database). Skipping it....");
}
_log.Write(string.Format("Tenant database migration completed. ({0} / {1})", (i + 1), tenants.Count));
_log.Write("--------------------------------------------------------");
}
_log.Write("All databases have been migrated.");
return true;
}
19
Source : Menu_PackageExporter.cs
with MIT License
from 5minlab
with MIT License
from 5minlab
public string[] GetList() {
var dirs = new string[] {
"replacedets/Minamo"
};
var set = new HashSet<string>();
var founds = replacedetDatabase.Findreplacedets("", dirs);
foreach (var guid in founds) {
var replacedetPath = replacedetDatabase.GUIDToreplacedetPath(guid);
if (replacedetPath.EndsWith(".cs")) {
set.Add(replacedetPath);
}
}
var list = new List<string>(set);
return list.ToArray();
}
19
Source : UnityARUserAnchorExample.cs
with MIT License
from 734843327
with MIT License
from 734843327
void Update () {
if (Input.touchCount > 0 && Input.GetTouch(0).phase == TouchPhase.Began)
{
GameObject clone = Instantiate(prefabObject, Camera.main.transform.position + (this.distanceFromCamera * Camera.main.transform.forward), Quaternion.idenreplacedy);
UnityARUserAnchorComponent component = clone.GetComponent<UnityARUserAnchorComponent>();
m_Clones.Add(component.AnchorId);
m_TimeUntilRemove = 4.0f;
}
// just remove anchors afte a certain amount of time for example's sake.
m_TimeUntilRemove -= Time.deltaTime;
if (m_TimeUntilRemove <= 0.0f)
{
foreach (string id in m_Clones)
{
Console.WriteLine("Removing anchor with id: " + id);
UnityARSessionNativeInterface.GetARSessionNativeInterface().RemoveUserAnchor(id);
break;
}
m_TimeUntilRemove = 4.0f;
}
}
19
Source : SkinnedMeshCombiner.cs
with Apache License 2.0
from A7ocin
with Apache License 2.0
from A7ocin
private static void replacedyzeSources(CombineInstance[] sources, int[] subMeshTriangleLength, ref int vertexCount, ref int bindPoseCount, ref int transformHierarchyCount, ref MeshComponents meshComponents, ref int blendShapeCount)
{
HashSet<string> blendShapeNames = new HashSet<string> (); //Hash to find all the unique blendshape names
for (int i = 0; i < subMeshTriangleLength.Length; i++)
{
subMeshTriangleLength[i] = 0;
}
foreach (var source in sources)
{
vertexCount += source.meshData.vertices.Length;
bindPoseCount += source.meshData.bindPoses.Length;
transformHierarchyCount += source.meshData.umaBones.Length;
if (source.meshData.normals != null && source.meshData.normals.Length != 0) meshComponents |= MeshComponents.has_normals;
if (source.meshData.tangents != null && source.meshData.tangents.Length != 0) meshComponents |= MeshComponents.has_tangents;
if (source.meshData.uv != null && source.meshData.uv.Length != 0) meshComponents |= MeshComponents.has_uv;
if (source.meshData.uv2 != null && source.meshData.uv2.Length != 0) meshComponents |= MeshComponents.has_uv2;
if (source.meshData.uv3 != null && source.meshData.uv3.Length != 0) meshComponents |= MeshComponents.has_uv3;
if (source.meshData.uv4 != null && source.meshData.uv4.Length != 0) meshComponents |= MeshComponents.has_uv4;
if (source.meshData.colors32 != null && source.meshData.colors32.Length != 0) meshComponents |= MeshComponents.has_colors32;
if (source.meshData.clothSkinningSerialized != null && source.meshData.clothSkinningSerialized.Length != 0) meshComponents |= MeshComponents.has_clothSkinning;
//If we find a blendshape on this mesh then lets add it to the blendShapeNames hash to get all the unique names
if (source.meshData.blendShapes != null && source.meshData.blendShapes.Length != 0)
{
for (int shapeIndex = 0; shapeIndex < source.meshData.blendShapes.Length; shapeIndex++)
blendShapeNames.Add (source.meshData.blendShapes [shapeIndex].shapeName);
}
for (int i = 0; i < source.meshData.subMeshCount; i++)
{
if (source.targetSubmeshIndices[i] >= 0)
{
int triangleLength = (source.triangleMask == null) ? source.meshData.submeshes[i].triangles.Length :
(source.meshData.submeshes[i].triangles.Length - (UMAUtils.GetCardinality(source.triangleMask[i]) * 3));
subMeshTriangleLength[source.targetSubmeshIndices[i]] += triangleLength;
}
}
}
//If our blendshape hash has at least 1 name, then we have a blendshape!
if (blendShapeNames.Count > 0)
{
blendShapeCount = blendShapeNames.Count;
meshComponents |= MeshComponents.has_blendShapes;
}
}
19
Source : TfsLogWriter.cs
with MIT License
from aabiryukov
with MIT License
from aabiryukov
private static bool CreateGourceLogFile(
string outputFile,
HashSet<string> outputCommiters,
VersionControlServer vcs,
string serverPath,
VisualizationSettings settings,
ref bool cancel,
Action<int> progressReporter)
{
// int latestChangesetId = vcs.GetLatestChangesetId();
if (cancel) return false;
var versionFrom = new DateVersionSpec(settings.DateFrom);
var versionTo = new DateVersionSpec(settings.DateTo);
int latestChangesetId;
// Getting latest changeset ID for current search criteria
{
var latestChanges = vcs.QueryHistory(
serverPath,
VersionSpec.Latest,
0,
RecursionType.Full,
null, //any user
versionFrom, // from first changeset
versionTo, // to last changeset
1,
false, // with changes
false,
false,
false); // sorted
var latestChangeset = latestChanges.Cast<Changeset>().FirstOrDefault();
if (latestChangeset == null)
{
// History not found
return false;
}
latestChangesetId = latestChangeset.ChangesetId;
if (cancel) return false; //-V3022
}
var firstChangesetId = 0;
var changesetConverter = new ChangesetConverter(settings.UsersFilter, settings.FilesFilter);
using (var writer = new StreamWriter(outputFile))
{
var csList = vcs.QueryHistory(
serverPath,
VersionSpec.Latest,
0,
RecursionType.Full,
null, //any user
versionFrom, // from first changeset
versionTo, // to last changeset
int.MaxValue,
true, // with changes
false,
false,
true); // sorted
var hasLines = false;
foreach (var changeset in csList.Cast<Changeset>())
{
if (cancel) return false; //-V3022
if (firstChangesetId == 0) firstChangesetId = changeset.ChangesetId;
if (progressReporter != null)
{
var progressValue = changeset.ChangesetId - firstChangesetId;
var progressTotal = latestChangesetId - firstChangesetId;
progressReporter(progressTotal > 0 ? progressValue * 100 / progressTotal : 100);
}
var usefulChangeset = false;
foreach (var line in changesetConverter.GetLogLines(changeset))
{
usefulChangeset = true;
writer.WriteLine(line);
}
if (usefulChangeset)
{
hasLines = true;
if (outputCommiters != null)
outputCommiters.Add(changeset.OwnerDisplayName);
}
}
return hasLines;
}
}
19
Source : UMACrowdRandomSet.cs
with Apache License 2.0
from A7ocin
with Apache License 2.0
from A7ocin
public static void Apply(UMA.UMAData umaData, CrowdRaceData race, Color skinColor, Color HairColor, Color Shine, HashSet<string> Keywords, SlotLibraryBase slotLibrary, OverlayLibraryBase overlayLibrary)
{
var slotParts = new HashSet<string>();
umaData.umaRecipe.slotDataList = new SlotData[race.slotElements.Length];
for (int i = 0; i < race.slotElements.Length; i++)
{
var currentElement = race.slotElements[i];
if (!string.IsNullOrEmpty(currentElement.requirement) && !slotParts.Contains(currentElement.requirement)) continue;
if (!string.IsNullOrEmpty(currentElement.condition))
{
if (currentElement.condition.StartsWith("!"))
{
if (Keywords.Contains(currentElement.condition.Substring(1))) continue;
}
else
{
if (!Keywords.Contains(currentElement.condition)) continue;
}
}
if (currentElement.possibleSlots.Length == 0) continue;
int randomResult = Random.Range(0, currentElement.possibleSlots.Length);
var slot = currentElement.possibleSlots[randomResult];
if (string.IsNullOrEmpty(slot.slotID)) continue;
slotParts.Add(slot.slotID);
SlotData slotData;
if (slot.useSharedOverlayList && slot.overlayListSource >= 0 && slot.overlayListSource < i)
{
slotData = slotLibrary.InstantiateSlot(slot.slotID, umaData.umaRecipe.slotDataList[slot.overlayListSource].GetOverlayList());
}
else
{
if (slot.useSharedOverlayList)
{
Debug.LogError("UMA Crowd: Invalid overlayListSource for " + slot.slotID);
}
slotData = slotLibrary.InstantiateSlot(slot.slotID);
}
umaData.umaRecipe.slotDataList[i] = slotData;
for (int overlayIdx = 0; overlayIdx < slot.overlayElements.Length; overlayIdx++)
{
var currentOverlayElement = slot.overlayElements[overlayIdx];
randomResult = Random.Range(0, currentOverlayElement.possibleOverlays.Length);
var overlay = currentOverlayElement.possibleOverlays[randomResult];
if (string.IsNullOrEmpty(overlay.overlayID)) continue;
overlay.UpdateVersion();
slotParts.Add(overlay.overlayID);
Color overlayColor = Color.black;
var overlayData = overlayLibrary.InstantiateOverlay(overlay.overlayID, overlayColor);
switch (overlay.overlayType)
{
case UMACrowdRandomSet.OverlayType.Color:
overlayColor = overlay.minRGB;
overlayData.colorData.color = overlayColor;
break;
case UMACrowdRandomSet.OverlayType.Texture:
overlayColor = Color.white;
overlayData.colorData.color = overlayColor;
break;
case UMACrowdRandomSet.OverlayType.Hair:
overlayColor = HairColor * overlay.hairColorMultiplier;
overlayColor.a = 1.0f;
overlayData.colorData.color = overlayColor;
break;
case UMACrowdRandomSet.OverlayType.Skin:
overlayColor = skinColor;// + new Color(Random.Range(overlay.minRGB.r, overlay.maxRGB.r), Random.Range(overlay.minRGB.g, overlay.maxRGB.g), Random.Range(overlay.minRGB.b, overlay.maxRGB.b), 1);
overlayData.colorData.color = overlayColor;
if (overlayData.colorData.channelAdditiveMask.Length > 2)
{
overlayData.colorData.channelAdditiveMask[2] = Shine;
}
else
{
break;
}
break;
case UMACrowdRandomSet.OverlayType.Random:
{
float randomShine = Random.Range(0.05f, 0.25f);
float randomMetal = Random.Range(0.1f, 0.3f);
overlayColor = new Color(Random.Range(overlay.minRGB.r, overlay.maxRGB.r), Random.Range(overlay.minRGB.g, overlay.maxRGB.g), Random.Range(overlay.minRGB.b, overlay.maxRGB.b), Random.Range(overlay.minRGB.a, overlay.maxRGB.a));
overlayData.colorData.color = overlayColor;
if (overlayData.colorData.channelAdditiveMask.Length > 2)
{
overlayData.colorData.channelAdditiveMask[2] = new Color(randomMetal, randomMetal, randomMetal, randomShine);
}
}
break;
default:
Debug.LogError("Unknown RandomSet overlayType: "+((int)overlay.overlayType));
overlayColor = overlay.minRGB;
overlayData.colorData.color = overlayColor;
break;
}
slotData.AddOverlay(overlayData);
if (overlay.colorChannelUse != ChannelUse.None)
{
overlayColor.a *= overlay.minRGB.a;
if (overlay.colorChannelUse == ChannelUse.InverseColor)
{
Vector3 color = new Vector3(overlayColor.r, overlayColor.g, overlayColor.b);
var len = color.magnitude;
if (len < 1f) len = 1f;
color = new Vector3(1.001f, 1.001f, 1.001f) - color;
color = color.normalized* len;
overlayColor = new Color(color.x, color.y, color.z, overlayColor.a);
}
overlayData.SetColor(overlay.colorChannel, overlayColor);
}
}
}
}
19
Source : MixedRealityToolkitFiles.cs
with Apache License 2.0
from abist-co-ltd
with Apache License 2.0
from abist-co-ltd
private static void RegisterFolderToModule(string folderPath, MixedRealityToolkitModuleType module)
{
string normalizedFolder = NormalizeSeparators(folderPath);
if (!mrtkFolders.TryGetValue(module, out HashSet<string> modFolders))
{
modFolders = new HashSet<string>();
mrtkFolders.Add(module, modFolders);
}
modFolders.Add(normalizedFolder);
}
19
Source : ONSPPropagationSerializationManager.cs
with MIT License
from absurd-joy
with MIT License
from absurd-joy
private static void BuildAudioGeometryForScene(Scene scene)
{
Debug.Log("Building audio geometry for scene " + scene.name);
List<GameObject> rootObjects = new List<GameObject>();
scene.GetRootGameObjects(rootObjects);
HashSet<string> fileNames = new HashSet<string>();
foreach (GameObject go in rootObjects)
{
var geometryComponents = go.GetComponentsInChildren<ONSPPropagationGeometry>();
foreach (ONSPPropagationGeometry geo in geometryComponents)
{
if (geo.fileEnabled)
{
if (!geo.WriteFile())
{
Debug.LogError("Failed writing geometry for " + geo.gameObject.name);
}
else
{
if (!fileNames.Add(geo.filePathRelative))
{
Debug.LogWarning("Duplicate file name detected: " + geo.filePathRelative);
}
}
}
}
}
Debug.Log("Successfully built " + fileNames.Count + " geometry objects");
}
19
Source : ExampleDescriptionFormattingConverter.cs
with MIT License
from ABTSoftware
with MIT License
from ABTSoftware
public object Convert(object value, Type targetType, object parameter, CultureInfo culture)
{
if (MainWindowViewModel.SearchText.IsNullOrEmpty())
{
return string.Empty;
}
var description = (string)value;
var result = string.Empty;
var terms = MainWindowViewModel.SearchText.Split(' ').Where(word => word != "").Select(x => x.ToLower()).ToArray();
var lines = description.Split(new[] { ". " }, StringSplitOptions.None).ToArray();
var sentences = new HashSet<string>();
foreach (var term in terms)
{
var containsTerm = lines.Where(x => x != "" && x.ToLower().Contains(term));
containsTerm.Take(2).ForEachDo(x => sentences.Add(x));
}
if (sentences.Any())
{
result = HighlightText(sentences.Select(x => x.Trim()).ToArray(), terms);
}
else
{
foreach (string sentence in lines.Take(2).Select(x => x.Trim()))
{
result = result + (sentence + ". ");
}
}
return result;
}
19
Source : ExampleSourceCodeFormattingConverter.cs
with MIT License
from ABTSoftware
with MIT License
from ABTSoftware
public object Convert(object value, Type targetType, object parameter, CultureInfo culture)
{
if (MainWindowViewModel.SearchText.IsNullOrEmpty())
{
return string.Empty;
}
var terms = MainWindowViewModel.SearchText.Split(' ').Where(word => word != "").Select(x => x.ToLower()).ToArray();
var codeFiles = (Dictionary<string, string>) value;
var uiCodeFiles = codeFiles.Where(x => x.Key.EndsWith(".xaml"));
var lines = new List<string>();
foreach (var file in uiCodeFiles)
{
lines.AddRange(file.Value.Split(new[] {"\r\n"}, StringSplitOptions.None));
}
var toHighlight = new HashSet<string>();
foreach (var term in terms)
{
var containsTerm = lines.Where(x => x != "" && x.ToLower().Contains(term));
containsTerm.Take(2).Select(x => x.Trim()).ForEachDo(x => toHighlight.Add(x));
}
string result;
if (toHighlight.Any())
{
lines = toHighlight.Take(2).Select(x => x.Trim().Replace('<', ' ').Replace('>', ' ') + '.').ToList();
result = HighlightText(lines, terms);
}
else
{
var sentences = lines.Take(2).Select(x => string.Format("... {0} ...", x.Trim().Replace('<', ' ').Replace('>', ' ').ToList()));
result = string.Join("\n", sentences);
}
return result;
}
19
Source : Module.cs
with MIT License
from ABTSoftware
with MIT License
from ABTSoftware
private void InitializeExamplesAndPages(IEnumerable<ExampleDefinition> exampleDefinitions)
{
AppPage appPage;
HashSet<string> categories = new HashSet<string>();
foreach (var definition in exampleDefinitions)
{
appPage = new ExampleAppPage(definition.replacedle ,definition.ViewModel, definition.View);
ChartingPages.Add(appPage.PageId, appPage);
var example = new Example(appPage, definition) {SelectCommand = NavigateToExample};
_examples.Add(appPage.PageId, example);
categories.Add(example.TopLevelCategory);
}
_allCategories = new ReadOnlyCollection<string>(categories.ToList());
_groupsByCategory = new Dictionary<string, ReadOnlyCollection<string>>();
foreach (var category in _allCategories)
{
var groups = _examples.Where(ex => ex.Value.TopLevelCategory == category).Select(y => y.Value.Group).Distinct().ToList();
_groupsByCategory.Add(category, new ReadOnlyCollection<string>(groups));
}
appPage = new HomeAppPage();
ChartingPages.Add(appPage.PageId, appPage);
appPage = new EverythingAppPage();
ChartingPages.Add(appPage.PageId, appPage);
appPage = new ExamplesHostAppPage();
ChartingPages.Add(appPage.PageId, appPage);
}
19
Source : AudioDeviceSource.cs
with MIT License
from ABTSoftware
with MIT License
from ABTSoftware
private void RefreshDevices()
{
if (!_dispatcher.CheckAccess())
{
_dispatcher.BeginInvoke((Action)RefreshDevices);
return;
}
DefaultDevice = GetDefaultDevice();
var deviceMap = Devices.ToDictionary(d => d.ID, d => d);
var presentDevices = new HashSet<string>();
foreach (var d in _enumerator.EnumerateAudioEndPoints(DataFlow.Capture, DeviceState.Active))
{
presentDevices.Add(d.ID);
if(deviceMap.TryGetValue(d.ID, out var device))
{
device.Update(d);
}
else
{
Devices.Add(new AudioDeviceInfo(d));
}
d.Dispose();
}
for (int i = Devices.Count - 1; i >=0; i--)
{
if (!presentDevices.Contains(Devices[i].ID))
{
Devices.RemoveAt(i);
}
}
DevicesChanged?.Invoke(this, EventArgs.Empty);
}
19
Source : FeatureBuilder.cs
with Apache License 2.0
from acblog
with Apache License 2.0
from acblog
public FeatureBuilder AddFeature(params string[] names)
{
foreach (var name in names)
{
if (!name.Contains(','))
{
Inner.Add(name);
}
else
{
throw new Exception($"Invalid feature name: {name}.");
}
}
return this;
}
19
Source : KeywordBuilder.cs
with Apache License 2.0
from acblog
with Apache License 2.0
from acblog
public KeywordBuilder AddKeyword(params string[] names)
{
foreach (var name in names)
{
if (!name.Contains(';'))
{
Inner.Add(name);
}
else
{
throw new Exception($"Invalid keyword name: {name}.");
}
}
return this;
}
19
Source : KeywordCollectionBuilder.cs
with Apache License 2.0
from acblog
with Apache License 2.0
from acblog
public static async Task<KeywordCollection> Build(IAsyncEnumerable<Keyword> data, CancellationToken cancellationToken = default)
{
HashSet<string> res = new HashSet<string>();
await foreach (var v in data)
{
cancellationToken.ThrowIfCancellationRequested();
List<string> keyids = new List<string>();
foreach (var k in v.Items)
{
res.Add(k);
}
}
return new KeywordCollection(res.Select(pair => new Keyword { Items = new string[] { pair } }).ToList());
}
19
Source : ContractManager.cs
with GNU Affero General Public License v3.0
from ACEmulator
with GNU Affero General Public License v3.0
from ACEmulator
private void RefreshMonitoredQuestFlags()
{
MonitoredQuestFlags.Clear();
var contracts = Player.Character.GetContracts(Player.CharacterDatabaseLock);
foreach (var contract in contracts)
{
var datContract = GetContractFromDat(contract.ContractId);
if (!MonitoredQuestFlags.ContainsKey(contract.ContractId))
MonitoredQuestFlags.Add(contract.ContractId, new HashSet<string>());
if (!string.IsNullOrWhiteSpace(datContract.QuestflagFinished))
MonitoredQuestFlags[datContract.ContractId].Add(datContract.QuestflagFinished.ToLower());
if (!string.IsNullOrWhiteSpace(datContract.QuestflagProgress))
MonitoredQuestFlags[datContract.ContractId].Add(datContract.QuestflagProgress.ToLower());
if (!string.IsNullOrWhiteSpace(datContract.QuestflagRepeatTime))
MonitoredQuestFlags[datContract.ContractId].Add(datContract.QuestflagRepeatTime.ToLower());
if (!string.IsNullOrWhiteSpace(datContract.QuestflagStamped))
MonitoredQuestFlags[datContract.ContractId].Add(datContract.QuestflagStamped.ToLower());
if (!string.IsNullOrWhiteSpace(datContract.QuestflagStarted))
MonitoredQuestFlags[datContract.ContractId].Add(datContract.QuestflagStarted.ToLower());
if (!string.IsNullOrWhiteSpace(datContract.QuestflagTimer))
MonitoredQuestFlags[datContract.ContractId].Add(datContract.QuestflagTimer.ToLower());
}
}
19
Source : CommandLineParser.cs
with MIT License
from actions
with MIT License
from actions
public void Parse(string[] args)
{
_trace.Info(nameof(Parse));
ArgUtil.NotNull(args, nameof(args));
_trace.Info("Parsing {0} args", args.Length);
string argScope = null;
foreach (string arg in args)
{
_trace.Info("parsing argument");
HasArgs = HasArgs || arg.StartsWith("--");
_trace.Info("HasArgs: {0}", HasArgs);
if (string.Equals(arg, "/?", StringComparison.Ordinal))
{
Flags.Add("help");
}
else if (!HasArgs)
{
_trace.Info("Adding Command: {0}", arg);
Commands.Add(arg.Trim());
}
else
{
// it's either an arg, an arg value or a flag
if (arg.StartsWith("--") && arg.Length > 2)
{
string argVal = arg.Substring(2);
_trace.Info("arg: {0}", argVal);
// this means two --args in a row which means previous was a flag
if (argScope != null)
{
_trace.Info("Adding flag: {0}", argScope);
Flags.Add(argScope.Trim());
}
argScope = argVal;
}
else if (!arg.StartsWith("-"))
{
// we found a value - check if we're in scope of an arg
if (argScope != null && !Args.ContainsKey(argScope = argScope.Trim()))
{
if (SecretArgNames.Contains(argScope))
{
_secretMasker.AddValue(arg);
}
_trace.Info("Adding option '{0}': '{1}'", argScope, arg);
// ignore duplicates - first wins - below will be val1
// --arg1 val1 --arg1 val1
Args.Add(argScope, arg);
argScope = null;
}
}
else
{
//
// ignoring the second value for an arg (val2 below)
// --arg val1 val2
// ignoring invalid things like empty - and --
// --arg val1 -- --flag
_trace.Info("Ignoring arg");
}
}
}
_trace.Verbose("done parsing arguments");
// handle last arg being a flag
if (argScope != null)
{
Flags.Add(argScope);
}
_trace.Verbose("Exiting parse");
}
19
Source : ProjectRootFolder.cs
with MIT License
from action-bi-toolkit
with MIT License
from action-bi-toolkit
internal void FileWritten(string fullPath)
{
_filesWritten.Add(fullPath); // keeps track of files added or updated
Log.Verbose("File written: {Path}", fullPath);
}
19
Source : ActionRunner.cs
with MIT License
from actions
with MIT License
from actions
public async Task RunAsync()
{
// Validate args.
Trace.Entering();
ArgUtil.NotNull(ExecutionContext, nameof(ExecutionContext));
ArgUtil.NotNull(Action, nameof(Action));
var taskManager = HostContext.GetService<IActionManager>();
var handlerFactory = HostContext.GetService<IHandlerFactory>();
// Load the task definition and choose the handler.
Definition definition = taskManager.LoadAction(ExecutionContext, Action);
ArgUtil.NotNull(definition, nameof(definition));
ActionExecutionData handlerData = definition.Data?.Execution;
ArgUtil.NotNull(handlerData, nameof(handlerData));
List<JobExtensionRunner> localActionContainerSetupSteps = null;
// Handle Composite Local Actions
// Need to download and expand the tree of referenced actions
if (handlerData.ExecutionType == ActionExecutionType.Composite &&
handlerData is CompositeActionExecutionData compositeHandlerData &&
Stage == ActionRunStage.Main &&
Action.Reference is Pipelines.RepositoryPathReference localAction &&
string.Equals(localAction.RepositoryType, Pipelines.PipelineConstants.SelfAlias, StringComparison.OrdinalIgnoreCase))
{
var actionManager = HostContext.GetService<IActionManager>();
var prepareResult = await actionManager.PrepareActionsAsync(ExecutionContext, compositeHandlerData.Steps, ExecutionContext.Id);
// Reload definition since post may exist now (from embedded steps that were JIT downloaded)
definition = taskManager.LoadAction(ExecutionContext, Action);
ArgUtil.NotNull(definition, nameof(definition));
handlerData = definition.Data?.Execution;
ArgUtil.NotNull(handlerData, nameof(handlerData));
// Save container setup steps so we can reference them later
localActionContainerSetupSteps = prepareResult.ContainerSetupSteps;
}
if (handlerData.HasPre &&
Action.Reference is Pipelines.RepositoryPathReference repoAction &&
string.Equals(repoAction.RepositoryType, Pipelines.PipelineConstants.SelfAlias, StringComparison.OrdinalIgnoreCase))
{
ExecutionContext.Warning($"`pre` execution is not supported for local action from '{repoAction.Path}'");
}
// The action has post cleanup defined.
// we need to create timeline record for them and add them to the step list that StepRunner is using
if (handlerData.HasPost && (Stage == ActionRunStage.Pre || Stage == ActionRunStage.Main))
{
string postDisplayName = $"Post {this.DisplayName}";
if (Stage == ActionRunStage.Pre &&
this.DisplayName.StartsWith("Pre ", StringComparison.OrdinalIgnoreCase))
{
// Trim the leading `Pre ` from the display name.
// Otherwise, we will get `Post Pre xxx` as DisplayName for the Post step.
postDisplayName = $"Post {this.DisplayName.Substring("Pre ".Length)}";
}
var repositoryReference = Action.Reference as RepositoryPathReference;
var pathString = string.IsNullOrEmpty(repositoryReference.Path) ? string.Empty : $"/{repositoryReference.Path}";
var repoString = string.IsNullOrEmpty(repositoryReference.Ref) ? $"{repositoryReference.Name}{pathString}" :
$"{repositoryReference.Name}{pathString}@{repositoryReference.Ref}";
ExecutionContext.Debug($"Register post job cleanup for action: {repoString}");
var actionRunner = HostContext.CreateService<IActionRunner>();
actionRunner.Action = Action;
actionRunner.Stage = ActionRunStage.Post;
actionRunner.Condition = handlerData.CleanupCondition;
actionRunner.DisplayName = postDisplayName;
ExecutionContext.RegisterPostJobStep(actionRunner);
}
IStepHost stepHost = HostContext.CreateService<IDefaultStepHost>();
// Makes directory for event_path data
var tempDirectory = HostContext.GetDirectory(WellKnownDirectory.Temp);
var workflowDirectory = Path.Combine(tempDirectory, "_github_workflow");
Directory.CreateDirectory(workflowDirectory);
var gitHubEvent = ExecutionContext.GetGitHubContext("event");
// adds the GitHub event path/file if the event exists
if (gitHubEvent != null)
{
var workflowFile = Path.Combine(workflowDirectory, "event.json");
Trace.Info($"Write event payload to {workflowFile}");
File.WriteAllText(workflowFile, gitHubEvent, new UTF8Encoding(false));
ExecutionContext.SetGitHubContext("event_path", workflowFile);
}
// Set GITHUB_ACTION_REPOSITORY if this Action is from a repository
if (Action.Reference is Pipelines.RepositoryPathReference repoPathReferenceAction &&
!string.Equals(repoPathReferenceAction.RepositoryType, Pipelines.PipelineConstants.SelfAlias, StringComparison.OrdinalIgnoreCase))
{
ExecutionContext.SetGitHubContext("action_repository", repoPathReferenceAction.Name);
ExecutionContext.SetGitHubContext("action_ref", repoPathReferenceAction.Ref);
}
else
{
ExecutionContext.SetGitHubContext("action_repository", null);
ExecutionContext.SetGitHubContext("action_ref", null);
}
// Setup container stephost for running inside the container.
if (ExecutionContext.Global.Container != null)
{
// Make sure required container is already created.
ArgUtil.NotNullOrEmpty(ExecutionContext.Global.Container.ContainerId, nameof(ExecutionContext.Global.Container.ContainerId));
var containerStepHost = HostContext.CreateService<IContainerStepHost>();
containerStepHost.Container = ExecutionContext.Global.Container;
stepHost = containerStepHost;
}
// Setup File Command Manager
var fileCommandManager = HostContext.CreateService<IFileCommandManager>();
fileCommandManager.InitializeFiles(ExecutionContext, null);
// Load the inputs.
ExecutionContext.Debug("Loading inputs");
var templateEvaluator = ExecutionContext.ToPipelineTemplateEvaluator();
var inputs = templateEvaluator.EvaluateStepInputs(Action.Inputs, ExecutionContext.ExpressionValues, ExecutionContext.ExpressionFunctions);
var userInputs = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
foreach (KeyValuePair<string, string> input in inputs)
{
userInputs.Add(input.Key);
string message = "";
if (definition.Data?.Deprecated?.TryGetValue(input.Key, out message) == true)
{
ExecutionContext.Warning(String.Format("Input '{0}' has been deprecated with message: {1}", input.Key, message));
}
}
var validInputs = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
if (handlerData.ExecutionType == ActionExecutionType.Container)
{
// container action always accept 'entryPoint' and 'args' as inputs
// https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idstepswithargs
validInputs.Add("entryPoint");
validInputs.Add("args");
}
// Merge the default inputs from the definition
if (definition.Data?.Inputs != null)
{
var manifestManager = HostContext.GetService<IActionManifestManager>();
foreach (var input in definition.Data.Inputs)
{
string key = input.Key.replacedertString("action input name").Value;
validInputs.Add(key);
if (!inputs.ContainsKey(key))
{
inputs[key] = manifestManager.EvaluateDefaultInput(ExecutionContext, key, input.Value);
}
}
}
// Validate inputs only for actions with action.yml
if (Action.Reference.Type == Pipelines.ActionSourceType.Repository)
{
var unexpectedInputs = new List<string>();
foreach (var input in userInputs)
{
if (!validInputs.Contains(input))
{
unexpectedInputs.Add(input);
}
}
if (unexpectedInputs.Count > 0)
{
ExecutionContext.Warning($"Unexpected input(s) '{string.Join("', '", unexpectedInputs)}', valid inputs are ['{string.Join("', '", validInputs)}']");
}
}
// Load the action environment.
ExecutionContext.Debug("Loading env");
var environment = new Dictionary<String, String>(VarUtil.EnvironmentVariableKeyComparer);
#if OS_WINDOWS
var envContext = ExecutionContext.ExpressionValues["env"] as DictionaryContextData;
#else
var envContext = ExecutionContext.ExpressionValues["env"] as CaseSensitiveDictionaryContextData;
#endif
// Apply environment from env context, env context contains job level env and action's evn block
foreach (var env in envContext)
{
environment[env.Key] = env.Value.ToString();
}
// Apply action's intra-action state at last
foreach (var state in ExecutionContext.IntraActionState)
{
environment[$"STATE_{state.Key}"] = state.Value ?? string.Empty;
}
// Create the handler.
IHandler handler = handlerFactory.Create(
ExecutionContext,
Action.Reference,
stepHost,
handlerData,
inputs,
environment,
ExecutionContext.Global.Variables,
actionDirectory: definition.Directory,
localActionContainerSetupSteps: localActionContainerSetupSteps);
// Print out action details
handler.PrintActionDetails(Stage);
// Run the task.
try
{
await handler.RunAsync(Stage);
}
finally
{
fileCommandManager.ProcessFiles(ExecutionContext, ExecutionContext.Global.Container);
}
}
19
Source : JobExtension.cs
with MIT License
from actions
with MIT License
from actions
public async Task<List<IStep>> InitializeJob(IExecutionContext jobContext, Pipelines.AgentJobRequestMessage message)
{
Trace.Entering();
ArgUtil.NotNull(jobContext, nameof(jobContext));
ArgUtil.NotNull(message, nameof(message));
// Create a new timeline record for 'Set up job'
IExecutionContext context = jobContext.CreateChild(Guid.NewGuid(), "Set up job", $"{nameof(JobExtension)}_Init", null, null, ActionRunStage.Pre);
List<IStep> preJobSteps = new List<IStep>();
List<IStep> jobSteps = new List<IStep>();
using (var register = jobContext.CancellationToken.Register(() => { context.CancelToken(); }))
{
try
{
context.Start();
context.Debug($"Starting: Set up job");
context.Output($"Current runner version: '{BuildConstants.RunnerPackage.Version}'");
var setting = HostContext.GetService<IConfigurationStore>().GetSettings();
var credFile = HostContext.GetConfigFile(WellKnownConfigFile.Credentials);
if (File.Exists(credFile))
{
var credData = IOUtil.LoadObject<CredentialData>(credFile);
if (credData != null &&
credData.Data.TryGetValue("clientId", out var clientId))
{
// print out HostName for self-hosted runner
context.Output($"Runner name: '{setting.AgentName}'");
if (message.Variables.TryGetValue("system.runnerGroupName", out VariableValue runnerGroupName))
{
context.Output($"Runner group name: '{runnerGroupName.Value}'");
}
context.Output($"Machine name: '{Environment.MachineName}'");
}
}
var setupInfoFile = HostContext.GetConfigFile(WellKnownConfigFile.SetupInfo);
if (File.Exists(setupInfoFile))
{
Trace.Info($"Load machine setup info from {setupInfoFile}");
try
{
var setupInfo = IOUtil.LoadObject<List<SetupInfo>>(setupInfoFile);
if (setupInfo?.Count > 0)
{
foreach (var info in setupInfo)
{
if (!string.IsNullOrEmpty(info?.Detail))
{
var groupName = info.Group;
if (string.IsNullOrEmpty(groupName))
{
groupName = "Machine Setup Info";
}
context.Output($"##[group]{groupName}");
var multiLines = info.Detail.Replace("\r\n", "\n").TrimEnd('\n').Split('\n');
foreach (var line in multiLines)
{
context.Output(line);
}
context.Output("##[endgroup]");
}
}
}
}
catch (Exception ex)
{
context.Output($"Fail to load and print machine setup info: {ex.Message}");
Trace.Error(ex);
}
}
try
{
var tokenPermissions = jobContext.Global.Variables.Get("system.github.token.permissions") ?? "";
if (!string.IsNullOrEmpty(tokenPermissions))
{
context.Output($"##[group]GITHUB_TOKEN Permissions");
var permissions = StringUtil.ConvertFromJson<Dictionary<string, string>>(tokenPermissions);
foreach(KeyValuePair<string, string> entry in permissions)
{
context.Output($"{entry.Key}: {entry.Value}");
}
context.Output("##[endgroup]");
}
}
catch (Exception ex)
{
context.Output($"Fail to parse and display GITHUB_TOKEN permissions list: {ex.Message}");
Trace.Error(ex);
}
var repoFullName = context.GetGitHubContext("repository");
ArgUtil.NotNull(repoFullName, nameof(repoFullName));
context.Debug($"Primary repository: {repoFullName}");
// Print proxy setting information for better diagnostic experience
if (!string.IsNullOrEmpty(HostContext.WebProxy.HttpProxyAddress))
{
context.Output($"Runner is running behind proxy server '{HostContext.WebProxy.HttpProxyAddress}' for all HTTP requests.");
}
if (!string.IsNullOrEmpty(HostContext.WebProxy.HttpsProxyAddress))
{
context.Output($"Runner is running behind proxy server '{HostContext.WebProxy.HttpsProxyAddress}' for all HTTPS requests.");
}
// Prepare the workflow directory
context.Output("Prepare workflow directory");
var directoryManager = HostContext.GetService<IPipelineDirectoryManager>();
TrackingConfig trackingConfig = directoryManager.PrepareDirectory(
context,
message.Workspace);
// Set the directory variables
context.Debug("Update context data");
string _workDirectory = HostContext.GetDirectory(WellKnownDirectory.Work);
context.SetRunnerContext("workspace", Path.Combine(_workDirectory, trackingConfig.PipelineDirectory));
context.SetGitHubContext("workspace", Path.Combine(_workDirectory, trackingConfig.WorkspaceDirectory));
// Temporary hack for GHES alpha
var configurationStore = HostContext.GetService<IConfigurationStore>();
var runnerSettings = configurationStore.GetSettings();
if (string.IsNullOrEmpty(context.GetGitHubContext("server_url")) && !runnerSettings.IsHostedServer && !string.IsNullOrEmpty(runnerSettings.GitHubUrl))
{
var url = new Uri(runnerSettings.GitHubUrl);
var portInfo = url.IsDefaultPort ? string.Empty : $":{url.Port.ToString(CultureInfo.InvariantCulture)}";
context.SetGitHubContext("server_url", $"{url.Scheme}://{url.Host}{portInfo}");
context.SetGitHubContext("api_url", $"{url.Scheme}://{url.Host}{portInfo}/api/v3");
context.SetGitHubContext("graphql_url", $"{url.Scheme}://{url.Host}{portInfo}/api/graphql");
}
// Evaluate the job-level environment variables
context.Debug("Evaluating job-level environment variables");
var templateEvaluator = context.ToPipelineTemplateEvaluator();
foreach (var token in message.EnvironmentVariables)
{
var environmentVariables = templateEvaluator.EvaluateStepEnvironment(token, jobContext.ExpressionValues, jobContext.ExpressionFunctions, VarUtil.EnvironmentVariableKeyComparer);
foreach (var pair in environmentVariables)
{
context.Global.EnvironmentVariables[pair.Key] = pair.Value ?? string.Empty;
context.SetEnvContext(pair.Key, pair.Value ?? string.Empty);
}
}
// Evaluate the job container
context.Debug("Evaluating job container");
var container = templateEvaluator.EvaluateJobContainer(message.JobContainer, jobContext.ExpressionValues, jobContext.ExpressionFunctions);
if (container != null)
{
jobContext.Global.Container = new Container.ContainerInfo(HostContext, container);
}
// Evaluate the job service containers
context.Debug("Evaluating job service containers");
var serviceContainers = templateEvaluator.EvaluateJobServiceContainers(message.JobServiceContainers, jobContext.ExpressionValues, jobContext.ExpressionFunctions);
if (serviceContainers?.Count > 0)
{
foreach (var pair in serviceContainers)
{
var networkAlias = pair.Key;
var serviceContainer = pair.Value;
jobContext.Global.ServiceContainers.Add(new Container.ContainerInfo(HostContext, serviceContainer, false, networkAlias));
}
}
// Evaluate the job defaults
context.Debug("Evaluating job defaults");
foreach (var token in message.Defaults)
{
var defaults = token.replacedertMapping("defaults");
if (defaults.Any(x => string.Equals(x.Key.replacedertString("defaults key").Value, "run", StringComparison.OrdinalIgnoreCase)))
{
context.Global.JobDefaults["run"] = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
var defaultsRun = defaults.First(x => string.Equals(x.Key.replacedertString("defaults key").Value, "run", StringComparison.OrdinalIgnoreCase));
var jobDefaults = templateEvaluator.EvaluateJobDefaultsRun(defaultsRun.Value, jobContext.ExpressionValues, jobContext.ExpressionFunctions);
foreach (var pair in jobDefaults)
{
if (!string.IsNullOrEmpty(pair.Value))
{
context.Global.JobDefaults["run"][pair.Key] = pair.Value;
}
}
}
}
// Build up 2 lists of steps, pre-job, job
// Download actions not already in the cache
Trace.Info("Downloading actions");
var actionManager = HostContext.GetService<IActionManager>();
var prepareResult = await actionManager.PrepareActionsAsync(context, message.Steps);
preJobSteps.AddRange(prepareResult.ContainerSetupSteps);
// Add start-container steps, record and stop-container steps
if (jobContext.Global.Container != null || jobContext.Global.ServiceContainers.Count > 0)
{
var containerProvider = HostContext.GetService<IContainerOperationProvider>();
var containers = new List<Container.ContainerInfo>();
if (jobContext.Global.Container != null)
{
containers.Add(jobContext.Global.Container);
}
containers.AddRange(jobContext.Global.ServiceContainers);
preJobSteps.Add(new JobExtensionRunner(runAsync: containerProvider.StartContainersAsync,
condition: $"{PipelineTemplateConstants.Success}()",
displayName: "Initialize containers",
data: (object)containers));
}
// Add action steps
foreach (var step in message.Steps)
{
if (step.Type == Pipelines.StepType.Action)
{
var action = step as Pipelines.ActionStep;
Trace.Info($"Adding {action.DisplayName}.");
var actionRunner = HostContext.CreateService<IActionRunner>();
actionRunner.Action = action;
actionRunner.Stage = ActionRunStage.Main;
actionRunner.Condition = step.Condition;
var contextData = new Pipelines.ContextData.DictionaryContextData();
if (message.ContextData?.Count > 0)
{
foreach (var pair in message.ContextData)
{
contextData[pair.Key] = pair.Value;
}
}
actionRunner.TryEvaluateDisplayName(contextData, context);
jobSteps.Add(actionRunner);
if (prepareResult.PreStepTracker.TryGetValue(step.Id, out var preStep))
{
Trace.Info($"Adding pre-{action.DisplayName}.");
preStep.TryEvaluateDisplayName(contextData, context);
preStep.DisplayName = $"Pre {preStep.DisplayName}";
preJobSteps.Add(preStep);
}
}
}
var intraActionStates = new Dictionary<Guid, Dictionary<string, string>>();
foreach (var preStep in prepareResult.PreStepTracker)
{
intraActionStates[preStep.Key] = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
}
// Create execution context for pre-job steps
foreach (var step in preJobSteps)
{
if (step is JobExtensionRunner)
{
JobExtensionRunner extensionStep = step as JobExtensionRunner;
ArgUtil.NotNull(extensionStep, extensionStep.DisplayName);
Guid stepId = Guid.NewGuid();
extensionStep.ExecutionContext = jobContext.CreateChild(stepId, extensionStep.DisplayName, null, null, stepId.ToString("N"), ActionRunStage.Pre);
}
else if (step is IActionRunner actionStep)
{
ArgUtil.NotNull(actionStep, step.DisplayName);
Guid stepId = Guid.NewGuid();
actionStep.ExecutionContext = jobContext.CreateChild(stepId, actionStep.DisplayName, stepId.ToString("N"), null, null, ActionRunStage.Pre, intraActionStates[actionStep.Action.Id]);
}
}
// Create execution context for job steps
foreach (var step in jobSteps)
{
if (step is IActionRunner actionStep)
{
ArgUtil.NotNull(actionStep, step.DisplayName);
intraActionStates.TryGetValue(actionStep.Action.Id, out var intraActionState);
actionStep.ExecutionContext = jobContext.CreateChild(actionStep.Action.Id, actionStep.DisplayName, actionStep.Action.Name, null, actionStep.Action.ContextName, ActionRunStage.Main, intraActionState);
}
}
List<IStep> steps = new List<IStep>();
steps.AddRange(preJobSteps);
steps.AddRange(jobSteps);
// Prepare for orphan process cleanup
_processCleanup = jobContext.Global.Variables.GetBoolean("process.clean") ?? true;
if (_processCleanup)
{
// Set the RUNNER_TRACKING_ID env variable.
Environment.SetEnvironmentVariable(Constants.ProcessTrackingId, _processLookupId);
context.Debug("Collect running processes for tracking orphan processes.");
// Take a snapshot of current running processes
Dictionary<int, Process> processes = SnapshotProcesses();
foreach (var proc in processes)
{
// Pid_ProcessName
_existingProcesses.Add($"{proc.Key}_{proc.Value.ProcessName}");
}
}
jobContext.Global.EnvironmentVariables.TryGetValue(Constants.Runner.Features.DiskSpaceWarning, out var enableWarning);
if (StringUtil.ConvertToBoolean(enableWarning, defaultValue: true))
{
_diskSpaceCheckTask = CheckDiskSpaceAsync(context, _diskSpaceCheckToken.Token);
}
return steps;
}
catch (OperationCanceledException ex) when (jobContext.CancellationToken.IsCancellationRequested)
{
// Log the exception and cancel the JobExtension Initialization.
Trace.Error($"Caught cancellation exception from JobExtension Initialization: {ex}");
context.Error(ex);
context.Result = TaskResult.Canceled;
throw;
}
catch (FailedToResolveActionDownloadInfoException ex)
{
// Log the error and fail the JobExtension Initialization.
Trace.Error($"Caught exception from JobExtenion Initialization: {ex}");
context.InfrastructureError(ex.Message);
context.Result = TaskResult.Failed;
throw;
}
catch (Exception ex)
{
// Log the error and fail the JobExtension Initialization.
Trace.Error($"Caught exception from JobExtension Initialization: {ex}");
context.Error(ex);
context.Result = TaskResult.Failed;
throw;
}
finally
{
context.Debug("Finishing: Set up job");
context.Complete();
}
}
}
19
Source : ExecutionContext.cs
with MIT License
from actions
with MIT License
from actions
public void AddMatchers(IssueMatchersConfig config)
{
var root = Root;
// Lock
lock (root._matchersLock)
{
var newMatchers = new List<IssueMatcherConfig>();
// Prepend
var newOwners = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
foreach (var matcher in config.Matchers)
{
newOwners.Add(matcher.Owner);
newMatchers.Add(matcher);
}
// Add existing non-matching
var existingMatchers = root._matchers ?? Array.Empty<IssueMatcherConfig>();
newMatchers.AddRange(existingMatchers.Where(x => !newOwners.Contains(x.Owner)));
// Store
root._matchers = newMatchers.ToArray();
// Fire events
foreach (var matcher in config.Matchers)
{
root._onMatcherChanged(null, new MatcherChangedEventArgs(matcher));
}
// Output
var owners = config.Matchers.Select(x => $"'{x.Owner}'");
var joinedOwners = string.Join(", ", owners);
// todo: loc
this.Debug($"Added matchers: {joinedOwners}. Problem matchers scan action output for known warning or error strings and report these inline.");
}
}
19
Source : TemplateEvaluator.cs
with MIT License
from actions
with MIT License
from actions
private void HandleMappingWithWellKnownProperties(
DefinitionInfo definition,
List<MappingDefinition> mappingDefinitions,
MappingToken mapping)
{
// Check if loose properties are allowed
String looseKeyType = null;
String looseValueType = null;
DefinitionInfo? looseKeyDefinition = null;
DefinitionInfo? looseValueDefinition = null;
if (!String.IsNullOrEmpty(mappingDefinitions[0].LooseKeyType))
{
looseKeyType = mappingDefinitions[0].LooseKeyType;
looseValueType = mappingDefinitions[0].LooseValueType;
}
var keys = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
var hasExpressionKey = false;
while (m_unraveler.AllowScalar(definition.Expand, out ScalarToken nextKeyScalar))
{
// Expression
if (nextKeyScalar is ExpressionToken)
{
hasExpressionKey = true;
var anyDefinition = new DefinitionInfo(definition, TemplateConstants.Any);
mapping.Add(nextKeyScalar, Evaluate(anyDefinition));
continue;
}
// Not a string, convert
if (!(nextKeyScalar is StringToken nextKey))
{
nextKey = new StringToken(nextKeyScalar.FileId, nextKeyScalar.Line, nextKeyScalar.Column, nextKeyScalar.ToString());
}
// Duplicate
if (!keys.Add(nextKey.Value))
{
m_context.Error(nextKey, TemplateStrings.ValueAlreadyDefined(nextKey.Value));
m_unraveler.SkipMappingValue();
continue;
}
// Well known
if (m_schema.TryMatchKey(mappingDefinitions, nextKey.Value, out String nextValueType))
{
var nextValueDefinition = new DefinitionInfo(definition, nextValueType);
var nextValue = Evaluate(nextValueDefinition);
mapping.Add(nextKey, nextValue);
continue;
}
// Loose
if (looseKeyType != null)
{
if (looseKeyDefinition == null)
{
looseKeyDefinition = new DefinitionInfo(definition, looseKeyType);
looseValueDefinition = new DefinitionInfo(definition, looseValueType);
}
Validate(nextKey, looseKeyDefinition.Value);
var nextValue = Evaluate(looseValueDefinition.Value);
mapping.Add(nextKey, nextValue);
continue;
}
// Error
m_context.Error(nextKey, TemplateStrings.UnexpectedValue(nextKey.Value));
m_unraveler.SkipMappingValue();
}
// Only one
if (mappingDefinitions.Count > 1)
{
var hitCount = new Dictionary<String, Int32>();
foreach (MappingDefinition mapdef in mappingDefinitions)
{
foreach (String key in mapdef.Properties.Keys)
{
if (!hitCount.TryGetValue(key, out Int32 value))
{
hitCount.Add(key, 1);
}
else
{
hitCount[key] = value + 1;
}
}
}
List<String> nonDuplicates = new List<String>();
foreach (String key in hitCount.Keys)
{
if (hitCount[key] == 1)
{
nonDuplicates.Add(key);
}
}
nonDuplicates.Sort();
String listToDeDuplicate = String.Join(", ", nonDuplicates);
m_context.Error(mapping, TemplateStrings.UnableToDetermineOneOf(listToDeDuplicate));
}
else if (mappingDefinitions.Count == 1 && !hasExpressionKey)
{
foreach (var property in mappingDefinitions[0].Properties)
{
if (property.Value.Required)
{
if (!keys.Contains(property.Key))
{
m_context.Error(mapping, $"Required property is missing: {property.Key}");
}
}
}
}
m_unraveler.ReadMappingEnd();
}
19
Source : PipelineTemplateEvaluator.cs
with MIT License
from actions
with MIT License
from actions
private TemplateContext CreateContext(
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions,
IEnumerable<KeyValuePair<String, Object>> expressionState = null)
{
var result = new TemplateContext
{
CancellationToken = CancellationToken.None,
Errors = new TemplateValidationErrors(MaxErrors, MaxErrorMessageLength),
Memory = new TemplateMemory(
maxDepth: MaxDepth,
maxEvents: MaxEvents,
maxBytes: MaxResultSize),
Schema = m_schema,
TraceWriter = m_trace,
};
// Add the file table
if (m_fileTable?.Count > 0)
{
foreach (var file in m_fileTable)
{
result.GetFileId(file);
}
}
// Add named values
if (contextData != null)
{
foreach (var pair in contextData)
{
result.ExpressionValues[pair.Key] = pair.Value;
}
}
// Add functions
var functionNames = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
if (expressionFunctions?.Count > 0)
{
foreach (var function in expressionFunctions)
{
result.ExpressionFunctions.Add(function);
functionNames.Add(function.Name);
}
}
// Add missing expression values and expression functions.
// This solves the following problems:
// - Compat for new agent against old server (new contexts not sent down in job message)
// - Evaluating early when all referenced contexts are available, even though all allowed
// contexts may not yet be available. For example, evaluating step display name can often
// be performed early.
foreach (var name in s_expressionValueNames)
{
if (!result.ExpressionValues.ContainsKey(name))
{
result.ExpressionValues[name] = null;
}
}
foreach (var name in s_expressionFunctionNames)
{
if (!functionNames.Contains(name))
{
result.ExpressionFunctions.Add(new FunctionInfo<NoOperation>(name, 0, Int32.MaxValue));
}
}
// Add state
if (expressionState != null)
{
foreach (var pair in expressionState)
{
result.State[pair.Key] = pair.Value;
}
}
return result;
}
19
Source : ReferenceNameBuilder.cs
with MIT License
from actions
with MIT License
from actions
internal String Build()
{
var original = m_name.Length > 0 ? m_name.ToString() : "job";
var attempt = 1;
var suffix = default(String);
while (true)
{
if (attempt == 1)
{
suffix = String.Empty;
}
else if (attempt < 1000)
{
suffix = String.Format(CultureInfo.InvariantCulture, "_{0}", attempt);
}
else
{
throw new InvalidOperationException("Unable to create a unique name");
}
var candidate = original.Substring(0, Math.Min(original.Length, PipelineConstants.MaxNodeNameLength - suffix.Length)) + suffix;
if (m_distinctNames.Add(candidate))
{
m_name.Clear();
return candidate;
}
attempt++;
}
}
19
Source : ReferenceNameBuilder.cs
with MIT License
from actions
with MIT License
from actions
internal Boolean TryAddKnownName(
String value,
out String error)
{
if (!NameValidation.IsValid(value, allowHyphens: true) && value.Length < PipelineConstants.MaxNodeNameLength)
{
error = $"The identifier '{value}' is invalid. IDs may only contain alphanumeric characters, '_', and '-'. IDs must start with a letter or '_' and and must be less than {PipelineConstants.MaxNodeNameLength} characters.";
return false;
}
else if (!m_distinctNames.Add(value))
{
error = $"The identifier '{value}' may not be used more than once within the same scope.";
return false;
}
else
{
error = null;
return true;
}
}
19
Source : TemplateEvaluator.cs
with MIT License
from actions
with MIT License
from actions
private void HandleMappingWithAllLooseProperties(
DefinitionInfo mappingDefinition,
DefinitionInfo keyDefinition,
DefinitionInfo valueDefinition,
MappingToken mapping)
{
var keys = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
while (m_unraveler.AllowScalar(mappingDefinition.Expand, out ScalarToken nextKeyScalar))
{
// Expression
if (nextKeyScalar is ExpressionToken)
{
if (nextKeyScalar is BasicExpressionToken)
{
mapping.Add(nextKeyScalar, Evaluate(valueDefinition));
}
else
{
var anyDefinition = new DefinitionInfo(mappingDefinition, TemplateConstants.Any);
mapping.Add(nextKeyScalar, Evaluate(anyDefinition));
}
continue;
}
// Not a string
if (!(nextKeyScalar is StringToken nextKey))
{
nextKey = new StringToken(nextKeyScalar.FileId, nextKeyScalar.Line, nextKeyScalar.Column, nextKeyScalar.ToString());
}
// Duplicate
if (!keys.Add(nextKey.Value))
{
m_context.Error(nextKey, TemplateStrings.ValueAlreadyDefined(nextKey.Value));
m_unraveler.SkipMappingValue();
continue;
}
// Validate
Validate(nextKey, keyDefinition);
// Add the pair
var nextValue = Evaluate(valueDefinition);
mapping.Add(nextKey, nextValue);
}
m_unraveler.ReadMappingEnd();
}
19
Source : TemplateReader.cs
with MIT License
from actions
with MIT License
from actions
private void HandleMappingWithWellKnownProperties(
DefinitionInfo definition,
List<MappingDefinition> mappingDefinitions,
MappingToken mapping)
{
// Check if loose properties are allowed
String looseKeyType = null;
String looseValueType = null;
DefinitionInfo? looseKeyDefinition = null;
DefinitionInfo? looseValueDefinition = null;
if (!String.IsNullOrEmpty(mappingDefinitions[0].LooseKeyType))
{
looseKeyType = mappingDefinitions[0].LooseKeyType;
looseValueType = mappingDefinitions[0].LooseValueType;
}
var keys = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
var hasExpressionKey = false;
while (m_objectReader.AllowLiteral(out LiteralToken rawLiteral))
{
var nextKeyScalar = ParseScalar(rawLiteral, definition.AllowedContext);
// Expression
if (nextKeyScalar is ExpressionToken)
{
hasExpressionKey = true;
// Legal
if (definition.AllowedContext.Length > 0)
{
m_memory.AddBytes(nextKeyScalar);
var anyDefinition = new DefinitionInfo(definition, TemplateConstants.Any);
mapping.Add(nextKeyScalar, ReadValue(anyDefinition));
}
// Illegal
else
{
m_context.Error(nextKeyScalar, TemplateStrings.ExpressionNotAllowed());
SkipValue();
}
continue;
}
// Not a string, convert
if (!(nextKeyScalar is StringToken nextKey))
{
nextKey = new StringToken(nextKeyScalar.FileId, nextKeyScalar.Line, nextKeyScalar.Column, nextKeyScalar.ToString());
}
// Duplicate
if (!keys.Add(nextKey.Value))
{
m_context.Error(nextKey, TemplateStrings.ValueAlreadyDefined(nextKey.Value));
SkipValue();
continue;
}
// Well known
if (m_schema.TryMatchKey(mappingDefinitions, nextKey.Value, out String nextValueType))
{
m_memory.AddBytes(nextKey);
var nextValueDefinition = new DefinitionInfo(definition, nextValueType);
var nextValue = ReadValue(nextValueDefinition);
mapping.Add(nextKey, nextValue);
continue;
}
// Loose
if (looseKeyType != null)
{
if (looseKeyDefinition == null)
{
looseKeyDefinition = new DefinitionInfo(definition, looseKeyType);
looseValueDefinition = new DefinitionInfo(definition, looseValueType);
}
Validate(nextKey, looseKeyDefinition.Value);
m_memory.AddBytes(nextKey);
var nextValue = ReadValue(looseValueDefinition.Value);
mapping.Add(nextKey, nextValue);
continue;
}
// Error
m_context.Error(nextKey, TemplateStrings.UnexpectedValue(nextKey.Value));
SkipValue();
}
// Only one
if (mappingDefinitions.Count > 1)
{
var hitCount = new Dictionary<String, Int32>();
foreach (MappingDefinition mapdef in mappingDefinitions)
{
foreach (String key in mapdef.Properties.Keys)
{
if (!hitCount.TryGetValue(key, out Int32 value))
{
hitCount.Add(key, 1);
}
else
{
hitCount[key] = value + 1;
}
}
}
List<String> nonDuplicates = new List<String>();
foreach (String key in hitCount.Keys)
{
if(hitCount[key] == 1)
{
nonDuplicates.Add(key);
}
}
nonDuplicates.Sort();
String listToDeDuplicate = String.Join(", ", nonDuplicates);
m_context.Error(mapping, TemplateStrings.UnableToDetermineOneOf(listToDeDuplicate));
}
else if (mappingDefinitions.Count == 1 && !hasExpressionKey)
{
foreach (var property in mappingDefinitions[0].Properties)
{
if (property.Value.Required)
{
if (!keys.Contains(property.Key))
{
m_context.Error(mapping, $"Required property is missing: {property.Key}");
}
}
}
}
ExpectMappingEnd();
}
19
Source : TemplateReader.cs
with MIT License
from actions
with MIT License
from actions
private void HandleMappingWithAllLooseProperties(
DefinitionInfo mappingDefinition,
DefinitionInfo keyDefinition,
DefinitionInfo valueDefinition,
MappingToken mapping)
{
TemplateToken nextValue;
var keys = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
while (m_objectReader.AllowLiteral(out LiteralToken rawLiteral))
{
var nextKeyScalar = ParseScalar(rawLiteral, mappingDefinition.AllowedContext);
// Expression
if (nextKeyScalar is ExpressionToken)
{
// Legal
if (mappingDefinition.AllowedContext.Length > 0)
{
m_memory.AddBytes(nextKeyScalar);
nextValue = ReadValue(valueDefinition);
mapping.Add(nextKeyScalar, nextValue);
}
// Illegal
else
{
m_context.Error(nextKeyScalar, TemplateStrings.ExpressionNotAllowed());
SkipValue();
}
continue;
}
// Not a string, convert
if (!(nextKeyScalar is StringToken nextKey))
{
nextKey = new StringToken(nextKeyScalar.FileId, nextKeyScalar.Line, nextKeyScalar.Column, nextKeyScalar.ToString());
}
// Duplicate
if (!keys.Add(nextKey.Value))
{
m_context.Error(nextKey, TemplateStrings.ValueAlreadyDefined(nextKey.Value));
SkipValue();
continue;
}
// Validate
Validate(nextKey, keyDefinition);
m_memory.AddBytes(nextKey);
// Add the pair
nextValue = ReadValue(valueDefinition);
mapping.Add(nextKey, nextValue);
}
ExpectMappingEnd();
}
19
Source : ActionCommandManager.cs
with MIT License
from actions
with MIT License
from actions
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
_registeredCommands.Add(_stopCommand);
// Register all command extensions
var extensionManager = hostContext.GetService<IExtensionManager>();
foreach (var commandExt in extensionManager.GetExtensions<IActionCommandExtension>() ?? new List<IActionCommandExtension>())
{
Trace.Info($"Register action command extension for command {commandExt.Command}");
_commandExtensions[commandExt.Command] = commandExt;
if (commandExt.Command != "internal-set-repo-path")
{
_registeredCommands.Add(commandExt.Command);
}
}
}
19
Source : ActionCommandManager.cs
with MIT License
from actions
with MIT License
from actions
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
_registeredCommands.Add(_stopCommand);
// Register all command extensions
var extensionManager = hostContext.GetService<IExtensionManager>();
foreach (var commandExt in extensionManager.GetExtensions<IActionCommandExtension>() ?? new List<IActionCommandExtension>())
{
Trace.Info($"Register action command extension for command {commandExt.Command}");
_commandExtensions[commandExt.Command] = commandExt;
if (commandExt.Command != "internal-set-repo-path")
{
_registeredCommands.Add(commandExt.Command);
}
}
}
19
Source : ActionCommandManager.cs
with MIT License
from actions
with MIT License
from actions
public void EnablePluginInternalCommand()
{
Trace.Info($"Enable plugin internal command extension.");
_registeredCommands.Add("internal-set-repo-path");
}
19
Source : ActionCommandManager.cs
with MIT License
from actions
with MIT License
from actions
public bool TryProcessCommand(IExecutionContext context, string input, ContainerInfo container)
{
if (string.IsNullOrEmpty(input))
{
return false;
}
// TryParse input to Command
ActionCommand actionCommand;
if (!ActionCommand.TryParseV2(input, _registeredCommands, out actionCommand) &&
!ActionCommand.TryParse(input, _registeredCommands, out actionCommand))
{
return false;
}
if (!ActionCommandManager.EnhancedAnnotationsEnabled(context) && actionCommand.Command == "notice")
{
context.Debug($"Enhanced Annotations not enabled on the server: 'notice' command will not be processed.");
return false;
}
// Serialize order
lock (_commandSerializeLock)
{
// Currently stopped
if (_stopProcessCommand)
{
// Resume token
if (!string.IsNullOrEmpty(_stopToken) &&
string.Equals(actionCommand.Command, _stopToken, StringComparison.OrdinalIgnoreCase))
{
context.Output(input);
context.Debug("Resume processing commands");
_registeredCommands.Remove(_stopToken);
_stopProcessCommand = false;
_stopToken = null;
return true;
}
else
{
context.Debug($"Process commands has been stopped and waiting for '##[{_stopToken}]' to resume.");
return false;
}
}
// Currently processing
else
{
// Stop command
if (string.Equals(actionCommand.Command, _stopCommand, StringComparison.OrdinalIgnoreCase))
{
ValidateStopToken(context, actionCommand.Data);
_stopToken = actionCommand.Data;
_stopProcessCommand = true;
_registeredCommands.Add(_stopToken);
if (_stopToken.Length > 6)
{
HostContext.SecretMasker.AddValue(_stopToken);
}
context.Output(input);
context.Debug("Paused processing commands until the token you called ::stopCommands:: with is received");
return true;
}
// Found command
else if (_commandExtensions.TryGetValue(actionCommand.Command, out IActionCommandExtension extension))
{
if (context.EchoOnActionCommand && !extension.OmitEcho)
{
context.Output(input);
}
try
{
extension.ProcessCommand(context, input, actionCommand, container);
}
catch (Exception ex)
{
var commandInformation = extension.OmitEcho ? extension.Command : input;
context.Error($"Unable to process command '{commandInformation}' successfully.");
context.Error(ex);
context.CommandResult = TaskResult.Failed;
}
}
// Command not found
else
{
context.Warning($"Can't find command extension for ##[{actionCommand.Command}.command].");
}
}
}
return true;
}
19
Source : IssueMatcher.cs
with MIT License
from actions
with MIT License
from actions
public void Validate()
{
var distinctOwners = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
if (_matchers?.Count > 0)
{
foreach (var matcher in _matchers)
{
matcher.Validate();
if (!distinctOwners.Add(matcher.Owner))
{
// Not localized since this is a programming contract
throw new ArgumentException($"Duplicate owner name '{matcher.Owner}'");
}
}
}
}
19
Source : TemplateEvaluator.cs
with MIT License
from actions
with MIT License
from actions
internal static TemplateToken Evaluate(
TemplateContext context,
String type,
TemplateToken template,
Int32 removeBytes,
Int32? fileId,
Boolean omitHeader = false)
{
TemplateToken result;
if (!omitHeader)
{
if (fileId != null)
{
context.TraceWriter.Info("{0}", $"Begin evaluating template '{context.GetFileName(fileId.Value)}'");
}
else
{
context.TraceWriter.Info("{0}", "Begin evaluating template");
}
}
var evaluator = new TemplateEvaluator(context, template, removeBytes);
try
{
var availableContext = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
foreach (var key in context.ExpressionValues.Keys)
{
availableContext.Add(key);
}
foreach (var function in context.ExpressionFunctions)
{
availableContext.Add($"{function.Name}()");
}
var definitionInfo = new DefinitionInfo(context.Schema, type, availableContext);
result = evaluator.Evaluate(definitionInfo);
if (result != null)
{
evaluator.m_unraveler.ReadEnd();
}
}
catch (Exception ex)
{
context.Error(fileId, null, null, ex);
result = null;
}
if (!omitHeader)
{
if (fileId != null)
{
context.TraceWriter.Info("{0}", $"Finished evaluating template '{context.GetFileName(fileId.Value)}'");
}
else
{
context.TraceWriter.Info("{0}", "Finished evaluating template");
}
}
return result;
}
19
Source : TemplateEvaluator.cs
with MIT License
from actions
with MIT License
from actions
internal static TemplateToken Evaluate(
TemplateContext context,
String type,
TemplateToken template,
Int32 removeBytes,
Int32? fileId,
Boolean omitHeader = false)
{
TemplateToken result;
if (!omitHeader)
{
if (fileId != null)
{
context.TraceWriter.Info("{0}", $"Begin evaluating template '{context.GetFileName(fileId.Value)}'");
}
else
{
context.TraceWriter.Info("{0}", "Begin evaluating template");
}
}
var evaluator = new TemplateEvaluator(context, template, removeBytes);
try
{
var availableContext = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
foreach (var key in context.ExpressionValues.Keys)
{
availableContext.Add(key);
}
foreach (var function in context.ExpressionFunctions)
{
availableContext.Add($"{function.Name}()");
}
var definitionInfo = new DefinitionInfo(context.Schema, type, availableContext);
result = evaluator.Evaluate(definitionInfo);
if (result != null)
{
evaluator.m_unraveler.ReadEnd();
}
}
catch (Exception ex)
{
context.Error(fileId, null, null, ex);
result = null;
}
if (!omitHeader)
{
if (fileId != null)
{
context.TraceWriter.Info("{0}", $"Finished evaluating template '{context.GetFileName(fileId.Value)}'");
}
else
{
context.TraceWriter.Info("{0}", "Finished evaluating template");
}
}
return result;
}
See More Examples