Here are the examples of the csharp api System.Threading.Interlocked.Increment(ref int) taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
2753 Examples
19
View Source File : CelesteNetConnection.cs
License : MIT License
Project Creator : 0x0ade
License : MIT License
Project Creator : 0x0ade
public void Enqueue(DataType data) {
lock (QueueLock) {
int count;
if (MaxCount > 0 && Interlocked.CompareExchange(ref QueueCount, 0, 0) == MaxCount) {
QueueSendNext = (QueueSendNext + 1) % Queue.Length;
} else if ((count = Interlocked.Increment(ref QueueCount)) > Queue.Length) {
count--;
int next = QueueSendNext;
DataType?[] old = Queue;
DataType?[] resized = new DataType?[old.Length * 2];
if (next + count <= old.Length) {
Array.Copy(old, next, resized, 0, count);
} else {
Array.Copy(old, next, resized, 0, old.Length - next);
Array.Copy(old, 0, resized, old.Length - next, count - next);
}
Queue = resized;
QueueSendNext = 0;
QueueAddNext = count;
}
Queue[QueueAddNext] = data;
QueueAddNext = (QueueAddNext + 1) % Queue.Length;
try {
Event.Set();
} catch (ObjectDisposedException) {
}
}
}
19
View Source File : SqDatabase.cs
License : MIT License
Project Creator : 0x1000000
License : MIT License
Project Creator : 0x1000000
public void Dispose()
{
if (Interlocked.Increment(ref this._isDisposed) != 1)
{
return;
}
try
{
lock (this._tranSync)
{
if (this._currentTransaction != null)
{
this._currentTransaction.DbTransaction?.Dispose();
this._currentTransaction = null;
}
}
}
finally
{
if (!this._disposeConnection)
{
if (this._wasClosed && this._connection.State == ConnectionState.Open)
{
this._connection.Close();
}
}
else
{
this._connection.Dispose();
}
}
}
19
View Source File : AsyncLock.cs
License : MIT License
Project Creator : 1100100
License : MIT License
Project Creator : 1100100
private bool TryEnter()
{
lock (_parent._reentrancy)
{
Debug.replacedert((_parent._owningId == UnlockedThreadId) == (_parent._reentrances == 0));
if (_parent._owningId != UnlockedThreadId && _parent._owningId != AsyncLock.ThreadId)
{
//another thread currently owns the lock
return false;
}
//we can go in
Interlocked.Increment(ref _parent._reentrances);
_parent._owningId = AsyncLock.ThreadId;
return true;
}
}
19
View Source File : DynamicAssembly.cs
License : MIT License
Project Creator : 1996v
License : MIT License
Project Creator : 1996v
public TypeBuilder DefineCollectionFormatterType(Type type, Type elementType)
{
VerifyTypeIsPublic(type);
string pre = "Array2.";
if (Array1FormatterHelper.IsArray1Type(elementType))
{
pre = "Array1.";
}
Type formatterType = typeof(IBssomFormatter<>).MakeGenericType(type);
TypeBuilder typeBuilder = DefineType("Bssom.Formatters." + pre + SubtractFullNameRegex.Replace(type.FullName, string.Empty).Replace(".", "_") + "Formatter" + Interlocked.Increment(ref nameSequence), TypeAttributes.NotPublic | TypeAttributes.Sealed, null, new[] { formatterType });
return typeBuilder;
}
19
View Source File : DynamicAssembly.cs
License : MIT License
Project Creator : 1996v
License : MIT License
Project Creator : 1996v
public TypeBuilder DefineFormatterType(Type type)
{
VerifyTypeIsPublic(type);
Type formatterType = typeof(IBssomFormatter<>).MakeGenericType(type);
TypeBuilder typeBuilder = DefineType("Bssom.Formatters." + SubtractFullNameRegex.Replace(type.FullName, string.Empty).Replace(".", "_") + "Formatter" + Interlocked.Increment(ref nameSequence), TypeAttributes.NotPublic | TypeAttributes.Sealed, null, new[] { formatterType });
return typeBuilder;
}
19
View Source File : DynamicAssembly.cs
License : MIT License
Project Creator : 1996v
License : MIT License
Project Creator : 1996v
public TypeBuilder DefineFormatterDelegateType(Type type)
{
TypeBuilder typeBuilder = DefineType("Bssom.Formatters." + SubtractFullNameRegex.Replace(type.FullName, string.Empty).Replace(".", "_") + "FormatterDelegate" + Interlocked.Increment(ref nameSequence), TypeAttributes.NotPublic | TypeAttributes.Sealed, null, null);
return typeBuilder;
}
19
View Source File : DynamicAssembly.cs
License : MIT License
Project Creator : 1996v
License : MIT License
Project Creator : 1996v
public TypeBuilder DefineInterfaceImpType(Type interfaceType)
{
VerifyTypeIsPublic(interfaceType);
TypeBuilder typeBuilder = DefineType("Bssom.DynamicInterfaceImp." + SubtractFullNameRegex.Replace(interfaceType.FullName, string.Empty).Replace(".", "_") + Interlocked.Increment(ref nameSequence), TypeAttributes.Public | TypeAttributes.Sealed, null, new[] { interfaceType });
return typeBuilder;
}
19
View Source File : SentinelAdapter.cs
License : MIT License
Project Creator : 2881099
License : MIT License
Project Creator : 2881099
internal void ResetSentinel()
{
if (ResetSentinelFlag != 0) return;
if (Interlocked.Increment(ref ResetSentinelFlag) != 1)
{
Interlocked.Decrement(ref ResetSentinelFlag);
return;
}
string masterhostEnd = _masterHost;
var allkeys = _ib.GetKeys().ToList();
for (int i = 0; i < _sentinels.Count; i++)
{
if (i > 0)
{
var first = _sentinels.First;
_sentinels.RemoveFirst();
_sentinels.AddLast(first.Value);
}
try
{
using (var sentinelcli = new RedisSentinelClient(_sentinels.First.Value))
{
var masterhost = sentinelcli.GetMasterAddrByName(_connectionString.Host);
var masterConnectionString = localTestHost(masterhost, RoleType.Master);
if (masterConnectionString == null) continue;
masterhostEnd = masterhost;
if (_rw_splitting)
{
foreach (var slave in sentinelcli.Salves(_connectionString.Host))
{
ConnectionStringBuilder slaveConnectionString = localTestHost($"{slave.ip}:{slave.port}", RoleType.Slave);
if (slaveConnectionString == null) continue;
}
}
foreach (var sentinel in sentinelcli.Sentinels(_connectionString.Host))
{
var remoteSentinelHost = $"{sentinel.ip}:{sentinel.port}";
if (_sentinels.Contains(remoteSentinelHost)) continue;
_sentinels.AddLast(remoteSentinelHost);
}
}
break;
}
catch { }
}
foreach (var spkey in allkeys) _ib.TryRemove(spkey, true);
Interlocked.Exchange(ref _masterHost, masterhostEnd);
Interlocked.Decrement(ref ResetSentinelFlag);
ConnectionStringBuilder localTestHost(string host, RoleType role)
{
ConnectionStringBuilder connectionString = _connectionString.ToString();
connectionString.Host = host;
connectionString.MinPoolSize = 1;
connectionString.MaxPoolSize = 1;
using (var cli = new RedisClient(connectionString))
{
if (cli.Role().role != role)
return null;
if (role == RoleType.Master)
{
//test set/get
}
}
connectionString.MinPoolSize = _connectionString.MinPoolSize;
connectionString.MaxPoolSize = _connectionString.MaxPoolSize;
_ib.TryRegister(host, () => new RedisClientPool(connectionString, null, TopOwner));
allkeys.Remove(host);
return connectionString;
}
}
19
View Source File : RedisClient.cs
License : MIT License
Project Creator : 2881099
License : MIT License
Project Creator : 2881099
public void Dispose()
{
if (Interlocked.Increment(ref _disposeCounter) != 1) return;
Adapter.Dispose();
_pubsubPriv?.Dispose();
}
19
View Source File : TemplateEngin.cs
License : MIT License
Project Creator : 2881099
License : MIT License
Project Creator : 2881099
private static ITemplateOutput Parser(string tplcode, string[] usings, IDictionary options) {
int view = Interlocked.Increment(ref _view);
StringBuilder sb = new StringBuilder();
IDictionary options_copy = new Hashtable();
foreach (DictionaryEntry options_de in options) options_copy[options_de.Key] = options_de.Value;
sb.AppendFormat(@"
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;{1}
//namespace TplDynamicCodeGenerate {{
public clreplaced TplDynamicCodeGenerate_view{0} : FreeSql.Template.TemplateEngin.ITemplateOutput {{
public FreeSql.Template.TemplateEngin.TemplateReturnInfo OuTpUt(StringBuilder tOuTpUt, IDictionary oPtIoNs, string rEfErErFiLeNaMe, FreeSql.Template.TemplateEngin tEmPlAtEsEnDeR) {{
FreeSql.Template.TemplateEngin.TemplateReturnInfo rTn = tOuTpUt == null ?
new FreeSql.Template.TemplateEngin.TemplateReturnInfo {{ Sb = (tOuTpUt = new StringBuilder()), Blocks = new Dictionary<string, int[]>() }} :
new FreeSql.Template.TemplateEngin.TemplateReturnInfo {{ Sb = tOuTpUt, Blocks = new Dictionary<string, int[]>() }};
Dictionary<string, int[]> TPL__blocks = rTn.Blocks;
Stack<int[]> TPL__blocks_stack = new Stack<int[]>();
int[] TPL__blocks_stack_peek;
List<IDictionary> TPL__forc = new List<IDictionary>();
Func<IDictionary> pRoCeSsOpTiOnS = new Func<IDictionary>(delegate () {{
IDictionary nEwoPtIoNs = new Hashtable();
foreach (DictionaryEntry oPtIoNs_dE in oPtIoNs)
nEwoPtIoNs[oPtIoNs_dE.Key] = oPtIoNs_dE.Value;
foreach (IDictionary TPL__forc_dIc in TPL__forc)
foreach (DictionaryEntry TPL__forc_dIc_dE in TPL__forc_dIc)
nEwoPtIoNs[TPL__forc_dIc_dE.Key] = TPL__forc_dIc_dE.Value;
return nEwoPtIoNs;
}});
FreeSql.Template.TemplateEngin.TemplateIf tPlIf = delegate(object exp) {{
if (exp is bool) return (bool)exp;
if (exp == null) return false;
if (exp is int && (int)exp == 0) return false;
if (exp is string && (string)exp == string.Empty) return false;
if (exp is long && (long)exp == 0) return false;
if (exp is short && (short)exp == 0) return false;
if (exp is byte && (byte)exp == 0) return false;
if (exp is double && (double)exp == 0) return false;
if (exp is float && (float)exp == 0) return false;
if (exp is decimal && (decimal)exp == 0) return false;
return true;
}};
FreeSql.Template.TemplateEngin.TemplatePrint print = delegate(object[] pArMs) {{
if (pArMs == null || pArMs.Length == 0) return;
foreach (object pArMs_A in pArMs) if (pArMs_A != null) tOuTpUt.Append(pArMs_A);
}};
FreeSql.Template.TemplateEngin.TemplatePrint Print = print;", view, usings?.Any() == true ? $"\r\nusing {string.Join(";\r\nusing ", usings)};" : "");
#region {miss}...{/miss}块内容将不被解析
string[] tmp_content_arr = _reg_miss.Split(tplcode);
if (tmp_content_arr.Length > 1) {
sb.AppendFormat(@"
string[] TPL__MISS = new string[{0}];", Math.Ceiling(1.0 * (tmp_content_arr.Length - 1) / 2));
int miss_len = -1;
for (int a = 1; a < tmp_content_arr.Length; a += 2) {
sb.Append(string.Concat(@"
TPL__MISS[", ++miss_len, @"] = """, Utils.GetConstString(tmp_content_arr[a]), @""";"));
tmp_content_arr[a] = string.Concat("{#TPL__MISS[", miss_len, "]}");
}
tplcode = string.Join("", tmp_content_arr);
}
#endregion
#region 扩展语法如 <div @if="表达式"></div>
tplcode = htmlSyntax(tplcode, 3); //<div @if="c#表达式" @for="index 1,100"></div>
//处理 {% %} 块 c#代码
tmp_content_arr = _reg_code.Split(tplcode);
if (tmp_content_arr.Length == 1) {
tplcode = Utils.GetConstString(tplcode)
.Replace("{%", "{$TEMPLATE__CODE}")
.Replace("%}", "{/$TEMPLATE__CODE}");
} else {
tmp_content_arr[0] = Utils.GetConstString(tmp_content_arr[0]);
for (int a = 1; a < tmp_content_arr.Length; a += 4) {
tmp_content_arr[a] = "{$TEMPLATE__CODE}";
tmp_content_arr[a + 2] = "{/$TEMPLATE__CODE}";
tmp_content_arr[a + 3] = Utils.GetConstString(tmp_content_arr[a + 3]);
}
tplcode = string.Join("", tmp_content_arr);
}
#endregion
sb.Append(@"
tOuTpUt.Append(""");
string error = null;
int tpl_tmpid = 0;
int forc_i = 0;
string extends = null;
Stack<string> codeTree = new Stack<string>();
Stack<string> forEndRepl = new Stack<string>();
sb.Append(_reg.Replace(tplcode, delegate (Match m) {
string _0 = m.Groups[0].Value;
if (!string.IsNullOrEmpty(error)) return _0;
string _1 = m.Groups[1].Value.Trim(' ', '\t');
string _2 = m.Groups[2].Value
.Replace("\\\\", "\\")
.Replace("\\\"", "\"");
_2 = Utils.ReplaceSingleQuote(_2);
switch (_1) {
#region $TEMPLATE__CODE--------------------------------------------------
case "$TEMPLATE__CODE":
codeTree.Push(_1);
return @""");
";
case "/$TEMPLATE__CODE":
string pop = codeTree.Pop();
if (pop != "$TEMPLATE__CODE") {
codeTree.Push(pop);
error = "编译出错,{% 与 %} 并没有配对";
return _0;
}
return @"
tOuTpUt.Append(""";
#endregion
case "include":
return string.Format(@""");
tEmPlAtEsEnDeR.RenderFile2(tOuTpUt, pRoCeSsOpTiOnS(), ""{0}"", rEfErErFiLeNaMe);
tOuTpUt.Append(""", _2);
case "import":
return _0;
case "module":
return _0;
case "/module":
return _0;
case "extends":
//{extends ../inc/layout.html}
if (string.IsNullOrEmpty(extends) == false) return _0;
extends = _2;
return string.Empty;
case "block":
codeTree.Push("block");
return string.Format(@""");
TPL__blocks_stack_peek = new int[] {{ tOuTpUt.Length, 0 }};
TPL__blocks_stack.Push(TPL__blocks_stack_peek);
TPL__blocks.Add(""{0}"", TPL__blocks_stack_peek);
tOuTpUt.Append(""", _2.Trim(' ', '\t'));
case "/block":
codeTreeEnd(codeTree, "block");
return @""");
TPL__blocks_stack_peek = TPL__blocks_stack.Pop();
TPL__blocks_stack_peek[1] = tOuTpUt.Length - TPL__blocks_stack_peek[0];
tOuTpUt.Append(""";
#region ##---------------------------------------------------------
case "#":
if (_2[0] == '#')
return string.Format(@""");
try {{ Print({0}); }} catch {{ }}
tOuTpUt.Append(""", _2.Substring(1));
return string.Format(@""");
Print({0});
tOuTpUt.Append(""", _2);
#endregion
#region for--------------------------------------------------------
case "for":
forc_i++;
int cur_tpl_tmpid = tpl_tmpid;
string sb_endRepl = string.Empty;
StringBuilder sbfor = new StringBuilder();
sbfor.Append(@""");");
Match mfor = _reg_forin.Match(_2);
if (mfor.Success) {
string mfor1 = mfor.Groups[1].Value.Trim(' ', '\t');
string mfor2 = mfor.Groups[2].Value.Trim(' ', '\t');
sbfor.AppendFormat(@"
//new Action(delegate () {{
IDictionary TPL__tmp{0} = new Hashtable();
TPL__forc.Add(TPL__tmp{0});
var TPL__tmp{1} = {3};
var TPL__tmp{2} = {4};", ++tpl_tmpid, ++tpl_tmpid, ++tpl_tmpid, mfor.Groups[3].Value, mfor1);
sb_endRepl = string.Concat(sb_endRepl, string.Format(@"
{0} = TPL__tmp{1};", mfor1, cur_tpl_tmpid + 3));
if (options_copy.Contains(mfor1) == false) options_copy[mfor1] = null;
if (!string.IsNullOrEmpty(mfor2)) {
sbfor.AppendFormat(@"
var TPL__tmp{1} = {0};
{0} = 0;", mfor2, ++tpl_tmpid);
sb_endRepl = string.Concat(sb_endRepl, string.Format(@"
{0} = TPL__tmp{1};", mfor2, tpl_tmpid));
if (options_copy.Contains(mfor2) == false) options_copy[mfor2] = null;
}
sbfor.AppendFormat(@"
if (TPL__tmp{1} != null)
foreach (var TPL__tmp{0} in TPL__tmp{1}) {{", ++tpl_tmpid, cur_tpl_tmpid + 2);
if (!string.IsNullOrEmpty(mfor2))
sbfor.AppendFormat(@"
TPL__tmp{1}[""{0}""] = ++ {0};", mfor2, cur_tpl_tmpid + 1);
sbfor.AppendFormat(@"
TPL__tmp{1}[""{0}""] = TPL__tmp{2};
{0} = TPL__tmp{2};
tOuTpUt.Append(""", mfor1, cur_tpl_tmpid + 1, tpl_tmpid);
codeTree.Push("for");
forEndRepl.Push(sb_endRepl);
return sbfor.ToString();
}
mfor = _reg_foron.Match(_2);
if (mfor.Success) {
string mfor1 = mfor.Groups[1].Value.Trim(' ', '\t');
string mfor2 = mfor.Groups[2].Value.Trim(' ', '\t');
string mfor3 = mfor.Groups[3].Value.Trim(' ', '\t');
sbfor.AppendFormat(@"
//new Action(delegate () {{
IDictionary TPL__tmp{0} = new Hashtable();
TPL__forc.Add(TPL__tmp{0});
var TPL__tmp{1} = {3};
var TPL__tmp{2} = {4};", ++tpl_tmpid, ++tpl_tmpid, ++tpl_tmpid, mfor.Groups[4].Value, mfor1);
sb_endRepl = string.Concat(sb_endRepl, string.Format(@"
{0} = TPL__tmp{1};", mfor1, cur_tpl_tmpid + 3));
if (options_copy.Contains(mfor1) == false) options_copy[mfor1] = null;
if (!string.IsNullOrEmpty(mfor2)) {
sbfor.AppendFormat(@"
var TPL__tmp{1} = {0};", mfor2, ++tpl_tmpid);
sb_endRepl = string.Concat(sb_endRepl, string.Format(@"
{0} = TPL__tmp{1};", mfor2, tpl_tmpid));
if (options_copy.Contains(mfor2) == false) options_copy[mfor2] = null;
}
if (!string.IsNullOrEmpty(mfor3)) {
sbfor.AppendFormat(@"
var TPL__tmp{1} = {0};
{0} = 0;", mfor3, ++tpl_tmpid);
sb_endRepl = string.Concat(sb_endRepl, string.Format(@"
{0} = TPL__tmp{1};", mfor3, tpl_tmpid));
if (options_copy.Contains(mfor3) == false) options_copy[mfor3] = null;
}
sbfor.AppendFormat(@"
if (TPL__tmp{2} != null)
foreach (DictionaryEntry TPL__tmp{1} in TPL__tmp{2}) {{
{0} = TPL__tmp{1}.Key;
TPL__tmp{3}[""{0}""] = {0};", mfor1, ++tpl_tmpid, cur_tpl_tmpid + 2, cur_tpl_tmpid + 1);
if (!string.IsNullOrEmpty(mfor2))
sbfor.AppendFormat(@"
{0} = TPL__tmp{1}.Value;
TPL__tmp{2}[""{0}""] = {0};", mfor2, tpl_tmpid, cur_tpl_tmpid + 1);
if (!string.IsNullOrEmpty(mfor3))
sbfor.AppendFormat(@"
TPL__tmp{1}[""{0}""] = ++ {0};", mfor3, cur_tpl_tmpid + 1);
sbfor.AppendFormat(@"
tOuTpUt.Append(""");
codeTree.Push("for");
forEndRepl.Push(sb_endRepl);
return sbfor.ToString();
}
mfor = _reg_forab.Match(_2);
if (mfor.Success) {
string mfor1 = mfor.Groups[1].Value.Trim(' ', '\t');
sbfor.AppendFormat(@"
//new Action(delegate () {{
IDictionary TPL__tmp{0} = new Hashtable();
TPL__forc.Add(TPL__tmp{0});
var TPL__tmp{1} = {5};
{5} = {3} - 1;
if ({5} == null) {5} = 0;
var TPL__tmp{2} = {4} + 1;
while (++{5} < TPL__tmp{2}) {{
TPL__tmp{0}[""{5}""] = {5};
tOuTpUt.Append(""", ++tpl_tmpid, ++tpl_tmpid, ++tpl_tmpid, mfor.Groups[2].Value, mfor.Groups[3].Value, mfor1);
sb_endRepl = string.Concat(sb_endRepl, string.Format(@"
{0} = TPL__tmp{1};", mfor1, cur_tpl_tmpid + 1));
if (options_copy.Contains(mfor1) == false) options_copy[mfor1] = null;
codeTree.Push("for");
forEndRepl.Push(sb_endRepl);
return sbfor.ToString();
}
return _0;
case "/for":
if (--forc_i < 0) return _0;
codeTreeEnd(codeTree, "for");
return string.Format(@""");
}}{0}
TPL__forc.RemoveAt(TPL__forc.Count - 1);
//}})();
tOuTpUt.Append(""", forEndRepl.Pop());
#endregion
#region if---------------------------------------------------------
case "if":
codeTree.Push("if");
return string.Format(@""");
if ({1}tPlIf({0})) {{
tOuTpUt.Append(""", _2[0] == '!' ? _2.Substring(1) : _2, _2[0] == '!' ? '!' : ' ');
case "elseif":
codeTreeEnd(codeTree, "if");
codeTree.Push("if");
return string.Format(@""");
}} else if ({1}tPlIf({0})) {{
tOuTpUt.Append(""", _2[0] == '!' ? _2.Substring(1) : _2, _2[0] == '!' ? '!' : ' ');
case "else":
codeTreeEnd(codeTree, "if");
codeTree.Push("if");
return @""");
} else {
tOuTpUt.Append(""";
case "/if":
codeTreeEnd(codeTree, "if");
return @""");
}
tOuTpUt.Append(""";
#endregion
}
return _0;
}));
sb.Append(@""");");
if (string.IsNullOrEmpty(extends) == false) {
sb.AppendFormat(@"
FreeSql.Template.TemplateEngin.TemplateReturnInfo eXtEnDs_ReT = tEmPlAtEsEnDeR.RenderFile2(null, pRoCeSsOpTiOnS(), ""{0}"", rEfErErFiLeNaMe);
string rTn_Sb_string = rTn.Sb.ToString();
foreach(string eXtEnDs_ReT_blocks_key in eXtEnDs_ReT.Blocks.Keys) {{
if (rTn.Blocks.ContainsKey(eXtEnDs_ReT_blocks_key)) {{
int[] eXtEnDs_ReT_blocks_value = eXtEnDs_ReT.Blocks[eXtEnDs_ReT_blocks_key];
eXtEnDs_ReT.Sb.Remove(eXtEnDs_ReT_blocks_value[0], eXtEnDs_ReT_blocks_value[1]);
int[] rTn_blocks_value = rTn.Blocks[eXtEnDs_ReT_blocks_key];
eXtEnDs_ReT.Sb.Insert(eXtEnDs_ReT_blocks_value[0], rTn_Sb_string.Substring(rTn_blocks_value[0], rTn_blocks_value[1]));
foreach(string eXtEnDs_ReT_blocks_keyb in eXtEnDs_ReT.Blocks.Keys) {{
if (eXtEnDs_ReT_blocks_keyb == eXtEnDs_ReT_blocks_key) continue;
int[] eXtEnDs_ReT_blocks_valueb = eXtEnDs_ReT.Blocks[eXtEnDs_ReT_blocks_keyb];
if (eXtEnDs_ReT_blocks_valueb[0] >= eXtEnDs_ReT_blocks_value[0])
eXtEnDs_ReT_blocks_valueb[0] = eXtEnDs_ReT_blocks_valueb[0] - eXtEnDs_ReT_blocks_value[1] + rTn_blocks_value[1];
}}
eXtEnDs_ReT_blocks_value[1] = rTn_blocks_value[1];
}}
}}
return eXtEnDs_ReT;
", extends);
} else {
sb.Append(@"
return rTn;");
}
sb.Append(@"
}
}
//}
");
var str = "FreeSql.Template.TemplateEngin.TemplatePrint Print = print;";
int dim_idx = sb.ToString().IndexOf(str) + str.Length;
foreach (string dic_name in options_copy.Keys) {
sb.Insert(dim_idx, string.Format(@"
dynamic {0} = oPtIoNs[""{0}""];", dic_name));
}
//Console.WriteLine(sb.ToString());
return Complie(sb.ToString(), @"TplDynamicCodeGenerate_view" + view);
}
19
View Source File : ChatManager.cs
License : Apache License 2.0
Project Creator : AantCoder
License : Apache License 2.0
Project Creator : AantCoder
public Chat CreateChat()
{
var chat = new Chat()
{
PartyLogin = new List<string>() { "system" },
LastChanged = DateTime.UtcNow,
Id = Interlocked.Increment(ref _maxChatId),
};
return chat;
}
19
View Source File : ChatManager.cs
License : Apache License 2.0
Project Creator : AantCoder
License : Apache License 2.0
Project Creator : AantCoder
public int GetChatId()
{
return Interlocked.Increment(ref _maxChatId);
}
19
View Source File : ServerManager.cs
License : Apache License 2.0
Project Creator : AantCoder
License : Apache License 2.0
Project Creator : AantCoder
private void ConnectionAccepted(ConnectClient client)
{
if (ActiveClientCount > MaxActiveClientCount)
{
client.Dispose();
return;
}
Interlocked.Increment(ref _ActiveClientCount);
var thread = new Thread(() => DoClient(client));
thread.IsBackground = true;
thread.Start();
}
19
View Source File : BufferSegmentProvider.cs
License : MIT License
Project Creator : Abc-Arbitrage
License : MIT License
Project Creator : Abc-Arbitrage
public BufferSegment GetSegment()
{
var nextSegmentIndex = Interlocked.Increment(ref _segmentIndex);
var largeBuffer = AllocateLargeBufferIfNeeded(nextSegmentIndex);
var bufferSegmentIndex = nextSegmentIndex - largeBuffer.FirstSegmentGlobalIndex;
// ReSharper disable once InconsistentlySynchronizedField
var bufferSegment = largeBuffer.GetSegment(bufferSegmentIndex * _bufferSegmentSize, _bufferSegmentSize);
return bufferSegment;
}
19
View Source File : BiotaGuidConsolidator.cs
License : GNU Affero General Public License v3.0
Project Creator : ACEmulator
License : GNU Affero General Public License v3.0
Project Creator : ACEmulator
public static void ConsolidateBiotaGuids(uint startingGuid, out int numberOfBiotasConsolidated, out int numberOfErrors)
{
log.Info($"Consolidating biotas, starting at guid 0x{startingGuid:X8}...");
Thread.Sleep(1000); // Give the logger type to flush to the client so that our output lines up in order
Console.WriteLine("!!! Do not proceed unless you have backed up your shard database first !!!");
Console.WriteLine("In the event of any failure, you may be asked to rollback your shard database.");
Console.WriteLine("Press any key to proceed, or abort the process to quit.");
Console.ReadLine();
Console.WriteLine(".... hold on to your butts...");
if (startingGuid < ObjectGuid.DynamicMin)
throw new Exception($"startingGuid cannot be lower than ObjectGuid.DynamicMin (0x{ObjectGuid.DynamicMin:X8})");
int numOfBiotasConsolidated = 0;
int numOfErrors = 0;
var shardDatabase = new ShardDatabase();
var sequenceGaps = shardDatabase.GetSequenceGaps(ObjectGuid.DynamicMin, 10000000);
var availableIDs = new LinkedList<(uint start, uint end)>(sequenceGaps);
List<Biota> partialBiotas;
using (var context = new ShardDbContext())
partialBiotas = context.Biota.Where(r => r.Id >= startingGuid).OrderByDescending(r => r.Id).ToList();
var idConversions = new ConcurrentDictionary<uint, uint>();
// Process ConsolidatableBasicWeenieTypes first
Parallel.ForEach(partialBiotas, partialBiota =>
{
if (numOfErrors > 0)
return;
if (!ConsolidatableBasicWeenieTypes.Contains((WeenieType)partialBiota.WeenieType))
return;
// Get the original biota
var fullBiota = shardDatabase.GetBiota(partialBiota.Id, true);
if (fullBiota == null)
{
Interlocked.Increment(ref numOfErrors);
log.Warn($"Failed to get biota with id 0x{partialBiota.Id:X8} from the database. This shouldn't happen. It also shouldn't require a rollback.");
return;
}
// Get the next available id
uint newId = 0;
lock (availableIDs)
{
if (availableIDs.First != null)
{
var id = availableIDs.First.Value.start;
if (availableIDs.First.Value.start == availableIDs.First.Value.end)
availableIDs.RemoveFirst();
else
availableIDs.First.Value = (availableIDs.First.Value.start + 1, availableIDs.First.Value.end);
newId = id;
}
}
if (newId == 0)
{
Interlocked.Increment(ref numOfErrors);
log.Fatal("Failed to generate new id. No more id's available for consolidation. This shouldn't require a rollback.");
return;
}
idConversions[fullBiota.Id] = newId;
// Copy our original biota into a new biota and set the new id
var converted = BiotaConverter.ConvertToEnreplacedyBiota(fullBiota);
converted.Id = newId;
// Save the new biota
if (!shardDatabase.SaveBiota(converted, new ReaderWriterLockSlim()))
{
Interlocked.Increment(ref numOfErrors);
log.Fatal($"Failed to save new biota with id 0x{fullBiota.Id:X8} to the database. Please rollback your shard.");
return;
}
// Finally, remove the original biota
if (!shardDatabase.RemoveBiota(fullBiota.Id))
{
Interlocked.Increment(ref numOfErrors);
log.Fatal($"Failed to remove original biota with id 0x{fullBiota.Id:X8} from database. Please rollback your shard.");
return;
}
var tempNumOfBiotasConsolidated = Interlocked.Increment(ref numOfBiotasConsolidated);
if ((tempNumOfBiotasConsolidated + numOfErrors) % 1000 == 0)
Console.WriteLine($"{tempNumOfBiotasConsolidated:N0} biotas successfully processed out of {partialBiotas.Count:N0}...");
});
// Process ConsolidatableContainerWeenieTypes second
foreach (var partialBiota in partialBiotas)
{
if (numOfErrors > 0)
break;
if (!ConsolidatableContainerWeenieTypes.Contains((WeenieType)partialBiota.WeenieType))
continue;
// Get the original biota
var fullBiota = shardDatabase.GetBiota(partialBiota.Id, true);
if (fullBiota == null)
{
Interlocked.Increment(ref numOfErrors);
log.Warn($"Failed to get biota with id 0x{partialBiota.Id:X8} from the database. This shouldn't happen. It also shouldn't require a rollback.");
break;
}
// Get the next available id
uint newId = 0;
lock (availableIDs)
{
if (availableIDs.First != null)
{
var id = availableIDs.First.Value.start;
if (availableIDs.First.Value.start == availableIDs.First.Value.end)
availableIDs.RemoveFirst();
else
availableIDs.First.Value = (availableIDs.First.Value.start + 1, availableIDs.First.Value.end);
newId = id;
}
}
if (newId == 0)
{
Interlocked.Increment(ref numOfErrors);
log.Fatal("Failed to generate new id. No more id's available for consolidation. This shouldn't require a rollback.");
break;
}
idConversions[fullBiota.Id] = newId;
// Copy our original biota into a new biota and set the new id
var converted = BiotaConverter.ConvertToEnreplacedyBiota(fullBiota);
converted.Id = newId;
// Save the new biota
if (!shardDatabase.SaveBiota(converted, new ReaderWriterLockSlim()))
{
Interlocked.Increment(ref numOfErrors);
log.Fatal($"Failed to save new biota with id 0x{fullBiota.Id:X8} to the database. Please rollback your shard.");
break;
}
// update contained items to point to the new container
using (var context = new ShardDbContext())
{
var ownedItems = context.BiotaPropertiesIID.Where(r => r.Type == (ushort)PropertyInstanceId.Owner && r.Value == fullBiota.Id);
foreach (var item in ownedItems)
item.Value = converted.Id;
var containedItems = context.BiotaPropertiesIID.Where(r => r.Type == (ushort)PropertyInstanceId.Container && r.Value == fullBiota.Id);
foreach (var item in containedItems)
item.Value = converted.Id;
context.SaveChanges();
}
// Finally, remove the original biota
if (!shardDatabase.RemoveBiota(fullBiota.Id))
{
Interlocked.Increment(ref numOfErrors);
log.Fatal($"Failed to remove original biota with id 0x{fullBiota.Id:X8} from database. Please rollback your shard.");
break;
}
var tempNumOfBiotasConsolidated = Interlocked.Increment(ref numOfBiotasConsolidated);
if ((tempNumOfBiotasConsolidated + numOfErrors) % 1000 == 0)
Console.WriteLine($"{tempNumOfBiotasConsolidated:N0} biotas successfully processed out of {partialBiotas.Count:N0}...");
}
// Update enchantment tables for equipped items
using (var context = new ShardDbContext())
{
var enchantments = context.BiotaPropertiesEnchantmentRegistry.Where(r => r.CasterObjectId >= startingGuid).ToList();
// First, remove the enchantments from the database
foreach (var enchantment in enchantments)
{
if (idConversions.TryGetValue(enchantment.CasterObjectId, out var newId))
context.BiotaPropertiesEnchantmentRegistry.Remove(enchantment);
}
context.SaveChanges();
// Second, re-id them and add them back
foreach (var enchantment in enchantments)
{
if (idConversions.TryGetValue(enchantment.CasterObjectId, out var newId))
{
enchantment.CasterObjectId = newId;
context.BiotaPropertiesEnchantmentRegistry.Add(enchantment);
}
}
var shortcuts = context.CharacterPropertiesShortcutBar.Where(r => r.ShortcutObjectId >= startingGuid).ToList();
foreach (var shortcut in shortcuts)
{
if (idConversions.TryGetValue(shortcut.ShortcutObjectId, out var newId))
shortcut.ShortcutObjectId = newId;
}
context.SaveChanges();
}
// Finished
numberOfBiotasConsolidated = numOfBiotasConsolidated;
numberOfErrors = numOfErrors;
log.Info($"Consolidated {numberOfBiotasConsolidated:N0} biotas with {numberOfErrors:N0} errors out of {partialBiotas.Count:N0} total.");
}
19
View Source File : Player_Move.cs
License : GNU Affero General Public License v3.0
Project Creator : ACEmulator
License : GNU Affero General Public License v3.0
Project Creator : ACEmulator
private int GetNextMoveToChainNumber()
{
return Interlocked.Increment(ref moveToChainCounter);
}
19
View Source File : Player_Move.cs
License : GNU Affero General Public License v3.0
Project Creator : ACEmulator
License : GNU Affero General Public License v3.0
Project Creator : ACEmulator
public void StopExistingMoveToChains()
{
Interlocked.Increment(ref moveToChainCounter);
lastCompletedMove = moveToChainCounter;
}
19
View Source File : FileContainerServer.cs
License : MIT License
Project Creator : actions
License : MIT License
Project Creator : actions
private void DrainUploadQueue(RunnerActionPluginExecutionContext context)
{
while (_fileUploadQueue.TryDequeue(out string fileToUpload))
{
context.Debug($"Clearing upload queue: '{fileToUpload}'");
Interlocked.Increment(ref _uploadFilesProcessed);
}
}
19
View Source File : FileContainerServer.cs
License : MIT License
Project Creator : actions
License : MIT License
Project Creator : actions
private async Task<UploadResult> UploadAsync(RunnerActionPluginExecutionContext context, int uploaderId, CancellationToken token)
{
List<string> failedFiles = new List<string>();
long uploadedSize = 0;
string fileToUpload;
Stopwatch uploadTimer = new Stopwatch();
while (_fileUploadQueue.TryDequeue(out fileToUpload))
{
token.ThrowIfCancellationRequested();
try
{
using (FileStream fs = File.Open(fileToUpload, FileMode.Open, FileAccess.Read, FileShare.Read))
{
string itemPath = (_containerPath.TrimEnd('/') + "/" + fileToUpload.Remove(0, _sourceParentDirectory.Length + 1)).Replace('\\', '/');
bool failAndExit = false;
try
{
uploadTimer.Restart();
using (HttpResponseMessage response = await _fileContainerHttpClient.UploadFileAsync(_containerId, itemPath, fs, _projectId, cancellationToken: token, chunkSize: 4 * 1024 * 1024))
{
if (response == null || response.StatusCode != HttpStatusCode.Created)
{
context.Output($"Unable to copy file to server StatusCode={response?.StatusCode}: {response?.ReasonPhrase}. Source file path: {fileToUpload}. Target server path: {itemPath}");
if (response?.StatusCode == HttpStatusCode.Conflict)
{
// fail upload task but continue with any other files
context.Error($"Error '{fileToUpload}' has already been uploaded.");
}
else if (_fileContainerHttpClient.IsFastFailResponse(response))
{
// Fast fail: we received an http status code where we should abandon our efforts
context.Output($"Cannot continue uploading files, so draining upload queue of {_fileUploadQueue.Count} items.");
DrainUploadQueue(context);
failedFiles.Clear();
failAndExit = true;
throw new UploadFailedException($"Critical failure uploading '{fileToUpload}'");
}
else
{
context.Debug($"Adding '{fileToUpload}' to retry list.");
failedFiles.Add(fileToUpload);
}
throw new UploadFailedException($"Http failure response '{response?.StatusCode}': '{response?.ReasonPhrase}' while uploading '{fileToUpload}'");
}
uploadTimer.Stop();
context.Debug($"File: '{fileToUpload}' took {uploadTimer.ElapsedMilliseconds} milliseconds to finish upload");
uploadedSize += fs.Length;
OutputLogForFile(context, fileToUpload, $"Detail upload trace for file: {itemPath}", context.Debug);
}
}
catch (OperationCanceledException) when (token.IsCancellationRequested)
{
context.Output($"File upload has been cancelled during upload file: '{fileToUpload}'.");
throw;
}
catch (Exception ex)
{
context.Output($"Fail to upload '{fileToUpload}' due to '{ex.Message}'.");
context.Output(ex.ToString());
OutputLogForFile(context, fileToUpload, $"Detail upload trace for file that fail to upload: {itemPath}", context.Output);
if (failAndExit)
{
context.Debug("Exiting upload.");
throw;
}
}
}
Interlocked.Increment(ref _uploadFilesProcessed);
}
catch (Exception ex)
{
context.Output($"File error '{ex.Message}' when uploading file '{fileToUpload}'.");
throw ex;
}
}
return new UploadResult(failedFiles, uploadedSize);
}
19
View Source File : FileContainerServer.cs
License : MIT License
Project Creator : actions
License : MIT License
Project Creator : actions
private async Task<DownloadResult> DownloadAsync(RunnerActionPluginExecutionContext context, int downloaderId, CancellationToken token)
{
List<DownloadInfo> failedFiles = new List<DownloadInfo>();
Stopwatch downloadTimer = new Stopwatch();
while (_fileDownloadQueue.TryDequeue(out DownloadInfo fileToDownload))
{
token.ThrowIfCancellationRequested();
try
{
int retryCount = 0;
bool downloadFailed = false;
while (true)
{
try
{
context.Debug($"Start downloading file: '{fileToDownload.ItemPath}' (Downloader {downloaderId})");
downloadTimer.Restart();
using (FileStream fs = new FileStream(fileToDownload.LocalPath, FileMode.Create, FileAccess.Write, FileShare.None, bufferSize: _defaultFileStreamBufferSize, useAsync: true))
using (var downloadStream = await _fileContainerHttpClient.DownloadFileAsync(_containerId, fileToDownload.ItemPath, token, _projectId))
{
await downloadStream.CopyToAsync(fs, _defaultCopyBufferSize, token);
await fs.FlushAsync(token);
downloadTimer.Stop();
context.Debug($"File: '{fileToDownload.LocalPath}' took {downloadTimer.ElapsedMilliseconds} milliseconds to finish download (Downloader {downloaderId})");
break;
}
}
catch (OperationCanceledException) when (token.IsCancellationRequested)
{
context.Debug($"Download has been cancelled while downloading {fileToDownload.ItemPath}. (Downloader {downloaderId})");
throw;
}
catch (Exception ex)
{
retryCount++;
context.Warning($"Fail to download '{fileToDownload.ItemPath}', error: {ex.Message} (Downloader {downloaderId})");
context.Debug(ex.ToString());
}
if (retryCount < 3)
{
var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(10), TimeSpan.FromSeconds(30));
context.Warning($"Back off {backOff.TotalSeconds} seconds before retry. (Downloader {downloaderId})");
await Task.Delay(backOff);
}
else
{
// upload still failed after 3 tries.
downloadFailed = true;
break;
}
}
if (downloadFailed)
{
// tracking file that failed to download.
failedFiles.Add(fileToDownload);
}
Interlocked.Increment(ref _downloadFilesProcessed);
}
catch (Exception ex)
{
// We should never
context.Error($"Error '{ex.Message}' when downloading file '{fileToDownload}'. (Downloader {downloaderId})");
throw ex;
}
}
return new DownloadResult(failedFiles);
}
19
View Source File : ProcessInvoker.cs
License : MIT License
Project Creator : actions
License : MIT License
Project Creator : actions
public async Task<int> ExecuteAsync(
string workingDirectory,
string fileName,
string arguments,
IDictionary<string, string> environment,
bool requireExitCodeZero,
Encoding outputEncoding,
bool killProcessOnCancel,
Channel<string> redirectStandardIn,
bool inheritConsoleHandler,
bool keepStandardInOpen,
bool highPriorityProcess,
CancellationToken cancellationToken)
{
ArgUtil.Null(_proc, nameof(_proc));
ArgUtil.NotNullOrEmpty(fileName, nameof(fileName));
Trace.Info("Starting process:");
Trace.Info($" File name: '{fileName}'");
Trace.Info($" Arguments: '{arguments}'");
Trace.Info($" Working directory: '{workingDirectory}'");
Trace.Info($" Require exit code zero: '{requireExitCodeZero}'");
Trace.Info($" Encoding web name: {outputEncoding?.WebName} ; code page: '{outputEncoding?.CodePage}'");
Trace.Info($" Force kill process on cancellation: '{killProcessOnCancel}'");
Trace.Info($" Redirected STDIN: '{redirectStandardIn != null}'");
Trace.Info($" Persist current code page: '{inheritConsoleHandler}'");
Trace.Info($" Keep redirected STDIN open: '{keepStandardInOpen}'");
Trace.Info($" High priority process: '{highPriorityProcess}'");
_proc = new Process();
_proc.StartInfo.FileName = fileName;
_proc.StartInfo.Arguments = arguments;
_proc.StartInfo.WorkingDirectory = workingDirectory;
_proc.StartInfo.UseShellExecute = false;
_proc.StartInfo.CreateNoWindow = !inheritConsoleHandler;
_proc.StartInfo.RedirectStandardInput = true;
_proc.StartInfo.RedirectStandardError = true;
_proc.StartInfo.RedirectStandardOutput = true;
// Ensure we process STDERR even the process exit event happen before we start read STDERR stream.
if (_proc.StartInfo.RedirectStandardError)
{
Interlocked.Increment(ref _asyncStreamReaderCount);
}
// Ensure we process STDOUT even the process exit event happen before we start read STDOUT stream.
if (_proc.StartInfo.RedirectStandardOutput)
{
Interlocked.Increment(ref _asyncStreamReaderCount);
}
#if OS_WINDOWS
// If StandardErrorEncoding or StandardOutputEncoding is not specified the on the
// ProcessStartInfo object, then .NET PInvokes to resolve the default console output
// code page:
// [DllImport("api-ms-win-core-console-l1-1-0.dll", SetLastError = true)]
// public extern static uint GetConsoleOutputCP();
StringUtil.EnsureRegisterEncodings();
#endif
if (outputEncoding != null)
{
_proc.StartInfo.StandardErrorEncoding = outputEncoding;
_proc.StartInfo.StandardOutputEncoding = outputEncoding;
}
// Copy the environment variables.
if (environment != null && environment.Count > 0)
{
foreach (KeyValuePair<string, string> kvp in environment)
{
_proc.StartInfo.Environment[kvp.Key] = kvp.Value;
}
}
// Indicate GitHub Actions process.
_proc.StartInfo.Environment["GITHUB_ACTIONS"] = "true";
// Set CI=true when no one else already set it.
// CI=true is common set in most CI provider in GitHub
if (!_proc.StartInfo.Environment.ContainsKey("CI") &&
Environment.GetEnvironmentVariable("CI") == null)
{
_proc.StartInfo.Environment["CI"] = "true";
}
// Hook up the events.
_proc.EnableRaisingEvents = true;
_proc.Exited += ProcessExitedHandler;
// Start the process.
_stopWatch = Stopwatch.StartNew();
_proc.Start();
// Decrease invoked process priority, in platform specifc way, relative to parent
if (!highPriorityProcess)
{
DecreaseProcessPriority(_proc);
}
// Start the standard error notifications, if appropriate.
if (_proc.StartInfo.RedirectStandardError)
{
StartReadStream(_proc.StandardError, _errorData);
}
// Start the standard output notifications, if appropriate.
if (_proc.StartInfo.RedirectStandardOutput)
{
StartReadStream(_proc.StandardOutput, _outputData);
}
if (_proc.StartInfo.RedirectStandardInput)
{
if (redirectStandardIn != null)
{
StartWriteStream(redirectStandardIn, _proc.StandardInput, keepStandardInOpen);
}
else
{
// Close the input stream. This is done to prevent commands from blocking the build waiting for input from the user.
_proc.StandardInput.Close();
}
}
var cancellationFinished = new TaskCompletionSource<bool>();
using (var registration = cancellationToken.Register(async () =>
{
await CancelAndKillProcessTree(killProcessOnCancel);
cancellationFinished.TrySetResult(true);
}))
{
Trace.Info($"Process started with process id {_proc.Id}, waiting for process exit.");
while (true)
{
Task outputSignal = _outputProcessEvent.WaitAsync();
var signaled = await Task.WhenAny(outputSignal, _processExitedCompletionSource.Task);
if (signaled == outputSignal)
{
ProcessOutput();
}
else
{
_stopWatch.Stop();
break;
}
}
// Just in case there was some pending output when the process shut down go ahead and check the
// data buffers one last time before returning
ProcessOutput();
if (cancellationToken.IsCancellationRequested)
{
// Ensure cancellation also finish on the cancellationToken.Register thread.
await cancellationFinished.Task;
Trace.Info($"Process Cancellation finished.");
}
Trace.Info($"Finished process {_proc.Id} with exit code {_proc.ExitCode}, and elapsed time {_stopWatch.Elapsed}.");
}
cancellationToken.ThrowIfCancellationRequested();
// Wait for process to finish.
if (_proc.ExitCode != 0 && requireExitCodeZero)
{
throw new ProcessExitCodeException(exitCode: _proc.ExitCode, fileName: fileName, arguments: arguments);
}
return _proc.ExitCode;
}
19
View Source File : Dependencies.cs
License : Apache License 2.0
Project Creator : adamralph
License : Apache License 2.0
Project Creator : adamralph
[Fact]
public static async Task DependencyOrderWhenParallelAndSkipping()
{
// arrange
var clock = 0;
var (buildStartTime, test1StartTime, test2StartTime) = (0, 0, 0);
var targets = new TargetCollection
{
CreateTarget(
"build",
() =>
{
Thread.Sleep(TimeSpan.FromSeconds(1)); // a weak way to encourage the tests to run first
buildStartTime = Interlocked.Increment(ref clock);
}),
CreateTarget("test1", new[] { "build" }, () => test1StartTime = Interlocked.Increment(ref clock)),
CreateTarget("test2", new[] { "build" }, () => test2StartTime = Interlocked.Increment(ref clock)),
};
// act
await targets.RunAsync(new List<string> { "--parallel", "--skip-dependencies", "test1", "test2", "build" }, _ => false, default, Console.Out, Console.Error, false);
// replacedert
replacedert.Equal(1, buildStartTime);
replacedert.Equal(5, test1StartTime + test2StartTime);
}
19
View Source File : CommandEx.cs
License : Apache License 2.0
Project Creator : adamralph
License : Apache License 2.0
Project Creator : adamralph
public static async Task<Result> ReadLoggedAsync(string name, string args = null, string workingDirectory = null, IEnumerable<KeyValuePair<string, string>> envVars = null)
{
envVars = (envVars ?? Enumerable.Empty<KeyValuePair<string, string>>()).ToList();
var result = await Command.ReadAsync(
name,
args,
workingDirectory,
configureEnvironment: env =>
{
foreach (var pair in envVars)
{
env[pair.Key] = pair.Value;
}
}).ConfigureAwait(false);
var index = Interlocked.Increment(ref CommandEx.index);
var markdown =
$@"
# Command read {index}
## Command
### Name
`{name}`
### Args
`{args}`
### Working directory
`{workingDirectory}`
### Environment variables
```text
{string.Join(Environment.NewLine, envVars.Select(pair => $"{pair.Key}={pair.Value}"))}
```
## Result
### StandardOutput (stdout)
```text
{result.StandardOutput}
```
### StandardError (stderr)
```text
{result.StandardError}
```
";
var markdownFileName = Path.Combine(workingDirectory, $"command-read-{index:D2}.md");
await File.WriteAllTextAsync(markdownFileName, markdown).ConfigureAwait(false);
return result;
}
19
View Source File : Output.Results.cs
License : Apache License 2.0
Project Creator : adamralph
License : Apache License 2.0
Project Creator : adamralph
private TargetResult InternResult(Target target) => this.results.GetOrAdd(target, key => new TargetResult(Interlocked.Increment(ref this.resultOrdinal)));
19
View Source File : Output.Results.cs
License : Apache License 2.0
Project Creator : adamralph
License : Apache License 2.0
Project Creator : adamralph
private (TargetResult, TargetInputResult) Intern(Target target, Guid inputId)
{
var targetResult = this.InternResult(target);
var targetInputResult = targetResult.InputResults.GetOrAdd(inputId, key => new TargetInputResult(Interlocked.Increment(ref this.resultOrdinal)));
return (targetResult, targetInputResult);
}
19
View Source File : JobDispatcher_HandleNextJobAsync_Test.cs
License : MIT License
Project Creator : AdemCatamak
License : MIT License
Project Creator : AdemCatamak
[Theory]
[InlineData(1, 100)]
[InlineData(4, 100)]
[InlineData(8, 100)]
[InlineData(1, 300)]
[InlineData(4, 300)]
[InlineData(8, 300)]
[InlineData(12, 300)]
[InlineData(16, 300)]
[InlineData(1, 1000)]
[InlineData(4, 1000)]
[InlineData(8, 1000)]
[InlineData(12, 1000)]
[InlineData(16, 1000)]
public void When_JobHandledCalledWithParallel__InitialJobCountAndJobExecutionCountShouldBeEqual(int parallelJobCount, int initialJobCount)
{
List<Task> addTasks = new List<Task>();
for (var i = 0; i < initialJobCount; i++)
{
DummyMessage dummyMessage = new DummyMessage
{
Guid = Guid.NewGuid()
};
var task = _messageStorageClient.AddMessageAsync(dummyMessage);
addTasks.Add(task);
}
Task.WaitAll(addTasks.ToArray());
List<Task> jobProcessorTasks = new List<Task>();
var actualExecutedJobCount = 0;
for (var i = 0; i < parallelJobCount; i++)
{
Task task = Task.Run(async () =>
{
bool jobHandled;
do
{
jobHandled = await _jobDispatcher.HandleNextJobAsync();
if (jobHandled)
{
Interlocked.Increment(ref actualExecutedJobCount);
}
} while (jobHandled);
}
);
jobProcessorTasks.Add(task);
}
Task.WaitAll(jobProcessorTasks.ToArray());
string message = $"Parallel Job Count : {parallelJobCount}{Environment.NewLine}" +
$"Expected Executed Job Count : {initialJobCount}{Environment.NewLine}" +
$"Actual Executed Job Count : {actualExecutedJobCount}";
replacedert.Equal(initialJobCount, actualExecutedJobCount);
_output.WriteLine(message);
}
19
View Source File : JobDispatcher_HandleNextJobAsync_Test.cs
License : MIT License
Project Creator : AdemCatamak
License : MIT License
Project Creator : AdemCatamak
[Theory]
[InlineData(1, 100)]
[InlineData(4, 100)]
[InlineData(8, 100)]
[InlineData(1, 300)]
[InlineData(4, 300)]
[InlineData(8, 300)]
[InlineData(12, 300)]
[InlineData(16, 300)]
[InlineData(1, 1000)]
[InlineData(4, 1000)]
[InlineData(8, 1000)]
[InlineData(12, 1000)]
[InlineData(16, 1000)]
public void When_JobHandledCalledWithParallel__InitialJobCountAndJobExecutionCountShouldBeEqual(int parallelJobCount, int initialJobCount)
{
List<Task> addTasks = new List<Task>();
for (var i = 0; i < initialJobCount; i++)
{
DummyMessage dummyMessage = new DummyMessage
{
Guid = Guid.NewGuid()
};
var task = _messageStorageClient.AddMessageAsync(dummyMessage);
addTasks.Add(task);
}
Task.WaitAll(addTasks.ToArray());
List<Task> jobProcessorTasks = new List<Task>();
int actualExecutedJobCount = 0;
for (var i = 0; i < parallelJobCount; i++)
{
Task task = Task.Run(async () =>
{
bool jobHandled;
do
{
jobHandled = await _sut.HandleNextJobAsync();
if (jobHandled)
{
Interlocked.Increment(ref actualExecutedJobCount);
}
} while (jobHandled);
}
);
jobProcessorTasks.Add(task);
}
Task.WaitAll(jobProcessorTasks.ToArray());
string message = $"Parallel Job Count : {parallelJobCount}{Environment.NewLine}" +
$"Expected Executed Job Count : {initialJobCount}{Environment.NewLine}" +
$"Actual Executed Job Count : {actualExecutedJobCount}";
replacedert.Equal(initialJobCount, actualExecutedJobCount);
_output.WriteLine(message);
}
19
View Source File : NamedLock.cs
License : MIT License
Project Creator : Adoxio
License : MIT License
Project Creator : Adoxio
public int AddRef()
{
return Interlocked.Increment(ref _count);
}
19
View Source File : Parallel.cs
License : MIT License
Project Creator : adrenak
License : MIT License
Project Creator : adrenak
private void RunWorkerThread(object threadIndex) {
WorkerThread workerThread = workerThreads[(int)threadIndex];
int localJobIndex = 0;
while (true) {
// Wait for a task.
workerThread.TaskWaiting.WaitOne();
// Exit if task is empty.
if (LoopFunction == null) {
return;
}
localJobIndex = Interlocked.Increment(ref currentJobIndex);
while (localJobIndex < stopIndex) {
////Console.WriteLine("Thread " + threadIndex + " of " + workerThreads.Count + " running task " + localJobIndex);
LoopFunction(localJobIndex);
localJobIndex = Interlocked.Increment(ref currentJobIndex);
}
// Signal that thread is idle.
workerThread.ThreadIdle.Set();
}
}
19
View Source File : ThreadPool.cs
License : MIT License
Project Creator : adrenak
License : MIT License
Project Creator : adrenak
private void EnqueueTask(WaitCallback callback, object state) {
while (m_numTasks == m_taskQueue.Length) {
m_getNotification.WaitOne();
}
m_taskQueue[m_nPutPointer].callback = callback;
m_taskQueue[m_nPutPointer].args = state;
++m_nPutPointer;
if (m_nPutPointer == m_taskQueue.Length) {
m_nPutPointer = 0;
}
#if !UNITY_WEBGL
if (m_threadPool.Length == 1) {
#endif
if (Interlocked.Increment(ref m_numTasks) == 1) {
m_putNotification.Set();
}
#if !UNITY_WEBGL
}
else {
Interlocked.Increment(ref m_numTasks);
m_semapreplaced.Release();
}
#endif
}
19
View Source File : Parallel.cs
License : MIT License
Project Creator : adrenak
License : MIT License
Project Creator : adrenak
private void RunWorkerThread(object threadIndex) {
WorkerThread workerThread = workerThreads[(int)threadIndex];
int localJobIndex = 0;
while(true) {
// Wait for a task.
workerThread.TaskWaiting.WaitOne();
// Exit if task is empty.
if(LoopFunction == null) {
return;
}
localJobIndex = Interlocked.Increment(ref currentJobIndex);
while(localJobIndex < stopIndex) {
////Console.WriteLine("Thread " + threadIndex + " of " + workerThreads.Count + " running task " + localJobIndex);
LoopFunction(localJobIndex);
localJobIndex = Interlocked.Increment(ref currentJobIndex);
}
// Signal that thread is idle.
workerThread.ThreadIdle.Set();
}
}
19
View Source File : Parallel.cs
License : MIT License
Project Creator : adrenak
License : MIT License
Project Creator : adrenak
private void RunWorkerThread(object threadIndex)
{
WorkerThread workerThread = workerThreads[(int)threadIndex];
int localJobIndex = 0;
while (true)
{
// Wait for a task.
workerThread.TaskWaiting.WaitOne();
// Exit if task is empty.
if (LoopFunction == null)
{
return;
}
localJobIndex = Interlocked.Increment(ref currentJobIndex);
while (localJobIndex < stopIndex)
{
////Console.WriteLine("Thread " + threadIndex + " of " + workerThreads.Count + " running task " + localJobIndex);
LoopFunction(localJobIndex);
localJobIndex = Interlocked.Increment(ref currentJobIndex);
}
// Signal that thread is idle.
workerThread.ThreadIdle.Set();
}
}
19
View Source File : ThreadTaskManager.cs
License : The Unlicense
Project Creator : aeroson
License : The Unlicense
Project Creator : aeroson
internal void EnqueueTask(Action<object> task, object taskInformation)
{
Interlocked.Increment(ref manager.tasksRemaining);
taskData.Enqueue(new TaskEntry(task, taskInformation));
resetEvent.Set();
}
19
View Source File : SimulationIsland.cs
License : The Unlicense
Project Creator : aeroson
License : The Unlicense
Project Creator : aeroson
void BecameDeactivationCandidate(SimulationIslandMember member)
{
Interlocked.Increment(ref deactivationCandidateCount);
//The reason why this does not deactivate when count == members.count is that deactivation candidate count will go up and down in parallel.
//The actual deactivation process is not designed to be thread safe. Perhaps doable, but perhaps not worth the effort.
}
19
View Source File : SimpleLooper.cs
License : The Unlicense
Project Creator : aeroson
License : The Unlicense
Project Creator : aeroson
internal void EnqueueTask(Action<object> task, object taskInformation)
{
lock (taskQueue)
{
Interlocked.Increment(ref manager.tasksRemaining);
taskQueue.Enqueue(task);
taskInformationQueue.Enqueue(taskInformation);
resetEvent.Set();
}
}
19
View Source File : SmallFlakes.cs
License : Mozilla Public License 2.0
Project Creator : agebullhu
License : Mozilla Public License 2.0
Project Creator : agebullhu
public static long Oxidize()
{
var ct = CurrentTimeCounter();
var counter = Interlocked.Increment(ref _counter);
if ((ushort)_counter == 0)
{
ct = WaitForNextTimeCounter((uint)ct);
counter = Interlocked.Increment(ref _counter);
}
var result = ((ct << SmallFlakes.TimestampShift) + ((long)Identifier << SmallFlakes.IdentifierShift) + (uint)counter);
return result & SmallFlakes.Mask;
}
19
View Source File : LogRecorder.cs
License : Mozilla Public License 2.0
Project Creator : agebullhu
License : Mozilla Public License 2.0
Project Creator : agebullhu
private static void WriteRecordLoop()
{
bool success = true;
try
{
if (Interlocked.Increment(ref BackIsRuning) > 1)
{
return;
}
SystemTrace(LogLevel.System, "日志开始");
int cnt = 0;
while (State != LogRecorderStatus.Shutdown)
{
//Thread.Sleep(10);//让子弹飞一会
if (State < LogRecorderStatus.Initialized || !BaseRecorder.IsInitialized || !Recorder.IsInitialized)
{
Thread.Sleep(50);
continue;
}
var array = RecordInfos.Switch();
if (array.Count == 0)
{
Thread.Sleep(50);
continue;
}
foreach (var info in array)
{
if (info == null)
continue;
try
{
info.Index = ++_id;
if (_id == ulong.MaxValue)
_id = 1;
if (!_isTextRecorder && (info.Type >= LogType.System || info.Local))
BaseRecorder.RecordLog(info);
if (Listener != null || TraceToConsole)
DoTrace(info);
}
catch (Exception ex)
{
SystemTrace(LogLevel.Error, "日志写入发生错误", ex);
}
}
try
{
Recorder.RecordLog(array.ToList());
}
catch (Exception ex)
{
SystemTrace(LogLevel.Error, "日志写入发生错误", ex);
}
if (++cnt < 1024)
continue;
GC.Collect();
cnt = 0;
}
_syncSlim.Release();
}
catch (Exception e)
{
success = false;
Console.WriteLine(e);
}
finally
{
Interlocked.Decrement(ref BackIsRuning);
SystemTrace(LogLevel.System, "日志结束");
}
if (!success)
NewRecorderThread();
}
19
View Source File : ApiStation.cs
License : Mozilla Public License 2.0
Project Creator : agebullhu
License : Mozilla Public License 2.0
Project Creator : agebullhu
private void ApiCall(ref ZSocket socket, ApiCallItem item)
{
using (IocScope.CreateScope())
{
Interlocked.Increment(ref CallCount);
try
{
if (LogRecorder.LogMonitor)
ApiCallByMonitor(ref socket, item);
else
ApiCallNoMonitor(ref socket, item);
}
catch (Exception ex)
{
ZeroTrace.WriteException(StationName, ex, "ApiCall", item.ApiName);
item.Result = ApiResult.InnerErrorJson;
SendResult(ref socket, item, ZeroOperatorStateType.Error);
}
finally
{
Interlocked.Decrement(ref waitCount);
}
}
}
19
View Source File : ApiStation.cs
License : Mozilla Public License 2.0
Project Creator : agebullhu
License : Mozilla Public License 2.0
Project Creator : agebullhu
private void ApiCallNoMonitor(ref ZSocket socket, ApiCallItem item)
{
ZeroOperatorStateType state = RestoryContext(item);
if (state == ZeroOperatorStateType.Ok)
{
Prepare(item);
state = ExecCommand(item);
if (state != ZeroOperatorStateType.Ok)
Interlocked.Increment(ref ErrorCount);
else
Interlocked.Increment(ref SuccessCount);
}
else
{
Interlocked.Increment(ref ErrorCount);
}
if (!SendResult(ref socket, item, state))
{
Interlocked.Increment(ref SendError);
}
End(item);
}
19
View Source File : ApiStation.cs
License : Mozilla Public License 2.0
Project Creator : agebullhu
License : Mozilla Public License 2.0
Project Creator : agebullhu
private void RunWait()
{
ZeroTrace.SystemLog(StationName, "run", Config.WorkerCallAddress, Name, RealName);
var socket = ZSocket.CreateClientSocket(Config.WorkerResultAddress, ZSocketType.DEALER);
{
using (var pool = ZmqPool.CreateZmqPool())
{
pool.Prepare(ZPollEvent.In, ZSocket.CreateClientSocket(Config.WorkerCallAddress, ZSocketType.PULL, Idenreplacedy));
State = StationState.Run;
while (CanLoop)
{
if (!pool.Poll() || !pool.CheckIn(0, out var message))
{
continue;
}
Interlocked.Increment(ref RecvCount);
using (message)
{
if (!Unpack(message, out var item))
{
SendLayoutErrorResult(ref socket, item.Caller);
continue;
}
Interlocked.Increment(ref waitCount);
if (waitCount > ZeroApplication.Config.MaxWait)
{
item.Result = ApiResult.UnavailableJson;
SendResult(ref socket, item, ZeroOperatorStateType.Unavailable);
}
else
{
Task.Factory.StartNew(ApiCallTask, item);
}
}
}
}
}
ZeroTrace.SystemLog(StationName, "end", Config.WorkerCallAddress, Name, RealName);
socket.Dispose();
}
19
View Source File : ApiStation.cs
License : Mozilla Public License 2.0
Project Creator : agebullhu
License : Mozilla Public License 2.0
Project Creator : agebullhu
private void RunSignle()
{
var socket = ZSocket.CreateClientSocket(Config.WorkerResultAddress, ZSocketType.DEALER);
{
using (var pool = ZmqPool.CreateZmqPool())
{
pool.Prepare(new[] { ZSocket.CreateClientSocket(Config.WorkerCallAddress, ZSocketType.PULL, Idenreplacedy) }, ZPollEvent.In);
State = StationState.Run;
while (CanLoop)
{
if (!pool.Poll() || !pool.CheckIn(0, out var message))
{
continue;
}
Interlocked.Increment(ref RecvCount);
using (message)
{
if (!Unpack(message, out var item))
{
SendLayoutErrorResult(ref socket, item.Caller);
continue;
}
ApiCall(ref socket, item);
}
}
}
}
socket.Dispose();
_processSemapreplaced?.Release();
}
19
View Source File : ApiStation.cs
License : Mozilla Public License 2.0
Project Creator : agebullhu
License : Mozilla Public License 2.0
Project Creator : agebullhu
private bool SendResult(ref ZSocket socket, ZMessage message)
{
try
{
ZError error;
using (message)
{
if (socket.Send(message, out error))
return true;
socket.TryClose();
socket = ZSocket.CreateClientSocket(Config.WorkerResultAddress, ZSocketType.DEALER, Idenreplacedy);
}
ZeroTrace.WriteError(StationName, error.Text, error.Name);
Interlocked.Increment(ref SendError);
return false;
}
catch (Exception e)
{
LogRecorder.Exception(e, "ApiStation.SendResult");
return false;
}
}
19
View Source File : Tester.cs
License : Mozilla Public License 2.0
Project Creator : agebullhu
License : Mozilla Public License 2.0
Project Creator : agebullhu
void Async()
{
Interlocked.Increment(ref WaitCount);
GlobalContext.SetRequestContext(ZeroApplication.Config.ServiceName, Guid.NewGuid().ToString("N"));
DateTime s = DateTime.Now;
DoAsync();
Interlocked.Decrement(ref WaitCount);
var sp = (DateTime.Now - s);
Interlocked.Add(ref RunTime, sp.Ticks);
if (sp.TotalMilliseconds > 500)
Interlocked.Increment(ref TmError);
if ((Interlocked.Increment(ref ExCount) % 100) == 0)
Count();
}
19
View Source File : ApiStation.cs
License : Mozilla Public License 2.0
Project Creator : agebullhu
License : Mozilla Public License 2.0
Project Creator : agebullhu
private void ApiCallByMonitor(ref ZSocket socket, ApiCallItem item)
{
using (MonitorScope.CreateScope(item.ApiName))
{
LogRecorder.MonitorTrace($"Caller:{item.Caller}");
LogRecorder.MonitorTrace($"GlobalId:{item.GlobalId}");
LogRecorder.MonitorTrace(JsonConvert.SerializeObject(item));
ZeroOperatorStateType state = RestoryContext(item);
if (state == ZeroOperatorStateType.Ok)
{
Prepare(item);
using (MonitorScope.CreateScope("Do"))
{
state = ExecCommand(item);
}
if (state != ZeroOperatorStateType.Ok)
Interlocked.Increment(ref ErrorCount);
else
Interlocked.Increment(ref SuccessCount);
}
else
Interlocked.Increment(ref ErrorCount);
LogRecorder.MonitorTrace(item.Result);
if (!SendResult(ref socket, item, state))
{
ZeroTrace.WriteError(item.ApiName, "SendResult");
Interlocked.Increment(ref SendError);
}
End(item);
}
}
19
View Source File : ApiStation.cs
License : Mozilla Public License 2.0
Project Creator : agebullhu
License : Mozilla Public License 2.0
Project Creator : agebullhu
private void RunThread()
{
var realName = ZeroIdenreplacedyHelper.CreateRealName(IsService, Name == Config.StationName ? null : Name);
var idenreplacedy = realName.ToAsciiBytes();
ZeroTrace.SystemLog(StationName, "run", Config.WorkerCallAddress, Name, realName);
var socket = ZSocket.CreateClientSocket(Config.WorkerResultAddress, ZSocketType.DEALER);
using (var pool = ZmqPool.CreateZmqPool())
{
pool.Prepare(new[] {ZSocket.CreateClientSocket(Config.WorkerCallAddress, ZSocketType.PULL, idenreplacedy)},
ZPollEvent.In);
State = StationState.Run;
while (CanLoop)
{
if (!pool.Poll() || !pool.CheckIn(0, out var message))
{
continue;
}
Interlocked.Increment(ref RecvCount);
using (message)
{
if (!Unpack(message, out var item))
{
SendLayoutErrorResult(ref socket, item.Caller);
continue;
}
ApiCall(ref socket, item);
}
}
}
ZeroTrace.SystemLog(StationName, "end", Config.WorkerCallAddress, Name, realName);
socket.Dispose();
_processSemapreplaced?.Release();
}
19
View Source File : Tester.cs
License : Mozilla Public License 2.0
Project Creator : agebullhu
License : Mozilla Public License 2.0
Project Creator : agebullhu
void OnTestStar()
{
Interlocked.Increment(ref testerCount);
}
19
View Source File : DiscordClient.WebSocket.cs
License : MIT License
Project Creator : Aiko-IT-Systems
License : MIT License
Project Creator : Aiko-IT-Systems
internal async Task SendHeartbeatAsync(long seq)
{
var more_than_5 = Volatile.Read(ref this._skippedHeartbeats) > 5;
var guilds_comp = Volatile.Read(ref this._guildDownloadCompleted);
if (guilds_comp && more_than_5)
{
this.Logger.LogCritical(LoggerEvents.HeartbeatFailure, "Server failed to acknowledge more than 5 heartbeats - connection is zombie");
var args = new ZombiedEventArgs(this.ServiceProvider)
{
Failures = Volatile.Read(ref this._skippedHeartbeats),
GuildDownloadCompleted = true
};
await this._zombied.InvokeAsync(this, args).ConfigureAwait(false);
await this.InternalReconnectAsync(code: 4001, message: "Too many heartbeats missed").ConfigureAwait(false);
return;
}
else if (!guilds_comp && more_than_5)
{
var args = new ZombiedEventArgs(this.ServiceProvider)
{
Failures = Volatile.Read(ref this._skippedHeartbeats),
GuildDownloadCompleted = false
};
await this._zombied.InvokeAsync(this, args).ConfigureAwait(false);
this.Logger.LogWarning(LoggerEvents.HeartbeatFailure, "Server failed to acknowledge more than 5 heartbeats, but the guild download is still running - check your connection speed");
}
Volatile.Write(ref this._lastSequence, seq);
this.Logger.LogTrace(LoggerEvents.Heartbeat, "Sending heartbeat");
var heartbeat = new GatewayPayload
{
OpCode = GatewayOpCode.Heartbeat,
Data = seq
};
var heartbeat_str = JsonConvert.SerializeObject(heartbeat);
await this.WsSendAsync(heartbeat_str).ConfigureAwait(false);
this._lastHeartbeat = DateTimeOffset.Now;
Interlocked.Increment(ref this._skippedHeartbeats);
}
19
View Source File : RestClient.cs
License : MIT License
Project Creator : Aiko-IT-Systems
License : MIT License
Project Creator : Aiko-IT-Systems
public RateLimitBucket GetBucket(RestRequestMethod method, string route, object route_params, out string url)
{
var rparams_props = route_params.GetType()
.GetTypeInfo()
.DeclaredProperties;
var rparams = new Dictionary<string, string>();
foreach (var xp in rparams_props)
{
var val = xp.GetValue(route_params);
rparams[xp.Name] = val is string xs
? xs
: val is DateTime dt
? dt.ToString("yyyy-MM-ddTHH:mm:sszzz", CultureInfo.InvariantCulture)
: val is DateTimeOffset dto
? dto.ToString("yyyy-MM-ddTHH:mm:sszzz", CultureInfo.InvariantCulture)
: val is IFormattable xf ? xf.ToString(null, CultureInfo.InvariantCulture) : val.ToString();
}
var guild_id = rparams.ContainsKey("guild_id") ? rparams["guild_id"] : "";
var channel_id = rparams.ContainsKey("channel_id") ? rparams["channel_id"] : "";
var webhook_id = rparams.ContainsKey("webhook_id") ? rparams["webhook_id"] : "";
// Create a generic route (minus major params) key
// ex: POST:/channels/channel_id/messages
var hashKey = RateLimitBucket.GenerateHashKey(method, route);
// We check if the hash is present, using our generic route (without major params)
// ex: in POST:/channels/channel_id/messages, out 80c17d2f203122d936070c88c8d10f33
// If it doesn't exist, we create an unlimited hash as our initial key in the form of the hash key + the unlimited constant
// and replacedign this to the route to hash cache
// ex: this.RoutesToHashes[POST:/channels/channel_id/messages] = POST:/channels/channel_id/messages:unlimited
var hash = this.RoutesToHashes.GetOrAdd(hashKey, RateLimitBucket.GenerateUnlimitedHash(method, route));
// Next we use the hash to generate the key to obtain the bucket.
// ex: 80c17d2f203122d936070c88c8d10f33:guild_id:506128773926879242:webhook_id
// or if unlimited: POST:/channels/channel_id/messages:unlimited:guild_id:506128773926879242:webhook_id
var bucketId = RateLimitBucket.GenerateBucketId(hash, guild_id, channel_id, webhook_id);
// If it's not in cache, create a new bucket and index it by its bucket id.
var bucket = this.HashesToBuckets.GetOrAdd(bucketId, new RateLimitBucket(hash, guild_id, channel_id, webhook_id));
bucket.LastAttemptAt = DateTimeOffset.UtcNow;
// Cache the routes for each bucket so it can be used for GC later.
if (!bucket.RouteHashes.Contains(bucketId))
bucket.RouteHashes.Add(bucketId);
// Add the current route to the request queue, which indexes the amount
// of requests occurring to the bucket id.
_ = this.RequestQueue.TryGetValue(bucketId, out var count);
// Increment by one atomically due to concurrency
this.RequestQueue[bucketId] = Interlocked.Increment(ref count);
// Start bucket cleaner if not already running.
if (!this._cleanerRunning)
{
this._cleanerRunning = true;
this._bucketCleanerTokenSource = new CancellationTokenSource();
this._cleanerTask = Task.Run(this.CleanupBucketsAsync, this._bucketCleanerTokenSource.Token);
this.Logger.LogDebug(LoggerEvents.RestCleaner, "Bucket cleaner task started.");
}
url = RouteArgumentRegex.Replace(route, xm => rparams[xm.Groups[1].Value]);
return bucket;
}
19
View Source File : Counter.cs
License : MIT License
Project Creator : AiursoftWeb
License : MIT License
Project Creator : AiursoftWeb
public int GetUniqueNo()
{
return Interlocked.Increment(ref this._current);
}
19
View Source File : Amb.cs
License : Apache License 2.0
Project Creator : akarnokd
License : Apache License 2.0
Project Creator : akarnokd
internal void Dispose()
{
_cts.Cancel();
if (Interlocked.Increment(ref _disposeWip) == 1)
{
_parent.Dispose(Source);
}
}
See More Examples