Here are the examples of the csharp api System.Collections.Generic.List.Add(System.Threading.Tasks.Task) taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
1813 Examples
19
View Source File : RoleService.cs
License : MIT License
Project Creator : 17MKH
License : MIT License
Project Creator : 17MKH
[Transaction]
public async Task<IResultModel> UpdateBindMenus(RoleBindMenusUpdateDto dto)
{
var role = await _repository.Get(dto.RoleId);
if (role == null)
return ResultModel.NotExists;
//删除当前角色已绑定的菜单数据
await _roleMenuRepository.Find(m => m.MenuGroupId == role.MenuGroupId && m.RoleId == role.Id).ToDelete();
//删除当前角色已绑定的按钮数据
await _roleButtonRepository.Find(m => m.MenuGroupId == role.MenuGroupId && m.RoleId == role.Id).ToDelete();
//删除当前角色已绑定的权限数据
await _rolePermissionRepository.Find(m => m.MenuGroupId == role.MenuGroupId && m.RoleId == role.Id).ToDelete();
//添加绑定菜单数据
if (dto.Menus.NotNullAndEmpty())
{
foreach (var dtoMenu in dto.Menus)
{
await _roleMenuRepository.Add(new RoleMenuEnreplacedy
{
MenuGroupId = role.MenuGroupId,
RoleId = role.Id,
MenuId = dtoMenu.MenuId,
MenuType = dtoMenu.MenuType
});
var tasks = new List<Task>();
//添加绑定按钮数据
if (dtoMenu.Buttons.NotNullAndEmpty())
{
foreach (var dtoButton in dtoMenu.Buttons)
{
tasks.Add(_roleButtonRepository.Add(new RoleButtonEnreplacedy
{
MenuGroupId = role.MenuGroupId,
MenuId = dtoMenu.MenuId,
RoleId = role.Id,
ButtonCode = dtoButton.ToLower()
}));
}
}
//添加绑定权限数据
if (dtoMenu.Permissions.NotNullAndEmpty())
{
foreach (var dtoPermission in dtoMenu.Permissions)
{
tasks.Add(_rolePermissionRepository.Add(new RolePermissionEnreplacedy
{
MenuGroupId = role.MenuGroupId,
RoleId = role.Id,
MenuId = dtoMenu.MenuId,
PermissionCode = dtoPermission.ToLower()
}));
}
}
await Task.WhenAll(tasks);
}
}
//清除关联账户的权限缓存
var accountIds = await _accountRepository.Find(m => m.RoleId == dto.RoleId).Select(m => m.Id).ToList<Guid>();
if (accountIds.Any())
{
var tasks = new List<Task>();
foreach (var accountId in accountIds)
{
tasks.Add(_cacheHandler.Remove(_cacheKeys.AccountPermissions(accountId, 0)));
}
await Task.WhenAll(tasks);
}
return ResultModel.Success();
}
19
View Source File : MenuService.cs
License : MIT License
Project Creator : 17MKH
License : MIT License
Project Creator : 17MKH
[Transaction]
public async Task<IResultModel> UpdateSort(IList<MenuEnreplacedy> menus)
{
if (!menus.Any())
return ResultModel.Success();
var tasks = new List<Task>();
foreach (var menu in menus)
{
var task = _repository.Find(m => m.Id == menu.Id).ToUpdate(m => new MenuEnreplacedy
{
ParentId = menu.ParentId,
Sort = menu.Sort
});
tasks.Add(task);
}
await Task.WhenAll(tasks);
return ResultModel.Success();
}
19
View Source File : CodeFirstProviderAbstract.cs
License : MIT License
Project Creator : 17MKH
License : MIT License
Project Creator : 17MKH
public virtual void InitData(IRepositoryManager repositoryManager)
{
if (!Options.InitData)
return;
if (Options.InitDataFilePath.IsNull() || !File.Exists(Options.InitDataFilePath))
{
_logger.LogDebug("初始化数据文件不存在");
return;
}
_logger.LogDebug("开始初始化数据");
var jsonHelper = new JsonHelper();
using var jsonReader = new StreamReader(Options.InitDataFilePath, Encoding.UTF8);
var str = jsonReader.ReadToEnd();
using var doc = JsonDoreplacedent.Parse(str);
var properties = doc.RootElement.EnumerateObject();
if (properties.Any())
{
foreach (var property in properties)
{
var enreplacedyDescriptor = Context.EnreplacedyDescriptors.FirstOrDefault(m => m.Name.EqualsIgnoreCase(property.Name));
if (enreplacedyDescriptor != null)
{
var list = (IList)jsonHelper.Deserialize(property.Value.ToString(),
typeof(List<>).MakeGenericType(enreplacedyDescriptor.EnreplacedyType));
var repositoryDescriptor = Context.RepositoryDescriptors.FirstOrDefault(m => m.EnreplacedyType == enreplacedyDescriptor.EnreplacedyType);
var repository = (IRepository)Service.BuildServiceProvider()
.GetService(repositoryDescriptor!.InterfaceType);
var tasks = new List<Task>();
foreach (var item in list)
{
tasks.Add(repository.Add(item));
}
Task.WaitAll(tasks.ToArray());
}
}
}
}
19
View Source File : Program.cs
License : MIT License
Project Creator : 2881099
License : MIT License
Project Creator : 2881099
static void Main(string[] args)
{
RedisHelper.Initialization(new CSRedis.CSRedisClient("127.0.0.1:6379,asyncPipeline=true,preheat=100,poolsize=100"));
cli.Set("TestMGet_null1", "");
RedisHelper.Set("TestMGet_null1", "");
sedb.StringSet("TestMGet_string1", String);
ThreadPool.SetMinThreads(10001, 10001);
Stopwatch sw = new Stopwatch();
var tasks = new List<Task>();
var results = new ConcurrentQueue<string>();
cli.FlushDb();
while (results.TryDequeue(out var del)) ;
sw.Reset();
sw.Start();
for (var a = 0; a < 100000; a++)
{
var tmp = Guid.NewGuid().ToString();
sedb.StringSet(tmp, String);
var val = sedb.StringGet(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}
sw.Stop();
Console.WriteLine("StackExchange(0-100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
tasks.Clear();
while (results.TryDequeue(out var del)) ;
cli.FlushDb();
sw.Reset();
sw.Start();
tasks = new List<Task>();
for (var a = 0; a < 100000; a++)
{
tasks.Add(Task.Run(() =>
{
var tmp = Guid.NewGuid().ToString();
sedb.StringSet(tmp, String);
var val = sedb.StringGet(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}));
}
Task.WaitAll(tasks.ToArray());
sw.Stop();
Console.WriteLine("StackExchange(Task.WaitAll 100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
tasks.Clear();
while (results.TryDequeue(out var del)) ;
cli.FlushDb();
sw.Reset();
sw.Start();
Task.Run(async () =>
{
for (var a = 0; a < 100000; a++)
{
var tmp = Guid.NewGuid().ToString();
await sedb.StringSetAsync(tmp, String);
var val = await sedb.StringGetAsync(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}
}).Wait();
sw.Stop();
Console.WriteLine("StackExchangeAsync(0-100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
tasks.Clear();
while (results.TryDequeue(out var del)) ;
cli.FlushDb();
sw.Reset();
sw.Start();
tasks = new List<Task>();
for (var a = 0; a < 100000; a++)
{
tasks.Add(Task.Run(async () =>
{
var tmp = Guid.NewGuid().ToString();
await sedb.StringSetAsync(tmp, String);
var val = await sedb.StringGetAsync(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}));
}
Task.WaitAll(tasks.ToArray());
sw.Stop();
Console.WriteLine("StackExchangeAsync(Task.WaitAll 100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count + "\r\n");
tasks.Clear();
while (results.TryDequeue(out var del)) ;
cli.FlushDb();
sw.Reset();
sw.Start();
for (var a = 0; a < 100000; a++)
{
var tmp = Guid.NewGuid().ToString();
cli.Set(tmp, String);
var val = cli.Get(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}
sw.Stop();
Console.WriteLine("FreeRedis(0-100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
tasks.Clear();
while (results.TryDequeue(out var del)) ;
cli.FlushDb();
sw.Reset();
sw.Start();
tasks = new List<Task>();
for (var a = 0; a < 100000; a++)
{
tasks.Add(Task.Run(() =>
{
var tmp = Guid.NewGuid().ToString();
cli.Set(tmp, String);
var val = cli.Get(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}));
}
Task.WaitAll(tasks.ToArray());
sw.Stop();
Console.WriteLine("FreeRedis(Task.WaitAll 100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
tasks.Clear();
while (results.TryDequeue(out var del)) ;
cli.FlushDb();
sw.Reset();
sw.Start();
Task.Run(async () =>
{
for (var a = 0; a < 100000; a++)
{
var tmp = Guid.NewGuid().ToString();
await cli.SetAsync(tmp, String);
var val = await cli.GetAsync(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}
}).Wait();
sw.Stop();
Console.WriteLine("FreeRedisAsync(0-100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
tasks.Clear();
while (results.TryDequeue(out var del)) ;
cli.FlushDb();
//FreeRedis.Internal.AsyncRedisSocket.sb.Clear();
//FreeRedis.Internal.AsyncRedisSocket.sw.Start();
sw.Reset();
sw.Start();
tasks = new List<Task>();
for (var a = 0; a < 100000; a++)
{
tasks.Add(Task.Run(async () =>
{
var tmp = Guid.NewGuid().ToString();
await cli.SetAsync(tmp, String);
var val = await cli.GetAsync(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}));
}
Task.WaitAll(tasks.ToArray());
sw.Stop();
//var sbstr = FreeRedis.Internal.AsyncRedisSocket.sb.ToString()
//sbstr = sbstr + sbstr.Split("\r\n").Length + "条消息 ;
Console.WriteLine("FreeRedisAsync(Task.WaitAll 100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
tasks.Clear();
while (results.TryDequeue(out var del)) ;
cli.FlushDb();
sw.Reset();
sw.Start();
using (var pipe = cli.StartPipe())
{
for (var a = 0; a < 100000; a++)
{
var tmp = Guid.NewGuid().ToString();
pipe.Set(tmp, String);
var val = pipe.Get(tmp);
}
var vals = pipe.EndPipe();
for (var a = 1; a < 200000; a += 2)
{
var val = vals[a].ToString();
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}
}
sw.Stop();
Console.WriteLine("FreeRedisPipeline(0-100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count + "\r\n");
tasks.Clear();
while (results.TryDequeue(out var del)) ;
cli.FlushDb();
//sw.Reset();
//sw.Start();
//for (var a = 0; a < 100000; a++)
// cli.Call(new CommandPacket("SET").Input("TestMGet_string1").InputRaw(String));
//sw.Stop();
//Console.WriteLine("FreeRedis2: " + sw.ElapsedMilliseconds + "ms");
tasks.Clear();
while (results.TryDequeue(out var del)) ;
cli.FlushDb();
//sw.Reset();
//sw.Start();
//for (var a = 0; a < 100000; a++)
//{
// using (var rds = cli.GetTestRedisSocket())
// {
// var cmd = new CommandPacket("SET").Input("TestMGet_string1").InputRaw(String);
// rds.Write(cmd);
// cmd.Read<string>();
// }
//}
//sw.Stop();
//Console.WriteLine("FreeRedis4: " + sw.ElapsedMilliseconds + "ms");
tasks.Clear();
while (results.TryDequeue(out var del)) ;
cli.FlushDb();
sw.Reset();
sw.Start();
for (var a = 0; a < 100000; a++)
{
var tmp = Guid.NewGuid().ToString();
RedisHelper.Set(tmp, String);
var val = RedisHelper.Get(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}
sw.Stop();
Console.WriteLine("CSRedisCore(0-100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
tasks.Clear();
while (results.TryDequeue(out var del)) ;
cli.FlushDb();
sw.Reset();
sw.Start();
tasks = new List<Task>();
for (var a = 0; a < 100000; a++)
{
tasks.Add(Task.Run(() =>
{
var tmp = Guid.NewGuid().ToString();
RedisHelper.Set(tmp, String);
var val = RedisHelper.Get(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}));
}
Task.WaitAll(tasks.ToArray());
sw.Stop();
Console.WriteLine("CSRedisCore(Task.WaitAll 100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
tasks.Clear();
while (results.TryDequeue(out var del)) ;
cli.FlushDb();
sw.Reset();
sw.Start();
Task.Run(async () =>
{
for (var a = 0; a < 100000; a++)
{
var tmp = Guid.NewGuid().ToString();
await RedisHelper.SetAsync(tmp, String);
var val = await RedisHelper.GetAsync(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}
}).Wait();
sw.Stop();
Console.WriteLine("CSRedisCoreAsync(0-100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
tasks.Clear();
while (results.TryDequeue(out var del)) ;
cli.FlushDb();
sw.Reset();
sw.Start();
tasks = new List<Task>();
for (var a = 0; a < 100000; a++)
{
tasks.Add(Task.Run(async () =>
{
var tmp = Guid.NewGuid().ToString();
await RedisHelper.SetAsync(tmp, String);
var val = await RedisHelper.GetAsync(tmp);
//if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}));
}
Task.WaitAll(tasks.ToArray());
sw.Stop();
Console.WriteLine("CSRedisCoreAsync(Task.WaitAll 100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count + "\r\n");
tasks.Clear();
while (results.TryDequeue(out var del)) ;
cli.FlushDb();
}
19
View Source File : Program.cs
License : MIT License
Project Creator : 2881099
License : MIT License
Project Creator : 2881099
static void Main(string[] args)
{
RedisHelper.Initialization(new CSRedis.CSRedisClient("127.0.0.1:6379,asyncPipeline=true,preheat=100,poolsize=100"));
cli.Set("TestMGet_null1", "");
RedisHelper.Set("TestMGet_null1", "");
sedb.StringSet("TestMGet_string1", String);
ThreadPool.SetMinThreads(10001, 10001);
Stopwatch sw = new Stopwatch();
var tasks = new List<Task>();
var results = new ConcurrentQueue<string>();
cli.FlushDb();
while (results.TryDequeue(out var del)) ;
sw.Reset();
sw.Start();
for (var a = 0; a < 100000; a++)
{
var tmp = Guid.NewGuid().ToString();
sedb.StringSet(tmp, String);
var val = sedb.StringGet(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}
sw.Stop();
Console.WriteLine("StackExchange(0-100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
tasks.Clear();
while (results.TryDequeue(out var del)) ;
cli.FlushDb();
sw.Reset();
sw.Start();
tasks = new List<Task>();
for (var a = 0; a < 100000; a++)
{
tasks.Add(Task.Run(() =>
{
var tmp = Guid.NewGuid().ToString();
sedb.StringSet(tmp, String);
var val = sedb.StringGet(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}));
}
Task.WaitAll(tasks.ToArray());
sw.Stop();
Console.WriteLine("StackExchange(Task.WaitAll 100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
tasks.Clear();
while (results.TryDequeue(out var del)) ;
cli.FlushDb();
sw.Reset();
sw.Start();
Task.Run(async () =>
{
for (var a = 0; a < 100000; a++)
{
var tmp = Guid.NewGuid().ToString();
await sedb.StringSetAsync(tmp, String);
var val = await sedb.StringGetAsync(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}
}).Wait();
sw.Stop();
Console.WriteLine("StackExchangeAsync(0-100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
tasks.Clear();
while (results.TryDequeue(out var del)) ;
cli.FlushDb();
sw.Reset();
sw.Start();
tasks = new List<Task>();
for (var a = 0; a < 100000; a++)
{
tasks.Add(Task.Run(async () =>
{
var tmp = Guid.NewGuid().ToString();
await sedb.StringSetAsync(tmp, String);
var val = await sedb.StringGetAsync(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}));
}
Task.WaitAll(tasks.ToArray());
sw.Stop();
Console.WriteLine("StackExchangeAsync(Task.WaitAll 100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count + "\r\n");
tasks.Clear();
while (results.TryDequeue(out var del)) ;
cli.FlushDb();
sw.Reset();
sw.Start();
for (var a = 0; a < 100000; a++)
{
var tmp = Guid.NewGuid().ToString();
cli.Set(tmp, String);
var val = cli.Get(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}
sw.Stop();
Console.WriteLine("FreeRedis(0-100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
tasks.Clear();
while (results.TryDequeue(out var del)) ;
cli.FlushDb();
sw.Reset();
sw.Start();
tasks = new List<Task>();
for (var a = 0; a < 100000; a++)
{
tasks.Add(Task.Run(() =>
{
var tmp = Guid.NewGuid().ToString();
cli.Set(tmp, String);
var val = cli.Get(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}));
}
Task.WaitAll(tasks.ToArray());
sw.Stop();
Console.WriteLine("FreeRedis(Task.WaitAll 100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
tasks.Clear();
while (results.TryDequeue(out var del)) ;
cli.FlushDb();
sw.Reset();
sw.Start();
Task.Run(async () =>
{
for (var a = 0; a < 100000; a++)
{
var tmp = Guid.NewGuid().ToString();
await cli.SetAsync(tmp, String);
var val = await cli.GetAsync(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}
}).Wait();
sw.Stop();
Console.WriteLine("FreeRedisAsync(0-100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
tasks.Clear();
while (results.TryDequeue(out var del)) ;
cli.FlushDb();
//FreeRedis.Internal.AsyncRedisSocket.sb.Clear();
//FreeRedis.Internal.AsyncRedisSocket.sw.Start();
sw.Reset();
sw.Start();
tasks = new List<Task>();
for (var a = 0; a < 100000; a++)
{
tasks.Add(Task.Run(async () =>
{
var tmp = Guid.NewGuid().ToString();
await cli.SetAsync(tmp, String);
var val = await cli.GetAsync(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}));
}
Task.WaitAll(tasks.ToArray());
sw.Stop();
//var sbstr = FreeRedis.Internal.AsyncRedisSocket.sb.ToString()
//sbstr = sbstr + sbstr.Split("\r\n").Length + "条消息 ;
Console.WriteLine("FreeRedisAsync(Task.WaitAll 100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
tasks.Clear();
while (results.TryDequeue(out var del)) ;
cli.FlushDb();
sw.Reset();
sw.Start();
using (var pipe = cli.StartPipe())
{
for (var a = 0; a < 100000; a++)
{
var tmp = Guid.NewGuid().ToString();
pipe.Set(tmp, String);
var val = pipe.Get(tmp);
}
var vals = pipe.EndPipe();
for (var a = 1; a < 200000; a += 2)
{
var val = vals[a].ToString();
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}
}
sw.Stop();
Console.WriteLine("FreeRedisPipeline(0-100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count + "\r\n");
tasks.Clear();
while (results.TryDequeue(out var del)) ;
cli.FlushDb();
//sw.Reset();
//sw.Start();
//for (var a = 0; a < 100000; a++)
// cli.Call(new CommandPacket("SET").Input("TestMGet_string1").InputRaw(String));
//sw.Stop();
//Console.WriteLine("FreeRedis2: " + sw.ElapsedMilliseconds + "ms");
tasks.Clear();
while (results.TryDequeue(out var del)) ;
cli.FlushDb();
//sw.Reset();
//sw.Start();
//for (var a = 0; a < 100000; a++)
//{
// using (var rds = cli.GetTestRedisSocket())
// {
// var cmd = new CommandPacket("SET").Input("TestMGet_string1").InputRaw(String);
// rds.Write(cmd);
// cmd.Read<string>();
// }
//}
//sw.Stop();
//Console.WriteLine("FreeRedis4: " + sw.ElapsedMilliseconds + "ms");
tasks.Clear();
while (results.TryDequeue(out var del)) ;
cli.FlushDb();
sw.Reset();
sw.Start();
for (var a = 0; a < 100000; a++)
{
var tmp = Guid.NewGuid().ToString();
RedisHelper.Set(tmp, String);
var val = RedisHelper.Get(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}
sw.Stop();
Console.WriteLine("CSRedisCore(0-100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
tasks.Clear();
while (results.TryDequeue(out var del)) ;
cli.FlushDb();
sw.Reset();
sw.Start();
tasks = new List<Task>();
for (var a = 0; a < 100000; a++)
{
tasks.Add(Task.Run(() =>
{
var tmp = Guid.NewGuid().ToString();
RedisHelper.Set(tmp, String);
var val = RedisHelper.Get(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}));
}
Task.WaitAll(tasks.ToArray());
sw.Stop();
Console.WriteLine("CSRedisCore(Task.WaitAll 100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
tasks.Clear();
while (results.TryDequeue(out var del)) ;
cli.FlushDb();
sw.Reset();
sw.Start();
Task.Run(async () =>
{
for (var a = 0; a < 100000; a++)
{
var tmp = Guid.NewGuid().ToString();
await RedisHelper.SetAsync(tmp, String);
var val = await RedisHelper.GetAsync(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}
}).Wait();
sw.Stop();
Console.WriteLine("CSRedisCoreAsync(0-100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
tasks.Clear();
while (results.TryDequeue(out var del)) ;
cli.FlushDb();
sw.Reset();
sw.Start();
tasks = new List<Task>();
for (var a = 0; a < 100000; a++)
{
tasks.Add(Task.Run(async () =>
{
var tmp = Guid.NewGuid().ToString();
await RedisHelper.SetAsync(tmp, String);
var val = await RedisHelper.GetAsync(tmp);
//if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}));
}
Task.WaitAll(tasks.ToArray());
sw.Stop();
Console.WriteLine("CSRedisCoreAsync(Task.WaitAll 100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count + "\r\n");
tasks.Clear();
while (results.TryDequeue(out var del)) ;
cli.FlushDb();
}
19
View Source File : Program.cs
License : MIT License
Project Creator : 2881099
License : MIT License
Project Creator : 2881099
static void Main(string[] args)
{
sedb.StringSet("key1", (string)null);
var val111 = sedb.StringGet("key1");
RedisHelper.Initialization(new CSRedis.CSRedisClient("127.0.0.1:6379,asyncPipeline=true,preheat=100,poolsize=100"));
cli.Set("TestMGet_null1", "");
RedisHelper.Set("TestMGet_null1", "");
sedb.StringSet("TestMGet_string1", String);
ThreadPool.SetMinThreads(10001, 10001);
Stopwatch sw = new Stopwatch();
var tasks = new List<Task>();
var results = new ConcurrentQueue<string>();
cli.FlushDb();
results.Clear();
sw.Reset();
sw.Start();
for (var a = 0; a < 100000; a++)
{
var tmp = Guid.NewGuid().ToString();
sedb.StringSet(tmp, String);
var val = sedb.StringGet(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}
sw.Stop();
Console.WriteLine("StackExchange(0-100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
tasks.Clear();
results.Clear();
cli.FlushDb();
sw.Reset();
sw.Start();
tasks = new List<Task>();
for (var a = 0; a < 100000; a++)
{
tasks.Add(Task.Run(() =>
{
var tmp = Guid.NewGuid().ToString();
sedb.StringSet(tmp, String);
var val = sedb.StringGet(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}));
}
Task.WaitAll(tasks.ToArray());
sw.Stop();
Console.WriteLine("StackExchange(Task.WaitAll 100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
tasks.Clear();
results.Clear();
cli.FlushDb();
sw.Reset();
sw.Start();
Task.Run(async () =>
{
for (var a = 0; a < 100000; a++)
{
var tmp = Guid.NewGuid().ToString();
await sedb.StringSetAsync(tmp, String);
var val = await sedb.StringGetAsync(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}
}).Wait();
sw.Stop();
Console.WriteLine("StackExchangeAsync(0-100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
tasks.Clear();
results.Clear();
cli.FlushDb();
sw.Reset();
sw.Start();
tasks = new List<Task>();
for (var a = 0; a < 100000; a++)
{
tasks.Add(Task.Run(async () =>
{
var tmp = Guid.NewGuid().ToString();
await sedb.StringSetAsync(tmp, String);
var val = await sedb.StringGetAsync(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}));
}
Task.WaitAll(tasks.ToArray());
sw.Stop();
Console.WriteLine("StackExchangeAsync(Task.WaitAll 100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count + "\r\n");
tasks.Clear();
results.Clear();
cli.FlushDb();
sw.Reset();
sw.Start();
for (var a = 0; a < 100000; a++)
{
var tmp = Guid.NewGuid().ToString();
cli.Set(tmp, String);
var val = cli.Get(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}
sw.Stop();
Console.WriteLine("FreeRedis(0-100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
tasks.Clear();
results.Clear();
cli.FlushDb();
sw.Reset();
sw.Start();
tasks = new List<Task>();
for (var a = 0; a < 100000; a++)
{
tasks.Add(Task.Run(() =>
{
var tmp = Guid.NewGuid().ToString();
cli.Set(tmp, String);
var val = cli.Get(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}));
}
Task.WaitAll(tasks.ToArray());
sw.Stop();
Console.WriteLine("FreeRedis(Task.WaitAll 100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
tasks.Clear();
results.Clear();
cli.FlushDb();
//sw.Reset();
//sw.Start();
//Task.Run(async () =>
//{
// for (var a = 0; a < 100000; a++)
// {
// var tmp = Guid.NewGuid().ToString();
// await cli.SetAsync(tmp, String);
// var val = await cli.GetAsync(tmp);
// if (val != String) throw new Exception("not equal");
// results.Enqueue(val);
// }
//}).Wait();
//sw.Stop();
//Console.WriteLine("FreeRedisAsync(0-100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
//tasks.Clear();
//results.Clear();
//cli.FlushDb();
//FreeRedis.Internal.AsyncRedisSocket.sb.Clear();
//FreeRedis.Internal.AsyncRedisSocket.sw.Start();
//sw.Reset();
//sw.Start();
//tasks = new List<Task>();
//for (var a = 0; a < 100000; a++)
//{
// tasks.Add(Task.Run(async () =>
// {
// var tmp = Guid.NewGuid().ToString();
// await cli.SetAsync(tmp, String);
// var val = await cli.GetAsync(tmp);
// if (val != String) throw new Exception("not equal");
// results.Enqueue(val);
// }));
//}
//Task.WaitAll(tasks.ToArray());
//sw.Stop();
////var sbstr = FreeRedis.Internal.AsyncRedisSocket.sb.ToString()
////sbstr = sbstr + sbstr.Split("\r\n").Length + "条消息 ;
//Console.WriteLine("FreeRedisAsync(Task.WaitAll 100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
//tasks.Clear();
//results.Clear();
//cli.FlushDb();
sw.Reset();
sw.Start();
using (var pipe = cli.StartPipe())
{
for (var a = 0; a < 100000; a++)
{
var tmp = Guid.NewGuid().ToString();
pipe.Set(tmp, String);
var val = pipe.Get(tmp);
}
var vals = pipe.EndPipe();
for (var a = 1; a < 200000; a += 2)
{
var val = vals[a].ToString();
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}
}
sw.Stop();
Console.WriteLine("FreeRedisPipeline(0-100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count + "\r\n");
tasks.Clear();
results.Clear();
cli.FlushDb();
//sw.Reset();
//sw.Start();
//for (var a = 0; a < 100000; a++)
// cli.Call(new CommandPacket("SET").Input("TestMGet_string1").InputRaw(String));
//sw.Stop();
//Console.WriteLine("FreeRedis2: " + sw.ElapsedMilliseconds + "ms");
tasks.Clear();
results.Clear();
cli.FlushDb();
//sw.Reset();
//sw.Start();
//for (var a = 0; a < 100000; a++)
//{
// using (var rds = cli.GetTestRedisSocket())
// {
// var cmd = new CommandPacket("SET").Input("TestMGet_string1").InputRaw(String);
// rds.Write(cmd);
// cmd.Read<string>();
// }
//}
//sw.Stop();
//Console.WriteLine("FreeRedis4: " + sw.ElapsedMilliseconds + "ms");
tasks.Clear();
results.Clear();
cli.FlushDb();
sw.Reset();
sw.Start();
for (var a = 0; a < 100000; a++)
{
var tmp = Guid.NewGuid().ToString();
RedisHelper.Set(tmp, String);
var val = RedisHelper.Get(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}
sw.Stop();
Console.WriteLine("CSRedisCore(0-100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
tasks.Clear();
results.Clear();
cli.FlushDb();
sw.Reset();
sw.Start();
tasks = new List<Task>();
for (var a = 0; a < 100000; a++)
{
tasks.Add(Task.Run(() =>
{
var tmp = Guid.NewGuid().ToString();
RedisHelper.Set(tmp, String);
var val = RedisHelper.Get(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}));
}
Task.WaitAll(tasks.ToArray());
sw.Stop();
Console.WriteLine("CSRedisCore(Task.WaitAll 100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
tasks.Clear();
results.Clear();
cli.FlushDb();
sw.Reset();
sw.Start();
Task.Run(async () =>
{
for (var a = 0; a < 100000; a++)
{
var tmp = Guid.NewGuid().ToString();
await RedisHelper.SetAsync(tmp, String);
var val = await RedisHelper.GetAsync(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}
}).Wait();
sw.Stop();
Console.WriteLine("CSRedisCoreAsync(0-100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
tasks.Clear();
results.Clear();
cli.FlushDb();
sw.Reset();
sw.Start();
tasks = new List<Task>();
for (var a = 0; a < 100000; a++)
{
tasks.Add(Task.Run(async () =>
{
var tmp = Guid.NewGuid().ToString();
await RedisHelper.SetAsync(tmp, String);
var val = await RedisHelper.GetAsync(tmp);
//if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}));
}
Task.WaitAll(tasks.ToArray());
sw.Stop();
Console.WriteLine("CSRedisCoreAsync(Task.WaitAll 100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count + "\r\n");
tasks.Clear();
results.Clear();
cli.FlushDb();
}
19
View Source File : Program.cs
License : MIT License
Project Creator : 2881099
License : MIT License
Project Creator : 2881099
static void Main(string[] args)
{
sedb.StringSet("key1", (string)null);
var val111 = sedb.StringGet("key1");
RedisHelper.Initialization(new CSRedis.CSRedisClient("127.0.0.1:6379,asyncPipeline=true,preheat=100,poolsize=100"));
cli.Set("TestMGet_null1", "");
RedisHelper.Set("TestMGet_null1", "");
sedb.StringSet("TestMGet_string1", String);
ThreadPool.SetMinThreads(10001, 10001);
Stopwatch sw = new Stopwatch();
var tasks = new List<Task>();
var results = new ConcurrentQueue<string>();
cli.FlushDb();
results.Clear();
sw.Reset();
sw.Start();
for (var a = 0; a < 100000; a++)
{
var tmp = Guid.NewGuid().ToString();
sedb.StringSet(tmp, String);
var val = sedb.StringGet(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}
sw.Stop();
Console.WriteLine("StackExchange(0-100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
tasks.Clear();
results.Clear();
cli.FlushDb();
sw.Reset();
sw.Start();
tasks = new List<Task>();
for (var a = 0; a < 100000; a++)
{
tasks.Add(Task.Run(() =>
{
var tmp = Guid.NewGuid().ToString();
sedb.StringSet(tmp, String);
var val = sedb.StringGet(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}));
}
Task.WaitAll(tasks.ToArray());
sw.Stop();
Console.WriteLine("StackExchange(Task.WaitAll 100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
tasks.Clear();
results.Clear();
cli.FlushDb();
sw.Reset();
sw.Start();
Task.Run(async () =>
{
for (var a = 0; a < 100000; a++)
{
var tmp = Guid.NewGuid().ToString();
await sedb.StringSetAsync(tmp, String);
var val = await sedb.StringGetAsync(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}
}).Wait();
sw.Stop();
Console.WriteLine("StackExchangeAsync(0-100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
tasks.Clear();
results.Clear();
cli.FlushDb();
sw.Reset();
sw.Start();
tasks = new List<Task>();
for (var a = 0; a < 100000; a++)
{
tasks.Add(Task.Run(async () =>
{
var tmp = Guid.NewGuid().ToString();
await sedb.StringSetAsync(tmp, String);
var val = await sedb.StringGetAsync(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}));
}
Task.WaitAll(tasks.ToArray());
sw.Stop();
Console.WriteLine("StackExchangeAsync(Task.WaitAll 100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count + "\r\n");
tasks.Clear();
results.Clear();
cli.FlushDb();
sw.Reset();
sw.Start();
for (var a = 0; a < 100000; a++)
{
var tmp = Guid.NewGuid().ToString();
cli.Set(tmp, String);
var val = cli.Get(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}
sw.Stop();
Console.WriteLine("FreeRedis(0-100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
tasks.Clear();
results.Clear();
cli.FlushDb();
sw.Reset();
sw.Start();
tasks = new List<Task>();
for (var a = 0; a < 100000; a++)
{
tasks.Add(Task.Run(() =>
{
var tmp = Guid.NewGuid().ToString();
cli.Set(tmp, String);
var val = cli.Get(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}));
}
Task.WaitAll(tasks.ToArray());
sw.Stop();
Console.WriteLine("FreeRedis(Task.WaitAll 100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
tasks.Clear();
results.Clear();
cli.FlushDb();
//sw.Reset();
//sw.Start();
//Task.Run(async () =>
//{
// for (var a = 0; a < 100000; a++)
// {
// var tmp = Guid.NewGuid().ToString();
// await cli.SetAsync(tmp, String);
// var val = await cli.GetAsync(tmp);
// if (val != String) throw new Exception("not equal");
// results.Enqueue(val);
// }
//}).Wait();
//sw.Stop();
//Console.WriteLine("FreeRedisAsync(0-100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
//tasks.Clear();
//results.Clear();
//cli.FlushDb();
//FreeRedis.Internal.AsyncRedisSocket.sb.Clear();
//FreeRedis.Internal.AsyncRedisSocket.sw.Start();
//sw.Reset();
//sw.Start();
//tasks = new List<Task>();
//for (var a = 0; a < 100000; a++)
//{
// tasks.Add(Task.Run(async () =>
// {
// var tmp = Guid.NewGuid().ToString();
// await cli.SetAsync(tmp, String);
// var val = await cli.GetAsync(tmp);
// if (val != String) throw new Exception("not equal");
// results.Enqueue(val);
// }));
//}
//Task.WaitAll(tasks.ToArray());
//sw.Stop();
////var sbstr = FreeRedis.Internal.AsyncRedisSocket.sb.ToString()
////sbstr = sbstr + sbstr.Split("\r\n").Length + "条消息 ;
//Console.WriteLine("FreeRedisAsync(Task.WaitAll 100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
//tasks.Clear();
//results.Clear();
//cli.FlushDb();
sw.Reset();
sw.Start();
using (var pipe = cli.StartPipe())
{
for (var a = 0; a < 100000; a++)
{
var tmp = Guid.NewGuid().ToString();
pipe.Set(tmp, String);
var val = pipe.Get(tmp);
}
var vals = pipe.EndPipe();
for (var a = 1; a < 200000; a += 2)
{
var val = vals[a].ToString();
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}
}
sw.Stop();
Console.WriteLine("FreeRedisPipeline(0-100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count + "\r\n");
tasks.Clear();
results.Clear();
cli.FlushDb();
//sw.Reset();
//sw.Start();
//for (var a = 0; a < 100000; a++)
// cli.Call(new CommandPacket("SET").Input("TestMGet_string1").InputRaw(String));
//sw.Stop();
//Console.WriteLine("FreeRedis2: " + sw.ElapsedMilliseconds + "ms");
tasks.Clear();
results.Clear();
cli.FlushDb();
//sw.Reset();
//sw.Start();
//for (var a = 0; a < 100000; a++)
//{
// using (var rds = cli.GetTestRedisSocket())
// {
// var cmd = new CommandPacket("SET").Input("TestMGet_string1").InputRaw(String);
// rds.Write(cmd);
// cmd.Read<string>();
// }
//}
//sw.Stop();
//Console.WriteLine("FreeRedis4: " + sw.ElapsedMilliseconds + "ms");
tasks.Clear();
results.Clear();
cli.FlushDb();
sw.Reset();
sw.Start();
for (var a = 0; a < 100000; a++)
{
var tmp = Guid.NewGuid().ToString();
RedisHelper.Set(tmp, String);
var val = RedisHelper.Get(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}
sw.Stop();
Console.WriteLine("CSRedisCore(0-100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
tasks.Clear();
results.Clear();
cli.FlushDb();
sw.Reset();
sw.Start();
tasks = new List<Task>();
for (var a = 0; a < 100000; a++)
{
tasks.Add(Task.Run(() =>
{
var tmp = Guid.NewGuid().ToString();
RedisHelper.Set(tmp, String);
var val = RedisHelper.Get(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}));
}
Task.WaitAll(tasks.ToArray());
sw.Stop();
Console.WriteLine("CSRedisCore(Task.WaitAll 100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
tasks.Clear();
results.Clear();
cli.FlushDb();
sw.Reset();
sw.Start();
Task.Run(async () =>
{
for (var a = 0; a < 100000; a++)
{
var tmp = Guid.NewGuid().ToString();
await RedisHelper.SetAsync(tmp, String);
var val = await RedisHelper.GetAsync(tmp);
if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}
}).Wait();
sw.Stop();
Console.WriteLine("CSRedisCoreAsync(0-100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count);
tasks.Clear();
results.Clear();
cli.FlushDb();
sw.Reset();
sw.Start();
tasks = new List<Task>();
for (var a = 0; a < 100000; a++)
{
tasks.Add(Task.Run(async () =>
{
var tmp = Guid.NewGuid().ToString();
await RedisHelper.SetAsync(tmp, String);
var val = await RedisHelper.GetAsync(tmp);
//if (val != String) throw new Exception("not equal");
results.Enqueue(val);
}));
}
Task.WaitAll(tasks.ToArray());
sw.Stop();
Console.WriteLine("CSRedisCoreAsync(Task.WaitAll 100000): " + sw.ElapsedMilliseconds + "ms results: " + results.Count + "\r\n");
tasks.Clear();
results.Clear();
cli.FlushDb();
}
19
View Source File : SpeedtestHandler.cs
License : GNU General Public License v3.0
Project Creator : 2dust
License : GNU General Public License v3.0
Project Creator : 2dust
private void RunRealPing()
{
int pid = -1;
try
{
string msg = string.Empty;
pid = _v2rayHandler.LoadV2rayConfigString(_config, _selecteds);
if (pid < 0)
{
_updateFunc(_selecteds[0], UIRes.I18N("OperationFailed"));
return;
}
//Thread.Sleep(5000);
int httpPort = _config.GetLocalPort("speedtest");
List<Task> tasks = new List<Task>();
foreach (int itemIndex in _selecteds)
{
if (_config.vmess[itemIndex].configType == (int)EConfigType.Custom)
{
continue;
}
tasks.Add(Task.Run(() =>
{
try
{
WebProxy webProxy = new WebProxy(Global.Loopback, httpPort + itemIndex);
int responseTime = -1;
string status = GetRealPingTime(_config.speedPingTestUrl, webProxy, out responseTime);
string output = Utils.IsNullOrEmpty(status) ? FormatOut(responseTime, "ms") : FormatOut(status, "");
_updateFunc(itemIndex, output);
}
catch (Exception ex)
{
Utils.SaveLog(ex.Message, ex);
}
}));
//Thread.Sleep(100);
}
Task.WaitAll(tasks.ToArray());
}
catch (Exception ex)
{
Utils.SaveLog(ex.Message, ex);
}
finally
{
if (pid > 0) _v2rayHandler.V2rayStopPid(pid);
}
}
19
View Source File : DigitalAnalyzerExampleViewModel.cs
License : MIT License
Project Creator : ABTSoftware
License : MIT License
Project Creator : ABTSoftware
private async Task GenerateData(List<byte[]> digitalChannels, List<float[]> replacedogChannels)
{
var digitalChannelsCount = digitalChannels.Count;
var replacedogChannelsCount = replacedogChannels.Count;
var totalChannelCount = digitalChannelsCount + replacedogChannelsCount;
var channelList = new List<ChannelViewModel>(totalChannelCount);
var channelIndex = ChannelViewModels.Count;
await Task.Run(async () =>
{
var xStart = 0d;
var xStep = 1d;
var digital = new List<Task<ChannelViewModel>>(digitalChannelsCount);
var replacedog = new List<Task<ChannelViewModel>>(replacedogChannelsCount);
foreach (var channel in digitalChannels)
{
var id = channelIndex++;
digital.Add(Task.Run(() => ChannelGenerationHelper.Instance.GenerateDigitalChannel(xStart, xStep, channel, id)));
}
foreach (var channel in replacedogChannels)
{
var id = channelIndex++;
replacedog.Add(Task.Run(() => ChannelGenerationHelper.Instance.GeneratereplacedogChannel(xStart, xStep, channel, id)));
}
await Task.WhenAll(digital.Union(replacedog));
foreach (var p in digital.Union(replacedog))
{
channelList.Add(p.Result);
}
});
channelList.ForEach(ch => ChannelViewModels.Add(ch));
}
19
View Source File : DigitalAnalyzerExampleViewModel.cs
License : MIT License
Project Creator : ABTSoftware
License : MIT License
Project Creator : ABTSoftware
private async Task GenerateData(List<byte[]> digitalChannels)
{
var digitalChannelsCount = digitalChannels.Count;
var channelList = new List<ChannelViewModel>(digitalChannelsCount);
var channelIndex = ChannelViewModels.Count;
await Task.Run(async () =>
{
var xStart = 0d;
var xStep = 1d;
var channels = new List<Task<ChannelViewModel>>(digitalChannelsCount);
foreach (var channel in digitalChannels)
{
var id = channelIndex++;
channels.Add(Task.Run(() => ChannelGenerationHelper.Instance.GenerateDigitalChannel(xStart, xStep, channel, id)));
}
await Task.WhenAll(channels);
foreach (var p in channels)
{
channelList.Add(p.Result);
}
});
channelList.ForEach(ch => ChannelViewModels.Add(ch));
}
19
View Source File : InternetCheck.cs
License : MIT License
Project Creator : actions
License : MIT License
Project Creator : actions
public async Task<bool> RunCheck(string url, string pat)
{
await File.AppendAllLinesAsync(_logFile, HostContext.WarnLog());
await File.AppendAllLinesAsync(_logFile, HostContext.CheckProxy());
var checkTasks = new List<Task<CheckResult>>();
checkTasks.Add(CheckUtil.CheckDns("https://api.github.com"));
checkTasks.Add(CheckUtil.CheckPing("https://api.github.com"));
// We don't need to preplaced a PAT since it might be a token for GHES.
checkTasks.Add(HostContext.CheckHttpsGetRequests("https://api.github.com", pat: null, expectedHeader: "X-GitHub-Request-Id"));
var result = true;
while (checkTasks.Count > 0)
{
var finishedCheckTask = await Task.WhenAny<CheckResult>(checkTasks);
var finishedCheck = await finishedCheckTask;
result = result && finishedCheck.Preplaced;
await File.AppendAllLinesAsync(_logFile, finishedCheck.Logs);
checkTasks.Remove(finishedCheckTask);
}
await Task.WhenAll(checkTasks);
return result;
}
19
View Source File : ActionsCheck.cs
License : MIT License
Project Creator : actions
License : MIT License
Project Creator : actions
public async Task<bool> RunCheck(string url, string pat)
{
await File.AppendAllLinesAsync(_logFile, HostContext.WarnLog());
await File.AppendAllLinesAsync(_logFile, HostContext.CheckProxy());
var checkTasks = new List<Task<CheckResult>>();
string githubApiUrl = null;
string actionsTokenServiceUrl = null;
string actionsPipelinesServiceUrl = null;
var urlBuilder = new UriBuilder(url);
if (UrlUtil.IsHostedServer(urlBuilder))
{
urlBuilder.Host = $"api.{urlBuilder.Host}";
urlBuilder.Path = "";
githubApiUrl = urlBuilder.Uri.AbsoluteUri;
actionsTokenServiceUrl = "https://vstoken.actions.githubusercontent.com/_apis/health";
actionsPipelinesServiceUrl = "https://pipelines.actions.githubusercontent.com/_apis/health";
}
else
{
urlBuilder.Path = "api/v3";
githubApiUrl = urlBuilder.Uri.AbsoluteUri;
urlBuilder.Path = "_services/vstoken/_apis/health";
actionsTokenServiceUrl = urlBuilder.Uri.AbsoluteUri;
urlBuilder.Path = "_services/pipelines/_apis/health";
actionsPipelinesServiceUrl = urlBuilder.Uri.AbsoluteUri;
}
// check github api
checkTasks.Add(CheckUtil.CheckDns(githubApiUrl));
checkTasks.Add(CheckUtil.CheckPing(githubApiUrl));
checkTasks.Add(HostContext.CheckHttpsGetRequests(githubApiUrl, pat, expectedHeader: "X-GitHub-Request-Id"));
// check actions token service
checkTasks.Add(CheckUtil.CheckDns(actionsTokenServiceUrl));
checkTasks.Add(CheckUtil.CheckPing(actionsTokenServiceUrl));
checkTasks.Add(HostContext.CheckHttpsGetRequests(actionsTokenServiceUrl, pat, expectedHeader: "x-vss-e2eid"));
// check actions pipelines service
checkTasks.Add(CheckUtil.CheckDns(actionsPipelinesServiceUrl));
checkTasks.Add(CheckUtil.CheckPing(actionsPipelinesServiceUrl));
checkTasks.Add(HostContext.CheckHttpsGetRequests(actionsPipelinesServiceUrl, pat, expectedHeader: "x-vss-e2eid"));
// check HTTP POST to actions pipelines service
checkTasks.Add(HostContext.CheckHttpsPostRequests(actionsPipelinesServiceUrl, pat, expectedHeader: "x-vss-e2eid"));
var result = true;
while (checkTasks.Count > 0)
{
var finishedCheckTask = await Task.WhenAny<CheckResult>(checkTasks);
var finishedCheck = await finishedCheckTask;
result = result && finishedCheck.Preplaced;
await File.AppendAllLinesAsync(_logFile, finishedCheck.Logs);
checkTasks.Remove(finishedCheckTask);
}
await Task.WhenAll(checkTasks);
return result;
}
19
View Source File : FileContainerServer.cs
License : MIT License
Project Creator : actions
License : MIT License
Project Creator : actions
private async Task<UploadResult> ParallelUploadAsync(RunnerActionPluginExecutionContext context, IReadOnlyList<string> files, int concurrentUploads, CancellationToken token)
{
// return files that fail to upload and total artifact size
var uploadResult = new UploadResult();
// nothing needs to upload
if (files.Count == 0)
{
return uploadResult;
}
// ensure the file upload queue is empty.
if (!_fileUploadQueue.IsEmpty)
{
throw new ArgumentOutOfRangeException(nameof(_fileUploadQueue));
}
// enqueue file into upload queue.
foreach (var file in files)
{
_fileUploadQueue.Enqueue(file);
}
// Start upload monitor task.
_uploadFilesProcessed = 0;
_uploadFinished = new TaskCompletionSource<int>();
_fileUploadTraceLog.Clear();
_fileUploadProgressLog.Clear();
Task uploadMonitor = UploadReportingAsync(context, files.Count(), _uploadCancellationTokenSource.Token);
// Start parallel upload tasks.
List<Task<UploadResult>> parallelUploadingTasks = new List<Task<UploadResult>>();
for (int uploader = 0; uploader < concurrentUploads; uploader++)
{
parallelUploadingTasks.Add(UploadAsync(context, uploader, _uploadCancellationTokenSource.Token));
}
// Wait for parallel upload finish.
await Task.WhenAll(parallelUploadingTasks);
foreach (var uploadTask in parallelUploadingTasks)
{
// record all failed files.
uploadResult.AddUploadResult(await uploadTask);
}
// Stop monitor task;
_uploadFinished.TrySetResult(0);
await uploadMonitor;
return uploadResult;
}
19
View Source File : FileContainerServer.cs
License : MIT License
Project Creator : actions
License : MIT License
Project Creator : actions
private async Task<DownloadResult> ParallelDownloadAsync(RunnerActionPluginExecutionContext context, IReadOnlyList<DownloadInfo> files, int concurrentDownloads, CancellationToken token)
{
// return files that fail to download
var downloadResult = new DownloadResult();
// nothing needs to download
if (files.Count == 0)
{
return downloadResult;
}
// ensure the file download queue is empty.
if (!_fileDownloadQueue.IsEmpty)
{
throw new ArgumentOutOfRangeException(nameof(_fileDownloadQueue));
}
// enqueue file into download queue.
foreach (var file in files)
{
_fileDownloadQueue.Enqueue(file);
}
// Start download monitor task.
_downloadFilesProcessed = 0;
_downloadFinished = new TaskCompletionSource<int>();
Task downloadMonitor = DownloadReportingAsync(context, files.Count(), token);
// Start parallel download tasks.
List<Task<DownloadResult>> parallelDownloadingTasks = new List<Task<DownloadResult>>();
for (int downloader = 0; downloader < concurrentDownloads; downloader++)
{
parallelDownloadingTasks.Add(DownloadAsync(context, downloader, token));
}
// Wait for parallel download finish.
await Task.WhenAll(parallelDownloadingTasks);
foreach (var downloadTask in parallelDownloadingTasks)
{
// record all failed files.
downloadResult.AddDownloadResult(await downloadTask);
}
// Stop monitor task;
_downloadFinished.TrySetResult(0);
await downloadMonitor;
return downloadResult;
}
19
View Source File : BankIdEventTrigger.cs
License : MIT License
Project Creator : ActiveLogin
License : MIT License
Project Creator : ActiveLogin
public async Task TriggerAsync(BankIdEvent bankIdEvent)
{
if (bankIdEvent == null)
{
throw new ArgumentNullException(nameof(bankIdEvent));
}
bankIdEvent.SetContext(_bankIdActiveLoginContext);
var tasks = new List<Task>();
foreach (var listener in _listeners)
{
tasks.Add(listener.HandleAsync(bankIdEvent));
}
await Task.WhenAll(tasks);
}
19
View Source File : BankIdResultStoreEventListener.cs
License : MIT License
Project Creator : ActiveLogin
License : MIT License
Project Creator : ActiveLogin
public override async Task HandleCollectCompletedEvent(BankIdCollectCompletedEvent e)
{
var tasks = new List<Task>();
foreach (var bankIdResultStore in _bankIdResultStores)
{
tasks.Add(bankIdResultStore.StoreCollectCompletedCompletionData(e.OrderRef, e.CompletionData));
}
await Task.WhenAll(tasks);
}
19
View Source File : EdgeCasesTest.cs
License : MIT License
Project Creator : adams85
License : MIT License
Project Creator : adams85
[Fact]
public async Task FailingEntryDontGetStuck()
{
var logsDirName = Guid.NewGuid().ToString("D");
var tempPath = Path.Combine(Path.GetTempPath());
var logPath = Path.Combine(tempPath, logsDirName);
if (Directory.Exists(logPath))
Directory.Delete(logPath, recursive: true);
var fileProvider = new PhysicalFileProvider(tempPath);
var options = new FileLoggerOptions
{
FileAppender = new PhysicalFileAppender(fileProvider),
BasePath = logsDirName,
Files = new[]
{
new LogFileOptions
{
Path = "default.log",
},
},
};
var optionsMonitor = new DelegatedOptionsMonitor<FileLoggerOptions>(_ => options);
var completeCts = new CancellationTokenSource();
var completionTimeoutMs = 2000;
var context = new TestFileLoggerContext(completeCts.Token, TimeSpan.FromMilliseconds(completionTimeoutMs), writeRetryDelay: TimeSpan.FromMilliseconds(250));
context.SetTimestamp(new DateTime(2017, 1, 1, 0, 0, 0, DateTimeKind.Utc));
var services = new ServiceCollection();
services.AddOptions();
services.AddLogging(b => b.AddFile(context));
services.AddSingleton<IOptionsMonitor<FileLoggerOptions>>(optionsMonitor);
string filePath = Path.Combine(logPath, "default.log");
try
{
FileLoggerProvider[] providers;
using (ServiceProvider sp = services.BuildServiceProvider())
{
providers = context.GetProviders(sp).ToArray();
replacedert.Equal(1, providers.Length);
var resetTasks = new List<Task>();
foreach (FileLoggerProvider provider in providers)
provider.Reset += (s, e) => resetTasks.Add(e);
ILoggerFactory loggerFactory = sp.GetRequiredService<ILoggerFactory>();
ILogger logger = loggerFactory.CreateLogger("X");
logger.LogInformation("This should get through.");
optionsMonitor.Reload();
// ensuring that reset has been finished and the new settings are effective
await Task.WhenAll(resetTasks);
using (var fs = new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.Read))
{
logger.LogInformation("This shouldn't get through.");
Task completion = context.GetCompletion(sp);
replacedert.False(completion.IsCompleted);
completeCts.Cancel();
replacedert.Equal(completion, await Task.WhenAny(completion, Task.Delay(TimeSpan.FromMilliseconds(completionTimeoutMs * 2))));
replacedert.Equal(TaskStatus.RanToCompletion, completion.Status);
}
}
IFileInfo logFile = fileProvider.GetFileInfo($"{logsDirName}/default.log");
replacedert.True(logFile.Exists && !logFile.IsDirectory);
var lines = logFile.ReadAllText(out Encoding encoding).Split(new[] { Environment.NewLine }, StringSplitOptions.None);
replacedert.Equal(Encoding.UTF8, encoding);
replacedert.Equal(new[]
{
$"info: X[0] @ {context.GetTimestamp().ToLocalTime():o}",
$" This should get through.",
""
}, lines);
}
finally
{
Directory.Delete(logPath, recursive: true);
}
}
19
View Source File : SettingsTest.cs
License : MIT License
Project Creator : adams85
License : MIT License
Project Creator : adams85
[Fact]
public async Task ReloadOptionsSettings()
{
var configJson =
$@"{{
""{FileLoggerProvider.Alias}"": {{
""{nameof(FileLoggerOptions.IncludeScopes)}"" : true,
""{nameof(FileLoggerOptions.Files)}"": [
{{
""{nameof(LogFileOptions.Path)}"": ""test.log"",
}}],
""{nameof(LoggerFilterRule.LogLevel)}"": {{
""{LogFileOptions.DefaultCategoryName}"": ""{LogLevel.Trace}""
}}
}}
}}";
var fileProvider = new MemoryFileProvider();
fileProvider.CreateFile("config.json", configJson, Encoding.UTF8);
var cb = new ConfigurationBuilder();
cb.AddJsonFile(fileProvider, "config.json", optional: false, reloadOnChange: true);
IConfigurationRoot config = cb.Build();
var completeCts = new CancellationTokenSource();
var context = new TestFileLoggerContext(completeCts.Token, completionTimeout: Timeout.InfiniteTimeSpan);
context.SetTimestamp(new DateTime(2017, 1, 1, 0, 0, 0, DateTimeKind.Utc));
var services = new ServiceCollection();
services.AddOptions();
services.AddLogging(b =>
{
b.AddConfiguration(config);
b.AddFile(context);
});
var fileAppender = new MemoryFileAppender(fileProvider);
services.Configure<FileLoggerOptions>(o => o.FileAppender ??= fileAppender);
FileLoggerProvider[] providers;
using (ServiceProvider sp = services.BuildServiceProvider())
{
providers = context.GetProviders(sp).ToArray();
replacedert.Equal(1, providers.Length);
var resetTasks = new List<Task>();
foreach (FileLoggerProvider provider in providers)
provider.Reset += (s, e) => resetTasks.Add(e);
ILoggerFactory loggerFactory = sp.GetService<ILoggerFactory>();
ILogger<SettingsTest> logger1 = loggerFactory.CreateLogger<SettingsTest>();
using (logger1.BeginScope("SCOPE"))
{
logger1.LogTrace("This is a nice logger.");
using (logger1.BeginScope("NESTED SCOPE"))
{
logger1.LogInformation("This is a smart logger.");
// changing switch and scopes inclusion
configJson =
$@"{{
""{FileLoggerProvider.Alias}"": {{
""{nameof(FileLoggerOptions.Files)}"": [
{{
""{nameof(LogFileOptions.Path)}"": ""test.log"",
}}],
""{nameof(LoggerFilterRule.LogLevel)}"": {{
""{LogFileOptions.DefaultCategoryName}"": ""{LogLevel.Information}""
}}
}}
}}";
replacedert.Equal(0, resetTasks.Count);
fileProvider.WriteContent("config.json", configJson);
// reload is triggered twice due to a bug in the framework (https://github.com/aspnet/Logging/issues/874)
replacedert.Equal(1 * 2, resetTasks.Count);
// ensuring that reset has been finished and the new settings are effective
await Task.WhenAll(resetTasks);
logger1 = loggerFactory.CreateLogger<SettingsTest>();
logger1.LogInformation("This one shouldn't include scopes.");
logger1.LogTrace("This one shouldn't be included at all.");
}
}
completeCts.Cancel();
// ensuring that all entries are processed
await context.GetCompletion(sp);
replacedert.True(providers.All(provider => provider.Completion.IsCompleted));
}
var logFile = (MemoryFileInfo)fileProvider.GetFileInfo("test.log");
replacedert.True(logFile.Exists && !logFile.IsDirectory);
var lines = logFile.ReadAllText(out Encoding encoding).Split(new[] { Environment.NewLine }, StringSplitOptions.None);
replacedert.Equal(Encoding.UTF8, encoding);
replacedert.Equal(new[]
{
$"trce: {typeof(SettingsTest).FullName}[0] @ {context.GetTimestamp().ToLocalTime():o}",
$" => SCOPE",
$" This is a nice logger.",
$"info: {typeof(SettingsTest).FullName}[0] @ {context.GetTimestamp().ToLocalTime():o}",
$" => SCOPE => NESTED SCOPE",
$" This is a smart logger.",
$"info: {typeof(SettingsTest).FullName}[0] @ {context.GetTimestamp().ToLocalTime():o}",
$" This one shouldn't include scopes.",
""
}, lines);
}
19
View Source File : SettingsTest.cs
License : MIT License
Project Creator : adams85
License : MIT License
Project Creator : adams85
[Fact]
public async Task ReloadOptionsSettingsMultipleProviders()
{
var fileProvider = new MemoryFileProvider();
var fileAppender = new MemoryFileAppender(fileProvider);
dynamic settings = new JObject();
dynamic globalFilters = settings[nameof(LoggerFilterRule.LogLevel)] = new JObject();
globalFilters[LogFileOptions.DefaultCategoryName] = LogLevel.None.ToString();
settings[FileLoggerProvider.Alias] = new JObject();
dynamic fileFilters = settings[FileLoggerProvider.Alias][nameof(LoggerFilterRule.LogLevel)] = new JObject();
fileFilters[LogFileOptions.DefaultCategoryName] = LogLevel.Warning.ToString();
dynamic oneFile = new JObject();
oneFile.Path = "one.log";
settings[FileLoggerProvider.Alias][nameof(FileLoggerOptions.Files)] = new JArray(oneFile);
settings[OtherFileLoggerProvider.Alias] = new JObject();
dynamic otherFileFilters = settings[OtherFileLoggerProvider.Alias][nameof(LoggerFilterRule.LogLevel)] = new JObject();
otherFileFilters[LogFileOptions.DefaultCategoryName] = LogLevel.Information.ToString();
dynamic otherFile = new JObject();
otherFile.Path = "other.log";
settings[OtherFileLoggerProvider.Alias][nameof(FileLoggerOptions.Files)] = new JArray(otherFile);
var configJson = ((JObject)settings).ToString();
fileProvider.CreateFile("config.json", configJson);
IConfigurationRoot config = new ConfigurationBuilder()
.AddJsonFile(fileProvider, "config.json", optional: false, reloadOnChange: true)
.Build();
var context = new TestFileLoggerContext(default, completionTimeout: Timeout.InfiniteTimeSpan);
var services = new ServiceCollection();
services.AddOptions();
services.AddLogging(lb =>
{
lb.AddConfiguration(config);
lb.AddFile(context, o => o.FileAppender ??= fileAppender);
lb.AddFile<OtherFileLoggerProvider>(context, o => o.FileAppender ??= fileAppender);
});
FileLoggerProvider[] providers;
using (ServiceProvider sp = services.BuildServiceProvider())
{
providers = context.GetProviders(sp).ToArray();
replacedert.Equal(2, providers.Length);
var resetTasks = new List<Task>();
foreach (FileLoggerProvider provider in providers)
provider.Reset += (s, e) => resetTasks.Add(e);
ILoggerFactory loggerFactory = sp.GetRequiredService<ILoggerFactory>();
ILogger logger = loggerFactory.CreateLogger("X");
logger.LogInformation("This is an info.");
logger.LogWarning("This is a warning.");
fileFilters[LogFileOptions.DefaultCategoryName] = LogLevel.Information.ToString();
otherFileFilters[LogFileOptions.DefaultCategoryName] = LogLevel.Warning.ToString();
configJson = ((JObject)settings).ToString();
replacedert.Equal(0, resetTasks.Count);
fileProvider.WriteContent("config.json", configJson);
// reload is triggered twice due to a bug in the framework (https://github.com/aspnet/Logging/issues/874)
replacedert.Equal(2 * 2, resetTasks.Count);
// ensuring that reset has been finished and the new settings are effective
await Task.WhenAll(resetTasks);
logger.LogInformation("This is another info.");
logger.LogWarning("This is another warning.");
}
replacedert.True(providers.All(provider => provider.Completion.IsCompleted));
var logFile = (MemoryFileInfo)fileProvider.GetFileInfo((string)oneFile.Path);
replacedert.True(logFile.Exists && !logFile.IsDirectory);
var lines = logFile.ReadAllText(out Encoding encoding).Split(new[] { Environment.NewLine }, StringSplitOptions.None);
replacedert.Equal(Encoding.UTF8, encoding);
replacedert.Equal(new[]
{
$"warn: X[0] @ {context.GetTimestamp().ToLocalTime():o}",
$" This is a warning.",
$"info: X[0] @ {context.GetTimestamp().ToLocalTime():o}",
$" This is another info.",
$"warn: X[0] @ {context.GetTimestamp().ToLocalTime():o}",
$" This is another warning.",
""
}, lines);
logFile = (MemoryFileInfo)fileProvider.GetFileInfo((string)otherFile.Path);
replacedert.True(logFile.Exists && !logFile.IsDirectory);
lines = logFile.ReadAllText(out encoding).Split(new[] { Environment.NewLine }, StringSplitOptions.None);
replacedert.Equal(Encoding.UTF8, encoding);
replacedert.Equal(new[]
{
$"info: X[0] @ {context.GetTimestamp().ToLocalTime():o}",
$" This is an info.",
$"warn: X[0] @ {context.GetTimestamp().ToLocalTime():o}",
$" This is a warning.",
$"warn: X[0] @ {context.GetTimestamp().ToLocalTime():o}",
$" This is another warning.",
""
}, lines);
}
19
View Source File : JobRescuerHostedService.cs
License : MIT License
Project Creator : AdemCatamak
License : MIT License
Project Creator : AdemCatamak
private async Task BackgroundJob(CancellationToken cancellationToken)
{
using IServiceScope scope = _serviceProvider.CreateScope();
var jobRescuer = scope.ServiceProvider
.GetRequiredService<IJobRescuer>();
List<RescueOption> rescueOptions = scope.ServiceProvider
.GetServices<MessageHandlerMetadata>()
.Select(metaData => metaData.RescueOption)
.ToList()!;
List<Task> operationList = new List<Task>();
foreach (var rescueOption in rescueOptions)
{
Task rescueAsync = jobRescuer.RescueAsync(rescueOption, cancellationToken);
operationList.Add(rescueAsync);
}
await Task.WhenAll(operationList);
}
19
View Source File : JobRetrierHostedService.cs
License : MIT License
Project Creator : AdemCatamak
License : MIT License
Project Creator : AdemCatamak
private async Task BackgroundJob(CancellationToken cancellationToken)
{
using IServiceScope scope = _serviceProvider.CreateScope();
var jobRetrier = scope.ServiceProvider
.GetRequiredService<IJobRetrier>();
List<RetryOption> retryOptions = scope.ServiceProvider
.GetServices<MessageHandlerMetadata>()
.Where(metaData => metaData.RetryOption != null)
.Select(metaData => metaData.RetryOption)
.ToList()!;
List<Task> operationList = new List<Task>();
foreach (var retryOption in retryOptions)
{
Task retryAsync = jobRetrier.RetryAsync(retryOption, cancellationToken);
operationList.Add(retryAsync);
}
await Task.WhenAll(operationList);
}
19
View Source File : AddMessageTest.cs
License : MIT License
Project Creator : AdemCatamak
License : MIT License
Project Creator : AdemCatamak
[ReleaseModeTheory]
[InlineData(2000, 20000)]
[InlineData(1000, 10000)]
[InlineData(100, 1000)]
public void When_AddMessageCalled__ResponseTimeShouldNotExceed(int times, int expectedMilliseconds)
{
Stopwatch stopwatch = new Stopwatch();
stopwatch.Start();
List<Task> tasks = new List<Task>();
for (int i = 0; i < times; i++)
{
DummyMessage dummyMessage = new DummyMessage()
{
Guid = Guid.NewGuid()
};
var task = _messageStorageClient.AddMessageAsync(dummyMessage);
tasks.Add(task);
}
Task.WaitAll(tasks.ToArray());
stopwatch.Stop();
string message = $"Expected Execution Time : {expectedMilliseconds} ms{Environment.NewLine}" +
$"Actual Execution Time : {stopwatch.ElapsedMilliseconds} ms";
replacedertThat.LessThan(expectedMilliseconds, stopwatch.ElapsedMilliseconds, message);
_output.WriteLine(message);
}
19
View Source File : HandleMessageTest.cs
License : MIT License
Project Creator : AdemCatamak
License : MIT License
Project Creator : AdemCatamak
[ReleaseModeTheory]
[InlineData(1, 1000, 10000)]
[InlineData(2, 1000, 7500)]
[InlineData(4, 1000, 5000)]
[InlineData(1, 100, 1500)]
[InlineData(2, 100, 1000)]
[InlineData(4, 100, 750)]
public void When_JobHandledCalledWithParallel__ResponseTimeShouldNotExceed(int concurrentJobCount, int times, int expectedMilliseconds)
{
List<Task> tasks = new List<Task>();
for (var i = 0; i < times; i++)
{
DummyMessage dummyMessage = new DummyMessage
{
Guid = Guid.NewGuid()
};
var task = _messageStorageClient.AddMessageAsync(dummyMessage);
tasks.Add(task);
}
Task.WaitAll(tasks.ToArray());
Stopwatch stopwatch = new Stopwatch();
stopwatch.Start();
List<Task> jobProcessorTasks = new List<Task>();
for (var i = 0; i < concurrentJobCount; i++)
{
Task task = Task.Run(async () =>
{
bool jobHandled;
do
{
jobHandled = await _jobDispatcher.HandleNextJobAsync();
} while (jobHandled);
}
);
jobProcessorTasks.Add(task);
}
Task.WaitAll(jobProcessorTasks.ToArray());
stopwatch.Stop();
string message = $"Parallel Job Count : {concurrentJobCount}{Environment.NewLine}" +
$"Executed Job Count : {times}{Environment.NewLine}" +
$"Expected Execution Time : {expectedMilliseconds} ms{Environment.NewLine}" +
$"Actual Execution Time : {stopwatch.ElapsedMilliseconds} ms";
replacedertThat.LessThan(expectedMilliseconds, stopwatch.ElapsedMilliseconds, message);
_output.WriteLine(message);
}
19
View Source File : HandleMessageTest.cs
License : MIT License
Project Creator : AdemCatamak
License : MIT License
Project Creator : AdemCatamak
[ReleaseModeTheory]
[InlineData(1, 1000, 10000)]
[InlineData(2, 1000, 7500)]
[InlineData(4, 1000, 5000)]
[InlineData(1, 100, 1500)]
[InlineData(2, 100, 1000)]
[InlineData(4, 100, 750)]
public void When_JobHandledCalledWithParallel__ResponseTimeShouldNotExceed(int concurrentJobCount, int times, int expectedMilliseconds)
{
List<Task> tasks = new List<Task>();
for (var i = 0; i < times; i++)
{
DummyMessage dummyMessage = new DummyMessage
{
Guid = Guid.NewGuid()
};
var task = _messageStorageClient.AddMessageAsync(dummyMessage);
tasks.Add(task);
}
Task.WaitAll(tasks.ToArray());
Stopwatch stopwatch = new Stopwatch();
stopwatch.Start();
List<Task> jobProcessorTasks = new List<Task>();
for (var i = 0; i < concurrentJobCount; i++)
{
Task task = Task.Run(async () =>
{
bool jobHandled;
do
{
jobHandled = await _jobDispatcher.HandleNextJobAsync();
} while (jobHandled);
}
);
jobProcessorTasks.Add(task);
}
Task.WaitAll(jobProcessorTasks.ToArray());
stopwatch.Stop();
string message = $"Parallel Job Count : {concurrentJobCount}{Environment.NewLine}" +
$"Executed Job Count : {times}{Environment.NewLine}" +
$"Expected Execution Time : {expectedMilliseconds} ms{Environment.NewLine}" +
$"Actual Execution Time : {stopwatch.ElapsedMilliseconds} ms";
replacedertThat.LessThan(expectedMilliseconds, stopwatch.ElapsedMilliseconds, message);
_output.WriteLine(message);
}
19
View Source File : JobDispatcher_HandleNextJobAsync_Test.cs
License : MIT License
Project Creator : AdemCatamak
License : MIT License
Project Creator : AdemCatamak
[Theory]
[InlineData(1, 100)]
[InlineData(4, 100)]
[InlineData(8, 100)]
[InlineData(1, 300)]
[InlineData(4, 300)]
[InlineData(8, 300)]
[InlineData(12, 300)]
[InlineData(16, 300)]
[InlineData(1, 1000)]
[InlineData(4, 1000)]
[InlineData(8, 1000)]
[InlineData(12, 1000)]
[InlineData(16, 1000)]
public void When_JobHandledCalledWithParallel__InitialJobCountAndJobExecutionCountShouldBeEqual(int parallelJobCount, int initialJobCount)
{
List<Task> addTasks = new List<Task>();
for (var i = 0; i < initialJobCount; i++)
{
DummyMessage dummyMessage = new DummyMessage
{
Guid = Guid.NewGuid()
};
var task = _messageStorageClient.AddMessageAsync(dummyMessage);
addTasks.Add(task);
}
Task.WaitAll(addTasks.ToArray());
List<Task> jobProcessorTasks = new List<Task>();
var actualExecutedJobCount = 0;
for (var i = 0; i < parallelJobCount; i++)
{
Task task = Task.Run(async () =>
{
bool jobHandled;
do
{
jobHandled = await _jobDispatcher.HandleNextJobAsync();
if (jobHandled)
{
Interlocked.Increment(ref actualExecutedJobCount);
}
} while (jobHandled);
}
);
jobProcessorTasks.Add(task);
}
Task.WaitAll(jobProcessorTasks.ToArray());
string message = $"Parallel Job Count : {parallelJobCount}{Environment.NewLine}" +
$"Expected Executed Job Count : {initialJobCount}{Environment.NewLine}" +
$"Actual Executed Job Count : {actualExecutedJobCount}";
replacedert.Equal(initialJobCount, actualExecutedJobCount);
_output.WriteLine(message);
}
19
View Source File : JobDispatcher_HandleNextJobAsync_Test.cs
License : MIT License
Project Creator : AdemCatamak
License : MIT License
Project Creator : AdemCatamak
[Fact]
public void WhenParallelJobIsAllowed__TasksShouldNotWaitEachOther()
{
TimeSpan jobConsumeTime = TimeSpan.FromSeconds(5);
const int jobCount = 8;
List<Task> addTasks = new List<Task>();
for (var i = 0; i < jobCount; i++)
{
LongProcessRequest longProcessRequest = new LongProcessRequest(jobConsumeTime);
var task = _messageStorageClient.AddMessageAsync(longProcessRequest);
addTasks.Add(task);
}
Task.WaitAll(addTasks.ToArray());
var stopwatch = new Stopwatch();
stopwatch.Start();
List<Task> jobProcessorTasks = new List<Task>();
for (var i = 0; i < jobCount; i++)
{
var handleTask = _jobDispatcher.HandleNextJobAsync();
jobProcessorTasks.Add(handleTask);
}
Task.WaitAll(jobProcessorTasks.ToArray());
stopwatch.Stop();
double threshold = jobConsumeTime.TotalMilliseconds * 1.5;
_output.WriteLine($"Threshold : {threshold} ms -- Actual execution time : {stopwatch.ElapsedMilliseconds} ms");
replacedertThat.LessThan(threshold, stopwatch.ElapsedMilliseconds);
}
19
View Source File : HandleMessageTest.cs
License : MIT License
Project Creator : AdemCatamak
License : MIT License
Project Creator : AdemCatamak
[ReleaseModeTheory]
[InlineData(1, 1000, 10000)]
[InlineData(2, 1000, 7500)]
[InlineData(4, 1000, 5000)]
[InlineData(1, 100, 1500)]
[InlineData(2, 100, 1000)]
[InlineData(4, 100, 750)]
public void When_JobHandledCalledWithParallel__ResponseTimeShouldNotExceed(int parallelJobCount, int times, int expectedMilliseconds)
{
List<Task> addTasks = new List<Task>();
for (var i = 0; i < times; i++)
{
DummyMessage dummyMessage = new DummyMessage
{
Guid = Guid.NewGuid()
};
var task = _messageStorageClient.AddMessageAsync(dummyMessage);
addTasks.Add(task);
}
Task.WaitAll(addTasks.ToArray());
Stopwatch stopwatch = new Stopwatch();
stopwatch.Start();
List<Task> jobProcessorTasks = new List<Task>();
for (var i = 0; i < parallelJobCount; i++)
{
Task task = Task.Run(async () =>
{
bool jobHandled;
do
{
jobHandled = await _jobDispatcher.HandleNextJobAsync();
} while (jobHandled);
}
);
jobProcessorTasks.Add(task);
}
Task.WaitAll(jobProcessorTasks.ToArray());
stopwatch.Stop();
string message = $"Parallel Job Count : {parallelJobCount}{Environment.NewLine}" +
$"Executed Job Count : {times}{Environment.NewLine}" +
$"Expected Execution Time : {expectedMilliseconds} ms{Environment.NewLine}" +
$"Actual Execution Time : {stopwatch.ElapsedMilliseconds} ms";
replacedertThat.LessThan(expectedMilliseconds, stopwatch.ElapsedMilliseconds, message);
_output.WriteLine(message);
}
19
View Source File : HandleMessageTest.cs
License : MIT License
Project Creator : AdemCatamak
License : MIT License
Project Creator : AdemCatamak
[ReleaseModeTheory]
[InlineData(1, 1000, 10000)]
[InlineData(2, 1000, 7500)]
[InlineData(4, 1000, 5000)]
[InlineData(1, 100, 1500)]
[InlineData(2, 100, 1000)]
[InlineData(4, 100, 750)]
public void When_JobHandledCalledWithParallel__ResponseTimeShouldNotExceed(int parallelJobCount, int times, int expectedMilliseconds)
{
List<Task> addTasks = new List<Task>();
for (var i = 0; i < times; i++)
{
DummyMessage dummyMessage = new DummyMessage
{
Guid = Guid.NewGuid()
};
var task = _messageStorageClient.AddMessageAsync(dummyMessage);
addTasks.Add(task);
}
Task.WaitAll(addTasks.ToArray());
Stopwatch stopwatch = new Stopwatch();
stopwatch.Start();
List<Task> jobProcessorTasks = new List<Task>();
for (var i = 0; i < parallelJobCount; i++)
{
Task task = Task.Run(async () =>
{
bool jobHandled;
do
{
jobHandled = await _jobDispatcher.HandleNextJobAsync();
} while (jobHandled);
}
);
jobProcessorTasks.Add(task);
}
Task.WaitAll(jobProcessorTasks.ToArray());
stopwatch.Stop();
string message = $"Parallel Job Count : {parallelJobCount}{Environment.NewLine}" +
$"Executed Job Count : {times}{Environment.NewLine}" +
$"Expected Execution Time : {expectedMilliseconds} ms{Environment.NewLine}" +
$"Actual Execution Time : {stopwatch.ElapsedMilliseconds} ms";
replacedertThat.LessThan(expectedMilliseconds, stopwatch.ElapsedMilliseconds, message);
_output.WriteLine(message);
}
19
View Source File : JobDispatcher_HandleNextJobAsync_Test.cs
License : MIT License
Project Creator : AdemCatamak
License : MIT License
Project Creator : AdemCatamak
[Fact]
public void WhenParallelJobIsAllowed__TasksShouldNotWaitEachOther()
{
TimeSpan jobConsumeTime = TimeSpan.FromSeconds(5);
const int jobCount = 8;
List<Task> addTasks = new List<Task>();
for (var i = 0; i < jobCount; i++)
{
LongProcessRequest longProcessRequest = new LongProcessRequest(jobConsumeTime);
var task = _messageStorageClient.AddMessageAsync(longProcessRequest);
addTasks.Add(task);
}
Task.WaitAll(addTasks.ToArray());
var stopwatch = new Stopwatch();
stopwatch.Start();
List<Task> jobProcessorTasks = new List<Task>();
for (var i = 0; i < jobCount; i++)
{
var handleTask = _sut.HandleNextJobAsync();
jobProcessorTasks.Add(handleTask);
}
Task.WaitAll(jobProcessorTasks.ToArray());
stopwatch.Stop();
double threshold = jobConsumeTime.TotalMilliseconds * 1.5;
_output.WriteLine($"Threshold : {threshold} ms -- Actual execution time : {stopwatch.ElapsedMilliseconds} ms");
replacedertThat.LessThan(threshold, stopwatch.ElapsedMilliseconds);
}
19
View Source File : AppManager.cs
License : MIT License
Project Creator : admaiorastudio
License : MIT License
Project Creator : admaiorastudio
private async Task ConnectAsync()
{
if (_isConnected)
return;
// Emulators loopback addresses
IPAddress[] loopbackAddresses = new[]
{
IPAddress.Parse("127.0.0.1"),
IPAddress.Parse("10.0.2.2"),
IPAddress.Parse("10.0.3.2"),
IPAddress.Parse("169.254.80.80")
};
// Check if we are an emulator instance
List<Task<string>> waitTasks = new List<Task<string>>();
CancellationTokenSource cts = new CancellationTokenSource();
// Look for server using localhost (an emulator device)
foreach (var ipAddress in loopbackAddresses.Take(1))
{
waitTasks.Add(Task.Run<string>(
async () =>
{
try
{
bool isPortOpen = TryPing(ipAddress.ToString(), 5001, 300);
if (!isPortOpen)
return null;
var connection = new HubConnectionBuilder()
.WithUrl($"http://{ipAddress.ToString()}:5001/hub")
.Build();
await connection.StartAsync(cts.Token);
if (cts.IsCancellationRequested)
return null;
_useLocalHost = true;
_hubConnection = connection;
cts.Cancel();
return ipAddress.ToString();
}
catch (Exception ex)
{
return null;
}
}, cts.Token));
}
// Look for server using broadcast (a real device)
waitTasks.Add(Task.Run<string>(
async () =>
{
// Discover the server
using (UdpClient client = new UdpClient())
{
client.EnableBroadcast = true;
byte[] requestData = Encoding.ASCII.GetBytes($"AreYouTheServer?");
Task<int> sendTask = client.SendAsync(requestData, requestData.Length, new IPEndPoint(IPAddress.Broadcast, 5002));
await Task.WhenAny(new[] { sendTask, Task.Delay(300) });
if (sendTask.IsCompleted)
{
if (cts.IsCancellationRequested)
return null;
Task<UdpReceiveResult> receiveTask = client.ReceiveAsync();
await Task.WhenAny(new[] { receiveTask, Task.Delay(300) });
if (receiveTask.IsCompleted)
{
if (cts.IsCancellationRequested)
return null;
UdpReceiveResult serverResponseData = receiveTask.Result;
string serverResponse = Encoding.ASCII.GetString(serverResponseData.Buffer);
if (serverResponse == "YesIamTheServer!")
{
string ipAddress = serverResponseData.RemoteEndPoint.Address.ToString();
_useLocalHost = false;
_hubConnection = null;
cts.Cancel();
return ipAddress.ToString();
}
}
}
client.Close();
}
return null;
}));
// Timeout task
waitTasks.Add(Task.Run<string>(
async () =>
{
try
{
await Task.Delay(5000, cts.Token);
cts.Cancel();
return null;
}
catch
{
return null;
}
}));
try
{
string ipAddress = await WaitForAnyGetHostIpTaskAsync(waitTasks);
if (ipAddress != null)
{
if (_hubConnection == null)
{
string port = _useLocalHost ? "5001" : "5002";
_hubConnection = new HubConnectionBuilder()
.WithUrl($"http://{ipAddress.ToString()}:{port}/hub")
.Build();
await _hubConnection.StartAsync();
}
_isConnected = true;
_serverAddress = ipAddress;
_hubConnection.Closed +=
async (error) =>
{
System.Diagnostics.Debug.WriteLine("Connection with RealXaml has been lost.");
while(_hubConnection.State == HubConnectionState.Disconnected)
{
bool isPortOpen = TryPing(ipAddress.ToString(), 5001, 300);
if (isPortOpen)
{
System.Diagnostics.Debug.WriteLine("Trying to reconnect again...");
await _hubConnection.StartAsync();
if (_hubConnection.State == HubConnectionState.Connected)
{
await Task.Delay(300);
await _hubConnection.SendAsync("NotifyIde", "Connection was lost. Here I'am again.");
System.Diagnostics.Debug.WriteLine($"Successfully restored lost to the RealXaml server.");
break;
}
}
System.Diagnostics.Debug.WriteLine("Unable to connect. Retrying in 5secs.");
await Task.Delay(5000);
}
};
_hubConnection.On<string, byte[], bool>("ReloadXaml",
async (pageId, data, refresh) => await WhenReloadXaml(pageId, data, refresh));
_hubConnection.On<string, byte[]>("Reloadreplacedembly",
async (replacedemblyName, data) => await WhenReloadreplacedembly(replacedemblyName, data));
string clientId = $"RXID-{DateTime.Now.Ticks}";
await _hubConnection.SendAsync("RegisterClient", clientId);
System.Diagnostics.Debug.WriteLine($"Successfully connected to the RealXaml server.");
System.Diagnostics.Debug.WriteLine($"Your client ID is {clientId}");
return;
}
}
catch(Exception ex)
{
System.Diagnostics.Debug.WriteLine("Error while trying to connect to the RealXaml server.");
System.Diagnostics.Debug.WriteLine(ex);
}
}
19
View Source File : JobDispatcher_HandleNextJobAsync_Test.cs
License : MIT License
Project Creator : AdemCatamak
License : MIT License
Project Creator : AdemCatamak
[Theory]
[InlineData(1, 100)]
[InlineData(4, 100)]
[InlineData(8, 100)]
[InlineData(1, 300)]
[InlineData(4, 300)]
[InlineData(8, 300)]
[InlineData(12, 300)]
[InlineData(16, 300)]
[InlineData(1, 1000)]
[InlineData(4, 1000)]
[InlineData(8, 1000)]
[InlineData(12, 1000)]
[InlineData(16, 1000)]
public void When_JobHandledCalledWithParallel__InitialJobCountAndJobExecutionCountShouldBeEqual(int parallelJobCount, int initialJobCount)
{
List<Task> addTasks = new List<Task>();
for (var i = 0; i < initialJobCount; i++)
{
DummyMessage dummyMessage = new DummyMessage
{
Guid = Guid.NewGuid()
};
var task = _messageStorageClient.AddMessageAsync(dummyMessage);
addTasks.Add(task);
}
Task.WaitAll(addTasks.ToArray());
List<Task> jobProcessorTasks = new List<Task>();
int actualExecutedJobCount = 0;
for (var i = 0; i < parallelJobCount; i++)
{
Task task = Task.Run(async () =>
{
bool jobHandled;
do
{
jobHandled = await _sut.HandleNextJobAsync();
if (jobHandled)
{
Interlocked.Increment(ref actualExecutedJobCount);
}
} while (jobHandled);
}
);
jobProcessorTasks.Add(task);
}
Task.WaitAll(jobProcessorTasks.ToArray());
string message = $"Parallel Job Count : {parallelJobCount}{Environment.NewLine}" +
$"Expected Executed Job Count : {initialJobCount}{Environment.NewLine}" +
$"Actual Executed Job Count : {actualExecutedJobCount}";
replacedert.Equal(initialJobCount, actualExecutedJobCount);
_output.WriteLine(message);
}
19
View Source File : JobDispatcher_HandleNextJobAsync_Test.cs
License : MIT License
Project Creator : AdemCatamak
License : MIT License
Project Creator : AdemCatamak
[Theory]
[InlineData(1, 100)]
[InlineData(4, 100)]
[InlineData(8, 100)]
[InlineData(1, 300)]
[InlineData(4, 300)]
[InlineData(8, 300)]
[InlineData(12, 300)]
[InlineData(16, 300)]
[InlineData(1, 1000)]
[InlineData(4, 1000)]
[InlineData(8, 1000)]
[InlineData(12, 1000)]
[InlineData(16, 1000)]
public void When_JobHandledCalledWithParallel__InitialJobCountAndJobExecutionCountShouldBeEqual(int parallelJobCount, int initialJobCount)
{
List<Task> addTasks = new List<Task>();
for (var i = 0; i < initialJobCount; i++)
{
DummyMessage dummyMessage = new DummyMessage
{
Guid = Guid.NewGuid()
};
var task = _messageStorageClient.AddMessageAsync(dummyMessage);
addTasks.Add(task);
}
Task.WaitAll(addTasks.ToArray());
List<Task> jobProcessorTasks = new List<Task>();
int actualExecutedJobCount = 0;
for (var i = 0; i < parallelJobCount; i++)
{
Task task = Task.Run(async () =>
{
bool jobHandled;
do
{
jobHandled = await _sut.HandleNextJobAsync();
if (jobHandled)
{
Interlocked.Increment(ref actualExecutedJobCount);
}
} while (jobHandled);
}
);
jobProcessorTasks.Add(task);
}
Task.WaitAll(jobProcessorTasks.ToArray());
string message = $"Parallel Job Count : {parallelJobCount}{Environment.NewLine}" +
$"Expected Executed Job Count : {initialJobCount}{Environment.NewLine}" +
$"Actual Executed Job Count : {actualExecutedJobCount}";
replacedert.Equal(initialJobCount, actualExecutedJobCount);
_output.WriteLine(message);
}
19
View Source File : JobDispatcherHostedService.cs
License : MIT License
Project Creator : AdemCatamak
License : MIT License
Project Creator : AdemCatamak
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
{
List<Task> tasks = new List<Task>();
for (var i = 0; i < _concurrentExecutionCount; i++)
{
Task task = Task.Run(async () => { await InfiniteBackgroundJob(stoppingToken); }, stoppingToken);
tasks.Add(task);
}
await Task.WhenAll(tasks.ToArray());
}
19
View Source File : JobDispatcher_HandleNextJobAsync_Test.cs
License : MIT License
Project Creator : AdemCatamak
License : MIT License
Project Creator : AdemCatamak
[Theory]
[InlineData(1, 100)]
[InlineData(4, 100)]
[InlineData(8, 100)]
[InlineData(1, 300)]
[InlineData(4, 300)]
[InlineData(8, 300)]
[InlineData(12, 300)]
[InlineData(16, 300)]
[InlineData(1, 1000)]
[InlineData(4, 1000)]
[InlineData(8, 1000)]
[InlineData(12, 1000)]
[InlineData(16, 1000)]
public void When_JobHandledCalledWithParallel__InitialJobCountAndJobExecutionCountShouldBeEqual(int parallelJobCount, int initialJobCount)
{
List<Task> addTasks = new List<Task>();
for (var i = 0; i < initialJobCount; i++)
{
DummyMessage dummyMessage = new DummyMessage
{
Guid = Guid.NewGuid()
};
var task = _messageStorageClient.AddMessageAsync(dummyMessage);
addTasks.Add(task);
}
Task.WaitAll(addTasks.ToArray());
List<Task> jobProcessorTasks = new List<Task>();
var actualExecutedJobCount = 0;
for (var i = 0; i < parallelJobCount; i++)
{
Task task = Task.Run(async () =>
{
bool jobHandled;
do
{
jobHandled = await _jobDispatcher.HandleNextJobAsync();
if (jobHandled)
{
Interlocked.Increment(ref actualExecutedJobCount);
}
} while (jobHandled);
}
);
jobProcessorTasks.Add(task);
}
Task.WaitAll(jobProcessorTasks.ToArray());
string message = $"Parallel Job Count : {parallelJobCount}{Environment.NewLine}" +
$"Expected Executed Job Count : {initialJobCount}{Environment.NewLine}" +
$"Actual Executed Job Count : {actualExecutedJobCount}";
replacedert.Equal(initialJobCount, actualExecutedJobCount);
_output.WriteLine(message);
}
19
View Source File : CrossChainMemoryCacheTest.cs
License : MIT License
Project Creator : AElfProject
License : MIT License
Project Creator : AElfProject
[Fact]
public void TryAdd_Mulreplacedhreads_WithDifferentData()
{
var chainId = 123;
var initTarget = 1;
var blockInfoCache = new ChainCacheEnreplacedy(chainId, initTarget);
var i = 0;
var taskList = new List<Task>();
while (i < 5)
{
var j = i;
var t = Task.Run(() => blockInfoCache.TryAdd(new SideChainBlockData
{
Height = 2 * j + 1,
ChainId = chainId,
TransactionStatusMerkleTreeRoot = HashHelper.ComputeFrom((2 * j + 1).ToString())
}));
taskList.Add(t);
i++;
}
Task.WaitAll(taskList.ToArray());
replacedert.True(blockInfoCache.TargetChainHeight() == initTarget + 1);
}
19
View Source File : CrossChainMemoryCacheTest.cs
License : MIT License
Project Creator : AElfProject
License : MIT License
Project Creator : AElfProject
[Fact]
public void TryAdd_Mulreplacedhreads_WithSameData()
{
var chainId = 123;
var initTarget = 1;
var blockInfoCache = new ChainCacheEnreplacedy(chainId, initTarget);
var i = 0;
var taskList = new List<Task>();
while (i++ < 5)
{
var t = Task.Run(() => blockInfoCache.TryAdd(new SideChainBlockData
{
Height = initTarget,
ChainId = chainId,
TransactionStatusMerkleTreeRoot = HashHelper.ComputeFrom(initTarget.ToString())
}));
taskList.Add(t);
}
Task.WaitAll(taskList.ToArray());
replacedert.True(blockInfoCache.TargetChainHeight() == initTarget + 1);
}
19
View Source File : DocumentBasedFixAllProvider.cs
License : Apache License 2.0
Project Creator : agoda-com
License : Apache License 2.0
Project Creator : agoda-com
private async Task<Solution> GetSolutionFixesAsync(FixAllContext fixAllContext, ImmutableArray<Doreplacedent> doreplacedents)
{
var doreplacedentDiagnosticsToFix = await FixAllContextHelper.GetDoreplacedentDiagnosticsToFixAsync(fixAllContext).ConfigureAwait(false);
var solution = fixAllContext.Solution;
var newDoreplacedents = new List<Task<SyntaxNode>>(doreplacedents.Length);
foreach (var doreplacedent in doreplacedents)
{
ImmutableArray<Diagnostic> diagnostics;
if (!doreplacedentDiagnosticsToFix.TryGetValue(doreplacedent, out diagnostics))
{
newDoreplacedents.Add(doreplacedent.GetSyntaxRootAsync(fixAllContext.CancellationToken));
continue;
}
newDoreplacedents.Add(FixAllInDoreplacedentAsync(fixAllContext, doreplacedent, diagnostics));
}
for (var i = 0; i < doreplacedents.Length; i++)
{
var newDoreplacedentRoot = await newDoreplacedents[i].ConfigureAwait(false);
if (newDoreplacedentRoot == null)
{
continue;
}
solution = solution.WithDoreplacedentSyntaxRoot(doreplacedents[i].Id, newDoreplacedentRoot);
}
return solution;
}
19
View Source File : DocumentBasedFixAllProvider.cs
License : Apache License 2.0
Project Creator : agoda-com
License : Apache License 2.0
Project Creator : agoda-com
private async Task<Solution> GetSolutionFixesAsync(FixAllContext fixAllContext, ImmutableArray<Doreplacedent> doreplacedents)
{
var doreplacedentDiagnosticsToFix = await FixAllContextHelper.GetDoreplacedentDiagnosticsToFixAsync(fixAllContext).ConfigureAwait(false);
var solution = fixAllContext.Solution;
var newDoreplacedents = new List<Task<SyntaxNode>>(doreplacedents.Length);
foreach (var doreplacedent in doreplacedents)
{
ImmutableArray<Diagnostic> diagnostics;
if (!doreplacedentDiagnosticsToFix.TryGetValue(doreplacedent, out diagnostics))
{
newDoreplacedents.Add(doreplacedent.GetSyntaxRootAsync(fixAllContext.CancellationToken));
continue;
}
newDoreplacedents.Add(FixAllInDoreplacedentAsync(fixAllContext, doreplacedent, diagnostics));
}
for (var i = 0; i < doreplacedents.Length; i++)
{
var newDoreplacedentRoot = await newDoreplacedents[i].ConfigureAwait(false);
if (newDoreplacedentRoot == null)
{
continue;
}
solution = solution.WithDoreplacedentSyntaxRoot(doreplacedents[i].Id, newDoreplacedentRoot);
}
return solution;
}
19
View Source File : UserOnlyStore.cs
License : Apache License 2.0
Project Creator : Aguafrommars
License : Apache License 2.0
Project Creator : Aguafrommars
public override async Task AddClaimsAsync(TUser user, IEnumerable<Claim> claims, CancellationToken cancellationToken = default)
{
cancellationToken.ThrowIfCancellationRequested();
ThrowIfDisposed();
replacedertNotNull(user, nameof(user));
replacedertNotNull(claims, nameof(claims));
List<TUserClaim> userClaims = claims.Select(c => CreateUserClaim(user, c)).ToList();
var taskList = new List<Task>(userClaims.Count);
foreach(var userClaim in userClaims)
{
taskList.Add(_client.PostAsync(GetFirebasePath(UserClaimsTableName), userClaim, cancellationToken));
}
await Task.WhenAll(taskList.ToArray())
.ConfigureAwait(false);
}
19
View Source File : UserOnlyStore.cs
License : Apache License 2.0
Project Creator : Aguafrommars
License : Apache License 2.0
Project Creator : Aguafrommars
public async override Task RemoveClaimsAsync(TUser user, IEnumerable<Claim> claims, CancellationToken cancellationToken = default)
{
ThrowIfDisposed();
replacedertNotNull(user, nameof(user));
replacedertNotNull(claims, nameof(claims));
Dictionary<string, TUserClaim> data;
try
{
var response = await _client.GetAsync<Dictionary<string, TUserClaim>>(GetFirebasePath(UserClaimsTableName), cancellationToken, false, $"orderBy=\"UserId\"&equalTo=\"{user.Id}\"")
.ConfigureAwait(false);
data = response.Data;
}
catch (FirebaseException e)
when (e.FirebaseError != null && e.FirebaseError.Error.StartsWith("Index"))
{
await SetIndex(UserClaimsTableName, new UserClaimIndex(), cancellationToken)
.ConfigureAwait(false);
var response = await _client.GetAsync<Dictionary<string, TUserClaim>>(GetFirebasePath(UserClaimsTableName), cancellationToken, queryString: $"orderBy=\"UserId\"&equalTo=\"{user.Id}\"")
.ConfigureAwait(false);
data = response.Data;
}
if (data != null)
{
var taskList = new List<Task>(claims.Count());
foreach (var claim in claims)
{
var match = data.SingleOrDefault(kv => kv.Value.ClaimType == claim.Type && kv.Value.ClaimValue == claim.Value);
if (match.Key != null)
{
taskList.Add(_client.DeleteAsync(GetFirebasePath(UserClaimsTableName, match.Key), cancellationToken));
}
}
await Task.WhenAll(taskList.ToArray())
.ConfigureAwait(false);
}
}
19
View Source File : UserOnlyStore.cs
License : Apache License 2.0
Project Creator : Aguafrommars
License : Apache License 2.0
Project Creator : Aguafrommars
public async override Task<IList<TUser>> GetUsersForClaimAsync(Claim claim, CancellationToken cancellationToken = default)
{
cancellationToken.ThrowIfCancellationRequested();
ThrowIfDisposed();
replacedertNotNull(claim, nameof(claim));
Dictionary<string, TUserClaim> data;
try
{
var response = await _client.GetAsync<Dictionary<string, TUserClaim>>(GetFirebasePath(UserClaimsTableName), cancellationToken, false, $"orderBy=\"ClaimType\"&equalTo=\"{claim.Type}\"")
.ConfigureAwait(false);
data = response.Data;
}
catch (FirebaseException e)
when (e.FirebaseError != null && e.FirebaseError.Error.StartsWith("Index"))
{
await SetIndex(UserClaimsTableName, new UserClaimIndex(), cancellationToken)
.ConfigureAwait(false);
var response = await _client.GetAsync<Dictionary<string, TUserClaim>>(GetFirebasePath(UserClaimsTableName), cancellationToken, queryString: $"orderBy=\"ClaimType\"&equalTo=\"{claim.Type}\"")
.ConfigureAwait(false);
data = response.Data;
}
if (data == null)
{
return new List<TUser>(0);
}
var userIds = data.Values.Where(c => c.ClaimValue == claim.Value).Select(c => c.UserId);
var users = new ConcurrentBag<TUser>();
var taskList = new List<Task>(userIds.Count());
foreach (var userId in userIds)
{
taskList.Add(Task.Run(async () => {
var user = await FindByIdAsync(userId, cancellationToken)
.ConfigureAwait(false);
if (user != null)
{
users.Add(user);
}
}));
}
await Task.WhenAll(taskList.ToArray())
.ConfigureAwait(false);
return users.ToList();
}
19
View Source File : UserStore.cs
License : Apache License 2.0
Project Creator : Aguafrommars
License : Apache License 2.0
Project Creator : Aguafrommars
public override async Task<IList<string>> GetRolesAsync(TUser user, CancellationToken cancellationToken = default)
{
cancellationToken.ThrowIfCancellationRequested();
ThrowIfDisposed();
replacedertNotNull(user, nameof(user));
Dictionary<string, TUserRole> userRoles;
try
{
var response = await _client.GetAsync<Dictionary<string, TUserRole>>(GetFirebasePath(UserRolesTableName), cancellationToken, false, $"orderBy=\"UserId\"&equalTo=\"{user.Id}\"")
.ConfigureAwait(false);
userRoles = response.Data;
}
catch (FirebaseException e)
when (e.FirebaseError != null && e.FirebaseError.Error.StartsWith("Index"))
{
await _userOnlyStore.SetIndex(UserRolesTableName, new UseRoleIndex(), cancellationToken)
.ConfigureAwait(false);
var response = await _client.GetAsync<Dictionary<string, TUserRole>>(GetFirebasePath(UserRolesTableName), cancellationToken, false, $"orderBy=\"UserId\"&equalTo=\"{user.Id}\"")
.ConfigureAwait(false);
userRoles = response.Data;
}
if (userRoles != null)
{
var concurrentBag = new ConcurrentBag<string>();
var taskList = new List<Task>(userRoles.Count);
foreach(var userRole in userRoles.Values)
{
taskList.Add(GetUserRoleAsync(userRole, concurrentBag, cancellationToken));
}
await Task.WhenAll(taskList.ToArray())
.ConfigureAwait(false);
return concurrentBag.ToList();
}
return new List<string>(0);
}
19
View Source File : UserStore.cs
License : Apache License 2.0
Project Creator : Aguafrommars
License : Apache License 2.0
Project Creator : Aguafrommars
public async override Task<IList<TUser>> GetUsersInRoleAsync(string roleName, CancellationToken cancellationToken = default)
{
cancellationToken.ThrowIfCancellationRequested();
ThrowIfDisposed();
replacedertNotNullOrEmpty(roleName, nameof(roleName));
var role = await FindRoleAsync(roleName, cancellationToken)
.ConfigureAwait(false);
if (role != null)
{
Dictionary<string, TUserRole> userRoles;
try
{
var response = await _client.GetAsync<Dictionary<string, TUserRole>>(GetFirebasePath(UserRolesTableName), cancellationToken, false, $"orderBy=\"RoleId\"&equalTo=\"{role.Id}\"")
.ConfigureAwait(false);
userRoles = response.Data;
}
catch (FirebaseException e)
when (e.FirebaseError != null && e.FirebaseError.Error.StartsWith("Index"))
{
await _userOnlyStore.SetIndex(UserRolesTableName, new UseRoleIndex(), cancellationToken)
.ConfigureAwait(false);
var response = await _client.GetAsync<Dictionary<string, TUserRole>>(GetFirebasePath(UserRolesTableName), cancellationToken, false, $"orderBy=\"RoleId\"&equalTo=\"{role.Id}\"")
.ConfigureAwait(false);
userRoles = response.Data;
}
if (userRoles != null)
{
var concurrencyBag = new ConcurrentBag<TUser>();
var taskList = new List<Task>(userRoles.Count);
foreach(var ur in userRoles.Values)
{
taskList.Add(Task.Run(async () =>
{
var user = await FindByIdAsync(ur.UserId, cancellationToken)
.ConfigureAwait(false);
if (user != null)
{
concurrencyBag.Add(user);
}
}));
}
await Task.WhenAll(taskList.ToArray())
.ConfigureAwait(false);
return concurrencyBag.ToList();
}
}
return new List<TUser>(0);
}
19
View Source File : UserOnlyStore.cs
License : Apache License 2.0
Project Creator : Aguafrommars
License : Apache License 2.0
Project Creator : Aguafrommars
public override async Task AddClaimsAsync(TUser user, IEnumerable<Claim> claims, CancellationToken cancellationToken = default)
{
cancellationToken.ThrowIfCancellationRequested();
ThrowIfDisposed();
replacedertNotNull(user, nameof(user));
replacedertNotNull(claims, nameof(claims));
var userClaims = await GetUserClaimsAsync(user).ConfigureAwait(false);
userClaims.AddRange(claims.Select(c => CreateUserClaim(user, c)));
var userId = ConvertIdToString(user.Id);
var taskList = new List<Task>(claims.Count() + 1)
{
_db.HashSetAsync(UserClaimsRedisKey, userId, JsonConvert.SerializeObject(userClaims))
};
foreach (var claim in claims)
{
taskList.Add(_db.HashSetAsync(UserClaimsKeyPrefix + claim.Type, userId, claim.Value));
}
await Task.WhenAll(taskList).ConfigureAwait(false);
}
19
View Source File : UserOnlyStore.cs
License : Apache License 2.0
Project Creator : Aguafrommars
License : Apache License 2.0
Project Creator : Aguafrommars
public async override Task ReplaceClaimAsync(TUser user, Claim claim, Claim newClaim, CancellationToken cancellationToken = default)
{
cancellationToken.ThrowIfCancellationRequested();
ThrowIfDisposed();
replacedertNotNull(user, nameof(user));
replacedertNotNull(claim, nameof(claim));
replacedertNotNull(newClaim, nameof(newClaim));
var userId = ConvertIdToString(user.Id);
var userClaims = await GetUserClaimsAsync(user).ConfigureAwait(false);
var taskList = new List<Task>(3);
await Task.WhenAll(taskList).ConfigureAwait(false);
foreach (var uc in userClaims)
{
if (uc.ClaimType == claim.Type && uc.ClaimValue == claim.Value)
{
uc.ClaimType = newClaim.Type;
uc.ClaimValue = newClaim.Value;
taskList.Add(_db.HashDeleteAsync(UserClaimsKeyPrefix + claim.Type, userId));
taskList.Add(_db.HashSetAsync(UserClaimsKeyPrefix + newClaim.Type, userId, newClaim.Value));
}
}
taskList.Add(_db.HashSetAsync(UserClaimsRedisKey, userId, JsonConvert.SerializeObject(userClaims)));
await Task.WhenAll(taskList).ConfigureAwait(false);
}
19
View Source File : UserStore.cs
License : Apache License 2.0
Project Creator : Aguafrommars
License : Apache License 2.0
Project Creator : Aguafrommars
public async override Task<IList<TUser>> GetUsersInRoleAsync(string roleName, CancellationToken cancellationToken = default)
{
cancellationToken.ThrowIfCancellationRequested();
ThrowIfDisposed();
replacedertNotNullOrEmpty(roleName, nameof(roleName));
var users = await _db.HashGetAllAsync(UserRolesNameIndexKey + roleName)
.ConfigureAwait(false);
var taskList = new List<Task<TUser>>(users.Length);
foreach(var user in users)
{
taskList.Add(FindByIdAsync(user.Name, cancellationToken));
}
var results = await Task.WhenAll(taskList)
.ConfigureAwait(false);
return results.Where(u => u != null)
.Select(u => u)
.ToList();
}
19
View Source File : UserStore.cs
License : Apache License 2.0
Project Creator : Aguafrommars
License : Apache License 2.0
Project Creator : Aguafrommars
public override async Task<IList<string>> GetRolesAsync(TUser user, CancellationToken cancellationToken = default)
{
cancellationToken.ThrowIfCancellationRequested();
ThrowIfDisposed();
replacedertNotNull(user, nameof(user));
var userId = ConvertIdToString(user.Id);
var userRoles = await GetUserRolesAsync(userId).ConfigureAwait(false);
var taskList = new List<Task<TRole>>(userRoles.Count);
foreach(var userRole in userRoles)
{
taskList.Add(FindRoleByIdAsync(ConvertIdToString(userRole.RoleId), cancellationToken));
}
var result = await Task.WhenAll(taskList)
.ConfigureAwait(false);
return result.Where(r => r != null)
.Select(r => r.Name)
.ToList();
}
19
View Source File : EntityModel.cs
License : Apache License 2.0
Project Creator : Aguafrommars
License : Apache License 2.0
Project Creator : Aguafrommars
private async Task HandleMoficationList(Type enreplacedyType, Dictionary<object, ModificationKind> modificationList)
{
Logger.LogDebug($"HandleMoficationList for type {enreplacedyType.Name}");
var addList = GetModifiedEnreplacedies(modificationList, ModificationKind.Add);
var taskList = new List<Task>(addList.Count());
foreach (var enreplacedy in addList)
{
taskList.Add(AddEnreplacedyAsync(enreplacedyType, enreplacedy));
}
await Task.WhenAll(taskList).ConfigureAwait(false);
var updateList = GetModifiedEnreplacedies(modificationList, ModificationKind.Update);
taskList = new List<Task>(updateList.Count());
foreach (var enreplacedy in updateList)
{
taskList.Add(UpdateEnreplacedyAsync(enreplacedyType, enreplacedy));
}
await Task.WhenAll(taskList).ConfigureAwait(false);
var deleteList = GetModifiedEnreplacedies(modificationList, ModificationKind.Delete);
taskList = new List<Task>(deleteList.Count());
foreach (var enreplacedy in deleteList)
{
taskList.Add(DeleteAsync(enreplacedyType, enreplacedy));
}
await Task.WhenAll(taskList).ConfigureAwait(false);
}
19
View Source File : UserOnlyStore.cs
License : Apache License 2.0
Project Creator : Aguafrommars
License : Apache License 2.0
Project Creator : Aguafrommars
public async override Task RemoveClaimsAsync(TUser user, IEnumerable<Claim> claims, CancellationToken cancellationToken = default)
{
ThrowIfDisposed();
replacedertNotNull(user, nameof(user));
replacedertNotNull(claims, nameof(claims));
var userId = ConvertIdToString(user.Id);
var userClaims = await GetUserClaimsAsync(user).ConfigureAwait(false);
var taskList = new List<Task>(claims.Count() + 1);
foreach (var claim in claims)
{
userClaims.RemoveAll(uc => uc.ClaimType == claim.Type && uc.ClaimValue == claim.Value);
taskList.Add(_db.HashDeleteAsync(UserClaimsKeyPrefix + claim.Type, userId));
}
taskList.Add(_db.HashSetAsync(UserClaimsRedisKey, userId, JsonConvert.SerializeObject(userClaims)));
await Task.WhenAll(taskList).ConfigureAwait(false);
}
19
View Source File : UserOnlyStore.cs
License : Apache License 2.0
Project Creator : Aguafrommars
License : Apache License 2.0
Project Creator : Aguafrommars
public async override Task<IList<TUser>> GetUsersForClaimAsync(Claim claim, CancellationToken cancellationToken = default)
{
cancellationToken.ThrowIfCancellationRequested();
ThrowIfDisposed();
replacedertNotNull(claim, nameof(claim));
var result = await _db.HashGetAllAsync(UserClaimsKeyPrefix + claim.Type)
.ConfigureAwait(false);
var users = new ConcurrentBag<TUser>();
var taskList = new List<Task>(result.Length);
foreach (var uc in result)
{
taskList.Add(Task.Run(async () => {
var user = await FindByIdAsync(uc.Name, cancellationToken)
.ConfigureAwait(false);
if (user != null)
{
users.Add(user);
}
}));
}
await Task.WhenAll(taskList.ToArray())
.ConfigureAwait(false);
return users.ToList();
}
See More Examples