Here are the examples of the csharp api System.DateTime.AddHours(double) taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
1095 Examples
19
Source : UpdaterCleanup.cs
with MIT License
from bcssov
with MIT License
from bcssov
private static async Task CleanupUpdaterAsync()
{
foreach (var path in StaticResources.GetUpdaterPath())
{
if (Directory.Exists(path))
{
bool cleanup = true;
var settingsFileName = Path.Combine(path, Constants.UpdateSettings);
if (File.Exists(settingsFileName))
{
var fileInfo = new FileInfo(settingsFileName);
var text = await File.ReadAllTextAsync(settingsFileName);
var settings = JsonConvert.DeserializeObject<UpdateSettings>(text);
// At least 72 since last update to cleanup
cleanup = (settings.Updated || settings.IsInstaller) && fileInfo.LastWriteTime <= DateTime.Now.AddHours(-72);
}
if (cleanup)
{
await Task.Delay(5000);
try
{
DiskOperations.DeleteDirectory(path, true);
}
catch (Exception ex)
{
var logger = DIResolver.Get<ILogger>();
logger.Error(ex);
}
}
}
}
}
19
Source : DbDistributedLockManagerTests.cs
with MIT License
from BEagle1984
with MIT License
from BEagle1984
[Fact]
public async Task Acquire_ExistingExpiredLockWithDifferentUniqueId_LockIsAcquired()
{
var db = GetDbContext();
db.Locks.Add(
new Lock
{
Name = "test.resource",
UniqueId = "other",
Created = DateTime.UtcNow.AddHours(-2),
Heartbeat = DateTime.UtcNow.AddHours(-1)
});
db.SaveChanges();
var distributedLock = await _serviceProvider.GetRequiredService<DbDistributedLockManager>()
.AcquireAsync(new DistributedLockSettings("test.resource", "unique"));
distributedLock.Should().NotBeNull();
}
19
Source : DbDistributedLockManagerTests.cs
with MIT License
from BEagle1984
with MIT License
from BEagle1984
[Fact]
public async Task Acquire_ExistingExpiredLock_LockIsWrittenToDb()
{
var db = GetDbContext();
db.Locks.Add(
new Lock
{
Name = "test.resource",
Created = DateTime.UtcNow.AddHours(-2),
Heartbeat = DateTime.UtcNow.AddHours(-1)
});
db.SaveChanges();
await _serviceProvider.GetRequiredService<DbDistributedLockManager>()
.AcquireAsync(new DistributedLockSettings("test.resource", "unique"));
var dbContext = GetDbContext();
dbContext.Locks.Should().HaveCount(1);
dbContext.Locks.Single().Name.Should().Be("test.resource");
dbContext.Locks.Single().Created.Should().BeAfter(DateTime.UtcNow.AddSeconds(-2));
}
19
Source : DbDistributedLockManagerTests.cs
with MIT License
from BEagle1984
with MIT License
from BEagle1984
[Fact]
public async Task Acquire_ExistingExpiredLockWithSameUniqueId_LockIsAcquired()
{
var db = GetDbContext();
db.Locks.Add(
new Lock
{
Name = "test.resource",
UniqueId = "unique",
Created = DateTime.UtcNow.AddHours(-2),
Heartbeat = DateTime.UtcNow.AddHours(-1)
});
db.SaveChanges();
var distributedLock = await _serviceProvider.GetRequiredService<DbDistributedLockManager>()
.AcquireAsync(new DistributedLockSettings("test.resource", "unique"));
distributedLock.Should().NotBeNull();
}
19
Source : DemoPullRequestSource.cs
with MIT License
from bgianfo
with MIT License
from bgianfo
public DateTime Next()
{
return m_start.AddDays(m_gen.Next(m_range)).AddHours(m_gen.Next(0, 24)).AddMinutes(m_gen.Next(0, 60)).AddSeconds(m_gen.Next(0, 60));
}
19
Source : IMonitoringApi.MySql.cs
with MIT License
from bing-framework
with MIT License
from bing-framework
private Dictionary<DateTime, int> GetHourlyTimelineStats(string tableName, string statusName)
{
var endDate = DateTime.Now;
var dates = new List<DateTime>();
for (var i = 0; i < 24; i++)
{
dates.Add(endDate);
endDate = endDate.AddHours(-1);
}
var keyMaps = dates.ToDictionary(x => x.ToString("yyyy-MM-dd-HH"), x => x);
return GetTimelineStats(tableName, statusName, keyMaps);
}
19
Source : CalenderTime.razor.cs
with MIT License
from BlazorFluentUI
with MIT License
from BlazorFluentUI
private void OnSelectMonth(int newTime)
{
// SelectedDate = SelectedDate.Date.AddHours(newTime);
OnNavigateDate.InvokeAsync(new NavigatedDateResult() { Date = SelectedDate.Date.AddHours(newTime), FocusOnNavigatedDay = true });
}
19
Source : ValidateHistoryState.cs
with GNU Affero General Public License v3.0
from blockbasenetwork
with GNU Affero General Public License v3.0
from blockbasenetwork
protected override async Task UpdateStatus()
{
_delay = TimeSpan.FromMilliseconds(500);
_contractState = await _mainchainService.RetrieveContractState(_sidechainPool.ClientAccountName);
_producerList = await _mainchainService.RetrieveProducersFromTable(_sidechainPool.ClientAccountName);
_historyValidations = await _mainchainService.RetrieveHistoryValidation(_sidechainPool.ClientAccountName);
//check preconditions to continue update
if (_contractState == null) return;
if (_producerList == null) return;
if (_historyValidations == null) return;
foreach (var historyValidationTable in _historyValidations)
{
if (!_blockBytesPerValidationEntryAccount.ContainsKey(historyValidationTable.Account) && historyValidationTable.Account != _nodeConfigurations.AccountName)
{
var blockByte = await GetBlockByte(historyValidationTable.BlockHash, _sidechainPool.ClientAccountName);
_logger.LogDebug($"Calculated provider {historyValidationTable.Account} validation block byte: {blockByte}.");
_blockBytesPerValidationEntryAccount[historyValidationTable.Account] = blockByte;
}
}
_currentProducerHistoryEntry = _historyValidations.Where(e => e.Account == _nodeConfigurations.AccountName).SingleOrDefault();
_hasToSubmitBlockByte = _currentProducerHistoryEntry != null ? true : false;
if (_currentProducerHistoryEntry == null) return;
if (_blockHashToValidate != null && _currentProducerHistoryEntry != null)
{
_blockHashToValidateHasChanged = _blockHashToValidate == _currentProducerHistoryEntry.BlockHash ? false : true;
}
else if (_blockHashToValidate == null && _currentProducerHistoryEntry != null)
{
_blockHashToValidate = _currentProducerHistoryEntry.BlockHash;
}
if (_hasToSubmitBlockByte && _blockByteInHex == null)
{
_blockByteInHex = await GetBlockByte(_currentProducerHistoryEntry.BlockHash, _sidechainPool.ClientAccountName);
_logger.LogInformation($"Calculated my validation block byte: {_blockByteInHex}.");
}
_hreplacedubmittedBlockByte = _currentProducerHistoryEntry?.BlockByteInHexadecimal != "" && _currentProducerHistoryEntry?.BlockByteInHexadecimal != null;
if (_hreplacedubmittedBlockByte && !_hasEnoughSignatures)
{
_transaction = _currentProducerHistoryEntry.Transaction;
_hreplacedubmittedBlockByte = _transaction.expiration > DateTime.UtcNow && _transaction.expiration < DateTime.UtcNow.AddHours(1);
var requestedApprovals = _sidechainPool.ProducersInPool.GetEnumerable().Select(m => m.ProducerInfo.AccountName).OrderBy(p => p).ToList();
var requiredKeys = _sidechainPool.ProducersInPool.GetEnumerable().Select(m => m.ProducerInfo.PublicKey).Distinct().ToList();
_hasEnoughSignatures = CheckIfBlockByteHasMajorityOfSignatures(_currentProducerHistoryEntry, requestedApprovals.Count, requiredKeys);
_packedTransactionAndSignatures = GetPackedTransactionAndSignatures(_currentProducerHistoryEntry, _blockByteInHex, requestedApprovals.Count, requiredKeys);
}
_hreplacedignedBlockByte = _currentProducerHistoryEntry?.SignedProducers.Any(p => p == _nodeConfigurations.AccountName) ?? false;
if (_hasToSubmitBlockByte && _hreplacedubmittedBlockByte && !_hasEnoughSignatures)
{
_delay = TimeSpan.FromSeconds(3);
}
}
19
Source : ValidateHistoryState.cs
with GNU Affero General Public License v3.0
from blockbasenetwork
with GNU Affero General Public License v3.0
from blockbasenetwork
private async Task TryBroadcastVerifyTransaction(byte[] packedTransaction, List<string> signatures)
{
if (_transaction.expiration < DateTime.UtcNow || _transaction.expiration > DateTime.UtcNow.AddHours(1)){
_delay = TimeSpan.FromSeconds(60);
return;
}
await _mainchainService.BroadcastTransactionWithSignatures(packedTransaction, signatures);
_logger.LogInformation("Executed history validation");
}
19
Source : CSky_DateTime.cs
with MIT License
from bmjoy
with MIT License
from bmjoy
private System.DateTime GetDateTime()
{
if (!m_SyncWithSystem)
{
// Create new DateTime.
System.DateTime dateTime = new System.DateTime(0, System.DateTimeKind.Utc);
// Add date and time in DateTime.
dateTime = dateTime.AddYears(m_Year - 1).AddMonths(m_Month - 1).AddDays(m_Day - 1).AddHours(m_Timeline);
// Set date.
m_Year = dateTime.Year;
m_Month = dateTime.Month;
m_Day = dateTime.Day;
// Set timeline.
m_Timeline = CSky_DateTimeHelper.TimeToFloat(dateTime.Hour, dateTime.Minute, dateTime.Second, dateTime.Millisecond);
return dateTime;
}
return System.DateTime.Now; // System date time.
}
19
Source : WireboyTimer.cs
with GNU General Public License v3.0
from bobowire
with GNU General Public License v3.0
from bobowire
public void Start()
{
m_isCancel = false;
if (m_task == null)
{
m_task = m_taskFactory.StartNew(() =>
{
while (true && !m_isCancel)
{
List<TimerTask> listFunc = m_taskList.Where(t => MatchTime(t.ExcuteTime, DateTime.Now)).ToList();
foreach (TimerTask task in listFunc)
{
switch (task.Mode)
{
case 0:
{
m_taskList.Remove(task);
}
break;
case 1:
{
task.ExcuteTime = task.ExcuteTime.AddDays(1);
}
break;
case 2:
{
task.ExcuteTime = task.ExcuteTime.AddHours(1);
}
break;
case 3:
{
task.ExcuteTime = task.ExcuteTime.AddMinutes(1);
}
break;
}
m_taskFactory.StartNew(() =>
{
TimerTask _this = task;
_this.CallBack();
});
}
Thread.Sleep(1000);
}
m_task = null;
});
}
}
19
Source : CalendarTime.Tests.cs
with MIT License
from bolorundurowb
with MIT License
from bolorundurowb
[Test]
public void CalendarTimeSameDay()
{
var today = DateTime.Now.Date.AddHours(2);
today.CalendarTime().ShouldStartWith("Today at ");
}
19
Source : TimeFrom.Tests.cs
with MIT License
from bolorundurowb
with MIT License
from bolorundurowb
[Test]
public void TimeFromADefiniteNumberOfHoursTest()
{
var dateTime = DateTime.UtcNow.AddHours(-20);
dateTime.FromNow().ShouldBe("20 hours ago");
}
19
Source : TimeFrom.Tests.cs
with MIT License
from bolorundurowb
with MIT License
from bolorundurowb
[Test]
public void TimeFromHoursThatCanBeRoundedUpOrDownToADayTest()
{
var dateTime = DateTime.UtcNow.AddHours(-25);
dateTime.FromNow().ShouldBe("one day ago");
}
19
Source : TimeTo.Tests.cs
with MIT License
from bolorundurowb
with MIT License
from bolorundurowb
[Test]
public void TimeToADefiniteNumberOfHoursTest()
{
var dateTime = DateTime.UtcNow.AddHours(20);
dateTime.ToNow().ShouldBe("in 20 hours");
}
19
Source : TimeTo.Tests.cs
with MIT License
from bolorundurowb
with MIT License
from bolorundurowb
[Test]
public void TimeToHoursThatCanBeRoundedUpOrDownToADayTest()
{
var dateTime = DateTime.UtcNow.AddHours(25);
dateTime.ToNow().ShouldBe("in one day");
}
19
Source : HttpClient.cs
with GNU General Public License v3.0
from bonarr
with GNU General Public License v3.0
from bonarr
private CookieContainer PrepareRequestCookies(HttpRequest request)
{
lock (_cookieContainerCache)
{
var persistentCookieContainer = _cookieContainerCache.Get("container", () => new CookieContainer());
if (request.Cookies.Count != 0)
{
foreach (var pair in request.Cookies)
{
persistentCookieContainer.Add(new Cookie(pair.Key, pair.Value, "/", request.Url.Host)
{
// Use Now rather than UtcNow to work around Mono cookie expiry bug.
// See https://gist.github.com/ta264/7822b1424f72e5b4c961
Expires = DateTime.Now.AddHours(1)
});
}
}
var requestCookies = persistentCookieContainer.GetCookies((Uri)request.Url);
var cookieContainer = new CookieContainer();
cookieContainer.Add(requestCookies);
return cookieContainer;
}
}
19
Source : HistorySpecification.cs
with GNU General Public License v3.0
from bonarr
with GNU General Public License v3.0
from bonarr
public virtual Decision IsSatisfiedBy(RemoteMovie subject, SearchCriteriaBase searchCriteria)
{
if (searchCriteria != null)
{
_logger.Debug("Skipping history check during search");
return Decision.Accept();
}
var cdhEnabled = _configService.EnableCompletedDownloadHandling;
_logger.Debug("Performing history status check on report");
_logger.Debug("Checking current status of episode [{0}] in history", subject.Movie.Id);
var mostRecent = _historyService.MostRecentForMovie(subject.Movie.Id);
if (mostRecent != null && mostRecent.EventType == HistoryEventType.Grabbed)
{
var recent = mostRecent.Date.After(DateTime.UtcNow.AddHours(-12));
var cutoffUnmet = _qualityUpgradableSpecification.CutoffNotMet(subject.Movie.Profile, mostRecent.Quality, subject.ParsedMovieInfo.Quality);
var upgradeable = _qualityUpgradableSpecification.IsUpgradable(subject.Movie.Profile, mostRecent.Quality, subject.ParsedMovieInfo.Quality);
if (!recent && cdhEnabled)
{
return Decision.Accept();
}
if (!cutoffUnmet)
{
if (recent)
{
return Decision.Reject("Recent grab event in history already meets cutoff: {0}", mostRecent.Quality);
}
return Decision.Reject("CDH is disabled and grab event in history already meets cutoff: {0}", mostRecent.Quality);
}
if (!upgradeable)
{
if (recent)
{
return Decision.Reject("Recent grab event in history is of equal or higher quality: {0}", mostRecent.Quality);
}
return Decision.Reject("CDH is disabled and grab event in history is of equal or higher quality: {0}", mostRecent.Quality);
}
}
return Decision.Accept();
}
19
Source : HistorySpecification.cs
with GNU General Public License v3.0
from bonarr
with GNU General Public License v3.0
from bonarr
public virtual Decision IsSatisfiedBy(RemoteEpisode subject, SearchCriteriaBase searchCriteria)
{
if (searchCriteria != null)
{
_logger.Debug("Skipping history check during search");
return Decision.Accept();
}
var cdhEnabled = _configService.EnableCompletedDownloadHandling;
_logger.Debug("Performing history status check on report");
foreach (var episode in subject.Episodes)
{
_logger.Debug("Checking current status of episode [{0}] in history", episode.Id);
var mostRecent = _historyService.MostRecentForEpisode(episode.Id);
if (mostRecent != null && mostRecent.EventType == HistoryEventType.Grabbed)
{
var recent = mostRecent.Date.After(DateTime.UtcNow.AddHours(-12));
var cutoffUnmet = _qualityUpgradableSpecification.CutoffNotMet(subject.Series.Profile, mostRecent.Quality, subject.ParsedEpisodeInfo.Quality);
var upgradeable = _qualityUpgradableSpecification.IsUpgradable(subject.Series.Profile, mostRecent.Quality, subject.ParsedEpisodeInfo.Quality);
if (!recent && cdhEnabled)
{
continue;
}
if (!cutoffUnmet)
{
if (recent)
{
return Decision.Reject("Recent grab event in history already meets cutoff: {0}", mostRecent.Quality);
}
return Decision.Reject("CDH is disabled and grab event in history already meets cutoff: {0}", mostRecent.Quality);
}
if (!upgradeable)
{
if (recent)
{
return Decision.Reject("Recent grab event in history is of equal or higher quality: {0}", mostRecent.Quality);
}
return Decision.Reject("CDH is disabled and grab event in history is of equal or higher quality: {0}", mostRecent.Quality);
}
}
}
return Decision.Accept();
}
19
Source : ShouldRefreshMovie.cs
with GNU General Public License v3.0
from bonarr
with GNU General Public License v3.0
from bonarr
public bool ShouldRefresh(Movie movie)
{
//return false;
if (movie.LastInfoSync < DateTime.UtcNow.AddDays(-30))
{
_logger.Trace("Movie {0} last updated more than 30 days ago, should refresh.", movie.replacedle);
return true;
}
if (movie.LastInfoSync >= DateTime.UtcNow.AddHours(-6))
{
_logger.Trace("Movie {0} last updated less than 6 hours ago, should not be refreshed.", movie.replacedle);
return false;
}
if (movie.Status == MovieStatusType.Announced || movie.Status == MovieStatusType.InCinemas)
{
_logger.Trace("Movie {0} is announced or in cinemas, should refresh.", movie.replacedle); //We probably have to change this.
return true;
}
if (movie.Status == MovieStatusType.Released && movie.PhysicalReleaseDate() >= DateTime.UtcNow.AddDays(-30))
{
_logger.Trace("Movie {0} is released since less than 30 days, should refresh", movie.replacedle);
return true;
}
_logger.Trace("Movie {0} came out long ago, should not be refreshed.", movie.replacedle);
return false;
}
19
Source : ShouldRefreshSeries.cs
with GNU General Public License v3.0
from bonarr
with GNU General Public License v3.0
from bonarr
public bool ShouldRefresh(Series series)
{
if (series.LastInfoSync < DateTime.UtcNow.AddDays(-30))
{
_logger.Trace("Series {0} last updated more than 30 days ago, should refresh.", series.replacedle);
return true;
}
if (series.LastInfoSync >= DateTime.UtcNow.AddHours(-6))
{
_logger.Trace("Series {0} last updated less than 6 hours ago, should not be refreshed.", series.replacedle);
return false;
}
if (series.Status == SeriesStatusType.Continuing)
{
_logger.Trace("Series {0} is continuing, should refresh.", series.replacedle);
return true;
}
var lastEpisode = _episodeService.GetEpisodeBySeries(series.Id).OrderByDescending(e => e.AirDateUtc).FirstOrDefault();
if (lastEpisode != null && lastEpisode.AirDateUtc > DateTime.UtcNow.AddDays(-30))
{
_logger.Trace("Last episode in {0} aired less than 30 days ago, should refresh.", series.replacedle);
return true;
}
_logger.Trace("Series {0} ended long ago, should not be refreshed.", series.replacedle);
return false;
}
19
Source : DelaySpecificationFixture.cs
with GNU General Public License v3.0
from bonarr
with GNU General Public License v3.0
from bonarr
[Test]
public void should_be_true_when_release_is_older_than_delay()
{
_remoteEpisode.ParsedMovieInfo.Quality = new QualityModel(Quality.HDTV720p);
_remoteEpisode.Release.PublishDate = DateTime.UtcNow.AddHours(-10);
_delayProfile.UsenetDelay = 60;
Subject.IsSatisfiedBy(_remoteEpisode, null).Accepted.Should().BeTrue();
}
19
Source : HistorySpecificationFixture.cs
with GNU General Public License v3.0
from bonarr
with GNU General Public License v3.0
from bonarr
[Test]
public void should_return_true_if_latest_history_item_is_older_than_twelve_hours()
{
GivenMostRecentForEpisode(FIRST_EPISODE_ID, string.Empty, _notupgradableQuality, DateTime.UtcNow.AddHours(-13), HistoryEventType.Grabbed);
_upgradeHistory.IsSatisfiedBy(_parseResultMulti, null).Accepted.Should().BeTrue();
}
19
Source : HistorySpecificationFixture.cs
with GNU General Public License v3.0
from bonarr
with GNU General Public License v3.0
from bonarr
[Test]
public void should_return_false_if_latest_history_item_is_only_one_hour_old()
{
GivenMostRecentForEpisode(FIRST_EPISODE_ID, string.Empty, _notupgradableQuality, DateTime.UtcNow.AddHours(-1), HistoryEventType.Grabbed);
_upgradeHistory.IsSatisfiedBy(_parseResultMulti, null).Accepted.Should().BeFalse();
}
19
Source : PendingReleaseServiceFixture.cs
with GNU General Public License v3.0
from bonarr
with GNU General Public License v3.0
from bonarr
[Test]
public void should_ignore_pending_items_from_unavailable_indexer()
{
Mocker.GetMock<IIndexerStatusService>()
.Setup(v => v.GetBlockedIndexers())
.Returns(new List<IndexerStatus> { new IndexerStatus { IndexerId = 1, DisabledTill = DateTime.UtcNow.AddHours(2) } });
GivenPendingRelease();
var results = Subject.GetPending();
results.Should().BeEmpty();
}
19
Source : IndexerStatusCheckFixture.cs
with GNU General Public License v3.0
from bonarr
with GNU General Public License v3.0
from bonarr
private Mock<IIndexer> GivenIndexer(int i, double backoffHours, double failureHours)
{
var id = i;
var mockIndexer = new Mock<IIndexer>();
mockIndexer.SetupGet(s => s.Definition).Returns(new IndexerDefinition { Id = id });
mockIndexer.SetupGet(s => s.SupportsSearch).Returns(true);
_indexers.Add(mockIndexer.Object);
if (backoffHours != 0.0)
{
_blockedIndexers.Add(new IndexerStatus
{
IndexerId = id,
InitialFailure = DateTime.UtcNow.AddHours(-failureHours),
MostRecentFailure = DateTime.UtcNow.AddHours(-0.1),
EscalationLevel = 5,
DisabledTill = DateTime.UtcNow.AddHours(backoffHours)
});
}
return mockIndexer;
}
19
Source : FixFutureRunScheduledTasksFixture.cs
with GNU General Public License v3.0
from bonarr
with GNU General Public License v3.0
from bonarr
[Test]
public void should_not_change_last_execution_time_when_its_in_the_past()
{
var expectedTime = DateTime.UtcNow.AddHours(-1);
var tasks = Builder<ScheduledTask>.CreateListOfSize(5)
.All()
.With(t => t.LastExecution = expectedTime)
.BuildListOfNew();
Db.InsertMany(tasks);
Subject.Clean();
AllStoredModels.ForEach(t => t.LastExecution.Should().Be(expectedTime));
}
19
Source : NotUnpackingSpecificationFixture.cs
with GNU General Public License v3.0
from bonarr
with GNU General Public License v3.0
from bonarr
[Test]
public void should_return_true_when_in_old_working_folder()
{
WindowsOnly();
GivenInWorkingFolder();
GivenLastWriteTimeUtc(DateTime.UtcNow.AddHours(-1));
Subject.IsSatisfiedBy(_localEpisode).Accepted.Should().BeTrue();
}
19
Source : ShouldRefreshSeriesFixture.cs
with GNU General Public License v3.0
from bonarr
with GNU General Public License v3.0
from bonarr
private void GivenSeriesLastRefreshedHalfADayAgo()
{
_series.LastInfoSync = DateTime.UtcNow.AddHours(-12);
}
19
Source : ShouldRefreshSeriesFixture.cs
with GNU General Public License v3.0
from bonarr
with GNU General Public License v3.0
from bonarr
private void GivenSeriesLastRefreshedRecently()
{
_series.LastInfoSync = DateTime.UtcNow.AddHours(-1);
}
19
Source : StrictEntityToType.cs
with MIT License
from BotBuilderCommunity
with MIT License
from BotBuilderCommunity
public static IEnumerable<Range<DateTime>> Interpret(DateTimeResolution resolution, DateTime now, Calendar calendar, CalendarWeekRule rule, DayOfWeek firstDayOfWeek, Func<DayPart, int> hourFor)
{
// remove any millisecond components
now = new DateTime(now.Year, now.Month, now.Day, now.Hour, now.Minute, now.Second, now.Kind);
switch (resolution.Reference)
{
case Reference.PAST_REF:
yield return Range.From(DateTime.MinValue, now);
yield break;
case Reference.PRESENT_REF:
yield return Range.From(now, now);
yield break;
case Reference.FUTURE_REF:
yield return Range.From(now, DateTime.MaxValue);
yield break;
case null:
break;
default:
throw new NotImplementedException();
}
var start = now;
// TODO: maybe clamp to prevent divergence
while (start < DateTime.MaxValue)
{
var after = start;
while (true)
{
// for each date component in decreasing order of significance:
// if it's not a variable (-1) or missing (null) component, then
// add a unit of that component to "start"
// round down to the component's granularity
// calculate the "after" based on the size of that component
if (resolution.Year >= 0)
{
bool need = start.Year != resolution.Year;
if (need)
{
start = start.AddYears(1);
start = new DateTime(start.Year, 1, 1, 0, 0, 0, 0, start.Kind);
}
if (start.Year > resolution.Year)
{
yield break;
}
after = start.AddYears(1);
if (need)
{
continue;
}
}
if (resolution.Month >= 0)
{
bool need = start.Month != resolution.Month;
if (need)
{
start = start.AddMonths(1);
start = new DateTime(start.Year, start.Month, 1, 0, 0, 0, 0, start.Kind);
}
after = start.AddMonths(1);
if (need)
{
continue;
}
}
var week = calendar.GetWeekOfYear(start, rule, firstDayOfWeek);
if (resolution.Week >= 0)
{
bool need = week != resolution.Week;
if (need)
{
start = start.AddDays(7);
start = new DateTime(start.Year, start.Month, start.Day, 0, 0, 0, 0, start.Kind);
while (start.DayOfWeek != firstDayOfWeek)
{
start = start.AddDays(-1);
}
}
after = start.AddDays(7);
if (need)
{
continue;
}
}
if (resolution.DayOfWeek != null)
{
bool need = start.DayOfWeek != resolution.DayOfWeek;
if (need)
{
start = start.AddDays(1);
start = new DateTime(start.Year, start.Month, start.Day, 0, 0, 0, 0, start.Kind);
}
after = start.AddDays(1);
if (need)
{
continue;
}
}
if (resolution.Day >= 0)
{
bool need = start.Day != resolution.Day;
if (need)
{
start = start.AddDays(1);
start = new DateTime(start.Year, start.Month, start.Day, 0, 0, 0, 0, start.Kind);
}
after = start.AddDays(1);
if (need)
{
continue;
}
}
if (resolution.DayPart != null)
{
var hourStart = hourFor(resolution.DayPart.Value);
var hourAfter = hourFor(resolution.DayPart.Value.Next());
var hourDelta = hourAfter - hourStart;
if (hourDelta < 0)
{
hourDelta += 24;
}
bool need = start.Hour != hourStart;
if (need)
{
start = start.AddHours(1);
start = new DateTime(start.Year, start.Month, start.Day, start.Hour, 0, 0, 0, start.Kind);
}
after = start.AddHours(hourDelta);
if (need)
{
continue;
}
}
if (resolution.Hour >= 0)
{
bool need = start.Hour != resolution.Hour;
if (need)
{
start = start.AddHours(1);
start = new DateTime(start.Year, start.Month, start.Day, start.Hour, 0, 0, 0, start.Kind);
}
after = start.AddHours(1);
if (need)
{
continue;
}
}
if (resolution.Minute >= 0)
{
bool need = start.Minute != resolution.Minute;
if (need)
{
start = start.AddMinutes(1);
start = new DateTime(start.Year, start.Month, start.Day, start.Hour, start.Minute, 0, 0, start.Kind);
}
after = start.AddMinutes(1);
if (need)
{
continue;
}
}
if (resolution.Second >= 0)
{
bool need = start.Second != resolution.Second;
if (need)
{
start = start.AddSeconds(1);
start = new DateTime(start.Year, start.Month, start.Day, start.Hour, start.Minute, start.Second, 0, start.Kind);
}
after = start.AddSeconds(1);
if (need)
{
continue;
}
}
// if all of the components were variable or missing,
// then in order of increasing component granularity,
// if the component is variable rather than missing, then increment by that granularity
if (start == after)
{
if (resolution.Second < 0)
{
after = start.AddSeconds(1);
}
else if (resolution.Minute < 0)
{
after = start.AddMinutes(1);
}
else if (resolution.Hour < 0)
{
after = start.AddHours(1);
}
else if (resolution.Day < 0)
{
after = start.AddDays(1);
}
else if (resolution.Week < 0)
{
after = start.AddDays(7);
}
else if (resolution.Month < 0)
{
after = start.AddMonths(1);
}
else if (resolution.Year < 0)
{
after = start.AddYears(1);
}
else
{
// a second is our minimum granularity
after = start.AddSeconds(1);
}
}
if (start >= now)
{
yield return new Range<DateTime>(start, after);
}
start = after;
}
}
}
19
Source : AppService.cs
with MIT License
from btcpayserver
with MIT License
from btcpayserver
private async Task<ViewCrowdfundViewModel> GetInfo(AppData appData)
{
var settings = appData.GetSettings<CrowdfundSettings>();
var resetEvery = settings.StartDate.HasValue ? settings.ResetEvery : CrowdfundResetEvery.Never;
DateTime? lastResetDate = null;
DateTime? nextResetDate = null;
if (resetEvery != CrowdfundResetEvery.Never)
{
lastResetDate = settings.StartDate.Value;
nextResetDate = lastResetDate.Value;
while (DateTime.Now >= nextResetDate)
{
lastResetDate = nextResetDate;
switch (resetEvery)
{
case CrowdfundResetEvery.Hour:
nextResetDate = lastResetDate.Value.AddHours(settings.ResetEveryAmount);
break;
case CrowdfundResetEvery.Day:
nextResetDate = lastResetDate.Value.AddDays(settings.ResetEveryAmount);
break;
case CrowdfundResetEvery.Month:
nextResetDate = lastResetDate.Value.AddMonths(settings.ResetEveryAmount);
break;
case CrowdfundResetEvery.Year:
nextResetDate = lastResetDate.Value.AddYears(settings.ResetEveryAmount);
break;
}
}
}
var invoices = await GetInvoicesForApp(appData, lastResetDate);
var completeInvoices = invoices.Where(enreplacedy => enreplacedy.Status == InvoiceStatusLegacy.Complete || enreplacedy.Status == InvoiceStatusLegacy.Confirmed).ToArray();
var pendingInvoices = invoices.Where(enreplacedy => !(enreplacedy.Status == InvoiceStatusLegacy.Complete || enreplacedy.Status == InvoiceStatusLegacy.Confirmed)).ToArray();
var paidInvoices = invoices.Where(enreplacedy => enreplacedy.Status == InvoiceStatusLegacy.Complete || enreplacedy.Status == InvoiceStatusLegacy.Confirmed || enreplacedy.Status == InvoiceStatusLegacy.Paid).ToArray();
var pendingPayments = GetContributionsByPaymentMethodId(settings.TargetCurrency, pendingInvoices, !settings.EnforceTargetAmount);
var currentPayments = GetContributionsByPaymentMethodId(settings.TargetCurrency, completeInvoices, !settings.EnforceTargetAmount);
var perkCount = paidInvoices
.Where(enreplacedy => !string.IsNullOrEmpty(enreplacedy.Metadata.ItemCode))
.GroupBy(enreplacedy => enreplacedy.Metadata.ItemCode)
.ToDictionary(enreplacedies => enreplacedies.Key, enreplacedies => enreplacedies.Count());
Dictionary<string, decimal> perkValue = new Dictionary<string, decimal>();
if (settings.DisplayPerksValue)
{
perkValue = paidInvoices
.Where(enreplacedy => enreplacedy.Currency.Equals(settings.TargetCurrency, StringComparison.OrdinalIgnoreCase) && !string.IsNullOrEmpty(enreplacedy.Metadata.ItemCode))
.GroupBy(enreplacedy => enreplacedy.Metadata.ItemCode)
.ToDictionary(enreplacedies => enreplacedies.Key, enreplacedies =>
enreplacedies.Sum(enreplacedy => enreplacedy.GetPayments(true).Sum(pay =>
{
var paymentMethodId = pay.GetPaymentMethodId();
var value = pay.GetCryptoPaymentData().GetValue() - pay.NetworkFee;
var rate = enreplacedy.GetPaymentMethod(paymentMethodId).Rate;
return rate * value;
})));
}
var perks = Parse(settings.PerksTemplate, settings.TargetCurrency);
if (settings.SortPerksByPopularity)
{
var ordered = perkCount.OrderByDescending(pair => pair.Value);
var newPerksOrder = ordered
.Select(keyValuePair => perks.SingleOrDefault(item => item.Id == keyValuePair.Key))
.Where(matchingPerk => matchingPerk != null)
.ToList();
var remainingPerks = perks.Where(item => !newPerksOrder.Contains(item));
newPerksOrder.AddRange(remainingPerks);
perks = newPerksOrder.ToArray();
}
return new ViewCrowdfundViewModel
{
replacedle = settings.replacedle,
Tagline = settings.Tagline,
Description = settings.Description,
CustomCSSLink = settings.CustomCSSLink,
MainImageUrl = settings.MainImageUrl,
EmbeddedCSS = settings.EmbeddedCSS,
StoreId = appData.StoreDataId,
AppId = appData.Id,
StartDate = settings.StartDate?.ToUniversalTime(),
EndDate = settings.EndDate?.ToUniversalTime(),
TargetAmount = settings.TargetAmount,
TargetCurrency = settings.TargetCurrency,
EnforceTargetAmount = settings.EnforceTargetAmount,
Perks = perks,
Enabled = settings.Enabled,
DisqusEnabled = settings.DisqusEnabled,
SoundsEnabled = settings.SoundsEnabled,
DisqusShortname = settings.DisqusShortname,
AnimationsEnabled = settings.AnimationsEnabled,
ResetEveryAmount = settings.ResetEveryAmount,
ResetEvery = Enum.GetName(typeof(CrowdfundResetEvery), settings.ResetEvery),
DisplayPerksRanking = settings.DisplayPerksRanking,
PerkCount = perkCount,
PerkValue = perkValue,
NeverReset = settings.ResetEvery == CrowdfundResetEvery.Never,
Sounds = settings.Sounds,
AnimationColors = settings.AnimationColors,
CurrencyData = _Currencies.GetCurrencyData(settings.TargetCurrency, true),
CurrencyDataPayments = currentPayments.Select(pair => pair.Key)
.Concat(pendingPayments.Select(pair => pair.Key))
.Select(id => _Currencies.GetCurrencyData(id.CryptoCode, true))
.DistinctBy(data => data.Code)
.ToDictionary(data => data.Code, data => data),
Info = new CrowdfundInfo
{
TotalContributors = paidInvoices.Length,
ProgressPercentage = (currentPayments.TotalCurrency / settings.TargetAmount) * 100,
PendingProgressPercentage = (pendingPayments.TotalCurrency / settings.TargetAmount) * 100,
LastUpdated = DateTime.Now,
PaymentStats = currentPayments.ToDictionary(c => c.Key.ToString(), c => c.Value.Value),
PendingPaymentStats = pendingPayments.ToDictionary(c => c.Key.ToString(), c => c.Value.Value),
LastResetDate = lastResetDate,
NextResetDate = nextResetDate,
CurrentPendingAmount = pendingPayments.TotalCurrency,
CurrentAmount = currentPayments.TotalCurrency
}
};
}
19
Source : TeamShiftScheduler.cs
with MIT License
from bsurprised
with MIT License
from bsurprised
public Task<List<Schedule>> CreateNewScheduleAsync(
RuleSet ruleSet,
IList<Employee> shiftEmployees,
DateTime startDate,
int numberOfDays,
int teamSize,
int minShiftsPerCycle,
int startHour,
int shiftHours,
int maxSolutions = 1)
{
// Some sanity checks
if (ruleSet == null)
throw new ArgumentOutOfRangeException($"Rule set is empty.");
if (shiftEmployees == null || shiftEmployees.Count < 1)
throw new ArgumentOutOfRangeException($"Employee collection is empty.");
if (numberOfDays < 1)
throw new ArgumentOutOfRangeException($"Invalid number for days in a cycle.");
if (teamSize < 1)
throw new ArgumentOutOfRangeException($"Invalid number for employees in each shift.");
if (minShiftsPerCycle < 0)
throw new ArgumentOutOfRangeException($"Invalid number for minimum shifts per cycle.");
if (startHour > 12)
throw new ArgumentOutOfRangeException(
$"Starting hour is bigger than expected. Please provide a number between 0-12");
if (shiftHours > 12)
throw new ArgumentOutOfRangeException(
$"Shift hours cannot be bigger than twelve. Please provide a number between 1-12");
var numberOfEmployees = shiftEmployees.Count;
LastError = null;
AddDiagnostics("Starting to solve a new schedule\n\n");
AddDiagnostics("This is a schedule for {0} employees in {1} days\n", numberOfEmployees, numberOfDays);
AddDiagnostics("Shift team size: {0}, minimum shifts per employee: {1}\n\n", teamSize, minShiftsPerCycle);
/*
* Solver
*/
// Initiate a new solver
var solver = new Solver("Schedule");
int[] shifts = { ShiftConsts.None, ShiftConsts.Day, ShiftConsts.Night, ShiftConsts.Off };
int[] validShifts = { ShiftConsts.Day, ShiftConsts.Night, ShiftConsts.Off };
/*
* DFA and Transitions
*/
var initialState = ruleSet.InitialState; // Everybody starts at this state
int[] acceptingStates = ruleSet.AcceptingStates;
// Transition tuples For TransitionConstraint
var transitionTuples = new IntTupleSet(3);
// Every tuple contains { state, input, next state }
transitionTuples.InsertAll(ruleSet.Tuples);
// Just for presentation in stats
string[] days = { "d", "n", "o" };
/*
* Decision variables
*/
// TransitionConstraint
var x =
solver.MakeIntVarMatrix(numberOfEmployees, numberOfDays, validShifts, "x");
var flattenedX = x.Flatten();
// Shift count
var shiftCount = shifts.Length;
// Shifts per day statistics
var dayStats = new IntVar[numberOfDays, shiftCount];
for (var i = 0; i < numberOfDays; i++)
for (var j = 0; j < shiftCount; j++)
dayStats[i, j] = solver.MakeIntVar(0, numberOfEmployees, "dayStats");
// Team statistics
var teamStats = new IntVar[numberOfEmployees];
/*
* Constraints
*/
for (var i = 0; i < numberOfEmployees; i++)
{
var regInput = new IntVar[numberOfDays];
for (var j = 0; j < numberOfDays; j++)
regInput[j] = x[i, j];
solver.Add(regInput.Transition(transitionTuples, initialState, acceptingStates));
}
// Statistics and constraints for each team
for (var team = 0; team < numberOfEmployees; team++)
{
// Number of worked days (either day or night shift)
var teamDays = new IntVar[numberOfDays];
for (var day = 0; day < numberOfDays; day++)
teamDays[day] = x[team, day].IsMember(new[] { ShiftConsts.Day, ShiftConsts.Night });
teamStats[team] = teamDays.Sum().Var();
// At least two shifts per cycle
solver.Add(teamStats[team] >= minShiftsPerCycle);
}
// Statistics and constraints for each day
for (var day = 0; day < numberOfDays; day++)
{
var teams = new IntVar[numberOfEmployees];
for (var team = 0; team < numberOfEmployees; team++)
teams[team] = x[team, day];
var stats = new IntVar[shiftCount];
for (var shift = 0; shift < shiftCount; ++shift)
stats[shift] = dayStats[day, shift];
solver.Add(teams.Distribute(stats));
// Constraints for each day
// - exactly teamSize on day shift
solver.Add(dayStats[day, ShiftConsts.Day] == teamSize);
// - exactly teamSize on night shift
solver.Add(dayStats[day, ShiftConsts.Night] == teamSize);
// - The rest of the employees are off duty
solver.Add(dayStats[day, ShiftConsts.Off] == numberOfEmployees - teamSize * 2);
/* We can customize constraints even further
* For example, a special constraints for weekends(1 employee each shift as weekends are quiet):
if (day % 7 == 5 || day % 7 == 6)
{
solver.Add(dayStats[day, ShiftConsts.Day] == weekendTeamSize);
solver.Add(dayStats[day, ShiftConsts.Night] == weekendTeamSize);
solver.Add(dayStats[day, ShiftConsts.Off] == numberOfEmployees - weekendTeamSize * 2);
}
*/
}
/*
* Decision Builder and Solution Search
*/
// A simple random selection
var db = solver.MakePhase(flattenedX, Solver.CHOOSE_DYNAMIC_GLOBAL_BEST, Solver.replacedIGN_RANDOM_VALUE);
var log = solver.MakeSearchLog(1000000);
// Don't search after a certain miliseconds
var timeLimit = solver.MakeTimeLimit(1000); // a second
// Start the search
solver.NewSearch(db, log, timeLimit);
// Return solutions as result
var schedules = new List<Schedule>();
var numSolutions = 0;
while (solver.NextSolution())
{
numSolutions++;
// A new schedule for the time period
var schedule = new Schedule
{
Id = numSolutions,
Name = string.Format("Schedule for {0}-{1} for {2} employees, team size {3}",
startDate.Date.ToShortDateString(), startDate.Date.AddDays(numberOfDays).ToShortDateString(),
numberOfEmployees, teamSize),
StartDate = startDate.Date,
EndDate = startDate.Date.AddDays(numberOfDays),
Shifts = new List<Shift>()
};
var idCounter = 1;
for (var i = 0; i < numberOfEmployees; i++)
{
AddDiagnostics("Employee #{0,-2}: ", i + 1, shiftEmployees[i].ToString());
var occ = new Dictionary<int, int>();
for (var j = 0; j < numberOfDays; j++)
{
var shiftVal = (int)x[i, j].Value() - 1;
if (!occ.ContainsKey(shiftVal)) occ[shiftVal] = 0;
occ[shiftVal]++;
// Add a shift
var shiftType = (ShiftType)shiftVal + 1;
var shiftStart = startDate.Date
.AddDays(j)
.AddHours(shiftType == ShiftType.Off
? 0
: (shiftType == ShiftType.Day
? startHour
: startHour + shiftHours)); // i.e Day shift starts at 07:00, night shift at 19:00
schedule.Shifts.Add(new Shift
{
Id = idCounter,
Employee = shiftEmployees[i],
Type = shiftType,
StartDate = shiftStart,
EndDate = shiftType == ShiftType.Off ? shiftStart : shiftStart.AddHours(shiftHours)
});
idCounter++;
AddDiagnostics(days[shiftVal] + " ");
}
AddDiagnostics(" #Total days: {0,2}", teamStats[i].Value());
foreach (var s in validShifts)
{
var v = 0;
if (occ.ContainsKey(s - 1)) v = occ[s - 1];
AddDiagnostics(" {0}:{1}", days[s - 1], v);
}
AddDiagnostics("\t- {0}\n", shiftEmployees[i].ToString());
}
AddDiagnostics("\n");
AddDiagnostics("Daily Statistics\nDay\t\td n o\n");
for (var j = 0; j < numberOfDays; j++)
{
AddDiagnostics("Day #{0,2}: \t", j + 1);
foreach (var t in validShifts) AddDiagnostics(dayStats[j, t].Value() + " ");
AddDiagnostics("\n");
}
AddDiagnostics("\n");
// Add this schedule to list
schedules.Add(schedule);
// defaults to just the first one
if (numSolutions >= maxSolutions)
break;
}
AddDiagnostics("\nSolutions: {0}", solver.Solutions());
AddDiagnostics("\nFailures: {0}", solver.Failures());
AddDiagnostics("\nBranches: {0} ", solver.Branches());
AddDiagnostics("\nWallTime: {0}ms", solver.WallTime());
solver.EndSearch();
AddDiagnostics("\n\nFinished solving the schedule.");
if (schedules.Count < 1)
{
LastError = "There's no solution in the model for your input.";
// We reached the limit and there's no solution
AddDiagnostics("\n\nThere's no solution in the model for your input.");
}
return Task.FromResult(schedules);
}
19
Source : Test.cs
with Apache License 2.0
from bubibubi
with Apache License 2.0
from bubibubi
[TestMethod]
public void RunEnreplacedyFunctions()
{
using (var context = new Context(GetConnection()))
{
for (int i = 0; i < 30; i++)
context.Enreplacedies.Add(new Enreplacedy() {Date = DateTime.Now.AddHours(i)});
context.SaveChanges();
}
using (var context = new Context(GetConnection()))
{
#pragma warning disable 618
var dates = context.Enreplacedies.Select(_ => EnreplacedyFunctions.TruncateTime(_.Date)).Distinct().ToList();
#pragma warning restore 618
foreach (DateTime? date in dates)
{
replacedert.IsNotNull(date);
replacedert.AreEqual(0, date.Value.Hour);
replacedert.AreEqual(0, date.Value.Minute);
replacedert.AreEqual(0, date.Value.Second);
}
}
}
19
Source : Test.cs
with Apache License 2.0
from bubibubi
with Apache License 2.0
from bubibubi
[TestMethod]
public void RunDbFunctions()
{
using (var context = new Context(GetConnection()))
{
for (int i = 0; i < 30; i++)
context.Enreplacedies.Add(new Enreplacedy() { Date = DateTime.Now.AddHours(i) });
context.SaveChanges();
}
using (var context = new Context(GetConnection()))
{
var dates = context.Enreplacedies.Select(_ => DbFunctions.TruncateTime(_.Date)).Distinct().ToList();
foreach (DateTime? date in dates)
{
replacedert.IsNotNull(date);
replacedert.AreEqual(0, date.Value.Hour);
replacedert.AreEqual(0, date.Value.Minute);
replacedert.AreEqual(0, date.Value.Second);
}
}
}
19
Source : SubmitSegmentedMessageTests.cs
with Apache License 2.0
from bugbytesinc
with Apache License 2.0
from bugbytesinc
[Fact(DisplayName = "Submit Segmented Message: Can Submit Single Segmented Message")]
public async Task CanSubmitSingleSegmentedMessage()
{
await using var fx = await TestTopic.CreateAsync(_network);
var submitParams = new SubmitMessageParams
{
Topic = fx.Record.Topic,
Segment = Encoding.ASCII.GetBytes(Generator.String(120, 199)),
Index = 1,
TotalSegmentCount = 1,
Signatory = fx.ParticipantPrivateKey
};
var receipt = await fx.Client.SubmitMessageAsync(submitParams);
replacedert.Equal(ResponseCode.Success, receipt.Status);
replacedert.Equal(1ul, receipt.SequenceNumber);
replacedert.False(receipt.RunningHash.IsEmpty);
replacedert.Equal(3ul, receipt.RunningHashVersion);
var txId = receipt.Id;
var info = await fx.Client.GetTopicInfoAsync(fx.Record.Topic);
replacedert.Equal(fx.Memo, info.Memo);
replacedert.NotEqual(ReadOnlyMemory<byte>.Empty, info.RunningHash);
replacedert.Equal(1UL, info.SequenceNumber);
replacedert.True(info.Expiration > DateTime.MinValue);
replacedert.Equal(new Endorsement(fx.AdminPublicKey), info.Administrator);
replacedert.Equal(new Endorsement(fx.ParticipantPublicKey), info.Participant);
replacedert.True(info.AutoRenewPeriod > TimeSpan.MinValue);
replacedert.Equal(fx.TestAccount.Record.Address, info.RenewAccount);
await Task.Delay(7000); // give the beta net time to sync
TopicMessage topicMessage = null;
using var ctx = new CancellationTokenSource();
await using var mirror = _network.NewMirror();
try
{
var subscribeTask = mirror.SubscribeTopicAsync(new SubscribeTopicParams
{
Topic = fx.Record.Topic,
Starting = DateTime.UtcNow.AddHours(-1),
MessageWriter = new TopicMessageWriterAdapter(m =>
{
topicMessage = m;
ctx.Cancel();
}),
CancellationToken = ctx.Token
});
ctx.CancelAfter(5000);
await subscribeTask;
if (topicMessage == null)
{
_network.Output?.WriteLine("INDETERMINATE TEST - MIRROR NODE DID NOT RETURN TOPIC IN ALLOWED TIME");
}
else
{
replacedert.Equal(submitParams.Topic, topicMessage.Topic);
replacedert.Equal(1ul, topicMessage.SequenceNumber);
replacedert.Equal(receipt.RunningHash.ToArray(), topicMessage.RunningHash.ToArray());
replacedert.Equal(submitParams.Segment.ToArray(), topicMessage.Messsage.ToArray());
replacedert.NotNull(topicMessage.SegmentInfo);
replacedert.Equal(txId, topicMessage.SegmentInfo.ParentTxId);
replacedert.Equal(1, topicMessage.SegmentInfo.Index);
replacedert.Equal(1, topicMessage.SegmentInfo.TotalSegmentCount);
}
}
catch (MirrorException mex) when (mex.Code == MirrorExceptionCode.TopicNotFound)
{
_network.Output?.WriteLine("INDETERMINATE TEST - MIRROR NODE DID NOT RECEIVE TOPIC CREATE IN ALLOWED TIME");
return;
}
}
19
Source : SubscribeTopicTests.cs
with Apache License 2.0
from bugbytesinc
with Apache License 2.0
from bugbytesinc
[Fact(DisplayName = "Subscribe Topic: Can Create and Fetch Topic Message from Stream")]
public async Task CanSubscribeToATopicAsync()
{
try
{
await using var fx = await TestTopic.CreateAsync(_network);
var message = Encoding.ASCII.GetBytes(Generator.String(10, 100));
var receipt = await fx.Client.SubmitMessageAsync(fx.Record.Topic, message, fx.ParticipantPrivateKey);
replacedert.Equal(ResponseCode.Success, receipt.Status);
replacedert.Equal(1ul, receipt.SequenceNumber);
replacedert.False(receipt.RunningHash.IsEmpty);
replacedert.Equal(3ul, receipt.RunningHashVersion);
await Task.Delay(5000); // give the beta net time to sync
TopicMessage topicMessage = null;
using var ctx = new CancellationTokenSource();
await using var mirror = _network.NewMirror();
var subscribeTask = mirror.SubscribeTopicAsync(new SubscribeTopicParams
{
Topic = fx.Record.Topic,
Starting = DateTime.UtcNow.AddHours(-1),
MessageWriter = new TopicMessageWriterAdapter(m =>
{
topicMessage = m;
ctx.Cancel();
}),
CancellationToken = ctx.Token
});
ctx.CancelAfter(5000);
await subscribeTask;
if (topicMessage == null)
{
_network.Output?.WriteLine("INDETERMINATE TEST - MIRROR NODE DID NOT RETURN TOPIC IN ALLOWED TIME");
}
else
{
replacedert.Equal(fx.Record.Topic, topicMessage.Topic);
replacedert.Equal(1ul, topicMessage.SequenceNumber);
replacedert.Equal(receipt.RunningHash.ToArray(), topicMessage.RunningHash.ToArray());
replacedert.Equal(3ul, receipt.RunningHashVersion);
replacedert.Equal(message, topicMessage.Messsage.ToArray());
replacedert.Null(topicMessage.SegmentInfo);
}
var info = await fx.Client.GetTopicInfoAsync(fx.Record.Topic);
replacedert.Equal(fx.Memo, info.Memo);
replacedert.NotEqual(receipt.RunningHash.ToArray(), info.RunningHash);
replacedert.Equal(3UL, receipt.RunningHashVersion);
replacedert.Equal(1UL, info.SequenceNumber);
replacedert.True(info.Expiration > DateTime.MinValue);
replacedert.Equal(new Endorsement(fx.AdminPublicKey), info.Administrator);
replacedert.Equal(new Endorsement(fx.ParticipantPublicKey), info.Participant);
replacedert.True(info.AutoRenewPeriod > TimeSpan.MinValue);
replacedert.Equal(fx.TestAccount.Record.Address, info.RenewAccount);
}
catch (MirrorException mex) when (mex.Code == MirrorExceptionCode.TopicNotFound)
{
_network.Output?.WriteLine("INDETERMINATE TEST - MIRROR NODE DID NOT RECEIVE TOPIC CREATE IN ALLOWED TIME");
return;
}
}
19
Source : SubscribeTopicTests.cs
with Apache License 2.0
from bugbytesinc
with Apache License 2.0
from bugbytesinc
[Fact(DisplayName = "Subscribe Topic: Can Create and Fetch Topic Test Message from Stream")]
public async Task CanSubscribeToATestTopic()
{
await using var fx = await TestTopicMessage.CreateAsync(_network);
replacedert.Equal(ResponseCode.Success, fx.Record.Status);
replacedert.Equal(1ul, fx.Record.SequenceNumber);
replacedert.False(fx.Record.RunningHash.IsEmpty);
await Task.Delay(7000); // give the beta net time to sync
TopicMessage topicMessage = null;
using var ctx = new CancellationTokenSource();
await using var mirror = _network.NewMirror();
try
{
var subscribeTask = mirror.SubscribeTopicAsync(new SubscribeTopicParams
{
Topic = fx.TestTopic.Record.Topic,
Starting = DateTime.UtcNow.AddHours(-1),
MessageWriter = new TopicMessageWriterAdapter(m =>
{
topicMessage = m;
ctx.Cancel();
}),
CancellationToken = ctx.Token
});
ctx.CancelAfter(5000);
await subscribeTask;
if (topicMessage == null)
{
_network.Output?.WriteLine("INDETERMINATE TEST - MIRROR NODE DID NOT RETURN TOPIC IN ALLOWED TIME");
}
else
{
replacedert.Equal(fx.TestTopic.Record.Topic, topicMessage.Topic);
replacedert.Equal(1ul, topicMessage.SequenceNumber);
replacedert.Equal(fx.Record.RunningHash.ToArray(), topicMessage.RunningHash.ToArray());
replacedert.Equal(fx.Message.ToArray(), topicMessage.Messsage.ToArray());
replacedert.Null(topicMessage.SegmentInfo);
}
}
catch (MirrorException mex) when (mex.Code == MirrorExceptionCode.TopicNotFound)
{
_network.Output?.WriteLine("INDETERMINATE TEST - MIRROR NODE DID NOT RECEIVE TOPIC CREATE IN ALLOWED TIME");
return;
}
}
19
Source : SubscribeTopicTests.cs
with Apache License 2.0
from bugbytesinc
with Apache License 2.0
from bugbytesinc
[Fact(DisplayName = "Subscribe Topic: Can Capture Topic Test Message from Stream")]
public async Task CanCaptureATestTopic()
{
try
{
await using var fx = await TestTopicMessage.CreateAsync(_network);
replacedert.Equal(ResponseCode.Success, fx.Record.Status);
replacedert.Equal(1ul, fx.Record.SequenceNumber);
replacedert.False(fx.Record.RunningHash.IsEmpty);
await Task.Delay(5000); // give the beta net time to sync
var capture = new TopicMessageCapture(1);
await using var mirror = _network.NewMirror();
using var cts = new CancellationTokenSource();
var subscribeTask = mirror.SubscribeTopicAsync(new SubscribeTopicParams
{
Topic = fx.TestTopic.Record.Topic,
Starting = DateTime.UtcNow.AddHours(-1),
MessageWriter = capture,
CancellationToken = cts.Token
});
cts.CancelAfter(500);
await subscribeTask;
if (capture.CapturedList.Count == 0)
{
_network.Output?.WriteLine("INDETERMINATE TEST - MIRROR NODE DID NOT RETURN TOPIC IN ALLOWED TIME");
}
else
{
var message = capture.CapturedList[0];
replacedert.Equal(fx.TestTopic.Record.Topic, message.Topic);
replacedert.Equal(1ul, message.SequenceNumber);
replacedert.Equal(fx.Record.RunningHash.ToArray(), message.RunningHash.ToArray());
replacedert.Equal(fx.Message.ToArray(), message.Messsage.ToArray());
replacedert.Null(message.SegmentInfo);
}
}
catch (MirrorException mex) when (mex.Code == MirrorExceptionCode.TopicNotFound)
{
_network.Output?.WriteLine("INDETERMINATE TEST - MIRROR NODE DID NOT RECEIVE TOPIC CREATE IN ALLOWED TIME");
return;
}
}
19
Source : SubscribeTopicTests.cs
with Apache License 2.0
from bugbytesinc
with Apache License 2.0
from bugbytesinc
[Fact(DisplayName = "Subscribe Topic: Return Limit is Enforced")]
public async Task ReturnLimitIsEnforced()
{
await using var fx = await TestTopicMessage.CreateAsync(_network);
await fx.TestTopic.Client.SubmitMessageAsync(fx.TestTopic.Record.Topic, fx.Message, fx.TestTopic.ParticipantPrivateKey);
await fx.TestTopic.Client.SubmitMessageAsync(fx.TestTopic.Record.Topic, fx.Message, fx.TestTopic.ParticipantPrivateKey);
await fx.TestTopic.Client.SubmitMessageAsync(fx.TestTopic.Record.Topic, fx.Message, fx.TestTopic.ParticipantPrivateKey);
await fx.TestTopic.Client.SubmitMessageAsync(fx.TestTopic.Record.Topic, fx.Message, fx.TestTopic.ParticipantPrivateKey);
// Wait for enough messages to be available
// in the mirror node's database.
for(int waitTries = 0; waitTries < 20; waitTries++)
{
var captured = await TopicMessageCapture.CaptureOrTimeoutAsync(_network.NewMirror(), fx.TestTopic.Record.Topic, 4, 5000);
if(captured.Length > 2 )
{
break;
}
}
// Now we can try the real test on the limits.
var capture = new TopicMessageCapture(10);
await using var mirror = _network.NewMirror();
using var cts = new CancellationTokenSource();
try
{
var subscribeTask = mirror.SubscribeTopicAsync(new SubscribeTopicParams
{
Topic = fx.TestTopic.Record.Topic,
Starting = DateTime.UtcNow.AddHours(-1),
MessageWriter = capture,
CancellationToken = cts.Token,
MaxCount = 2
});
cts.CancelAfter(10000);
await subscribeTask;
if (capture.CapturedList.Count == 0)
{
_network.Output?.WriteLine("INDETERMINATE TEST - MIRROR NODE DID NOT RETURN TOPIC IN ALLOWED TIME");
}
else
{
replacedert.Equal(2, capture.CapturedList.Count);
}
}
catch (MirrorException mex) when (mex.Code == MirrorExceptionCode.TopicNotFound)
{
_network.Output?.WriteLine("INDETERMINATE TEST - MIRROR NODE DID NOT RECEIVE TOPIC CREATE IN ALLOWED TIME");
return;
}
}
19
Source : TopicMessageCapture.cs
with Apache License 2.0
from bugbytesinc
with Apache License 2.0
from bugbytesinc
public static async Task<TopicMessage[]> CaptureOrTimeoutAsync(MirrorClient mirror, Address topic, int expectedCount, int timeoutInMiliseconds)
{
using var cts = new CancellationTokenSource();
var capture = new TopicMessageCapture(expectedCount);
var subscribeTask = mirror.SubscribeTopicAsync(new SubscribeTopicParams
{
Topic = topic,
Starting = DateTime.UtcNow.AddHours(-1),
MessageWriter = capture,
CancellationToken = cts.Token
});
cts.CancelAfter(timeoutInMiliseconds);
await subscribeTask;
return capture.CapturedList.ToArray();
}
19
Source : PartialZipDownloader.cs
with The Unlicense
from BuIlDaLiBlE
with The Unlicense
from BuIlDaLiBlE
private static DateTime ConvertDOSDateTime(ushort date, ushort time)
{
FileTime fileTime = new FileTime();
Systime systemTime = new Systime();
DosDateTimeToFileTime(date, time, ref fileTime);
FileTimeToSystemTime(ref fileTime, ref systemTime);
return new DateTime(systemTime.Year, systemTime.Month, systemTime.Day, systemTime.Hour, systemTime.Minute, systemTime.Second + 1, DateTimeKind.Utc).AddHours(-3);
}
19
Source : Generator.cs
with Apache License 2.0
from bugbytesinc
with Apache License 2.0
from bugbytesinc
public static DateTime TruncatedFutureDate(Int32 minHoursAhead, Int32 maxHoursAhead)
{
var date = DateTime.UtcNow.AddHours(Double(minHoursAhead, maxHoursAhead));
return new DateTime(date.Year, date.Month, date.Day, date.Hour, date.Minute, date.Second, DateTimeKind.Utc);
}
19
Source : Program.cs
with MIT License
from cafeasp
with MIT License
from cafeasp
static void OrderImport()
{
restClient = new RestClient(live_url_base);
IRestRequest restRequest = new RestRequest(purchase_order_resource, Method.GET);
restRequest.AddHeader("x-amz-access-token", "token_from_your_secure_db");
restRequest.AddQueryParameter("createdAfter", DateTime.Now.AddHours(-7).ToString("o"));
restRequest.AddQueryParameter("createdBefore", DateTime.Now.ToString("o"));
var request = signatureHelper.SignRequest(restRequest, restClient, content_form_urlencoded);
try
{
var result = restClient.Execute(request);
if (result.StatusCode == System.Net.HttpStatusCode.OK)
{
//read your orders
var payload = JsonConvert.DeserializeObject<Payload>(result.Content);
foreach (var order in payload.Orders)
{
string po = order.PurchaseOrderNumber;
DateTime orderDate = order.OrderDetails.OrderDate;
foreach (var item in order.OrderDetails.Items)
{
string vendorSku = item.VendorProductIdentifier;
string price = item.NetPrice.Amount;
int qty = item.OrderedQuanreplacedy.Amount;
}
}
}
else if (result.StatusCode == System.Net.HttpStatusCode.Forbidden)
{
//bad token
string new_token = signatureHelper.GetToken();
//save new_token to your secure location/db and run OrderImport again
}
}
catch (Exception e)
{
throw;
}
}
19
Source : DefaultBrokerageMessageHandler.cs
with Apache License 2.0
from Capnode
with Apache License 2.0
from Capnode
public void Handle(BrokerageMessageEvent message)
{
// based on message type dispatch to result handler
switch (message.Type)
{
case BrokerageMessageType.Information:
_algorithm.Debug($"Brokerage Info: {message.Message}");
break;
case BrokerageMessageType.Warning:
_algorithm.Error($"Brokerage Warning: {message.Message}");
break;
case BrokerageMessageType.Error:
// unexpected error, we need to close down shop
_algorithm.SetRuntimeError(new Exception(message.Message), "Brokerage Error");
break;
case BrokerageMessageType.Disconnect:
_connected = false;
Log.Trace("DefaultBrokerageMessageHandler.Handle(): Disconnected.");
// check to see if any non-custom security exchanges are open within the next x minutes
var open = (from kvp in _algorithm.Securities
let security = kvp.Value
where security.Type != SecurityType.Base
let exchange = security.Exchange
let localTime = _algorithm.UtcTime.ConvertFromUtc(exchange.TimeZone)
where exchange.IsOpenDuringBar(
localTime,
localTime + _openThreshold,
_algorithm.SubscriptionManager.SubscriptionDataConfigService
.GetSubscriptionDataConfigs(security.Symbol)
.IsExtendedMarketHours())
select security).Any();
// if any are open then we need to kill the algorithm
if (open)
{
Log.Trace("DefaultBrokerageMessageHandler.Handle(): Disconnect when exchanges are open, " +
Invariant($"trying to reconnect for {_initialDelay.TotalMinutes} minutes.")
);
// wait 15 minutes before killing algorithm
StartCheckReconnected(_initialDelay, message);
}
else
{
Log.Trace("DefaultBrokerageMessageHandler.Handle(): Disconnect when exchanges are closed, checking back before exchange open.");
// if they aren't open, we'll need to check again a little bit before markets open
DateTime nextMarketOpenUtc;
if (_algorithm.Securities.Count != 0)
{
nextMarketOpenUtc = (from kvp in _algorithm.Securities
let security = kvp.Value
where security.Type != SecurityType.Base
let exchange = security.Exchange
let localTime = _algorithm.UtcTime.ConvertFromUtc(exchange.TimeZone)
let marketOpen = exchange.Hours.GetNextMarketOpen(localTime,
_algorithm.SubscriptionManager.SubscriptionDataConfigService
.GetSubscriptionDataConfigs(security.Symbol)
.IsExtendedMarketHours())
let marketOpenUtc = marketOpen.ConvertToUtc(exchange.TimeZone)
select marketOpenUtc).Min();
}
else
{
// if we have no securities just make next market open an hour from now
nextMarketOpenUtc = DateTime.UtcNow.AddHours(1);
}
var timeUntilNextMarketOpen = nextMarketOpenUtc - DateTime.UtcNow - _openThreshold;
Log.Trace(Invariant($"DefaultBrokerageMessageHandler.Handle(): TimeUntilNextMarketOpen: {timeUntilNextMarketOpen}"));
// wake up 5 minutes before market open and check if we've reconnected
StartCheckReconnected(timeUntilNextMarketOpen, message);
}
break;
case BrokerageMessageType.Reconnect:
_connected = true;
Log.Trace("DefaultBrokerageMessageHandler.Handle(): Reconnected.");
if (_cancellationTokenSource != null && !_cancellationTokenSource.IsCancellationRequested)
{
_cancellationTokenSource.Cancel();
}
break;
}
}
19
Source : StatisticsFileHelper.cs
with Apache License 2.0
from cdy816
with Apache License 2.0
from cdy816
public NumberStatisticsQueryResult Read(int id,DateTime startTime,DateTime endTime)
{
NumberStatisticsQueryResult result;
var valcount = (int)Math.Ceiling((endTime - startTime).TotalHours) + 1;
result = new NumberStatisticsQueryResult(valcount);
Dictionary<DateTime, List<DateTime>> mFileMap = new Dictionary<DateTime, List<DateTime>>();
DateTime stime = new DateTime(startTime.Year, startTime.Month, startTime.Day, startTime.Hour, 0, 0);
while(stime<=endTime)
{
var vdd = stime.Date;
if(!mFileMap.ContainsKey(vdd))
{
mFileMap.Add(vdd, new List<DateTime>() { stime });
}
else
{
mFileMap[vdd].Add(stime);
}
stime = stime.AddHours(1);
}
if(stime>endTime)
{
stime = new DateTime(stime.Year, stime.Month, stime.Day, stime.Hour, 0, 0);
var etime = new DateTime(endTime.Year, endTime.Month, endTime.Day, endTime.Hour, 0, 0);
if(stime == endTime)
{
var vdd = stime.Date;
if (!mFileMap.ContainsKey(vdd))
{
mFileMap.Add(vdd, new List<DateTime>() { stime });
}
else
{
mFileMap[vdd].Add(stime);
}
}
}
foreach(var vv in mFileMap)
{
Read(id, vv.Key, vv.Value,result);
}
return result;
}
19
Source : HisDataQueryModel.cs
with Apache License 2.0
from cdy816
with Apache License 2.0
from cdy816
private void QueryHisData(string tag,DateTime startTime,DateTime endTime)
{
mIsBusy = true;
if (!mTags.ContainsKey(mSelectTag)) return;
int id = mTags[mSelectTag].Item1;
DateTime sTime = StartTime.AddHours(StartTimeHour);
DateTime eTime = EndTime.AddHours(EndTimeHour);
int tcount = (int)(eTime - sTime).TotalSeconds;
switch (mTags[mSelectTag].Item2)
{
case (byte)Cdy.Tag.TagType.Bool:
ProcessDataQuery<bool>(id, sTime, eTime);
break;
case (byte)Cdy.Tag.TagType.Byte:
ProcessDataQuery<byte>(id, sTime, eTime);
break;
case (byte)Cdy.Tag.TagType.DateTime:
ProcessDataQuery<DateTime>(id, sTime, eTime);
break;
case (byte)Cdy.Tag.TagType.Double:
ProcessDataQuery<double>(id, sTime, eTime);
break;
case (byte)Cdy.Tag.TagType.Float:
ProcessDataQuery<float>(id, sTime, eTime);
break;
case (byte)Cdy.Tag.TagType.Int:
ProcessDataQuery<int>(id, sTime, eTime);
break;
case (byte)Cdy.Tag.TagType.Long:
ProcessDataQuery<long>(id, sTime, eTime);
break;
case (byte)Cdy.Tag.TagType.Short:
ProcessDataQuery<short>(id, sTime, eTime);
break;
case (byte)Cdy.Tag.TagType.String:
ProcessDataQuery<string>(id, sTime, eTime);
break;
case (byte)Cdy.Tag.TagType.UInt:
ProcessDataQuery<uint>(id, sTime, eTime);
break;
case (byte)Cdy.Tag.TagType.ULong:
ProcessDataQuery<ulong>(id, sTime, eTime);
break;
case (byte)Cdy.Tag.TagType.UShort:
ProcessDataQuery<ushort>(id, sTime, eTime);
break;
case (byte)Cdy.Tag.TagType.IntPoint:
ProcessDataQuery<IntPointData>(id, sTime, eTime);
break;
case (byte)Cdy.Tag.TagType.UIntPoint:
ProcessDataQuery<UIntPointData>(id, sTime, eTime);
break;
case (byte)Cdy.Tag.TagType.IntPoint3:
ProcessDataQuery<IntPoint3Data>(id, sTime, eTime);
break;
case (byte)Cdy.Tag.TagType.UIntPoint3:
ProcessDataQuery<UIntPoint3Data>(id, sTime, eTime);
break;
case (byte)Cdy.Tag.TagType.LongPoint:
ProcessDataQuery<LongPointData>(id, sTime, eTime);
break;
case (byte)Cdy.Tag.TagType.ULongPoint:
ProcessDataQuery<ULongPointTag>(id, sTime, eTime);
break;
case (byte)Cdy.Tag.TagType.LongPoint3:
ProcessDataQuery<LongPoint3Data>(id, sTime, eTime);
break;
case (byte)Cdy.Tag.TagType.ULongPoint3:
ProcessDataQuery<ULongPoint3Data>(id, sTime, eTime);
break;
}
mIsBusy = false;
}
19
Source : PriceHistoryPeriodHelperTests.cs
with MIT License
from centaurus-project
with MIT License
from centaurus-project
[Test]
public void TrimTest()
{
var r = new Random();
var periods = Enum.GetValues(typeof(PriceHistoryPeriod)).Cast<PriceHistoryPeriod>();
foreach (var p in periods)
{
var minDate = GetMinDateForPeriod(p);
for (var i = 0; i < 5; i++)
{
var dateTime = minDate
.AddDays(r.Next(0, 1000))
.AddHours(r.Next(0, 24))
.AddMinutes(r.Next(0, 60))
.AddSeconds(r.Next(0, 60))
.AddMilliseconds(r.Next(0, 1000));
var trimmedDate = dateTime.Trim(p);
if (p == PriceHistoryPeriod.Month)
{
var date = default(DateTime);
while (date < trimmedDate)
{
date = date.AddMonths(1);
}
if (date == trimmedDate
&& date.Day == 1
&& date.Hour == 0
&& date.Minute == 0
&& date.Second == 0
&& date.Millisecond == 0)
continue;
}
else
{
var date = minDate.Ticks;
while (date < trimmedDate.Ticks)
date += PriceHistoryPeriodHelper.TicksPerPeriod(p);
if (date == trimmedDate.Ticks)
continue;
}
replacedert.Fail($"Unable to trim {dateTime.Ticks} to {p} period.");
}
}
}
19
Source : LineExpression.cs
with GNU General Public License v3.0
from cesarbmx
with GNU General Public License v3.0
from cesarbmx
public static Expression<Func<Line, bool>> ObsoleteLine()
{
return x => x.Period == Period.ONE_MINUTE && x.Time < DateTime.UtcNow.AddHours(-3) ||
x.Period == Period.FIVE_MINUTES && x.Time < DateTime.UtcNow.AddDays(-1) ||
x.Period == Period.FIFTEEN_MINUTES && x.Time < DateTime.UtcNow.AddDays(-3) ||
x.Period == Period.ONE_HOUR && x.Time < DateTime.UtcNow.AddDays(-8) ||
x.Period == Period.ONE_DAY && x.Time < DateTime.UtcNow.AddYears(-1);
}
19
Source : OutboxManagementTests.cs
with MIT License
from cfrenzel
with MIT License
from cfrenzel
[Fact]
public async Task Should_reset_aged_status_to_inprogress()
{
string endpointName = "OutboxManagementTests_1.1";
var readyOrig = await IntegrationTestFixture.CreateOutboxMessage(endpointName, "ready_1", OutboxMessageStatus.Ready);
await IntegrationTestFixture.CreateOutboxMessage(endpointName, "ready_2", OutboxMessageStatus.InProgress, DateTime.UtcNow.AddMinutes(-40));
var reset1 = await IntegrationTestFixture.CreateOutboxMessage(endpointName, "ready_3", OutboxMessageStatus.InProgress, DateTime.UtcNow.AddHours(-1));
var reset2 = await IntegrationTestFixture.CreateOutboxMessage(endpointName, "ready_4", OutboxMessageStatus.InProgress, DateTime.UtcNow.AddHours(-1.1));
await IntegrationTestFixture.CreateOutboxMessage(endpointName, "ready_5", OutboxMessageStatus.Processed);
await IntegrationTestFixture.CreateOutboxMessage(endpointName, "ready_6", OutboxMessageStatus.Failed);
await _fixture.Outbox.Reset(TimeSpan.FromMinutes(58));
using (var scope = NewScope())
{
var db = scope.ServiceProvider.GetService<ApplicationDbContext>();
var outboxMessages = db.Set<OutboxMessage>().Include(x=>x.MessageData)
.Where(x=>x.Status == (int)OutboxMessageStatus.Ready)
.OrderBy(x=>x.CreatedAtUtc)
.AsNoTracking().ToList();
outboxMessages.Count.ShouldBe(3);
outboxMessages[0].Status.ShouldBe((int)OutboxMessageStatus.Ready);
outboxMessages[0].TryCount.ShouldBe(0);
outboxMessages[0].PriorityDateUtc.ShouldBe(readyOrig.PriorityDateUtc);
outboxMessages[1].Status.ShouldBe((int)OutboxMessageStatus.Ready);
outboxMessages[1].TryCount.ShouldBe(0);
outboxMessages[1].PriorityDateUtc.ShouldBe(reset1.PriorityDateUtc);
outboxMessages[2].Status.ShouldBe((int)OutboxMessageStatus.Ready);
outboxMessages[2].TryCount.ShouldBe(0);
outboxMessages[2].PriorityDateUtc.ShouldBe(reset2.PriorityDateUtc);
}
}
See More Examples