Here are the examples of the csharp api System.Math.Min(long, long) taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
660 Examples
19
View Source File : HeifStreamWriter.cs
License : GNU Lesser General Public License v3.0
Project Creator : 0xC0000054
License : GNU Lesser General Public License v3.0
Project Creator : 0xC0000054
protected override void WriteCore(IntPtr data, long count)
{
long offset = 0;
long remaining = count;
while (remaining > 0)
{
int copySize = (int)Math.Min(MaxWriteBufferSize, remaining);
Marshal.Copy(new IntPtr(data.ToInt64() + offset), this.streamBuffer, 0, copySize);
this.stream.Write(this.streamBuffer, 0, copySize);
offset += copySize;
remaining -= copySize;
}
}
19
View Source File : HeifStreamReader.cs
License : GNU Lesser General Public License v3.0
Project Creator : 0xC0000054
License : GNU Lesser General Public License v3.0
Project Creator : 0xC0000054
protected override bool ReadCore(IntPtr data, long count)
{
long totalBytesRead = 0;
long remaining = count;
while (remaining > 0)
{
int streamBytesRead = this.stream.Read(this.streamBuffer, 0, (int)Math.Min(MaxReadBufferSize, remaining));
if (streamBytesRead == 0)
{
break;
}
Marshal.Copy(this.streamBuffer, 0, new IntPtr(data.ToInt64() + totalBytesRead), streamBytesRead);
totalBytesRead += streamBytesRead;
remaining -= streamBytesRead;
}
return remaining == 0;
}
19
View Source File : StreamIOCallbacks.cs
License : MIT License
Project Creator : 0xC0000054
License : MIT License
Project Creator : 0xC0000054
public unsafe int Read(IntPtr buffer, uint count, uint* bytesRead)
{
if (bytesRead != null)
{
*bytesRead = 0;
}
if (count == 0)
{
return HResult.S_OK;
}
try
{
long totalBytesRead = 0;
long remaining = count;
do
{
int streamBytesRead = this.stream.Read(this.streamBuffer, 0, (int)Math.Min(MaxBufferSize, remaining));
if (streamBytesRead == 0)
{
break;
}
Marshal.Copy(this.streamBuffer, 0, new IntPtr(buffer.ToInt64() + totalBytesRead), streamBytesRead);
totalBytesRead += streamBytesRead;
remaining -= streamBytesRead;
} while (remaining > 0);
if (bytesRead != null)
{
*bytesRead = (uint)totalBytesRead;
}
return HResult.S_OK;
}
catch (Exception ex)
{
this.CallbackExceptionInfo = ExceptionDispatchInfo.Capture(ex);
return ex.HResult;
}
}
19
View Source File : StreamIOCallbacks.cs
License : MIT License
Project Creator : 0xC0000054
License : MIT License
Project Creator : 0xC0000054
public unsafe int Write(IntPtr buffer, uint count, uint* bytesWritten)
{
if (bytesWritten != null)
{
*bytesWritten = 0;
}
if (count == 0)
{
return HResult.S_OK;
}
try
{
long offset = 0;
long remaining = count;
do
{
int copySize = (int)Math.Min(MaxBufferSize, remaining);
Marshal.Copy(new IntPtr(buffer.ToInt64() + offset), this.streamBuffer, 0, copySize);
this.stream.Write(this.streamBuffer, 0, copySize);
offset += copySize;
remaining -= copySize;
} while (remaining > 0);
if (bytesWritten != null)
{
*bytesWritten = count;
}
return HResult.S_OK;
}
catch (Exception ex)
{
this.CallbackExceptionInfo = ExceptionDispatchInfo.Capture(ex);
return ex.HResult;
}
}
19
View Source File : WebPNative.cs
License : MIT License
Project Creator : 0xC0000054
License : MIT License
Project Creator : 0xC0000054
[System.Diagnostics.Codereplacedysis.SuppressMessage(
"Microsoft.Design",
"CA1031:DoNotCatchGeneralExceptionTypes",
Justification = "The exception will be re-thrown after WebPSave returns the error code.")]
public WebPEncodingError WriteImageCallback(IntPtr image, UIntPtr imageSize)
{
if (image == IntPtr.Zero)
{
return WebPEncodingError.NullParameter;
}
if (imageSize == UIntPtr.Zero)
{
// Ignore zero-length images.
return WebPEncodingError.Ok;
}
// 81920 is the largest multiple of 4096 that is below the large object heap threshold.
const int MaxBufferSize = 81920;
try
{
long size = checked((long)imageSize.ToUInt64());
int bufferSize = (int)Math.Min(size, MaxBufferSize);
byte[] streamBuffer = new byte[bufferSize];
output.SetLength(size);
long offset = 0;
long remaining = size;
while (remaining > 0)
{
int copySize = (int)Math.Min(MaxBufferSize, remaining);
Marshal.Copy(new IntPtr(image.ToInt64() + offset), streamBuffer, 0, copySize);
output.Write(streamBuffer, 0, copySize);
offset += copySize;
remaining -= copySize;
}
}
catch (OperationCanceledException)
{
return WebPEncodingError.UserAbort;
}
catch (Exception ex)
{
WriteException = ex;
return WebPEncodingError.BadWrite;
}
return WebPEncodingError.Ok;
}
19
View Source File : Decrypter.cs
License : MIT License
Project Creator : 13xforever
License : MIT License
Project Creator : 13xforever
public override int Read( byte[] buffer, int offset, int count)
{
if (Position == inputStream.Length)
return 0;
var positionInSector = Position % sectorSize;
var resultCount = 0;
if (positionInSector > 0)
{
var len = (int)Math.Min(Math.Min(count, sectorSize - positionInSector), inputStream.Position - Position);
md5.TransformBlock(bufferedSector, (int)positionInSector, len, buffer, offset);
sha1.TransformBlock(bufferedSector, (int)positionInSector, len, buffer, offset);
sha256.TransformBlock(bufferedSector, (int)positionInSector, len, buffer, offset);
offset += len;
count -= len;
resultCount += len;
Position += len;
if (Position % sectorSize == 0)
SectorPosition++;
}
if (Position == inputStream.Length)
return resultCount;
int readCount;
do
{
readCount = inputStream.ReadExact(tmpSector, 0, sectorSize);
if (readCount < sectorSize)
Array.Clear(tmpSector, readCount, sectorSize - readCount);
var decryptedSector = tmpSector;
if (IsEncrypted(SectorPosition))
{
WasEncrypted = true;
if (readCount % 16 != 0)
{
Log.Debug($"Block has only {(readCount % 16) * 8} bits of data, reading raw sector...");
discStream.Seek(SectorPosition * sectorSize, SeekOrigin.Begin);
var newTmpSector = new byte[sectorSize];
discStream.ReadExact(newTmpSector, 0, sectorSize);
if (!newTmpSector.Take(readCount).SequenceEqual(tmpSector.Take(readCount)))
Log.Warn($"Filesystem data and raw data do not match for sector 0x{SectorPosition:x8}");
tmpSector = newTmpSector;
}
using var aesTransform = aes.CreateDecryptor(decryptionKey, GetSectorIV(SectorPosition));
decryptedSector = aesTransform.TransformFinalBlock(tmpSector, 0, sectorSize);
}
else
WasUnprotected = true;
if (count >= readCount)
{
md5.TransformBlock(decryptedSector, 0, readCount, buffer, offset);
sha1.TransformBlock(decryptedSector, 0, readCount, buffer, offset);
sha256.TransformBlock(decryptedSector, 0, readCount, buffer, offset);
offset += readCount;
count -= readCount;
resultCount += readCount;
Position += readCount;
SectorPosition++;
}
else // partial sector read
{
Buffer.BlockCopy(decryptedSector, 0, bufferedSector, 0, sectorSize);
md5.TransformBlock(decryptedSector, 0, count, buffer, offset);
sha1.TransformBlock(decryptedSector, 0, count, buffer, offset);
sha256.TransformBlock(decryptedSector, 0, count, buffer, offset);
offset += count;
count = 0;
resultCount += count;
Position += count;
}
} while (count > 0 && readCount == sectorSize);
return resultCount;
}
19
View Source File : PkgChecker.cs
License : MIT License
Project Creator : 13xforever
License : MIT License
Project Creator : 13xforever
internal static async Task CheckAsync(List<FileInfo> pkgList, int fnameWidth, int sigWidth, int csumWidth, int allCsumsWidth, CancellationToken cancellationToken)
{
TotalFileSize = pkgList.Sum(i => i.Length);
var buf = new byte[1024 * 1024]; // 1 MB
foreach (var item in pkgList)
{
Write($"{item.Name.Trim(fnameWidth).PadRight(fnameWidth)} ");
try
{
CurrentPadding = sigWidth;
CurrentFileSize = item.Length;
if (item.Length < 0xC0 + 0x20) // header + csum at the end
{
Write("invalid pkg".PadLeft(allCsumsWidth) + Environment.NewLine, ConsoleColor.Red);
continue;
}
using var file = File.Open(item.FullName, FileMode.Open, FileAccess.Read, FileShare.Read);
var header = new byte[0xc0];
file.ReadExact(header);
byte[] sha1Sum = null;
using (var sha1 = SHA1.Create())
sha1Sum = sha1.ComputeHash(header, 0, 0x80);
if (!ValidateCmac(header))
Write("cmac".PadLeft(sigWidth) + " ", ConsoleColor.Red);
else if (!ValidateHash(header, sha1Sum))
Write("sha1".PadLeft(sigWidth) + " ", ConsoleColor.Yellow);
else if (!ValidateSigNew(header, sha1Sum))
{
if (!ValidateSigOld(header, sha1Sum))
Write("ecdsa".PadLeft(sigWidth) + " ", ConsoleColor.Red);
else
Write("ok (old)".PadLeft(sigWidth) + " ", ConsoleColor.Yellow);
}
else
Write("ok".PadLeft(sigWidth) + " ", ConsoleColor.Green);
CurrentPadding = csumWidth;
file.Seek(0, SeekOrigin.Begin);
byte[] hash;
using (var sha1 = SHA1.Create())
{
var dataLengthToHash = CurrentFileSize - 0x20;
int read;
do
{
read = await file.ReadAsync(buf, 0, (int)Math.Min(buf.Length, dataLengthToHash - CurrentFileProcessedBytes), cancellationToken).ConfigureAwait(false);
CurrentFileProcessedBytes += read;
sha1.TransformBlock(buf, 0, read, null, 0);
} while (read > 0 && CurrentFileProcessedBytes < dataLengthToHash && !cancellationToken.IsCancellationRequested);
sha1.TransformFinalBlock(buf, 0, 0);
hash = sha1.Hash;
}
if (cancellationToken.IsCancellationRequested)
return;
var expectedHash = new byte[0x14];
file.ReadExact(expectedHash);
CurrentFileProcessedBytes += 0x20;
if (!expectedHash.SequenceEqual(hash))
Write("fail".PadLeft(csumWidth) + Environment.NewLine, ConsoleColor.Red);
else
Write("ok".PadLeft(csumWidth) + Environment.NewLine, ConsoleColor.Green);
}
catch (Exception e)
{
Write("Error" + Environment.NewLine + e.Message + Environment.NewLine, ConsoleColor.Red);
}
finally
{
ProcessedBytes += CurrentFileSize;
CurrentFileProcessedBytes = 0;
CurrentPadding = 0;
}
if (cancellationToken.IsCancellationRequested)
return;
}
}
19
View Source File : WaveFileReader.cs
License : MIT License
Project Creator : 3wz
License : MIT License
Project Creator : 3wz
public static void ReadWaveHeader(Stream stream, out WaveFormat format, out long dataChunkPosition, out int dataChunkLength, List<RiffChunk> chunks)
{
dataChunkPosition = -1;
format = null;
BinaryReader br = new BinaryReader(stream);
if (Encoding.ASCII.GetString(br.ReadBytes(4)) != "RIFF")//WaveInterop.mmioStringToFOURCC("RIFF", 0)
{
throw new FormatException("Not a WAVE file - no RIFF header");
}
uint fileSize = br.ReadUInt32(); // read the file size (minus 8 bytes)
if (Encoding.ASCII.GetString(br.ReadBytes(4)) != "WAVE")//WaveInterop.mmioStringToFOURCC("WAVE", 0)
{
throw new FormatException("Not a WAVE file - no WAVE header");
}
int dataChunkID = BitConverter.ToInt32(Encoding.UTF8.GetBytes("data"), 0); ;//WaveInterop.mmioStringToFOURCC("data", 0)
int formatChunkId = BitConverter.ToInt32(Encoding.UTF8.GetBytes("fmt "), 0); ;//WaveInterop.mmioStringToFOURCC("fmt ", 0)
dataChunkLength = 0;
// sometimes a file has more data than is specified after the RIFF header
long stopPosition = Math.Min(fileSize + 8, stream.Length);
// this -8 is so we can be sure that there are at least 8 bytes for a chunk id and length
while (stream.Position <= stopPosition - 8)
{
Int32 chunkIdentifier = br.ReadInt32();
Int32 chunkLength = br.ReadInt32();
if (chunkIdentifier == dataChunkID)
{
dataChunkPosition = stream.Position;
dataChunkLength = chunkLength;
stream.Position += chunkLength;
}
else if (chunkIdentifier == formatChunkId)
{
format = WaveFormat.FromFormatChunk(br, chunkLength);
}
else
{
// check for invalid chunk length
if (chunkLength < 0 || chunkLength > stream.Length - stream.Position)
{
Debug.replacedert(false, String.Format("Invalid chunk length {0}, pos: {1}. length: {2}",
chunkLength, stream.Position, stream.Length));
// an exception will be thrown further down if we haven't got a format and data chunk yet,
// otherwise we will tolerate this file despite it having corrupt data at the end
break;
}
if (chunks != null)
{
chunks.Add(new RiffChunk(chunkIdentifier, chunkLength, stream.Position));
}
stream.Position += chunkLength;
}
}
if (format == null)
{
throw new FormatException("Invalid WAV file - No fmt chunk found");
}
if (dataChunkPosition == -1)
{
throw new FormatException("Invalid WAV file - No data chunk found");
}
}
19
View Source File : UnityPointCloudExample.cs
License : MIT License
Project Creator : 734843327
License : MIT License
Project Creator : 734843327
public void Update()
{
if (PointCloudPrefab != null && m_PointCloudData != null)
{
for (int count = 0; count < Math.Min (m_PointCloudData.Length, numPointsToShow); count++)
{
Vector4 vert = m_PointCloudData [count];
GameObject point = pointCloudObjects [count];
point.transform.position = new Vector3(vert.x, vert.y, vert.z);
}
}
}
19
View Source File : ChunkStreamContext.cs
License : MIT License
Project Creator : a1q123456
License : MIT License
Project Creator : a1q123456
internal async Task MultiplexMessageAsync(uint chunkStreamId, Message message)
{
if (!message.MessageHeader.MessageStreamId.HasValue)
{
throw new InvalidOperationException("cannot send message that has not attached to a message stream");
}
byte[] buffer = null;
uint length = 0;
using (var writeBuffer = new ByteBuffer())
{
var context = new Serialization.SerializationContext()
{
Amf0Reader = _amf0Reader,
Amf0Writer = _amf0Writer,
Amf3Reader = _amf3Reader,
Amf3Writer = _amf3Writer,
WriteBuffer = writeBuffer
};
message.Serialize(context);
length = (uint)writeBuffer.Length;
Debug.replacedert(length != 0);
buffer = _arrayPool.Rent((int)length);
writeBuffer.TakeOutMemory(buffer);
}
try
{
message.MessageHeader.MessageLength = length;
Debug.replacedert(message.MessageHeader.MessageLength != 0);
if (message.MessageHeader.MessageType == 0)
{
message.MessageHeader.MessageType = message.GetType().GetCustomAttribute<RtmpMessageAttribute>().MessageTypes.First();
}
Debug.replacedert(message.MessageHeader.MessageType != 0);
Task ret = null;
// chunking
bool isFirstChunk = true;
_rtmpSession.replacedertStreamId(message.MessageHeader.MessageStreamId.Value);
for (int i = 0; i < message.MessageHeader.MessageLength;)
{
_previousWriteMessageHeader.TryGetValue(chunkStreamId, out var prevHeader);
var chunkHeaderType = SelectChunkType(message.MessageHeader, prevHeader, isFirstChunk);
isFirstChunk = false;
GenerateBasicHeader(chunkHeaderType, chunkStreamId, out var basicHeader, out var basicHeaderLength);
GenerateMesesageHeader(chunkHeaderType, message.MessageHeader, prevHeader, out var messageHeader, out var messageHeaderLength);
_previousWriteMessageHeader[chunkStreamId] = (MessageHeader)message.MessageHeader.Clone();
var headerLength = basicHeaderLength + messageHeaderLength;
var bodySize = (int)(length - i >= _writeChunkSize ? _writeChunkSize : length - i);
var chunkBuffer = _arrayPool.Rent(headerLength + bodySize);
await _sync.WaitAsync();
try
{
basicHeader.replacedpan(0, basicHeaderLength).CopyTo(chunkBuffer);
messageHeader.replacedpan(0, messageHeaderLength).CopyTo(chunkBuffer.replacedpan(basicHeaderLength));
_arrayPool.Return(basicHeader);
_arrayPool.Return(messageHeader);
buffer.replacedpan(i, bodySize).CopyTo(chunkBuffer.replacedpan(headerLength));
i += bodySize;
var isLastChunk = message.MessageHeader.MessageLength - i == 0;
long offset = 0;
long totalLength = headerLength + bodySize;
long currentSendSize = totalLength;
while (offset != (headerLength + bodySize))
{
if (WriteWindowAcknowledgementSize.HasValue && Interlocked.Read(ref WriteUnAcknowledgedSize) + headerLength + bodySize > WriteWindowAcknowledgementSize.Value)
{
currentSendSize = Math.Min(WriteWindowAcknowledgementSize.Value, currentSendSize);
//var delayCount = 0;
while (currentSendSize + Interlocked.Read(ref WriteUnAcknowledgedSize) >= WriteWindowAcknowledgementSize.Value)
{
await Task.Delay(1);
}
}
var tsk = _ioPipeline.SendRawData(chunkBuffer.AsMemory((int)offset, (int)currentSendSize));
offset += currentSendSize;
totalLength -= currentSendSize;
if (WriteWindowAcknowledgementSize.HasValue)
{
Interlocked.Add(ref WriteUnAcknowledgedSize, currentSendSize);
}
if (isLastChunk)
{
ret = tsk;
}
}
if (isLastChunk)
{
if (message.MessageHeader.MessageType == MessageType.SetChunkSize)
{
var setChunkSize = message as SetChunkSizeMessage;
_writeChunkSize = setChunkSize.ChunkSize;
}
else if (message.MessageHeader.MessageType == MessageType.SetPeerBandwidth)
{
var m = message as SetPeerBandwidthMessage;
ReadWindowAcknowledgementSize = m.WindowSize;
}
else if (message.MessageHeader.MessageType == MessageType.WindowAcknowledgementSize)
{
var m = message as WindowAcknowledgementSizeMessage;
WriteWindowAcknowledgementSize = m.WindowSize;
}
}
}
finally
{
_sync.Release();
_arrayPool.Return(chunkBuffer);
}
}
Debug.replacedert(ret != null);
await ret;
}
finally
{
_arrayPool.Return(buffer);
}
}
19
View Source File : FileReader.cs
License : MIT License
Project Creator : Abdesol
License : MIT License
Project Creator : Abdesol
static StreamReader AutoDetect(Stream fs, byte firstByte, byte secondByte, Encoding defaultEncoding)
{
int max = (int)Math.Min(fs.Length, 500000); // look at max. 500 KB
const int ASCII = 0;
const int Error = 1;
const int UTF8 = 2;
const int UTF8Sequence = 3;
int state = ASCII;
int sequenceLength = 0;
byte b;
for (int i = 0; i < max; i++) {
if (i == 0) {
b = firstByte;
} else if (i == 1) {
b = secondByte;
} else {
b = (byte)fs.ReadByte();
}
if (b < 0x80) {
// normal ASCII character
if (state == UTF8Sequence) {
state = Error;
break;
}
} else if (b < 0xc0) {
// 10xxxxxx : continues UTF8 byte sequence
if (state == UTF8Sequence) {
--sequenceLength;
if (sequenceLength < 0) {
state = Error;
break;
} else if (sequenceLength == 0) {
state = UTF8;
}
} else {
state = Error;
break;
}
} else if (b >= 0xc2 && b < 0xf5) {
// beginning of byte sequence
if (state == UTF8 || state == ASCII) {
state = UTF8Sequence;
if (b < 0xe0) {
sequenceLength = 1; // one more byte following
} else if (b < 0xf0) {
sequenceLength = 2; // two more bytes following
} else {
sequenceLength = 3; // three more bytes following
}
} else {
state = Error;
break;
}
} else {
// 0xc0, 0xc1, 0xf5 to 0xff are invalid in UTF-8 (see RFC 3629)
state = Error;
break;
}
}
fs.Position = 0;
switch (state) {
case ASCII:
return new StreamReader(fs, IsASCIICompatible(defaultEncoding) ? RemoveBOM(defaultEncoding) : Encoding.ASCII);
case Error:
// When the file seems to be non-UTF8,
// we read it using the user-specified encoding so it is saved again
// using that encoding.
if (IsUnicode(defaultEncoding)) {
// the file is not Unicode, so don't read it using Unicode even if the
// user has choosen Unicode as the default encoding.
defaultEncoding = Encoding.Default; // use system encoding instead
}
return new StreamReader(fs, RemoveBOM(defaultEncoding));
default:
return new StreamReader(fs, UTF8NoBOM);
}
}
19
View Source File : MoviePlayerSample.cs
License : MIT License
Project Creator : absurd-joy
License : MIT License
Project Creator : absurd-joy
public void SeekTo(long position)
{
long seekPos = Math.Max(0, Math.Min(Duration, position));
if (overlay.isExternalSurface)
{
NativeVideoPlayer.PlaybackPosition = seekPos;
}
else
{
videoPlayer.time = seekPos / 1000.0;
}
}
19
View Source File : Player_Xp.cs
License : GNU Affero General Public License v3.0
Project Creator : ACEmulator
License : GNU Affero General Public License v3.0
Project Creator : ACEmulator
public void GrantLevelProportionalXp(double percent, long min, long max)
{
var nextLevelXP = GetXPBetweenLevels(Level.Value, Level.Value + 1);
var scaledXP = (long)Math.Round(nextLevelXP * percent);
if (max > 0)
scaledXP = Math.Min(scaledXP, max);
if (min > 0)
scaledXP = Math.Max(scaledXP, min);
// apply xp modifiers?
EarnXP(scaledXP, XpType.Quest, ShareType.Allegiance);
}
19
View Source File : StreamExtent.cs
License : MIT License
Project Creator : ADeltaX
License : MIT License
Project Creator : ADeltaX
public static IEnumerable<StreamExtent> Intersect(params IEnumerable<StreamExtent>[] streams)
{
long extentStart = long.MinValue;
long extentEnd = long.MaxValue;
IEnumerator<StreamExtent>[] enums = new IEnumerator<StreamExtent>[streams.Length];
for (int i = 0; i < streams.Length; ++i)
{
enums[i] = streams[i].GetEnumerator();
if (!enums[i].MoveNext())
{
// Gone past end of one stream (in practice was empty), so no intersections
yield break;
}
}
int overlapsFound = 0;
while (true)
{
// We keep cycling round the streams, until we get streams.Length continuous overlaps
for (int i = 0; i < streams.Length; ++i)
{
// Move stream on past all extents that are earlier than our candidate start point
while (enums[i].Current.Length == 0
|| enums[i].Current.Start + enums[i].Current.Length <= extentStart)
{
if (!enums[i].MoveNext())
{
// Gone past end of this stream, no more intersections possible
yield break;
}
}
// If this stream has an extent that spans over the candidate start point
if (enums[i].Current.Start <= extentStart)
{
extentEnd = Math.Min(extentEnd, enums[i].Current.Start + enums[i].Current.Length);
overlapsFound++;
}
else
{
extentStart = enums[i].Current.Start;
extentEnd = extentStart + enums[i].Current.Length;
overlapsFound = 1;
}
// We've just done a complete loop of all streams, they overlapped this start position
// and we've cut the extent's end down to the shortest run.
if (overlapsFound == streams.Length)
{
yield return new StreamExtent(extentStart, extentEnd - extentStart);
extentStart = extentEnd;
extentEnd = long.MaxValue;
overlapsFound = 0;
}
}
}
}
19
View Source File : AudioPlaybackEngine.cs
License : GNU General Public License v3.0
Project Creator : aenemenate
License : GNU General Public License v3.0
Project Creator : aenemenate
public int Read(float[] buffer, int offset, int count)
{
var availableSamples = cachedSound.AudioData.Length - position;
var samplesToCopy = Math.Min(availableSamples, count);
Array.Copy(cachedSound.AudioData, position, buffer, offset, samplesToCopy);
position += samplesToCopy;
return (int)samplesToCopy;
}
19
View Source File : Decompress.cs
License : GNU General Public License v3.0
Project Creator : Aeroblast
License : GNU General Public License v3.0
Project Creator : Aeroblast
public void Add(byte[] raw)
{
string ident = Encoding.ASCII.GetString(raw, 0, 4);
if (ident != "CDIC") { throw new UnpackKindleSException("Unexpect Section Header at CDIC"); }
UInt32 phases = Util.GetUInt32(raw, 8);
UInt32 bits = Util.GetUInt32(raw, 12);
long n = Math.Min(1 << (int)bits, phases - slice.Count);
for (int i = 0; i < n; i++)
{
UInt16 off = Util.GetUInt16(raw, (ulong)(16 + i * 2));
UInt16 length = Util.GetUInt16(raw, (ulong)(16 + off));
slice_flag.Add((length & 0x8000) > 0);
slice.Add(Util.SubArray(raw, (ulong)(18 + off), (ulong)(length & 0x7fff)));
}
}
19
View Source File : DataMap.cs
License : MIT License
Project Creator : aerosoul94
License : MIT License
Project Creator : aerosoul94
protected override void OnPaint(PaintEventArgs e)
{
base.OnPaint(e);
var G = e.Graphics;
var cellPad = 5;
_visibleRows = (_rowInfo.Height / CellSize + cellPad) + 1;
_visibleColumns = _totalColumns;
_startCell = vScrollPos * _totalColumns;
_endCell = _startCell + Math.Min(
_visibleRows * _totalColumns,
CellCount - _startCell);
_visibleCells = _endCell - _startCell;
#if DEBUG
G.DrawRectangle(_frameBorderPen, _columnInfo);
G.DrawRectangle(_frameBorderPen, _rowInfo);
G.DrawRectangle(_frameBorderPen, _grid);
#endif
// Draw column info
for (int c = 0; c < _visibleColumns; c++)
{
G.DrawString((c + 1).ToString().PadLeft(2),
_font,
_fontBrush,
_columnInfo.X + (c * (CellSize + cellPad)),
_columnInfo.Y);
}
// Draw row info
for (int r = 0; r < _visibleRows; r++)
{
var offset = (vScrollPos + r) * (Increment * _visibleColumns);
var row = (vScrollPos + r) + 1;
// Draw row number
G.DrawString(row.ToString().PadLeft(6),
_font,
_fontBrush,
_rowInfo.X,
_rowInfo.Y + (r * (CellSize + cellPad)) + 5);
// Draw offset
G.DrawString(offset.ToString("X16"),
_font,
_fontBrush,
_rowInfo.X + 60,
_rowInfo.Y + (r * (CellSize + cellPad)) + 5);
}
// Draw cells
for (int i = 0; i < _visibleCells; i++)
{
// Get column and row for current cell
int x = (i % (int)_totalColumns); // column
int y = (i / (int)_totalColumns); // row
// Calculate coordinates for this cell
var xPos = (x * (CellSize + cellPad));
var yPos = (y * (CellSize + cellPad));
var cellIndex = (int)(_startCell + (y * _visibleColumns) + x);
var rect = new Rectangle(_grid.X + xPos,
_grid.Y + yPos,
CellSize,
CellSize);
Cells[cellIndex].Rect = rect;
if (Cells[cellIndex].Selected)
{
G.FillRectangle(_highlightBrush,
new Rectangle(
_grid.X + xPos - 4,
_grid.Y + yPos - 4,
CellSize + 9,
CellSize + 9));
}
// Draw filled rectangle
G.FillRectangle(
new SolidBrush(Cells[cellIndex].Color),
rect);
G.DrawRectangle(
_cellBorderPen,
rect);
}
}
19
View Source File : DataMap.cs
License : MIT License
Project Creator : aerosoul94
License : MIT License
Project Creator : aerosoul94
private void VScrollBar_Scroll(object sender, ScrollEventArgs e)
{
switch (e.Type)
{
case ScrollEventType.SmallDecrement:
if (vScrollPos != vScrollBar.Minimum)
{
vScrollPos--;
e.NewValue = (int)vScrollPos;
Invalidate();
}
break;
case ScrollEventType.SmallIncrement:
if (vScrollPos != vScrollBar.Maximum)
{
vScrollPos++;
e.NewValue = (int)vScrollPos;
Invalidate();
}
break;
case ScrollEventType.LargeDecrement:
vScrollPos -= Math.Min(vScrollPos, this._visibleRows - 2);
vScrollBar.Minimum = 0;
vScrollBar.Maximum = (int)_totalRows;
vScrollBar.Value = (int)vScrollPos;
e.NewValue = (int)vScrollPos;
Invalidate();
break;
case ScrollEventType.LargeIncrement:
vScrollPos += this._visibleRows - 2;
vScrollBar.Minimum = 0;
vScrollBar.Maximum = (int)_totalRows;
vScrollBar.Value = (int)vScrollPos;
e.NewValue = (int)vScrollPos;
Invalidate();
break;
case ScrollEventType.ThumbPosition:
vScrollPos = e.NewValue;
vScrollBar.Minimum = 0;
vScrollBar.Maximum = (int)_totalRows;
Invalidate();
break;
case ScrollEventType.ThumbTrack:
vScrollPos = e.NewValue;
vScrollBar.Minimum = 0;
vScrollBar.Maximum = (int)_totalRows;
Invalidate();
break;
case ScrollEventType.EndScroll:
break;
case ScrollEventType.Last:
break;
case ScrollEventType.First:
break;
default:
throw new NotImplementedException();
}
}
19
View Source File : UploadClient.cs
License : MIT License
Project Creator : agc93
License : MIT License
Project Creator : agc93
public async Task<UploadedFile> UploadFile(GameRef game, int modId, FileInfo file)
{
int chunckSize = 5242880;
int GetChunkSize(int i)
{
long position = (i * (long)chunckSize);
int toRead = (int)Math.Min(file.Length - position + 1, chunckSize);
return toRead;
}
string GetIdentifier()
{
return $"{file.Length}{file.Name.Replace(".", "")}";
}
int totalChunks = (int)(file.Length / chunckSize);
if (file.Length % chunckSize != 0)
{
totalChunks++;
}
using (var str = file.Open(FileMode.Open, FileAccess.Read, FileShare.Read))
{
for (int i = 0; i < totalChunks; i++)
{
var toRead = GetChunkSize(i);
var path = "/uploads/chunk"
.SetQueryParams(new
{
resumableChunkNumber = totalChunks,
resumableChunkSize = chunckSize,
resumableCurrentChunkSize = GetChunkSize(i),
resumableTotalSize = file.Length,
resumableType = _zipContentType,
resumableIdentifier = GetIdentifier(),
resumableFilename = file.Name,
resumableRelativePath = file.Name,
resumableTotalChunks = totalChunks
});
var getResp = await _httpClient.GetAsync(path.ToString());
if (getResp.StatusCode != HttpStatusCode.NoContent)
{
throw new Exception("I don't even know what this means");
}
byte[] buffer = new byte[toRead];
await str.ReadAsync(buffer, 0, buffer.Length);
using (MultipartFormDataContent form = new MultipartFormDataContent())
{
form.Add(new StringContent((i + 1).ToString()), "resumableChunkNumber");
form.Add(chunckSize.ToContent(), "resumableChunkSize");
form.Add(toRead.ToContent(), "resumableCurrentChunkSize");
form.Add(file.Length.ToContent(), "resumableTotalSize");
form.Add(_zipContentType.ToContent(), "resumableType");
form.Add(GetIdentifier().ToContent(), "resumableIdentifier");
form.Add(file.Name.ToContent(), "resumableFilename");
form.Add(file.Name.ToContent(), "resumableRelativePath");
form.Add(totalChunks.ToContent(), "resumableTotalChunks");
form.Add(new ByteArrayContent(buffer), "file", "blob");
// new StreamContent(str, toRead)
// form.Add(new ByteArrayContent(buffer), "file", "blob");
var response = await _httpClient.PostAsync("/uploads/chunk", form).ConfigureAwait(false);
if (!response.IsSuccessStatusCode)
{
throw new Exception("I don't know what this means either");
}
if (response.Content.Headers.Contains("Content-Type"))
{
var resp = System.Text.Json.JsonSerializer.Deserialize<UploadedFile>(await response.Content.ReadreplacedtringAsync());
if (!string.IsNullOrWhiteSpace(resp.Id))
{
resp.FileSize = (int)file.Length;
resp.OriginalFile = file.Name;
return resp;
}
}
}
}
}
return null;
}
19
View Source File : BundleContainer.cs
License : GNU Affero General Public License v3.0
Project Creator : aianlinb
License : GNU Affero General Public License v3.0
Project Creator : aianlinb
public virtual byte[] AppendAndSave(Stream newData, Stream originalData) {
originalData.Seek(offset + 60, SeekOrigin.Begin);
var OldChunkCompressedSizes = new byte[(entry_count - 1) * 4];
originalData.Read(OldChunkCompressedSizes, 0, OldChunkCompressedSizes.Length);
var lastCunkCompressedSize = originalData.ReadByte() | originalData.ReadByte() << 8 | originalData.ReadByte() << 16 | originalData.ReadByte() << 24; //ReadInt32
var lastCunkDecompressedSize = uncompressed_size - (chunk_size * (entry_count - 1));
uncompressed_size = (int)(size_decompressed += newData.Length);
entry_count = uncompressed_size / chunk_size;
if (uncompressed_size % chunk_size != 0) entry_count++;
head_size = entry_count * 4 + 48;
var msToSave = new MemoryStream();
var bw = new BinaryWriter(msToSave);
msToSave.Seek(60 + (entry_count * 4), SeekOrigin.Begin);
var o = new byte[compressed_size - lastCunkCompressedSize];
originalData.Read(o, 0, o.Length);
bw.Write(o);
var lastChunkCompressedData = new byte[lastCunkCompressedSize];
originalData.Read(lastChunkCompressedData, 0, lastCunkCompressedSize);
var lastCunkDecompressedData = new byte[lastCunkDecompressedSize + 64];
_ = OodleLZ_Decompress(lastChunkCompressedData, lastCunkCompressedSize, lastCunkDecompressedData, lastCunkDecompressedSize, 0, 0, 0, IntPtr.Zero, 0, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero, 0, 3);
newData.Seek(0, SeekOrigin.Begin);
compressed_size -= lastCunkCompressedSize;
var NewChunkCompressedSizes = new int[entry_count - (OldChunkCompressedSizes.Length / 4)];
var FirstNewDataChunk = new byte[Math.Min(chunk_size - lastCunkDecompressedSize, newData.Length)];
newData.Read(FirstNewDataChunk, 0, FirstNewDataChunk.Length);
FirstNewDataChunk = lastCunkDecompressedData.Take(lastCunkDecompressedSize).Concat(FirstNewDataChunk).ToArray();
var CompressedChunk = new byte[FirstNewDataChunk.Length + 548];
var CompressedLength = OodleLZ_Compress(encoder, FirstNewDataChunk, FirstNewDataChunk.Length, CompressedChunk, Compression_Level, IntPtr.Zero, 0, 0, IntPtr.Zero, 0);
compressed_size += NewChunkCompressedSizes[0] = CompressedLength;
bw.Write(CompressedChunk, 0, CompressedLength);
for (var i = 1; i < NewChunkCompressedSizes.Length; i++) {
var size = (i + 1 == NewChunkCompressedSizes.Length) ? uncompressed_size - (chunk_size * (entry_count - 1)) : chunk_size;
var b = new byte[size];
newData.Read(b, 0, size);
var by = new byte[b.Length + 548];
var l = OodleLZ_Compress(encoder, b, size, by, Compression_Level, IntPtr.Zero, 0, 0, IntPtr.Zero, 0);
compressed_size += NewChunkCompressedSizes[i] = l;
bw.Write(by, 0, l);
}
size_compressed = compressed_size;
msToSave.Seek(60, SeekOrigin.Begin);
bw.Write(OldChunkCompressedSizes);
for (var i = 0; i < NewChunkCompressedSizes.Length; i++)
bw.Write(NewChunkCompressedSizes[i]);
msToSave.Seek(0, SeekOrigin.Begin);
bw.Write(uncompressed_size);
bw.Write(compressed_size);
bw.Write(head_size);
bw.Write((uint)encoder);
bw.Write(unknown);
bw.Write(size_decompressed);
bw.Write(size_compressed);
bw.Write(entry_count);
bw.Write(chunk_size);
bw.Write(unknown3);
bw.Write(unknown4);
bw.Write(unknown5);
bw.Write(unknown6);
bw.Flush();
var result = msToSave.ToArray();
bw.Close();
return result;
}
19
View Source File : TimeCostRecorder.cs
License : MIT License
Project Creator : aillieo
License : MIT License
Project Creator : aillieo
public void Stop()
{
stopwatch.Stop();
long now = Stopwatch.GetTimestamp();
long lastCost = (now - lastStart) / kTicksPerMillisecond;
timeCostMax = Math.Max(lastCost, timeCostMax);
timeCostMin = Math.Min(lastCost, timeCostMin);
testTimes++;
}
19
View Source File : PublishProcessor.cs
License : Apache License 2.0
Project Creator : akarnokd
License : Apache License 2.0
Project Creator : akarnokd
void Drain()
{
if (Interlocked.Increment(ref wip) != 1)
{
return;
}
int missed = 1;
int f = consumed;
int lim = limit;
var q = Volatile.Read(ref queue);
for (;;)
{
if (q != null)
{
var s = Volatile.Read(ref subscribers);
int n = s.Length;
if (n != 0)
{
long r = -1L;
for (int i = 0; i < n; i++)
{
var ra = s[i].Requested();
if (ra >= 0L)
{
if (r == -1L)
{
r = ra;
}
else
{
r = Math.Min(r, ra);
}
}
}
bool changed = false;
while (r > 0L)
{
if (Volatile.Read(ref subscribers) != s)
{
changed = true;
break;
}
bool d = Volatile.Read(ref done);
bool empty = !q.Poll(out T v);
if (d && empty)
{
Exception ex = error;
if (ex != null && ex != ExceptionHelper.Terminated)
{
foreach (var inner in Interlocked.Exchange(ref subscribers, Terminated))
{
inner.OnError(ex);
}
} else
{
foreach (var inner in Interlocked.Exchange(ref subscribers, Terminated))
{
inner.OnComplete();
}
}
return;
}
if (empty)
{
break;
}
foreach (var inner in s)
{
inner.OnNext(v);
}
r--;
if (fusionMode != FusionSupport.SYNC)
{
if (++f == lim)
{
f = 0;
upstream.Request(lim);
}
}
}
if (changed)
{
continue;
}
if (r == 0)
{
if (Volatile.Read(ref subscribers) != s)
{
continue;
}
if (Volatile.Read(ref done) && q.IsEmpty())
{
Exception ex = error;
if (ex != null && ex != ExceptionHelper.Terminated)
{
foreach (var inner in Interlocked.Exchange(ref subscribers, Terminated))
{
inner.OnError(ex);
}
}
else
{
foreach (var inner in Interlocked.Exchange(ref subscribers, Terminated))
{
inner.OnComplete();
}
}
return;
}
}
}
}
int w = Volatile.Read(ref wip);
if (w == missed)
{
consumed = f;
missed = Interlocked.Add(ref wip, -missed);
if (missed == 0)
{
break;
}
}
else
{
missed = w;
}
if (q == null)
{
q = Volatile.Read(ref queue);
}
}
}
19
View Source File : HMACBase.cs
License : MIT License
Project Creator : alecgn
License : MIT License
Project Creator : alecgn
internal HMACHashResult ComputeFileHMAC(Enums.HMACAlgorithm hmacAlgorithm, string filePathToComputeHMAC, byte[] key = null,
long offset = 0, long count = 0)
{
if (!File.Exists(filePathToComputeHMAC))
{
return new HMACHashResult()
{
Success = false,
Message = $"{MessageStrings.Common_FileNotFound} \"{filePathToComputeHMAC}\"."
};
}
if (key == null || key.Length == 0)
{
key = CommonMethods.GenerateRandomBytes(HMACOutputLengthDictionary.Instance[hmacAlgorithm] / 8);
}
HMACHashResult result = null;
try
{
byte[] hash = null;
using (var fStream = new FileStream(filePathToComputeHMAC, FileMode.Open, FileAccess.Read, FileShare.None))
{
//offset = (offset == 0 ? 0 : offset);
count = (count == 0 ? fStream.Length : count);
fStream.Position = offset;
var buffer = new byte[(1024 * 4)];
var amount = (count - offset);
using (var hmac = (HMAC)CryptoConfig.CreateFromName(hmacAlgorithm.ToString()))
{
hmac.Key = key;
var percentageDone = 0;
while (amount > 0)
{
var bytesRead = fStream.Read(buffer, 0, (int)Math.Min(buffer.Length, amount));
if (bytesRead > 0)
{
amount -= bytesRead;
if (amount > 0)
{
hmac.TransformBlock(buffer, 0, bytesRead, buffer, 0);
}
else
{
hmac.TransformFinalBlock(buffer, 0, bytesRead);
}
var tmpPercentageDone = (int)(fStream.Position * 100 / count);
if (tmpPercentageDone != percentageDone)
{
percentageDone = tmpPercentageDone;
RaiseOnHashProgress(percentageDone, (percentageDone != 100 ? $"Computing HMAC ({percentageDone}%)..." : $"HMAC computed ({percentageDone}%)."));
}
}
else
{
throw new InvalidOperationException();
}
}
hash = hmac.Hash;
}
}
result = new HMACHashResult()
{
Success = true,
Message = MessageStrings.HMAC_ComputeSuccess,
HashString = Encoding.HighPerformanceHexadecimal.ToHexString(hash),
HashBytes = hash,
Key = key
};
}
catch (Exception ex)
{
result = new HMACHashResult()
{
Success = false,
Message = ex.ToString()
};
}
return result;
}
19
View Source File : AesBase.cs
License : MIT License
Project Creator : alecgn
License : MIT License
Project Creator : alecgn
internal AesDecryptionResult DecryptWithFileStream(string encryptedFilePath, string decryptedFilePath, byte[] key, byte[] IV, CipherMode cipherMode = CipherMode.CBC,
PaddingMode paddingMode = PaddingMode.PKCS7, bool deleteEncryptedFile = false, int kBbufferSize = 4, long startPosition = 0, long endPosition = 0)
{
if (!File.Exists(encryptedFilePath))
{
return new AesDecryptionResult()
{
Success = false,
Message = $"{MessageStrings.Decryption_EncryptedFileNotFound} \"{encryptedFilePath}\"."
};
}
if (string.IsNullOrWhiteSpace(decryptedFilePath))
{
return new AesDecryptionResult()
{
Success = false,
Message = MessageStrings.Decryption_DecryptedFilePathError
};
}
var destinationDirectory = Path.GetDirectoryName(decryptedFilePath);
if (!Directory.Exists(destinationDirectory))
{
return new AesDecryptionResult()
{
Success = false,
Message = $"{MessageStrings.Encryption_DestinationDirectoryNotFound} \"{destinationDirectory}\"."
};
}
_key = key ?? _key;
_IV = IV ?? _IV;
if (_key == null)
{
return new AesDecryptionResult()
{
Success = false,
Message = MessageStrings.Decryption_NullKeyError
};
}
if (_IV == null)
{
return new AesDecryptionResult()
{
Success = false,
Message = MessageStrings.Decryption_NullIVError
};
}
if (endPosition < startPosition)
{
return new AesDecryptionResult()
{
Success = false,
Message = string.Format(MessageStrings.Decryption_EndPositionLessThanStartError, endPosition, startPosition)
};
}
var pathsEqual = decryptedFilePath.Equals(encryptedFilePath, StringComparison.InvariantCultureIgnoreCase);
try
{
using (var aesManaged = new AesManaged())
{
aesManaged.Key = _key;
aesManaged.IV = _IV;
aesManaged.Mode = cipherMode;
aesManaged.Padding = paddingMode;
using (var decryptedFs = File.Open((pathsEqual ? decryptedFilePath + "_tmpdecrypt" : decryptedFilePath), FileMode.Create, FileAccess.Write, FileShare.None))
{
using (var encryptedFs = File.Open(encryptedFilePath, FileMode.Open, FileAccess.Read, FileShare.Read))
{
encryptedFs.Position = startPosition;
using (var decryptor = aesManaged.CreateDecryptor(_key, _IV))
{
using (var cs = new CryptoStream(decryptedFs, decryptor, CryptoStreamMode.Write))
{
//encrypted.CopyTo(cs);
var buffer = new byte[kBbufferSize * 1024];
var totalBytesToRead = ((endPosition == 0 ? encryptedFs.Length : endPosition) - startPosition);
var totalBytesNotRead = totalBytesToRead;
long totalBytesRead = 0;
var percentageDone = 0;
while (totalBytesNotRead > 0)
{
var bytesRead = encryptedFs.Read(buffer, 0, (int)Math.Min(buffer.Length, totalBytesNotRead));
if (bytesRead > 0)
{
cs.Write(buffer, 0, bytesRead);
totalBytesRead += bytesRead;
totalBytesNotRead -= bytesRead;
var tmpPercentageDone = (int)(totalBytesRead * 100 / totalBytesToRead);
if (tmpPercentageDone != percentageDone)
{
percentageDone = tmpPercentageDone;
RaiseOnDecryptionProgress(percentageDone, (percentageDone != 100 ? $"Decrypting ({percentageDone}%)..." : $"Decrypted ({percentageDone}%)."));
}
}
}
}
}
}
}
}
if (pathsEqual)
{
CommonMethods.ClearFileAttributes(encryptedFilePath); // set "Normal" FileAttributes to avoid erros while trying to delete the file below
File.Delete(encryptedFilePath);
File.Move(decryptedFilePath + "_tmpdecrypt", decryptedFilePath);
}
if (deleteEncryptedFile && !pathsEqual)
{
CommonMethods.ClearFileAttributes(encryptedFilePath); // set "Normal" FileAttributes to avoid erros while trying to delete the file below
File.Delete(encryptedFilePath);
}
var message = string.Format(MessageStrings.Decryption_FileDecryptSuccess, encryptedFilePath, decryptedFilePath);
message += (deleteEncryptedFile && !pathsEqual ? $"\n{string.Format(MessageStrings.Encryption_FileDeleted, encryptedFilePath)}" : "");
return new AesDecryptionResult()
{
Success = true,
Message = message,
Key = _key,
IV = _IV,
AesCipherMode = (AesCipherMode)cipherMode,
PaddingMode = paddingMode
};
}
catch (Exception ex)
{
return new AesDecryptionResult()
{
Success = false,
Message = $"{MessageStrings.Decryption_ExceptionError}\n{ex.ToString()}"
};
}
}
19
View Source File : HashBase.cs
License : MIT License
Project Creator : alecgn
License : MIT License
Project Creator : alecgn
internal GenericHashResult ComputeFileHash(Enums.HashAlgorithm hashAlgorithm, string filePathToComputeHash,
long offset = 0, long count = 0)
{
if (!File.Exists(filePathToComputeHash))
{
return new GenericHashResult()
{
Success = false,
Message = $"{MessageStrings.Common_FileNotFound} \"{filePathToComputeHash}\"."
};
}
GenericHashResult result = null;
HashAlgorithm hashAlg = null;
#if CORERT
switch (hashAlgorithm)
{
case Enums.HashAlgorithm.MD5:
hashAlg = MD5.Create();
break;
case Enums.HashAlgorithm.SHA1:
hashAlg = SHA1.Create();
break;
case Enums.HashAlgorithm.SHA256:
hashAlg = SHA256.Create();
break;
case Enums.HashAlgorithm.SHA384:
hashAlg = SHA384.Create();
break;
case Enums.HashAlgorithm.SHA512:
hashAlg = SHA512.Create();
break;
case Enums.HashAlgorithm.BCrypt:
default:
break;
}
#else
hashAlg = (HashAlgorithm)CryptoConfig.CreateFromName(hashAlgorithm.ToString());
#endif
try
{
byte[] hash = null;
using (var fStream = new FileStream(filePathToComputeHash, FileMode.Open, FileAccess.Read, FileShare.None))
{
//offset = (offset == 0 ? 0 : offset);
count = (count == 0 ? fStream.Length : count);
fStream.Position = offset;
var buffer = new byte[(1024 * 4)];
var amount = (count - offset);
using (hashAlg)
{
var percentageDone = 0;
while (amount > 0)
{
var bytesRead = fStream.Read(buffer, 0, (int)Math.Min(buffer.Length, amount));
if (bytesRead > 0)
{
amount -= bytesRead;
if (amount > 0)
{
hashAlg.TransformBlock(buffer, 0, bytesRead, buffer, 0);
}
else
{
hashAlg.TransformFinalBlock(buffer, 0, bytesRead);
}
var tmpPercentageDone = (int)(fStream.Position * 100 / count);
if (tmpPercentageDone != percentageDone)
{
percentageDone = tmpPercentageDone;
RaiseOnHashProgress(percentageDone, (percentageDone != 100 ? $"Computing hash ({percentageDone}%)..." : $"Hash computed ({percentageDone}%)."));
}
}
else
{
throw new InvalidOperationException();
}
}
hash = hashAlg.Hash;
}
}
result = new GenericHashResult()
{
Success = true,
Message = MessageStrings.Hash_ComputeSuccess,
HashString = Encoding.HighPerformanceHexadecimal.ToHexString(hash),
HashBytes = hash
};
}
catch (Exception ex)
{
result = new GenericHashResult()
{
Success = false,
Message = ex.ToString()
};
}
return result;
}
19
View Source File : Chat.cs
License : MIT License
Project Creator : alexyakunin
License : MIT License
Project Creator : alexyakunin
public static string GetP2PChatId(long user1Id, long user2Id)
{
if (user1Id == user2Id)
throw new ArgumentOutOfRangeException(nameof(user2Id));
var lowId = Math.Min(user1Id, user2Id);
var highId = Math.Max(user1Id, user2Id);
return $"p2p/{lowId}/{highId}";
}
19
View Source File : Piece.cs
License : MIT License
Project Creator : aljazsim
License : MIT License
Project Creator : aljazsim
public long GetBlockLength(long blockOffset)
{
return Math.Min(this.BlockLength, this.PieceLength - blockOffset);
}
19
View Source File : MessagePackSerializer.cs
License : Apache License 2.0
Project Creator : allenai
License : Apache License 2.0
Project Creator : allenai
public static T Deserialize<T>(Stream stream, MessagePackSerializerOptions options = null, CancellationToken cancellationToken = default)
{
if (TryDeserializeFromMemoryStream(stream, options, cancellationToken, out T result))
{
return result;
}
using (var sequenceRental = SequencePool.Shared.Rent())
{
var sequence = sequenceRental.Value;
try
{
int bytesRead;
do
{
cancellationToken.ThrowIfCancellationRequested();
Span<byte> span = sequence.GetSpan(stream.CanSeek ? (int)Math.Min(MaxHintSize, stream.Length - stream.Position) : 0);
bytesRead = stream.Read(span);
sequence.Advance(bytesRead);
}
while (bytesRead > 0);
}
catch (Exception ex)
{
throw new MessagePackSerializationException("Error occurred while reading from the stream.", ex);
}
return DeserializeFromSequenceAndRewindStreamIfPossible<T>(stream, options, sequence, cancellationToken);
}
}
19
View Source File : MessagePackSerializer.cs
License : Apache License 2.0
Project Creator : allenai
License : Apache License 2.0
Project Creator : allenai
public static async ValueTask<T> DeserializeAsync<T>(Stream stream, MessagePackSerializerOptions options = null, CancellationToken cancellationToken = default)
{
if (TryDeserializeFromMemoryStream(stream, options, cancellationToken, out T result))
{
return result;
}
using (var sequenceRental = SequencePool.Shared.Rent())
{
var sequence = sequenceRental.Value;
try
{
int bytesRead;
do
{
Memory<byte> memory = sequence.GetMemory(stream.CanSeek ? (int)Math.Min(MaxHintSize, stream.Length - stream.Position) : 0);
bytesRead = await stream.ReadAsync(memory, cancellationToken).ConfigureAwait(false);
sequence.Advance(bytesRead);
}
while (bytesRead > 0);
}
catch (Exception ex)
{
throw new MessagePackSerializationException("Error occurred while reading from the stream.", ex);
}
return DeserializeFromSequenceAndRewindStreamIfPossible<T>(stream, options, sequence, cancellationToken);
}
}
19
View Source File : ChunkingReadStream.cs
License : Apache License 2.0
Project Creator : aloneguid
License : Apache License 2.0
Project Creator : aloneguid
public override async Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
if(Files.HasLogger)
{
Files.Log($"read: offset: {offset}, count: {count}, length: {_length}");
}
int readTotal = 0;
do
{
int toRead = (int)Math.Min(_maxChunkLength, count - readTotal);
if(Files.HasLogger)
{
Files.Log($"to read: {toRead}");
}
if(toRead <= 0)
break;
int read = await SmallReadAsync(_pos + readTotal, buffer, offset + readTotal, toRead);
readTotal += read;
if(read < toRead)
break;
} while(true);
_pos += readTotal;
return readTotal;
}
19
View Source File : ChunkingReadStream.cs
License : Apache License 2.0
Project Creator : aloneguid
License : Apache License 2.0
Project Creator : aloneguid
public override int Read(byte[] buffer, int offset, int count)
{
if(Files.HasLogger)
{
Files.Log($"read: offset: {offset}, count: {count}");
}
int readTotal = 0;
do
{
int toRead = (int)Math.Min(_maxChunkLength, count - readTotal);
if(Files.HasLogger)
{
Files.Log($"to read: {toRead}");
}
if(toRead <= 0)
return readTotal;
int read = SmallRead(_pos + readTotal, buffer, offset + readTotal, toRead);
readTotal += read;
if(read < toRead)
break;
} while(true);
_pos += readTotal;
if(Files.HasLogger)
{
Files.Log($"read {readTotal}");
}
return readTotal;
}
19
View Source File : DataStream.cs
License : MIT License
Project Creator : amerkoleci
License : MIT License
Project Creator : amerkoleci
public override int Read(byte[] buffer, int offset, int count)
{
int minCount = (int)Math.Min(RemainingLength, count);
return ReadRange(buffer, offset, minCount);
}
19
View Source File : StreamIStream.cs
License : GNU General Public License v3.0
Project Creator : anydream
License : GNU General Public License v3.0
Project Creator : anydream
public void CopyTo(IStream pstm, long cb, IntPtr pcbRead, IntPtr pcbWritten) {
if (cb > int.MaxValue)
cb = int.MaxValue;
else if (cb < 0)
cb = 0;
int sizeToRead = (int)cb;
if (stream.Position + sizeToRead < sizeToRead || stream.Position + sizeToRead > stream.Length)
sizeToRead = (int)(stream.Length - Math.Min(stream.Position, stream.Length));
var buffer = new byte[sizeToRead];
Read(buffer, sizeToRead, pcbRead);
if (pcbRead != IntPtr.Zero)
Marshal.WriteInt64(pcbRead, Marshal.ReadInt32(pcbRead));
pstm.Write(buffer, buffer.Length, pcbWritten);
if (pcbWritten != IntPtr.Zero)
Marshal.WriteInt64(pcbWritten, Marshal.ReadInt32(pcbWritten));
}
19
View Source File : StrongNameSigner.cs
License : GNU General Public License v3.0
Project Creator : anydream
License : GNU General Public License v3.0
Project Creator : anydream
byte[] StrongNameHashData(replacedemblyHashAlgorithm hashAlg, long snSigOffset, uint snSigSize) {
var reader = new BinaryReader(stream);
snSigOffset += baseOffset;
long snSigOffsetEnd = snSigOffset + snSigSize;
using (var hasher = new replacedemblyHash(hashAlg)) {
byte[] buffer = new byte[0x8000];
// Hash the DOS header. It's defined to be all data from the start of
// the file up to the NT headers.
stream.Position = baseOffset + 0x3C;
uint ntHeadersOffs = reader.ReadUInt32();
stream.Position = baseOffset;
hasher.Hash(stream, ntHeadersOffs, buffer);
// Hash NT headers, but hash authenticode + checksum as 0s
stream.Position += 6;
int numSections = reader.ReadUInt16();
stream.Position -= 8;
hasher.Hash(stream, 0x18, buffer); // magic + FileHeader
bool is32bit = reader.ReadUInt16() == 0x010B;
stream.Position -= 2;
int optHeaderSize = is32bit ? 0x60 : 0x70;
if (stream.Read(buffer, 0, optHeaderSize) != optHeaderSize)
throw new IOException("Could not read data");
// Clear checksum
for (int i = 0; i < 4; i++)
buffer[0x40 + i] = 0;
hasher.Hash(buffer, 0, optHeaderSize);
const int imageDirsSize = 16 * 8;
if (stream.Read(buffer, 0, imageDirsSize) != imageDirsSize)
throw new IOException("Could not read data");
// Clear authenticode data dir
for (int i = 0; i < 8; i++)
buffer[4 * 8 + i] = 0;
hasher.Hash(buffer, 0, imageDirsSize);
// Hash section headers
long sectHeadersOffs = stream.Position;
hasher.Hash(stream, (uint)numSections * 0x28, buffer);
// Hash all raw section data but make sure we don't hash the location
// where the strong name signature will be stored.
for (int i = 0; i < numSections; i++) {
stream.Position = sectHeadersOffs + i * 0x28 + 0x10;
uint sizeOfRawData = reader.ReadUInt32();
uint pointerToRawData = reader.ReadUInt32();
stream.Position = baseOffset + pointerToRawData;
while (sizeOfRawData > 0) {
var pos = stream.Position;
if (snSigOffset <= pos && pos < snSigOffsetEnd) {
uint skipSize = (uint)(snSigOffsetEnd - pos);
if (skipSize >= sizeOfRawData)
break;
sizeOfRawData -= skipSize;
stream.Position += skipSize;
continue;
}
if (pos >= snSigOffsetEnd) {
hasher.Hash(stream, sizeOfRawData, buffer);
break;
}
uint maxLen = (uint)Math.Min(snSigOffset - pos, sizeOfRawData);
hasher.Hash(stream, maxLen, buffer);
sizeOfRawData -= maxLen;
}
}
return hasher.ComputeHash();
}
}
19
View Source File : MemoryStreamCreator.cs
License : GNU General Public License v3.0
Project Creator : anydream
License : GNU General Public License v3.0
Project Creator : anydream
public IImageStream Create(FileOffset offset, long length) {
if (offset < 0 || length < 0)
return MemoryImageStream.CreateEmpty();
int offs = (int)Math.Min((long)dataLength, (long)offset);
int len = (int)Math.Min((long)dataLength - offs, length);
return new MemoryImageStream(offset, data, dataOffset + offs, len);
}
19
View Source File : UnmanagedMemoryImageStream.cs
License : GNU General Public License v3.0
Project Creator : anydream
License : GNU General Public License v3.0
Project Creator : anydream
public unsafe IImageStream Create(FileOffset offset, long length) {
if ((long)offset < 0 || length < 0)
return MemoryImageStream.CreateEmpty();
long offs = Math.Min(Length, (long)offset);
long len = Math.Min(Length - offs, length);
return new UnmanagedMemoryImageStream(owner, (FileOffset)((long)fileOffset + (long)offset), startAddr + (long)offs, len);
}
19
View Source File : UnmanagedMemoryImageStream.cs
License : GNU General Public License v3.0
Project Creator : anydream
License : GNU General Public License v3.0
Project Creator : anydream
public unsafe byte[] ReadBytes(int size) {
if (size < 0)
throw new IOException("Invalid size");
size = (int)Math.Min(size, Length - Math.Min(Length, Position));
var newData = new byte[size];
Marshal.Copy(new IntPtr((byte*)owner.Address + currentAddr), newData, 0, size);
currentAddr += size;
return newData;
}
19
View Source File : UnmanagedMemoryImageStream.cs
License : GNU General Public License v3.0
Project Creator : anydream
License : GNU General Public License v3.0
Project Creator : anydream
public int Read(byte[] buffer, int offset, int length) {
if (length < 0)
throw new IOException("Invalid size");
length = (int)Math.Min(length, Length - Math.Min(Length, Position));
Marshal.Copy(new IntPtr((byte*)owner.Address + currentAddr), buffer, offset, length);
currentAddr += length;
return length;
}
19
View Source File : UnmanagedMemoryStreamCreator.cs
License : GNU General Public License v3.0
Project Creator : anydream
License : GNU General Public License v3.0
Project Creator : anydream
public unsafe IImageStream Create(FileOffset offset, long length) {
if (offset < 0 || length < 0)
return MemoryImageStream.CreateEmpty();
long offs = Math.Min((long)dataLength, (long)offset);
long len = Math.Min((long)dataLength - offs, length);
return new UnmanagedMemoryImageStream(this, offset, offs, len);
}
19
View Source File : MemoryImageStream.cs
License : GNU General Public License v3.0
Project Creator : anydream
License : GNU General Public License v3.0
Project Creator : anydream
public IImageStream Create(FileOffset offset, long length) {
if ((long)offset < 0 || length < 0)
return MemoryImageStream.CreateEmpty();
int offs = (int)Math.Min((long)Length, (long)offset);
int len = (int)Math.Min((long)Length - offs, length);
return new MemoryImageStream((FileOffset)((long)fileOffset + (long)offset), data, dataOffset + offs, len);
}
19
View Source File : FailoverProvider.cs
License : Apache License 2.0
Project Creator : apache
License : Apache License 2.0
Project Creator : apache
private long NextReconnectDelay()
{
if (nextReconnectDelay == -1)
{
nextReconnectDelay = failoverProvider.ReconnectDelay;
}
if (failoverProvider.UseReconnectBackOff && ReconnectAttempts > 1)
{
// Exponential increment of reconnect delay.
nextReconnectDelay = (long) Math.Round(nextReconnectDelay * failoverProvider.ReconnectBackOffMultiplier);
if (nextReconnectDelay > failoverProvider.MaxReconnectDelay)
{
nextReconnectDelay = failoverProvider.MaxReconnectDelay;
}
}
long randomFactor = (long)((1 - 2 * GetRandomDouble()) * failoverProvider.ReconnectDelayRandomFactor * nextReconnectDelay);
return Math.Max(0, Math.Min(failoverProvider.MaxReconnectDelay, nextReconnectDelay + randomFactor));
}
19
View Source File : MathImplementation.cs
License : MIT License
Project Creator : apexsharp
License : MIT License
Project Creator : apexsharp
public long min(long longValue1, long longValue2) => Min(longValue1, longValue2);
19
View Source File : SlotStream.cs
License : GNU Lesser General Public License v3.0
Project Creator : Apollo3zehn
License : GNU Lesser General Public License v3.0
Project Creator : Apollo3zehn
public override int Read(byte[] buffer, int offset, int count)
{
var length = (int)Math.Min(this.Length - this.Position, count);
_stream = this.EnsureStream();
var actualLength = _stream.Read(buffer, offset, length);
// If file is shorter than slot: fill remaining buffer with zeros.
buffer
.replacedpan()
.Slice(offset + actualLength, length - actualLength)
.Fill(0);
_position += length;
return length;
}
19
View Source File : Socks5Bytestreams.cs
License : MIT License
Project Creator : araditc
License : MIT License
Project Creator : araditc
void SendData(SISession session, Stream stream) {
long left = session.Size;
try {
while (left > 0) {
byte[] buffer = new byte[4096];
int read = session.Stream.Read(buffer, 0,
(int) Math.Min(left, buffer.Length));
if (read > 0)
stream.Write(buffer, 0, read);
else
break;
left = left - read;
// Update the byte count and raise the 'BytesTransferred' event.
session.Count = session.Count + read;
BytesTransferred.Raise(this, new BytesTransferredEventArgs(session));
}
} catch (ObjectDisposedException) {
// This means the IO-stream has been disposed because we cancelled
// the transfer. Just fall through.
} finally {
// Tear down the SI session.
siFileTransfer.InvalidateSession(session.Sid);
// If not all bytes have been transferred, the data-transfer must have
// been aborted prematurely.
if (session.Count < session.Size)
TransferAborted.Raise(this, new TransferAbortedEventArgs(session));
}
}
19
View Source File : StreamCopyOperation.cs
License : Apache License 2.0
Project Creator : aspnet
License : Apache License 2.0
Project Creator : aspnet
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Redirecting")]
private void ReadNextSegment()
{
// The natural end of the range.
if (_bytesRemaining.HasValue && _bytesRemaining.Value <= 0)
{
Complete();
return;
}
if (CheckCancelled())
{
return;
}
try
{
int readLength = _buffer.Length;
if (_bytesRemaining.HasValue)
{
readLength = (int)Math.Min(_bytesRemaining.Value, (long)readLength);
}
IAsyncResult async = _source.BeginRead(_buffer, 0, readLength, _readCallback, null);
if (async.CompletedSynchronously)
{
int read = _source.EndRead(async);
WriteToOutputStream(read);
}
}
catch (Exception ex)
{
Fail(ex);
}
}
19
View Source File : RangeHelpers.cs
License : Apache License 2.0
Project Creator : aspnet
License : Apache License 2.0
Project Creator : aspnet
internal static IList<Tuple<long, long>> NormalizeRanges(IList<Tuple<long?, long?>> ranges, long length)
{
IList<Tuple<long, long>> normalizedRanges = new List<Tuple<long, long>>(ranges.Count);
for (int i = 0; i < ranges.Count; i++)
{
Tuple<long?, long?> range = ranges[i];
long? start = range.Item1, end = range.Item2;
// X-[Y]
if (start.HasValue)
{
if (start.Value >= length)
{
// Not satisfiable, skip/discard.
continue;
}
if (!end.HasValue || end.Value >= length)
{
end = length - 1;
}
}
else
{
// suffix range "-X" e.g. the last X bytes, resolve
if (end.Value == 0)
{
// Not satisfiable, skip/discard.
continue;
}
long bytes = Math.Min(end.Value, length);
start = length - bytes;
end = start + bytes - 1;
}
normalizedRanges.Add(new Tuple<long, long>(start.Value, end.Value));
}
return normalizedRanges;
}
19
View Source File : FileSystemCache.cs
License : MIT License
Project Creator : ASStoredProcedures
License : MIT License
Project Creator : ASStoredProcedures
private static void ClearStandbyFileSystemCacheByConsumingAvailableMemory()
{
//consume all available memory then free it, which will wipe out the standby cache
Context.TraceEvent(1, 1, "Clearing standby cache by consuming all available memory");
//get the page size. will need to write at least one byte per page to make sure that page is committed to this process' working set: http://blogs.msdn.com/b/ntdebugging/archive/2007/11/27/too-much-cache.aspx?CommentPosted=true&PageIndex=2#comments
SYSTEM_INFO sysinfo = new SYSTEM_INFO();
GetSystemInfo(ref sysinfo);
Context.TraceEvent(1, 2, "Page size on this server is " + sysinfo.dwPageSize + " bytes");
System.Diagnostics.PerformanceCounter pcAvailableBytes = null;
long lngAvailableBytes = 0;
pcAvailableBytes = new System.Diagnostics.PerformanceCounter("Memory", "Available Bytes", true);
lngAvailableBytes = (long)pcAvailableBytes.NextValue();
Context.TraceEvent(1, 3, "Available Bytes after clearing active cache: " + lngAvailableBytes);
long lngRemainingBytes = lngAvailableBytes - (1024 * 1024); //take up all available memory minus 1MB
System.Collections.Generic.List<IntPtr> listPtrMem = new System.Collections.Generic.List<IntPtr>();
try
{
Context.TraceEvent(1, 4, "Preparing to consume " + lngRemainingBytes + " bytes of memory");
while (lngRemainingBytes > 0)
{
//figure out the next allocation size
int iAllocLen = (int)Math.Min((long)(sysinfo.dwPageSize * 1024), lngRemainingBytes);
lngRemainingBytes -= iAllocLen;
//allocate this memory
listPtrMem.Add(Marshal.AllocHGlobal(iAllocLen));
//write one byte per page which is the minimum necessary to make sure this page gets committed to this process' working set
for (int j = 0; j < iAllocLen; j += (int)sysinfo.dwPageSize)
{
Marshal.WriteByte(listPtrMem[listPtrMem.Count - 1], j, (byte)1);
}
}
lngAvailableBytes = (long)pcAvailableBytes.NextValue();
Context.TraceEvent(1, 5, "Available Bytes after consuming memory: " + lngAvailableBytes);
}
catch (OutOfMemoryException ex)
{
Context.TraceEvent(1, 5, "Received OutOfMemoryException: " + ex.Message);
Context.TraceEvent(1, 10, "Was able to consume desired memory except for the following number of bytes: " + lngRemainingBytes);
}
finally
{
// dont forget to free up the memory.
foreach (IntPtr ptrMem in listPtrMem)
{
if (ptrMem != IntPtr.Zero)
Marshal.FreeHGlobal(ptrMem);
}
}
lngAvailableBytes = (long)pcAvailableBytes.NextValue();
Context.TraceEvent(1, 6, "Available Bytes after freeing consumed memory: " + lngAvailableBytes);
}
19
View Source File : FileReader.cs
License : MIT License
Project Creator : AvaloniaUI
License : MIT License
Project Creator : AvaloniaUI
private static StreamReader AutoDetect(Stream fs, byte firstByte, byte secondByte, Encoding defaultEncoding)
{
var max = (int)Math.Min(fs.Length, 500000); // look at max. 500 KB
// ReSharper disable InconsistentNaming
const int ASCII = 0;
const int Error = 1;
const int UTF8 = 2;
const int UTF8Sequence = 3;
// ReSharper restore InconsistentNaming
var state = ASCII;
var sequenceLength = 0;
for (var i = 0; i < max; i++)
{
byte b;
if (i == 0)
{
b = firstByte;
}
else if (i == 1)
{
b = secondByte;
}
else
{
b = (byte)fs.ReadByte();
}
if (b < 0x80)
{
// normal ASCII character
if (state == UTF8Sequence)
{
state = Error;
break;
}
}
else if (b < 0xc0)
{
// 10xxxxxx : continues UTF8 byte sequence
if (state == UTF8Sequence)
{
--sequenceLength;
if (sequenceLength < 0)
{
state = Error;
break;
}
else if (sequenceLength == 0)
{
state = UTF8;
}
}
else
{
state = Error;
break;
}
}
else if (b >= 0xc2 && b < 0xf5)
{
// beginning of byte sequence
if (state == UTF8 || state == ASCII)
{
state = UTF8Sequence;
if (b < 0xe0)
{
sequenceLength = 1; // one more byte following
}
else if (b < 0xf0)
{
sequenceLength = 2; // two more bytes following
}
else
{
sequenceLength = 3; // three more bytes following
}
}
else
{
state = Error;
break;
}
}
else
{
// 0xc0, 0xc1, 0xf5 to 0xff are invalid in UTF-8 (see RFC 3629)
state = Error;
break;
}
}
fs.Position = 0;
switch (state)
{
case ASCII:
// TODO: Encoding.ASCII
return new StreamReader(fs, IsAsciiCompatible(defaultEncoding) ? RemoveBom(defaultEncoding) : Encoding.UTF8);
case Error:
// When the file seems to be non-UTF8,
// we read it using the user-specified encoding so it is saved again
// using that encoding.
if (IsUnicode(defaultEncoding))
{
// the file is not Unicode, so don't read it using Unicode even if the
// user has choosen Unicode as the default encoding.
defaultEncoding = Encoding.UTF8; // use system encoding instead
}
return new StreamReader(fs, RemoveBom(defaultEncoding));
default:
return new StreamReader(fs, UTF8NoBOM);
}
}
19
View Source File : Rectangle.cs
License : GNU Lesser General Public License v2.1
Project Creator : axiom3d
License : GNU Lesser General Public License v2.1
Project Creator : axiom3d
[OgreVersion(1, 7, 2)]
public Rectangle Merge(Rectangle rhs)
{
if (IsNull)
{
this = rhs;
}
else if (!rhs.IsNull)
{
Left = System.Math.Min(Left, rhs.Left);
Right = System.Math.Max(Right, rhs.Right);
Top = System.Math.Min(Top, rhs.Top);
Bottom = System.Math.Max(Bottom, rhs.Bottom);
}
return this;
}
19
View Source File : Rectangle.cs
License : GNU Lesser General Public License v2.1
Project Creator : axiom3d
License : GNU Lesser General Public License v2.1
Project Creator : axiom3d
[OgreVersion(1, 7, 2)]
internal static Rectangle Intersect(Rectangle lhs, Rectangle rhs)
{
var ret = new Rectangle();
if (lhs.IsNull || rhs.IsNull)
{
//empty
return ret;
}
else
{
ret.Left = System.Math.Min(lhs.Left, rhs.Left);
ret.Right = System.Math.Max(lhs.Right, rhs.Right);
ret.Top = System.Math.Min(lhs.Top, rhs.Top);
ret.Bottom = System.Math.Max(lhs.Bottom, rhs.Bottom);
}
if (ret.Left > ret.Right || ret.Top > ret.Bottom)
{
// no intersection, return empty
ret.IsNull = true;
}
return ret;
}
See More Examples