Here are the examples of the csharp api System.IO.Stream.Seek(long, System.IO.SeekOrigin) taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
2902 Examples
19
Source : CelesteNetBinaryWriter.cs
with MIT License
from 0x0ade
with MIT License
from 0x0ade
public virtual void UpdateSizeDummy() {
if (SizeDummySize == 0)
return;
Flush();
long end = BaseStream.Position;
long length = end - (SizeDummyIndex + SizeDummySize);
BaseStream.Seek(SizeDummyIndex, SeekOrigin.Begin);
if (SizeDummySize == 1) {
if (length > byte.MaxValue)
length = byte.MaxValue;
Write((byte) length);
} else if (SizeDummySize == 4) {
if (length > uint.MaxValue)
length = uint.MaxValue;
Write((uint) length);
} else {
if (length > ushort.MaxValue)
length = ushort.MaxValue;
Write((ushort) length);
}
Flush();
BaseStream.Seek(end, SeekOrigin.Begin);
}
19
Source : DisposeActionStream.cs
with MIT License
from 0x0ade
with MIT License
from 0x0ade
public override long Seek(long offset, SeekOrigin origin) => Inner.Seek(offset, origin);
19
Source : PositionAwareStream.cs
with MIT License
from 0x0ade
with MIT License
from 0x0ade
public override long Seek(long offset, SeekOrigin origin) {
switch (origin) {
case SeekOrigin.Begin:
_Position = offset;
break;
case SeekOrigin.Current:
_Position += offset;
break;
}
return Inner.Seek(offset, origin);
}
19
Source : StreamSegment.cs
with MIT License
from 0xC0000054
with MIT License
from 0xC0000054
public override long Seek(long offset, SeekOrigin origin)
{
VerifyNotDisposed();
long tempPosition;
switch (origin)
{
case SeekOrigin.Begin:
tempPosition = unchecked(this.origin + offset);
if (offset < this.origin || offset > this.length)
{
ExceptionUtil.ThrowArgumentOutOfRangeException(nameof(offset), "The offset is not within the stream segment.");
}
break;
case SeekOrigin.Current:
tempPosition = unchecked(this.stream.Position + offset);
if (tempPosition < this.origin || tempPosition > this.length)
{
ExceptionUtil.ThrowArgumentOutOfRangeException(nameof(offset), "The offset is not within the stream segment.");
}
break;
case SeekOrigin.End:
tempPosition = unchecked(this.length + offset);
if (tempPosition < this.origin || tempPosition > this.length)
{
ExceptionUtil.ThrowArgumentOutOfRangeException(nameof(offset), "The offset is not within the stream segment.");
}
break;
default:
throw new ArgumentException("Unknown SeekOrigin value.");
}
return this.stream.Seek(tempPosition, origin);
}
19
Source : ResponseExtensions.cs
with Apache License 2.0
from 0xFireball
with Apache License 2.0
from 0xFireball
public static Response FromPartialStream(this IResponseFormatter response, Request request, Stream stream,
string contentType)
{
// Store the len
var len = stream.Length;
// Create the response now
var res = response.FromStream(stream, contentType)
.WithHeader("Connection", "Keep-alive")
.WithHeader("Accept-ranges", "Bytes");
// Use the partial status code
res.StatusCode = HttpStatusCode.PartialContent;
long startPos = 0;
foreach (var s in request.Headers["Range"])
{
var start = s.Split('=')[1];
var m = Regex.Match(start, @"(\d+)-(\d+)?");
start = m.Groups[1].Value;
var end = len - 1;
if (!string.IsNullOrWhiteSpace(m.Groups[2]?.Value))
{
end = Convert.ToInt64(m.Groups[2].Value);
}
startPos = Convert.ToInt64(start);
var length = len - startPos;
res.WithHeader("Content-range", "Bytes " + start + "-" + end + "/" + len);
res.WithHeader("Content-length", length.ToString(CultureInfo.InvariantCulture));
}
stream.Seek(startPos, SeekOrigin.Begin);
return res;
}
19
Source : StreamIOCallbacks.cs
with MIT License
from 0xC0000054
with MIT License
from 0xC0000054
public int Seek(long offset, int origin)
{
int hr = HResult.S_OK;
try
{
long newPosition = this.stream.Seek(offset, (SeekOrigin)origin);
if (newPosition != offset)
{
hr = HResult.SeekError;
}
}
catch (Exception ex)
{
this.CallbackExceptionInfo = ExceptionDispatchInfo.Capture(ex);
hr = ex.HResult;
}
return hr;
}
19
Source : ParamSfo.cs
with MIT License
from 13xforever
with MIT License
from 13xforever
public static ParamSfo ReadFrom(Stream stream)
{
if (!stream.CanSeek)
throw new ArgumentException("Stream must be seekable", nameof(stream));
stream.Seek(0, SeekOrigin.Begin);
var result = new ParamSfo();
using var reader = new BinaryReader(stream, new UTF8Encoding(false), true);
result.Magic = new string(reader.ReadChars(4));
if (result.Magic != "\0PSF")
throw new FormatException("Not a valid SFO file");
result.MajorVersion = reader.ReadByte();
result.MinorVersion = reader.ReadByte();
result.Reserved1 = reader.ReadInt16();
result.KeysOffset = reader.ReadInt32();
result.ValuesOffset = reader.ReadInt32();
result.ItemCount = reader.ReadInt32();
result.Items = new List<ParamSfoEntry>(result.ItemCount);
for (var i = 0; i < result.ItemCount; i++)
result.Items.Add(ParamSfoEntry.Read(reader, result, i));
return result;
}
19
Source : ParamSfoEntry.cs
with MIT License
from 13xforever
with MIT License
from 13xforever
public static ParamSfoEntry Read(BinaryReader reader, ParamSfo paramSfo, int itemNumber)
{
const int indexOffset = 0x14;
const int indexEntryLength = 0x10;
reader.BaseStream.Seek(indexOffset + indexEntryLength * itemNumber, SeekOrigin.Begin);
var result = new ParamSfoEntry();
result.KeyOffset = reader.ReadUInt16();
result.ValueFormat = (EntryFormat)reader.ReadUInt16();
result.ValueLength = reader.ReadInt32();
result.ValueMaxLength = reader.ReadInt32();
result.ValueOffset = reader.ReadInt32();
reader.BaseStream.Seek(paramSfo.KeysOffset + result.KeyOffset, SeekOrigin.Begin);
byte tmp;
var sb = new StringBuilder(32);
while ((tmp = reader.ReadByte()) != 0)
sb.Append((char)tmp);
result.Key = sb.ToString();
reader.BaseStream.Seek(paramSfo.ValuesOffset + result.ValueOffset, SeekOrigin.Begin);
result.BinaryValue = reader.ReadBytes(result.ValueMaxLength);
return result;
}
19
Source : ParamSfoEntry.cs
with MIT License
from 13xforever
with MIT License
from 13xforever
public static ParamSfoEntry Read(BinaryReader reader, ParamSfo paramSfo, int itemNumber)
{
const int indexOffset = 0x14;
const int indexEntryLength = 0x10;
reader.BaseStream.Seek(indexOffset + indexEntryLength * itemNumber, SeekOrigin.Begin);
var result = new ParamSfoEntry();
result.KeyOffset = reader.ReadUInt16();
result.ValueFormat = (EntryFormat)reader.ReadUInt16();
result.ValueLength = reader.ReadInt32();
result.ValueMaxLength = reader.ReadInt32();
result.ValueOffset = reader.ReadInt32();
reader.BaseStream.Seek(paramSfo.KeysOffset + result.KeyOffset, SeekOrigin.Begin);
byte tmp;
var sb = new StringBuilder(32);
while ((tmp = reader.ReadByte()) != 0)
sb.Append((char)tmp);
result.Key = sb.ToString();
reader.BaseStream.Seek(paramSfo.ValuesOffset + result.ValueOffset, SeekOrigin.Begin);
result.BinaryValue = reader.ReadBytes(result.ValueMaxLength);
return result;
}
19
Source : Decrypter.cs
with MIT License
from 13xforever
with MIT License
from 13xforever
public override int Read( byte[] buffer, int offset, int count)
{
if (Position == inputStream.Length)
return 0;
var positionInSector = Position % sectorSize;
var resultCount = 0;
if (positionInSector > 0)
{
var len = (int)Math.Min(Math.Min(count, sectorSize - positionInSector), inputStream.Position - Position);
md5.TransformBlock(bufferedSector, (int)positionInSector, len, buffer, offset);
sha1.TransformBlock(bufferedSector, (int)positionInSector, len, buffer, offset);
sha256.TransformBlock(bufferedSector, (int)positionInSector, len, buffer, offset);
offset += len;
count -= len;
resultCount += len;
Position += len;
if (Position % sectorSize == 0)
SectorPosition++;
}
if (Position == inputStream.Length)
return resultCount;
int readCount;
do
{
readCount = inputStream.ReadExact(tmpSector, 0, sectorSize);
if (readCount < sectorSize)
Array.Clear(tmpSector, readCount, sectorSize - readCount);
var decryptedSector = tmpSector;
if (IsEncrypted(SectorPosition))
{
WasEncrypted = true;
if (readCount % 16 != 0)
{
Log.Debug($"Block has only {(readCount % 16) * 8} bits of data, reading raw sector...");
discStream.Seek(SectorPosition * sectorSize, SeekOrigin.Begin);
var newTmpSector = new byte[sectorSize];
discStream.ReadExact(newTmpSector, 0, sectorSize);
if (!newTmpSector.Take(readCount).SequenceEqual(tmpSector.Take(readCount)))
Log.Warn($"Filesystem data and raw data do not match for sector 0x{SectorPosition:x8}");
tmpSector = newTmpSector;
}
using var aesTransform = aes.CreateDecryptor(decryptionKey, GetSectorIV(SectorPosition));
decryptedSector = aesTransform.TransformFinalBlock(tmpSector, 0, sectorSize);
}
else
WasUnprotected = true;
if (count >= readCount)
{
md5.TransformBlock(decryptedSector, 0, readCount, buffer, offset);
sha1.TransformBlock(decryptedSector, 0, readCount, buffer, offset);
sha256.TransformBlock(decryptedSector, 0, readCount, buffer, offset);
offset += readCount;
count -= readCount;
resultCount += readCount;
Position += readCount;
SectorPosition++;
}
else // partial sector read
{
Buffer.BlockCopy(decryptedSector, 0, bufferedSector, 0, sectorSize);
md5.TransformBlock(decryptedSector, 0, count, buffer, offset);
sha1.TransformBlock(decryptedSector, 0, count, buffer, offset);
sha256.TransformBlock(decryptedSector, 0, count, buffer, offset);
offset += count;
count = 0;
resultCount += count;
Position += count;
}
} while (count > 0 && readCount == sectorSize);
return resultCount;
}
19
Source : Dumper.cs
with MIT License
from 13xforever
with MIT License
from 13xforever
private (List<FileRecord> files, List<string> dirs) GetFilesystemStructure()
{
var pos = driveStream.Position;
var buf = new byte[64 * 1024 * 1024];
driveStream.Seek(0, SeekOrigin.Begin);
driveStream.ReadExact(buf, 0, buf.Length);
driveStream.Seek(pos, SeekOrigin.Begin);
try
{
using var memStream = new MemoryStream(buf, false);
var reader = new CDReader(memStream, true, true);
return reader.GetFilesystemStructure();
}
catch (Exception e)
{
Log.Error(e, "Failed to buffer TOC");
}
return discReader.GetFilesystemStructure();
}
19
Source : IsoHeaderParser.cs
with MIT License
from 13xforever
with MIT License
from 13xforever
public static List<(int start, int end)> GetUnprotectedRegions(this Stream discStream)
{
var result = new List<(int start, int end)>();
discStream.Seek(0, SeekOrigin.Begin);
var reader = new BigEndianDataReader(discStream);
var regionCount = reader.ReadInt32();
Log.Trace($"Found {regionCount} encrypted regions");
var unk = reader.ReadUInt32(); // 0?
if (unk != 0)
Log.Debug($"Unk in sector description was {unk:x16}");
for (var i = 0; i < regionCount; i++)
{
var start = reader.ReadInt32();
var end = reader.ReadInt32();
Log.Trace($"Unprotected region: {start:x8}-{end:x8}");
result.Add((start, end));
}
return result;
}
19
Source : Dumper.cs
with MIT License
from 13xforever
with MIT License
from 13xforever
public async Task FindDiscKeyAsync(string discKeyCachePath)
{
// reload disc keys
try
{
foreach (var keyProvider in DiscKeyProviders)
{
Log.Trace($"Getting keys from {keyProvider.GetType().Name}...");
var newKeys = await keyProvider.EnumerateAsync(discKeyCachePath, ProductCode, Cts.Token).ConfigureAwait(false);
Log.Trace($"Got {newKeys.Count} keys");
lock (AllKnownDiscKeys)
{
foreach (var keyInfo in newKeys)
{
try
{
if (!AllKnownDiscKeys.TryGetValue(keyInfo.DecryptedKeyId, out var duplicates))
AllKnownDiscKeys[keyInfo.DecryptedKeyId] = duplicates = new HashSet<DiscKeyInfo>();
duplicates.Add(keyInfo);
}
catch (Exception e)
{
Log.Error(e);
}
}
}
}
}
catch (Exception ex)
{
Log.Error(ex, "Failed to load disc keys");
}
// check if user provided something new since the last attempt
var untestedKeys = new HashSet<string>();
lock (AllKnownDiscKeys)
untestedKeys.UnionWith(AllKnownDiscKeys.Keys);
untestedKeys.ExceptWith(TestedDiscKeys);
if (untestedKeys.Count == 0)
throw new KeyNotFoundException("No valid disc decryption key was found");
// select physical device
string physicalDevice = null;
List<string> physicalDrives = new List<string>();
Log.Trace("Trying to enumerate physical drives...");
try
{
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
physicalDrives = EnumeratePhysicalDrivesWindows();
else if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
physicalDrives = EnumeratePhysicalDrivesLinux();
else
throw new NotImplementedException("Current OS is not supported");
}
catch (Exception e)
{
Log.Error(e);
throw;
}
Log.Debug($"Found {physicalDrives.Count} physical drives");
if (physicalDrives.Count == 0)
throw new InvalidOperationException("No optical drives were found");
foreach (var drive in physicalDrives)
{
try
{
Log.Trace($"Checking physical drive {drive}...");
using var discStream = File.Open(drive, FileMode.Open, FileAccess.Read, FileShare.Read);
var tmpDiscReader = new CDReader(discStream, true, true);
if (tmpDiscReader.FileExists("PS3_DISC.SFB"))
{
Log.Trace("Found PS3_DISC.SFB, getting sector data...");
var discSfbInfo = tmpDiscReader.GetFileInfo("PS3_DISC.SFB");
if (discSfbInfo.Length == discSfbData.Length)
{
var buf = new byte[discSfbData.Length];
var sector = tmpDiscReader.PathToClusters(discSfbInfo.FullName).First().Offset;
Log.Trace($"PS3_DISC.SFB sector number is {sector}, reading content...");
discStream.Seek(sector * tmpDiscReader.ClusterSize, SeekOrigin.Begin);
discStream.ReadExact(buf, 0, buf.Length);
if (buf.SequenceEqual(discSfbData))
{
physicalDevice = drive;
break;
}
Log.Trace("SFB content check failed, skipping the drive");
}
}
}
catch (Exception e)
{
Log.Debug($"Skipping drive {drive}: {e.Message}");
}
}
if (physicalDevice == null)
throw new AccessViolationException("Couldn't get physical access to the drive");
Log.Debug($"Selected physical drive {physicalDevice}");
driveStream = File.Open(physicalDevice, FileMode.Open, FileAccess.Read, FileShare.Read);
// find disc license file
discReader = new CDReader(driveStream, true, true);
FileRecord detectionRecord = null;
byte[] expectedBytes = null;
try
{
foreach (var path in Detectors.Keys)
if (discReader.FileExists(path))
{
var clusterRange = discReader.PathToClusters(path);
detectionRecord = new FileRecord(path, clusterRange.Min(r => r.Offset), discReader.GetFileLength(path));
expectedBytes = Detectors[path];
if (detectionRecord.Length == 0)
continue;
Log.Debug($"Using {path} for disc key detection");
break;
}
}
catch (Exception e)
{
Log.Error(e);
}
if (detectionRecord == null)
throw new FileNotFoundException("Couldn't find a single disc key detection file, please report");
if (Cts.IsCancellationRequested)
return;
SectorSize = discReader.ClusterSize;
// select decryption key
driveStream.Seek(detectionRecord.StartSector * discReader.ClusterSize, SeekOrigin.Begin);
detectionSector = new byte[discReader.ClusterSize];
detectionBytesExpected = expectedBytes;
sectorIV = Decrypter.GetSectorIV(detectionRecord.StartSector);
Log.Debug($"Initialized {nameof(sectorIV)} ({sectorIV?.Length * 8} bit) for sector {detectionRecord.StartSector}: {sectorIV?.ToHexString()}");
driveStream.ReadExact(detectionSector, 0, detectionSector.Length);
string discKey = null;
try
{
discKey = untestedKeys.AsParallel().FirstOrDefault(k => !Cts.IsCancellationRequested && IsValidDiscKey(k));
}
catch (Exception e)
{
Log.Error(e);
}
if (discKey == null)
throw new KeyNotFoundException("No valid disc decryption key was found");
if (Cts.IsCancellationRequested)
return;
lock (AllKnownDiscKeys)
AllKnownDiscKeys.TryGetValue(discKey, out allMatchingKeys);
var discKeyInfo = allMatchingKeys?.First();
DiscKeyFilename = Path.GetFileName(discKeyInfo?.FullPath);
DiscKeyType = discKeyInfo?.KeyType ?? default;
}
19
Source : StreamDeserializeAux.cs
with MIT License
from 1996v
with MIT License
from 1996v
private static void AdvanceStream(Stream stream, byte[] buffer, int size)
{
if (stream.CanSeek)
{
stream.Seek(size, SeekOrigin.Current);
}
else
{
while (size > 0)
{
if (buffer.Length > size)
{
StreamSkip(stream, buffer, size);
size = 0;
}
else
{
StreamSkip(stream, buffer, buffer.Length);
size -= buffer.Length;
}
}
}
}
19
Source : HttpUtil.cs
with Apache License 2.0
from 214175590
with Apache License 2.0
from 214175590
public string GetString(Encoding coding)
{
StringBuilder str = new StringBuilder();
Stream sr = ResponseContent;
sr.Seek(0, SeekOrigin.Begin);
byte[] data = new byte[1024 * 1024];
int readcount = sr.Read(data, 0, data.Length);
while (readcount > 0)
{
str.Append(coding.GetString(data, 0, readcount));
readcount = sr.Read(data, 0, data.Length);
}
return str.ToString();
}
19
Source : Svc.cs
with MIT License
from 3F
with MIT License
from 3F
public void ResetStream()
{
if(stream != null) {
nline = stream.BaseStream.Seek(0, SeekOrigin.Begin);
return;
}
nline = 0;
}
19
Source : FlvDemuxer.cs
with MIT License
from a1q123456
with MIT License
from a1q123456
public void SeekNoLock(double milliseconds, Dictionary<string, object> metaData, CancellationToken ct = default)
{
if (metaData == null)
{
return;
}
var seconds = milliseconds / 1000;
var keyframes = metaData["keyframes"] as AmfObject;
var times = keyframes.Fields["times"] as List<object>;
var idx = times.FindIndex(t => ((double)t) >= seconds);
if (idx == -1)
{
return;
}
var filePositions = keyframes.Fields["filepositions"] as List<object>;
var pos = (double)filePositions[idx];
_stream.Seek((int)(pos - 4), SeekOrigin.Begin);
}
19
Source : ItemDef.cs
with GNU General Public License v3.0
from a2659802
with GNU General Public License v3.0
from a2659802
public object Clone()
{
IFormatter formatter = new BinaryFormatter();
Stream stream = new MemoryStream();
using (stream)
{
formatter.Serialize(stream, this);
stream.Seek(0, SeekOrigin.Begin);
return formatter.Deserialize(stream);
}
}
19
Source : HttpClientHelpers.cs
with MIT License
from Abdulrhman5
with MIT License
from Abdulrhman5
public async Task<HttpRequestMessage> CreateAsync(
HttpContext context,
HttpMethod method,
string url,
bool forwardUrlPars,
bool forwardHeaders,
// add new headers,
Func<string, (string Content, string ContentType)> changeBody = null
)
{
string finalUrl = forwardUrlPars ? url + context.Request.QueryString.ToUriComponent() : url;
var request = new HttpRequestMessage(method, finalUrl);
context.Request.QueryString.ToUriComponent();
if (forwardHeaders)
{
foreach (var requestHeader in context.Request.Headers)
{
if (requestHeader.Key.EqualsIC("Host")) continue;
request.Headers.TryAddWithoutValidation(requestHeader.Key, requestHeader.Value.AsEnumerable());
}
}
if (changeBody is null)
{
return request;
}
var body = new StreamReader(context.Request.Body);
//The modelbinder has already read the stream and need to reset the stream index
body.BaseStream.Seek(0, SeekOrigin.Begin);
var requestBody = await body.ReadToEndAsync();
var (content, type) = changeBody(requestBody);
request.Content = new StringContent(content);
request.Content.Headers.ContentType.MediaType = type;
return request;
}
19
Source : HttpClientHelpers.cs
with MIT License
from Abdulrhman5
with MIT License
from Abdulrhman5
public async Task<HttpRequestMessage> CreateAsync(
HttpMethod method,
string url,
bool forwardUrlPars,
bool forwardHeaders,
// add new headers,
Func<string, (string Content, string ContentType)> changeBody = null)
{
string finalUrl = forwardUrlPars ? url + _httpContext.Request.QueryString.ToUriComponent() : url;
var request = new HttpRequestMessage(method, finalUrl);
_httpContext.Request.QueryString.ToUriComponent();
if (forwardHeaders)
{
foreach (var requestHeader in _httpContext.Request.Headers)
{
if (requestHeader.Key.EqualsIC("Host")) continue;
request.Headers.TryAddWithoutValidation(requestHeader.Key, requestHeader.Value.AsEnumerable());
}
}
if (changeBody is null)
{
return request;
}
var body = new StreamReader(_httpContext.Request.Body);
//The modelbinder has already read the stream and need to reset the stream index
body.BaseStream.Seek(0, SeekOrigin.Begin);
var requestBody = await body.ReadToEndAsync();
var (content, type) = changeBody(requestBody);
request.Content = new StringContent(content);
request.Content.Headers.ContentType.MediaType = type;
return request;
}
19
Source : StreamParser.cs
with MIT License
from actions
with MIT License
from actions
public override long Seek(long offset, SeekOrigin origin)
{
if (origin == SeekOrigin.Begin && 0 <= offset && offset < m_length)
{
return m_stream.Seek(offset + m_startingPosition, origin);
}
else if (origin == SeekOrigin.End && 0 >= offset && offset > -m_length)
{
return m_stream.Seek(offset - ((m_stream.Length-1) - this.EndingPostionOnOuterStream), origin);
}
else if (origin == SeekOrigin.Current && (offset + m_stream.Position) >= this.StartingPostionOnOuterStream && (offset + m_stream.Position) < this.EndingPostionOnOuterStream)
{
return m_stream.Seek(offset, origin);
}
throw new ArgumentException();
}
19
Source : FileContainerHttpClient.cs
with MIT License
from actions
with MIT License
from actions
public async Task<HttpResponseMessage> UploadFileAsync(
Int64 containerId,
String itemPath,
Stream fileStream,
Guid scopeIdentifier,
CancellationToken cancellationToken = default(CancellationToken),
int chunkSize = c_defaultChunkSize,
bool uploadFirstChunk = false,
Object userState = null,
Boolean compressStream = true)
{
if (containerId < 1)
{
throw new ArgumentException(WebApiResources.ContainerIdMustBeGreaterThanZero(), "containerId");
}
ArgumentUtility.CheckForNull(fileStream, "fileStream");
if (fileStream.Length == 0)
{
HttpRequestMessage requestMessage;
List<KeyValuePair<String, String>> query = AppendItemQueryString(itemPath, scopeIdentifier);
// zero byte upload
requestMessage = await CreateRequestMessageAsync(HttpMethod.Put, FileContainerResourceIds.FileContainer, routeValues: new { containerId = containerId }, version: s_currentApiVersion, queryParameters: query, userState: userState, cancellationToken: cancellationToken).ConfigureAwait(false);
return await SendAsync(requestMessage, userState, cancellationToken).ConfigureAwait(false);
}
ApiResourceVersion gzipSupportedVersion = new ApiResourceVersion(new Version(1, 0), 2);
ApiResourceVersion requestVersion = await NegotiateRequestVersionAsync(FileContainerResourceIds.FileContainer, s_currentApiVersion, userState, cancellationToken: cancellationToken).ConfigureAwait(false);
if (compressStream
&& (requestVersion.ApiVersion < gzipSupportedVersion.ApiVersion
|| (requestVersion.ApiVersion == gzipSupportedVersion.ApiVersion && requestVersion.ResourceVersion < gzipSupportedVersion.ResourceVersion)))
{
compressStream = false;
}
Stream streamToUpload = fileStream;
Boolean gzipped = false;
long filelength = fileStream.Length;
try
{
if (compressStream)
{
if (filelength > 65535) // if file greater than 64K use a file
{
String tempFile = Path.GetTempFileName();
streamToUpload = File.Create(tempFile, 32768, FileOptions.DeleteOnClose | FileOptions.Asynchronous);
}
else
{
streamToUpload = new MemoryStream((int)filelength + 8);
}
using (GZipStream zippedStream = new GZipStream(streamToUpload, CompressionMode.Compress, true))
{
await fileStream.CopyToAsync(zippedStream).ConfigureAwait(false);
}
if (streamToUpload.Length >= filelength)
{
// compression did not help
streamToUpload.Dispose();
streamToUpload = fileStream;
}
else
{
gzipped = true;
}
streamToUpload.Seek(0, SeekOrigin.Begin);
}
return await UploadFileAsync(containerId, itemPath, streamToUpload, null, filelength, gzipped, scopeIdentifier, cancellationToken, chunkSize, uploadFirstChunk: uploadFirstChunk, userState: userState);
}
finally
{
if (gzipped && streamToUpload != null)
{
streamToUpload.Dispose();
}
}
}
19
Source : MediaPlaybackDataSource.cs
with MIT License
from ADeltaX
with MIT License
from ADeltaX
public Stream GetThumbnailStream()
{
Stream outStream = null;
IPropertyStore propStore;
if (numSelectInterface == 20279)
playbackDataSource_20279.GetMediaObjectInfo(out propStore);
else
playbackDataSource_10586.GetMediaObjectInfo(out propStore);
if (propStore != null && propStore.GetValue(ref PKEY_ThumbnailStream, out PROPVARIANT pVariant) == 0 && pVariant.vt == VARTYPE.VT_STREAM)
{
var inStream = (IStream)Marshal.GetObjectForIUnknown(pVariant.union.pStream);
outStream = new MemoryStream();
int cb = 4096;
byte[] buffer = new byte[cb];
int read = 0;
do
{
IntPtr bytesRead = Marshal.AllocCoTaskMem(Marshal.SizeOf(typeof(int)));
try
{
inStream.Read(buffer, cb, bytesRead);
read = Marshal.ReadInt32(bytesRead);
}
finally
{
Marshal.FreeCoTaskMem(bytesRead);
}
outStream.Write(buffer, 0, read);
} while (read > 0);
outStream.Seek(0, SeekOrigin.Begin);
NativeMethods.PropVariantClear(ref pVariant);
}
return outStream;
}
19
Source : FsBufferedReaderWriter.cs
with MIT License
from Adoxio
with MIT License
from Adoxio
public override void ReadPage(long pageNumber, byte[] destination)
{
this.fileStream.Seek(this.dataOffset + (pageNumber * (this.PageSize + 500)), SeekOrigin.Begin);
var readInt = this.ReadInt();
var encrypted = new byte[readInt];
this.fileStream.Read(encrypted, 0, readInt);
this.DecryptData(encrypted, destination);
}
19
Source : FsBufferedReaderWriter.cs
with MIT License
from Adoxio
with MIT License
from Adoxio
public override void WritePage(long pageNumber, byte[] source)
{
var encrypted = this.EncryptData(source);
this.fileStream.Seek(this.dataOffset + (pageNumber * (this.PageSize + 500)), SeekOrigin.Begin);
this.WriteInt(encrypted.Length);
this.fileStream.Write(encrypted, 0, encrypted.Length);
// We need to track actual data size as our file has some overheap and also data could be padded to block size
if (this.fileSize > this.Length)
{
this.WriteLength(this.fileSize);
}
}
19
Source : FsBufferedReaderWriter.cs
with MIT License
from Adoxio
with MIT License
from Adoxio
protected void WriteHeader()
{
var fs = this.fileStream;
// Write header marker
fs.Seek(0, SeekOrigin.Begin);
fs.Write(this.headerMarker, 0, this.headerMarker.Length);
// Write version
fs.WriteByte(1);
this.lengthOffset = fs.Position;
this.dataOffset = this.lengthOffset + sizeof(long);
}
19
Source : FsBufferedReaderWriter.cs
with MIT License
from Adoxio
with MIT License
from Adoxio
protected void ReadHeader()
{
try
{
var fs = this.fileStream;
// Check header
byte[] fileMarker = new byte[this.headerMarker.Length];
fs.Seek(0, SeekOrigin.Begin);
fs.Read(fileMarker, 0, fileMarker.Length);
if (!fileMarker.SequenceEqual(this.headerMarker))
{
throw new InvalidOperationException(
string.Format("File {0} does not looks like search index",
this.File.FullName));
}
// Read version
var version = fs.ReadByte(); // Not in use right now. Just in case for required next changes
this.lengthOffset = fs.Position;
this.dataOffset = this.lengthOffset + sizeof(long);
}
catch (Exception e)
{
throw new InvalidOperationException("Not able to read keys from file", e);
}
}
19
Source : FsBufferedReaderWriter.cs
with MIT License
from Adoxio
with MIT License
from Adoxio
protected long ReadLength()
{
if (this.commitedFileSize > 0)
{
return this.commitedFileSize;
}
try
{
var raw = new byte[sizeof(long)];
this.fileStream.Seek(this.lengthOffset, SeekOrigin.Begin);
this.fileStream.Read(raw, 0, raw.Length);
var value = BitConverter.ToInt64(raw, 0);
return value;
}
catch (IOException)
{
return 0;
}
}
19
Source : FsBufferedReaderWriter.cs
with MIT License
from Adoxio
with MIT License
from Adoxio
protected void WriteLength(long value)
{
var raw = BitConverter.GetBytes(this.fileSize);
this.fileStream.Seek(this.lengthOffset, SeekOrigin.Begin);
this.fileStream.Write(raw, 0, raw.Length);
this.commitedFileSize = this.fileSize;
}
19
Source : ResourceTestBase.cs
with MIT License
from adrianoc
with MIT License
from adrianoc
private static string ReadToEnd(Stream tbc)
{
tbc.Seek(0, SeekOrigin.Begin);
return new StreamReader(tbc).ReadToEnd();
}
19
Source : EndianReader.cs
with MIT License
from aerosoul94
with MIT License
from aerosoul94
public virtual long Seek(long offset, SeekOrigin origin)
{
return BaseStream.Seek(offset, origin);
}
19
Source : FileBuffer.cs
with GNU General Public License v2.0
from afrantzis
with GNU General Public License v2.0
from afrantzis
public override long Read(byte[] ba, long index, long pos, long len)
{
// bounds checking
if (pos >= FileLength || pos<0)
return 0;
if (pos + len > FileLength)
len = FileLength-pos+1;
reader.BaseStream.Seek(pos, SeekOrigin.Begin);
// FIXME: The casts are dangerous but .NET doesn't have
// an Int64 version of Read()!
reader.Read(ba, (int)index, (int)len);
// seek back to previous position
//reader.BaseStream.Seek(winOffset, SeekOrigin.Begin);
return len;
}
19
Source : FileBuffer.cs
with GNU General Public License v2.0
from afrantzis
with GNU General Public License v2.0
from afrantzis
public void Load(string filename)
{
if (reader != null)
reader.Close();
#if ENABLE_UNIX_SPECIFIC
UnixFileInfo fsInfo = new UnixFileInfo(filename);
if (!fsInfo.Exists)
throw new FileNotFoundException(fsInfo.FullName);
// get the size of the file or device
if (fsInfo.IsRegularFile) {
FileLength = fsInfo.Length;
isResizable = true;
}
else if (fsInfo.IsBlockDevice) {
UnixStream unixStream = fsInfo.OpenRead();
ioctl(unixStream.Handle, BLKGETSIZE64, ref FileLength);
unixStream.Close();
isResizable = false;
}
else
throw new NotSupportedException("File object isn't a regular or block device.");
#endif
Stream stream = new FileStream(filename, FileMode.Open, FileAccess.Read,
FileShare.ReadWrite);
if (stream.CanSeek == false)
throw new NotSupportedException("File object doesn't support seeking.");
#if !ENABLE_UNIX_SPECIFIC
FileLength = stream.Seek(0, SeekOrigin.End);
stream.Seek(0, SeekOrigin.Begin);
isResizable = false;
#endif
reader = new BinaryReader(stream);
winOccupied = reader.Read(window, 0, window.Length);
winOffset = 0;
}
19
Source : IOHelper.cs
with Mozilla Public License 2.0
from agebullhu
with Mozilla Public License 2.0
from agebullhu
public static Encoding GetEncoding(Stream stream, Encoding defaultEncoding)
{
var targetEncoding = defaultEncoding;
if (stream == null || stream.Length < 2)
return targetEncoding;
//保存文件流的前4个字节
byte byte3 = 0;
//保存当前Seek位置
var origPos = stream.Seek(0, SeekOrigin.Begin);
stream.Seek(0, SeekOrigin.Begin);
var nByte = stream.ReadByte();
var byte1 = Convert.ToByte(nByte);
var byte2 = Convert.ToByte(stream.ReadByte());
if (stream.Length >= 3)
{
byte3 = Convert.ToByte(stream.ReadByte());
}
if (stream.Length >= 4)
{
// ReSharper disable ReturnValueOfPureMethodIsNotUsed
Convert.ToByte(stream.ReadByte());
// ReSharper restore ReturnValueOfPureMethodIsNotUsed
}
//根据文件流的前4个字节判断Encoding
//Unicode {0xFF, 0xFE};
//BE-Unicode {0xFE, 0xFF};
//UTF8 = {0xEF, 0xBB, 0xBF};
if (byte1 == 0xFE && byte2 == 0xFF) //UnicodeBe
{
targetEncoding = Encoding.BigEndianUnicode;
}
if (byte1 == 0xFF && byte2 == 0xFE && byte3 != 0xFF) //Unicode
{
targetEncoding = Encoding.Unicode;
}
if (byte1 == 0xEF && byte2 == 0xBB && byte3 == 0xBF) //UTF8
{
targetEncoding = Encoding.UTF8;
}
//恢复Seek位置
stream.Seek(origPos, SeekOrigin.Begin);
return targetEncoding;
}
19
Source : SoundBankFile.cs
with GNU General Public License v3.0
from ahmed605
with GNU General Public License v3.0
from ahmed605
public void Open(Stream stream)
{
Stream = stream;
BinaryReader br = new BinaryReader(stream);
var startPosition = stream.Position;
// Try loading as multichannel
try
{
stream.Seek(startPosition, SeekOrigin.Begin);
SoundBank = new SoundBankMultiChannel();
SoundBank.Read(br);
IsMultiChannel = true;
}
catch(SoundBankException)
{
SoundBank = null;
}
// Failed, so lets try mono
if (SoundBank == null)
{
try
{
stream.Seek(startPosition, SeekOrigin.Begin);
SoundBank = new SoundBankMono();
SoundBank.Read(br);
IsMultiChannel = false;
}
catch (SoundBankException)
{
SoundBank = null;
}
}
if (SoundBank == null)
{
throw new SoundBankException("Could not load sound bank.");
}
}
19
Source : SoundBankMono.cs
with GNU General Public License v3.0
from ahmed605
with GNU General Public License v3.0
from ahmed605
public int ExportWaveBlockAsPCM(int waveIndex, int blockIndex, ref DviAdpcmDecoder.AdpcmState state, Stream soundBankStream, Stream outStream)
{
int samplesWritten = 0;
WaveInfo waveInfo = _waveInfos[waveIndex];
BinaryWriter bw = new BinaryWriter(outStream);
byte[] block = new byte[2048];
int blockSize = 2048;
if (blockIndex == (waveInfo.numSamplesInBytes_computed / blockSize) - 1)
{
// Last block
blockSize = waveInfo.numSamplesInBytes%blockSize;
}
if (waveInfo.states != null && blockIndex < waveInfo.states.Length)
{
state = waveInfo.states[blockIndex];
}
soundBankStream.Seek(Header.headerSize + waveInfo.offset + blockIndex * 2048, SeekOrigin.Begin);
soundBankStream.Read(block, 0, blockSize);
int nibblePairCount = 0;
while (nibblePairCount < blockSize)
{
if (waveInfo.is_compressed)
{
bw.Write(DviAdpcmDecoder.DecodeAdpcm((byte)(block[nibblePairCount] & 0xf), ref state));
bw.Write(DviAdpcmDecoder.DecodeAdpcm((byte)((block[nibblePairCount] >> 4) & 0xf), ref state));
samplesWritten += 2;
nibblePairCount++;
}
else
{
bw.Write(BitConverter.ToInt16(block, nibblePairCount));
samplesWritten++;
nibblePairCount += 2;
}
}
return samplesWritten * 2; // byte size
}
19
Source : SoundBankMono.cs
with GNU General Public License v3.0
from ahmed605
with GNU General Public License v3.0
from ahmed605
public void Read(BinaryReader br)
{
// Read and validate header
Header = new Mono.Header(br);
if (Header.offsetWaveInfo > Header.headerSize)
{
throw new SoundBankException("WaveInfo is outside of header");
}
// Read block info headers
var blockInfoHeaders = new WaveInfoHeader[Header.numBlocks];
br.BaseStream.Seek(Header.offsetWaveInfo, SeekOrigin.Begin);
for (int i = 0; i < Header.numBlocks; i++)
{
blockInfoHeaders[i] = new WaveInfoHeader(br);
}
// Read block infos
_waveInfos = new List<WaveInfo>(Header.numBlocks);
var position = br.BaseStream.Position;
foreach (var biHeader in blockInfoHeaders)
{
br.BaseStream.Seek(position + biHeader.offset, SeekOrigin.Begin);
var blockInfo = new WaveInfo(biHeader, br);
_waveInfos.Add(blockInfo);
}
}
19
Source : WaveExport.cs
with GNU General Public License v3.0
from ahmed605
with GNU General Public License v3.0
from ahmed605
public static void ExportMultichannel(AudioFile file, Stream outStream)
{
WaveHeader header = new WaveHeader(true);
// Skip the header
outStream.Seek(header.HeaderSize, SeekOrigin.Begin);
// Write the data
IMultichannelSound sound = file.SoundBank as IMultichannelSound;
sound.ExportMultichannelAsPCM(file.Stream, outStream);
// Create header and write it
outStream.Seek(0, SeekOrigin.Begin);
header.FileSize = (int)outStream.Length;
header.SamplesPerSecond = sound.CommonSamplesPerSecond;
header.ChannelMask = sound.ChannelMask;
header.Write(new BinaryWriter(outStream));
}
19
Source : PtrCollection.cs
with GNU General Public License v3.0
from ahmed605
with GNU General Public License v3.0
from ahmed605
public void Read(BinaryReader br)
{
var ptrListOffset = ResourceUtil.ReadOffset(br);
Count = br.ReadUInt16();
Size = br.ReadUInt16();
_itemOffsets = new uint[Count];
_items = new List<T>();
using (new StreamContext(br))
{
br.BaseStream.Seek(ptrListOffset, SeekOrigin.Begin);
for (int i = 0; i < Count; i++)
{
_itemOffsets[i] = ResourceUtil.ReadOffset(br);
}
for (int i = 0; i < Count; i++)
{
br.BaseStream.Seek(_itemOffsets[i], SeekOrigin.Begin);
var item = new T();
item.Read(br);
_items.Add(item);
}
}
}
19
Source : File.cs
with GNU General Public License v3.0
from ahmed605
with GNU General Public License v3.0
from ahmed605
public void Save()
{
if (Header.EntryCount > 0)
{
_stream.Position = 0;
var bw = new BinaryWriter(_stream);
Header.Write(bw);
// Recalculate the offset/sizes of the TOC entries
var tocOffset = _stream.Position;
// Find the max value here
var dataOffset = int.MinValue;
foreach (var entry in TOC)
{
var offset = entry.OffsetBlock + entry.UsedBlocks;
if (offset > dataOffset)
{
dataOffset = offset;
}
}
foreach (var entry in TOC)
{
if (entry.CustomData != null)
{
var blockCount = (int)Math.Ceiling((float)entry.CustomData.Length / TOCEntry.BlockSize);
if (blockCount <= entry.UsedBlocks)
{
// Clear up the old data
_stream.Seek(entry.OffsetBlock * TOCEntry.BlockSize, SeekOrigin.Begin);
bw.Write(new byte[entry.UsedBlocks * TOCEntry.BlockSize]);
// We can fit it in the existing block... so lets do that.
_stream.Seek(entry.OffsetBlock * TOCEntry.BlockSize, SeekOrigin.Begin);
}
else
{
// Clear up the old data
_stream.Seek(entry.OffsetBlock * TOCEntry.BlockSize, SeekOrigin.Begin);
bw.Write(new byte[entry.UsedBlocks * TOCEntry.BlockSize]);
// Fit it at the end of the stream
entry.OffsetBlock = dataOffset;
_stream.Seek(dataOffset*TOCEntry.BlockSize, SeekOrigin.Begin);
dataOffset += blockCount;
}
entry.UsedBlocks = (short)blockCount;
bw.Write(entry.CustomData);
if ((entry.CustomData.Length % TOCEntry.BlockSize) != 0)
{
var padding = new byte[blockCount * TOCEntry.BlockSize - entry.CustomData.Length];
bw.Write(padding);
}
entry.SetCustomData(null);
}
}
_stream.Seek(tocOffset, SeekOrigin.Begin);
TOC.Write(bw);
}
}
19
Source : File.cs
with GNU General Public License v3.0
from ahmed605
with GNU General Public License v3.0
from ahmed605
public bool Open(string filename)
{
_stream = new FileStream(filename, FileMode.Open, FileAccess.ReadWrite, FileShare.ReadWrite);
var br = new BinaryReader(_stream);
Header.Read(br);
if (!Enum.IsDefined(typeof (MagicId), (int) Header.Identifier))
{
_stream.Close();
return false;
}
_stream.Seek(0x800, SeekOrigin.Begin);
TOC.Read(br);
return true;
}
19
Source : File.cs
with GNU General Public License v3.0
from ahmed605
with GNU General Public License v3.0
from ahmed605
public void Save()
{
if (Header.EntryCount > 0)
{
_stream.Position = 0;
var bw = new BinaryWriter(_stream);
Header.Write(bw);
// Recalculate the offset/sizes of the TOC entries
var tocOffset = 0x800;
// Find the max value here
var dataOffset = int.MinValue;
foreach (var entry in TOC)
{
var fileEntry = entry as FileEntry;
if (fileEntry != null)
{
var offset = fileEntry.Offset + fileEntry.SizeUsed;
if (offset > dataOffset)
{
dataOffset = offset;
}
}
}
foreach (var entry in TOC)
{
var fileEntry = entry as FileEntry;
if (fileEntry != null && fileEntry.CustomData != null)
{
var blockCount = (int)Math.Ceiling((float)fileEntry.CustomData.Length / FileEntry.BlockSize);
var blockSize = blockCount*FileEntry.BlockSize;
if (blockSize <= fileEntry.SizeUsed)
{
// Clear up the old data
_stream.Seek(fileEntry.Offset, SeekOrigin.Begin);
bw.Write(new byte[fileEntry.SizeUsed]);
// We can fit it in the existing block... so lets do that.
_stream.Seek(fileEntry.Offset, SeekOrigin.Begin);
}
else
{
// Clear up the old data
_stream.Seek(fileEntry.Offset, SeekOrigin.Begin);
bw.Write(new byte[fileEntry.SizeUsed]);
// Fit it at the end of the stream
fileEntry.Offset = dataOffset;
_stream.Seek(dataOffset, SeekOrigin.Begin);
dataOffset += blockSize;
}
fileEntry.SizeUsed = blockSize;
bw.Write(fileEntry.CustomData);
if ((fileEntry.CustomData.Length % FileEntry.BlockSize) != 0)
{
var padding = new byte[blockSize - fileEntry.CustomData.Length];
bw.Write(padding);
}
fileEntry.SetCustomData(null);
}
}
_stream.Seek(tocOffset, SeekOrigin.Begin);
TOC.Write(bw);
}
}
19
Source : File.cs
with GNU General Public License v3.0
from ahmed605
with GNU General Public License v3.0
from ahmed605
public void Save()
{
if (Header.EntryCount > 0)
{
_stream.Position = 0;
var bw = new BinaryWriter(_stream);
Header.Write(bw);
// Recalculate the offset/sizes of the TOC entries
var tocOffset = 0x800;
// Find the max value here
var dataOffset = int.MinValue;
foreach (var entry in TOC)
{
var fileEntry = entry as FileEntry;
if (fileEntry != null)
{
var offset = fileEntry.Offset + fileEntry.SizeUsed;
if (offset > dataOffset)
{
dataOffset = offset;
}
}
}
foreach (var entry in TOC)
{
var fileEntry = entry as FileEntry;
if (fileEntry != null && fileEntry.CustomData != null)
{
var blockCount = (int)Math.Ceiling((float)fileEntry.CustomData.Length / FileEntry.BlockSize);
var blockSize = blockCount*FileEntry.BlockSize;
if (blockSize <= fileEntry.SizeUsed)
{
// Clear up the old data
_stream.Seek(fileEntry.Offset, SeekOrigin.Begin);
bw.Write(new byte[fileEntry.SizeUsed]);
// We can fit it in the existing block... so lets do that.
_stream.Seek(fileEntry.Offset, SeekOrigin.Begin);
}
else
{
// Clear up the old data
_stream.Seek(fileEntry.Offset, SeekOrigin.Begin);
bw.Write(new byte[fileEntry.SizeUsed]);
// Fit it at the end of the stream
fileEntry.Offset = dataOffset;
_stream.Seek(dataOffset, SeekOrigin.Begin);
dataOffset += blockSize;
}
fileEntry.SizeUsed = blockSize;
bw.Write(fileEntry.CustomData);
if ((fileEntry.CustomData.Length % FileEntry.BlockSize) != 0)
{
var padding = new byte[blockSize - fileEntry.CustomData.Length];
bw.Write(padding);
}
fileEntry.SetCustomData(null);
}
}
_stream.Seek(tocOffset, SeekOrigin.Begin);
TOC.Write(bw);
}
}
19
Source : HtmlDocument.cs
with GNU General Public License v3.0
from ahmed605
with GNU General Public License v3.0
from ahmed605
public void ReadEmbeddedResources(Stream systemMemory, Stream graphicsMemory)
{
if (TextureDictionaryOffset != 0)
{
systemMemory.Seek(TextureDictionaryOffset, SeekOrigin.Begin);
TextureDictionary = new TextureFile();
TextureDictionary.Open(systemMemory, graphicsMemory);
}
}
19
Source : VertexBuffer.cs
with GNU General Public License v3.0
from ahmed605
with GNU General Public License v3.0
from ahmed605
public void ReadData(BinaryReader br)
{
br.BaseStream.Seek(DataOffset, SeekOrigin.Begin);
RawData = br.ReadBytes((int) (VertexCount*StrideSize));
}
19
Source : VertexBuffer.cs
with GNU General Public License v3.0
from ahmed605
with GNU General Public License v3.0
from ahmed605
public new void Read(BinaryReader br)
{
base.Read(br);
VertexCount = br.ReadUInt16();
Unknown1 = br.ReadUInt16();
DataOffset = ResourceUtil.ReadDataOffset(br);
StrideSize = br.ReadUInt32();
var vertexDeclOffset = ResourceUtil.ReadOffset(br);
Unknown2 = br.ReadUInt32();
DataOffset2 = ResourceUtil.ReadDataOffset(br);
var p2Offset = ResourceUtil.ReadOffset(br); // null
//
br.BaseStream.Seek(vertexDeclOffset, SeekOrigin.Begin);
VertexDeclaration = new VertexDeclaration(br);
}
19
Source : Shader.cs
with GNU General Public License v3.0
from ahmed605
with GNU General Public License v3.0
from ahmed605
public new void Read(BinaryReader br)
{
base.Read(br);
Unknown1 = br.ReadUInt16();
Unknown2 = br.ReadByte();
Unknown3 = br.ReadByte();
Unknown4 = br.ReadUInt16();
Unknown4_1 = br.ReadUInt16();
Unknown5 = br.ReadUInt32();
var shaderParamOffsetsOffset = ResourceUtil.ReadOffset(br);
Unknown6 = br.ReadUInt32();
ShaderParamCount = br.ReadInt32();
Unknown8 = br.ReadUInt32();
var shaderParamTypesOffset = ResourceUtil.ReadOffset(br);
Hash = br.ReadUInt32();
Unknown9 = br.ReadUInt32();
Unknown10 = br.ReadUInt32();
var shaderParamNameOffset = ResourceUtil.ReadOffset(br);
Unknown11 = br.ReadUInt32();
Unknown12 = br.ReadUInt32();
Unknown13 = br.ReadUInt32();
// Data :
using (new StreamContext(br))
{
br.BaseStream.Seek(shaderParamOffsetsOffset, SeekOrigin.Begin);
ShaderParamOffsets = new SimpleArray<uint>(br, ShaderParamCount, ResourceUtil.ReadOffset);
br.BaseStream.Seek(shaderParamTypesOffset, SeekOrigin.Begin);
ShaderParamTypes = new SimpleArray<byte>(br, ShaderParamCount, r => r.ReadByte());
br.BaseStream.Seek(shaderParamNameOffset, SeekOrigin.Begin);
ShaderParamNames = new SimpleArray<uint>(br, ShaderParamCount, r => r.ReadUInt32());
ShaderParams = new Dictionary<ParamNameHash, IShaderParam>(ShaderParamCount);
for (int i = 0; i < ShaderParamCount; i++)
{
try
{
var obj = ParamObjectFactory.Create((ParamType) ShaderParamTypes[i]);
br.BaseStream.Seek(ShaderParamOffsets[i], SeekOrigin.Begin);
obj.Read(br);
ShaderParams.Add((ParamNameHash) ShaderParamNames[i], obj);
}
catch
{
ShaderParams.Add((ParamNameHash) ShaderParamNames[i], null);
}
}
}
}
19
Source : ShaderFx.cs
with GNU General Public License v3.0
from ahmed605
with GNU General Public License v3.0
from ahmed605
public new void Read(BinaryReader br)
{
base.Read(br);
var shaderNamePtr = ResourceUtil.ReadOffset(br);
var shaderSpsPtr = ResourceUtil.ReadOffset(br);
Unknown14 = br.ReadUInt32();
Unknown15 = br.ReadUInt32();
Unknown16 = br.ReadUInt32();
Unknown17 = br.ReadUInt32();
// Data:
br.BaseStream.Seek(shaderNamePtr, SeekOrigin.Begin);
ShaderName = ResourceUtil.ReadNullTerminatedString(br);
br.BaseStream.Seek(shaderSpsPtr, SeekOrigin.Begin);
ShaderSPS = ResourceUtil.ReadNullTerminatedString(br);
}
19
Source : DrawableModel.cs
with GNU General Public License v3.0
from ahmed605
with GNU General Public License v3.0
from ahmed605
public new void Read(BinaryReader br)
{
base.Read(br);
// rage::rmcDrawableBase
// rage::rmcDrawable
// gtaDrawable
var shaderGroupOffset = ResourceUtil.ReadOffset(br);
var skeletonOffset = ResourceUtil.ReadOffset(br);
Center = new Vector4(br);
BoundsMin = new Vector4(br);
BoundsMax = new Vector4(br);
int levelOfDetailCount = 0;
var modelOffsets = new uint[4];
for (int i = 0; i < 4; i++)
{
modelOffsets[i] = ResourceUtil.ReadOffset(br);
if (modelOffsets[i] != 0)
{
levelOfDetailCount++;
}
}
AbsoluteMax = new Vector4(br);
Unk1 = br.ReadUInt32();
Neg1 = br.ReadUInt32();
Neg2 = br.ReadUInt32();
Neg3 = br.ReadUInt32();
Unk2 = br.ReadSingle();
Unk3 = br.ReadUInt32();
Unk4 = br.ReadUInt32();
Unk5 = br.ReadUInt32();
// Collection<LightAttrs>
Unk6 = br.ReadUInt32();
Unk7 = br.ReadUInt32();
// The data follows:
if (shaderGroupOffset != 0)
{
br.BaseStream.Seek(shaderGroupOffset, SeekOrigin.Begin);
ShaderGroup = new ShaderGroup(br);
}
if (skeletonOffset != 0)
{
br.BaseStream.Seek(skeletonOffset, SeekOrigin.Begin);
Skeleton = new Skeleton(br);
}
ModelCollection = new PtrCollection<Model>[levelOfDetailCount];
for (int i = 0; i < levelOfDetailCount; i++)
{
br.BaseStream.Seek(modelOffsets[i], SeekOrigin.Begin);
ModelCollection[i] = new PtrCollection<Model>(br);
}
}
19
Source : DrawableModel.cs
with GNU General Public License v3.0
from ahmed605
with GNU General Public License v3.0
from ahmed605
public new void Read(BinaryReader br)
{
base.Read(br);
// rage::rmcDrawableBase
// rage::rmcDrawable
// gtaDrawable
var shaderGroupOffset = ResourceUtil.ReadOffset(br);
var skeletonOffset = ResourceUtil.ReadOffset(br);
Center = new Vector4(br);
BoundsMin = new Vector4(br);
BoundsMax = new Vector4(br);
int levelOfDetailCount = 0;
var modelOffsets = new uint[4];
for (int i = 0; i < 4; i++)
{
modelOffsets[i] = ResourceUtil.ReadOffset(br);
if (modelOffsets[i] != 0)
{
levelOfDetailCount++;
}
}
AbsoluteMax = new Vector4(br);
Unk1 = br.ReadUInt32();
Neg1 = br.ReadUInt32();
Neg2 = br.ReadUInt32();
Neg3 = br.ReadUInt32();
Unk2 = br.ReadSingle();
Unk3 = br.ReadUInt32();
Unk4 = br.ReadUInt32();
Unk5 = br.ReadUInt32();
// Collection<LightAttrs>
Unk6 = br.ReadUInt32();
Unk7 = br.ReadUInt32();
// The data follows:
if (shaderGroupOffset != 0)
{
br.BaseStream.Seek(shaderGroupOffset, SeekOrigin.Begin);
ShaderGroup = new ShaderGroup(br);
}
if (skeletonOffset != 0)
{
br.BaseStream.Seek(skeletonOffset, SeekOrigin.Begin);
Skeleton = new Skeleton(br);
}
ModelCollection = new PtrCollection<Model>[levelOfDetailCount];
for (int i = 0; i < levelOfDetailCount; i++)
{
br.BaseStream.Seek(modelOffsets[i], SeekOrigin.Begin);
ModelCollection[i] = new PtrCollection<Model>(br);
}
}
See More Examples