Here are the examples of the csharp api System.BitConverter.GetBytes(ushort) taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
677 Examples
19
Source : NetworkUtils.cs
with MIT License
from 1ZouLTReX1
with MIT License
from 1ZouLTReX1
public static void SerializeUshort(List<byte> byteList, ushort data)
{
byteList.AddRange(BitConverter.GetBytes(data));
}
19
Source : RdpPacket.cs
with BSD 3-Clause "New" or "Revised" License
from 3gstudent
with BSD 3-Clause "New" or "Revised" License
from 3gstudent
public void WriteLittleEndian16(ushort Value)
{
base.Write(BitConverter.GetBytes(Value), 0, 2);
}
19
Source : Util.cs
with MIT License
from 499116344
with MIT License
from 499116344
public static void BeWrite(this BinaryWriter bw, ushort v)
{
bw.Write(BitConverter.GetBytes(v).Reverse().ToArray());
}
19
Source : Util.cs
with MIT License
from 499116344
with MIT License
from 499116344
public static void BeWrite(this BinaryWriter bw, char v)
{
bw.Write(BitConverter.GetBytes((ushort) v).Reverse().ToArray());
}
19
Source : Half.cs
with MIT License
from 91Act
with MIT License
from 91Act
public static byte[] GetBytes(Half value)
{
return BitConverter.GetBytes(value.value);
}
19
Source : WavUtility.cs
with GNU General Public License v3.0
from a2659802
with GNU General Public License v3.0
from a2659802
private static int WriteFileFormat(ref MemoryStream stream, int channels, int sampleRate, UInt16 bitDepth)
{
int count = 0;
int total = 24;
byte[] id = Encoding.ASCII.GetBytes("fmt ");
count += WriteBytesToMemoryStream(ref stream, id, "FMT_ID");
int subchunk1Size = 16; // 24 - 8
count += WriteBytesToMemoryStream(ref stream, BitConverter.GetBytes(subchunk1Size), "SUBCHUNK_SIZE");
UInt16 audioFormat = 1;
count += WriteBytesToMemoryStream(ref stream, BitConverter.GetBytes(audioFormat), "AUDIO_FORMAT");
UInt16 numChannels = Convert.ToUInt16(channels);
count += WriteBytesToMemoryStream(ref stream, BitConverter.GetBytes(numChannels), "CHANNELS");
count += WriteBytesToMemoryStream(ref stream, BitConverter.GetBytes(sampleRate), "SAMPLE_RATE");
int byteRate = sampleRate * channels * BytesPerSample(bitDepth);
count += WriteBytesToMemoryStream(ref stream, BitConverter.GetBytes(byteRate), "BYTE_RATE");
UInt16 blockAlign = Convert.ToUInt16(channels * BytesPerSample(bitDepth));
count += WriteBytesToMemoryStream(ref stream, BitConverter.GetBytes(blockAlign), "BLOCK_ALIGN");
count += WriteBytesToMemoryStream(ref stream, BitConverter.GetBytes(bitDepth), "BITS_PER_SAMPLE");
// Validate format
Debug.replacedertFormat(count == total, "Unexpected wav fmt byte count: {0} == {1}", count, total);
return count;
}
19
Source : Extensions.cs
with GNU Affero General Public License v3.0
from ACEmulator
with GNU Affero General Public License v3.0
from ACEmulator
public static void WritePackedDword(this BinaryWriter writer, uint value)
{
if (value <= 32767)
{
ushort networkValue = Convert.ToUInt16(value);
writer.Write(BitConverter.GetBytes(networkValue));
}
else
{
uint packedValue = (value << 16) | ((value >> 16) | 0x8000);
writer.Write(BitConverter.GetBytes(packedValue));
}
}
19
Source : Serializer.cs
with MIT License
from ADeltaX
with MIT License
from ADeltaX
public static byte[] FromUInt16(ushort data, DateTimeOffset? timestamp = null)
=> BitConverter.GetBytes(data).AppendTimestamp(timestamp);
19
Source : Extensions.cs
with MIT License
from adrenak
with MIT License
from adrenak
public static byte[] GetBytes(this ushort value) {
return BitConverter.GetBytes(value);
}
19
Source : BingUtils.cs
with MIT License
from adrenak
with MIT License
from adrenak
public static byte[] GetHeader(string requestId) {
var headerBuilder = new StringBuilder();
headerBuilder.Append("path:audio\r\n");
headerBuilder.Append("x-requestid:" + requestId + "\r\n");
headerBuilder.Append("x-timestamp:" + DateTime.UtcNow.ToString("yyyy-MM-ddTHH:mm:ss.fffK") + "\r\n");
headerBuilder.Append("content-type:audio/wav; codec=audio/pcm; samplerate=16000");
headerBuilder.Append("Accept:application/json");
byte[] headerBytes = Encoding.ASCII.GetBytes(headerBuilder.ToString());
byte[] headerBytesUInt16 = BitConverter.GetBytes((UInt16)headerBytes.Length);
bool isBigEndian = !BitConverter.IsLittleEndian;
var headerHead = !isBigEndian ? new byte[] { headerBytesUInt16[1], headerBytesUInt16[0] } : new byte[] { headerBytesUInt16[0], headerBytesUInt16[1] };
return headerHead.Concat(headerBytes).ToArray();
}
19
Source : Utils.cs
with Apache License 2.0
from ajuna-network
with Apache License 2.0
from ajuna-network
public static byte[] Value2Bytes(object value, bool littleEndian = true)
{
byte[] result;
switch (value)
{
case ushort s:
result = BitConverter.GetBytes(s);
break;
case uint s:
result = BitConverter.GetBytes(s);
break;
case ulong s:
result = BitConverter.GetBytes(s);
break;
default:
throw new Exception("Unhandled byte size for this method!");
}
if (!littleEndian) Array.Reverse(result);
return result;
}
19
Source : Utils.cs
with Apache License 2.0
from ajuna-network
with Apache License 2.0
from ajuna-network
internal static byte[] KeyTypeToBytes(string keyType, string parameter)
{
switch (keyType)
{
case "u16":
return BitConverter.GetBytes(ushort.Parse(parameter));
case "u32":
return BitConverter.GetBytes(uint.Parse(parameter));
case "u64":
return BitConverter.GetBytes(ulong.Parse(parameter));
case "T::Hash":
var hash = new Hash();
hash.Create(parameter);
return hash.Bytes;
case "T::AccountId":
var accountId = new AccountId();
accountId.Create(parameter);
return accountId.Bytes;
case "Vec<u8>":
var vecU8 = Utils.SizePrefixedByteArray(Utils.HexToByteArray(parameter).ToList());
return vecU8;
case "T::replacedetId":
var replacedetId = new replacedetId();
replacedetId.Create(uint.Parse(parameter));
return replacedetId.Bytes;
default:
throw new Exception($"Unimplemented item function key 'item.Function.Key1' = {keyType}!");
}
}
19
Source : Era.cs
with Apache License 2.0
from ajuna-network
with Apache License 2.0
from ajuna-network
public byte[] Encode()
{
if (IsImmortal)
{
return new byte[] { 0x00 };
}
var quantizeFactor = Math.Max(1, Period / 4096);
var lastBit = Period & (ulong)-(long)Period;
//var rest = _period;
//var lastBit = 1;
//while (rest % 2 == 0 && rest != 0)
//{
// rest /= 2;
// lastBit *= 2;
//}
var logOf2 = lastBit != 0 ? Math.Log(lastBit, 2) : 64;
var low = (ushort)Math.Min(15, Math.Max(1, logOf2 - 1));
var high = (ushort)(Phase / quantizeFactor << 4);
var encoded = (ushort)(low | high);
return BitConverter.GetBytes(encoded);
}
19
Source : U16.cs
with Apache License 2.0
from ajuna-network
with Apache License 2.0
from ajuna-network
public void Create(ushort value)
{
Bytes = BitConverter.GetBytes(value);
Value = value;
}
19
Source : WavUtility.cs
with MIT License
from alessandroTironi
with MIT License
from alessandroTironi
private static int WriteFileFormat (ref MemoryStream stream, int channels, int sampleRate, UInt16 bitDepth)
{
int count = 0;
int total = 24;
byte[] id = Encoding.ASCII.GetBytes ("fmt ");
count += WriteBytesToMemoryStream (ref stream, id, "FMT_ID");
int subchunk1Size = 16; // 24 - 8
count += WriteBytesToMemoryStream (ref stream, BitConverter.GetBytes (subchunk1Size), "SUBCHUNK_SIZE");
UInt16 audioFormat = 1;
count += WriteBytesToMemoryStream (ref stream, BitConverter.GetBytes (audioFormat), "AUDIO_FORMAT");
UInt16 numChannels = Convert.ToUInt16 (channels);
count += WriteBytesToMemoryStream (ref stream, BitConverter.GetBytes (numChannels), "CHANNELS");
count += WriteBytesToMemoryStream (ref stream, BitConverter.GetBytes (sampleRate), "SAMPLE_RATE");
int byteRate = sampleRate * channels * BytesPerSample (bitDepth);
count += WriteBytesToMemoryStream (ref stream, BitConverter.GetBytes (byteRate), "BYTE_RATE");
UInt16 blockAlign = Convert.ToUInt16 (channels * BytesPerSample (bitDepth));
count += WriteBytesToMemoryStream (ref stream, BitConverter.GetBytes (blockAlign), "BLOCK_ALIGN");
count += WriteBytesToMemoryStream (ref stream, BitConverter.GetBytes (bitDepth), "BITS_PER_SAMPLE");
// Validate format
Debug.replacedertFormat (count == total, "Unexpected wav fmt byte count: {0} == {1}", count, total);
return count;
}
19
Source : BitStream.cs
with MIT License
from Alexander-Scott
with MIT License
from Alexander-Scott
public void WriteUInt16(ushort value)
{
WriteBytes(BitConverter.GetBytes(value), 16);
}
19
Source : AddCopyList.cs
with Apache License 2.0
from AmpScm
with Apache License 2.0
from AmpScm
private void WriteBigEndian(Stream Diff, ushort us)
{
WriteBigEndian(Diff, BitConverter.GetBytes(us));
}
19
Source : DataBuffer.cs
with MIT License
from AndreasAmMueller
with MIT License
from AndreasAmMueller
public void SetUInt16(int index, ushort value)
{
byte[] blob = BitConverter.GetBytes(value);
InternalSwap(blob);
SetBytes(index, blob);
}
19
Source : DataBuffer.cs
with MIT License
from AndreasAmMueller
with MIT License
from AndreasAmMueller
public void AddUInt16(ushort value)
{
byte[] blob = BitConverter.GetBytes(value);
InternalSwap(blob);
AddBytes(blob);
}
19
Source : StreamUtils.cs
with MIT License
from ansel86castro
with MIT License
from ansel86castro
public static void WriteUInt16(Stream stream, ushort s)
{
byte[] bytes = Convert(BitConverter.GetBytes(s));
stream.Write(bytes, 0, bytes.Length);
}
19
Source : ModbusUtils.cs
with GNU Lesser General Public License v3.0
from Apollo3zehn
with GNU Lesser General Public License v3.0
from Apollo3zehn
public static ushort SwitchEndianness(ushort value)
{
var bytes = BitConverter.GetBytes(value);
return (ushort)((bytes[0] << 8) + bytes[1]);
}
19
Source : ExtendedBinaryReader.cs
with GNU Lesser General Public License v3.0
from Apollo3zehn
with GNU Lesser General Public License v3.0
from Apollo3zehn
public ushort ReadUInt16Reverse()
{
return this.ReadReverse<ushort>(BitConverter.GetBytes(this.ReadUInt16()));
}
19
Source : ExtendedBinaryWriter.cs
with GNU Lesser General Public License v3.0
from Apollo3zehn
with GNU Lesser General Public License v3.0
from Apollo3zehn
public void WriteReverse(ushort value)
{
this.WriteReverse(BitConverter.GetBytes(value));
}
19
Source : EncryptionHandler.cs
with Apache License 2.0
from Appdynamics
with Apache License 2.0
from Appdynamics
private MemoryStream EncryptPackageAgile(byte[] package, ExcelEncryption encryption)
{
var xml= "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>\r\n";
xml += "<encryption xmlns=\"http://schemas.microsoft.com/office/2006/encryption\" xmlns:p=\"http://schemas.microsoft.com/office/2006/keyEncryptor/preplacedword\" xmlns:c=\"http://schemas.microsoft.com/office/2006/keyEncryptor/certificate\">";
xml += "<keyData saltSize=\"16\" blockSize=\"16\" keyBits=\"256\" hashSize=\"64\" cipherAlgorithm=\"AES\" cipherChaining=\"ChainingModeCBC\" hashAlgorithm=\"SHA512\" saltValue=\"\"/>";
xml += "<dataIntegrity encryptedHmacKey=\"\" encryptedHmacValue=\"\"/>";
xml += "<keyEncryptors>";
xml += "<keyEncryptor uri=\"http://schemas.microsoft.com/office/2006/keyEncryptor/preplacedword\">";
xml += "<p:encryptedKey spinCount=\"100000\" saltSize=\"16\" blockSize=\"16\" keyBits=\"256\" hashSize=\"64\" cipherAlgorithm=\"AES\" cipherChaining=\"ChainingModeCBC\" hashAlgorithm=\"SHA512\" saltValue=\"\" encryptedVerifierHashInput=\"\" encryptedVerifierHashValue=\"\" encryptedKeyValue=\"\" />";
xml += "</keyEncryptor></keyEncryptors></encryption>";
var encryptionInfo = new EncryptionInfoAgile();
encryptionInfo.ReadFromXml(xml);
var encr = encryptionInfo.KeyEncryptors[0];
var rnd = RandomNumberGenerator.Create();
var s = new byte[16];
rnd.GetBytes(s);
encryptionInfo.KeyData.SaltValue = s;
rnd.GetBytes(s);
encr.SaltValue = s;
encr.KeyValue = new byte[encr.KeyBits / 8];
rnd.GetBytes(encr.KeyValue);
//Get the preplacedword key.
var hashProvider = GetHashProvider(encryptionInfo.KeyEncryptors[0]);
var baseHash = GetPreplacedwordHash(hashProvider, encr.SaltValue, encryption.Preplacedword, encr.SpinCount, encr.HashSize);
var hashFinal = GetFinalHash(hashProvider, BlockKey_KeyValue, baseHash);
hashFinal = FixHashSize(hashFinal, encr.KeyBits / 8);
var encrData = EncryptDataAgile(package, encryptionInfo, hashProvider);
/**** Data Integrity ****/
var saltHMAC=new byte[64];
rnd.GetBytes(saltHMAC);
SetHMAC(encryptionInfo,hashProvider,saltHMAC, encrData);
/**** Verifier ****/
encr.VerifierHashInput = new byte[16];
rnd.GetBytes(encr.VerifierHashInput);
encr.VerifierHash = hashProvider.ComputeHash(encr.VerifierHashInput);
var VerifierInputKey = GetFinalHash(hashProvider, BlockKey_HashInput, baseHash);
var VerifierHashKey = GetFinalHash(hashProvider, BlockKey_HashValue, baseHash);
var KeyValueKey = GetFinalHash(hashProvider, BlockKey_KeyValue, baseHash);
var ms = new MemoryStream();
EncryptAgileFromKey(encr, VerifierInputKey, encr.VerifierHashInput, 0, encr.VerifierHashInput.Length, encr.SaltValue, ms);
encr.EncryptedVerifierHashInput = ms.ToArray();
ms = new MemoryStream();
EncryptAgileFromKey(encr, VerifierHashKey, encr.VerifierHash, 0, encr.VerifierHash.Length, encr.SaltValue, ms);
encr.EncryptedVerifierHash = ms.ToArray();
ms = new MemoryStream();
EncryptAgileFromKey(encr, KeyValueKey, encr.KeyValue, 0, encr.KeyValue.Length, encr.SaltValue, ms);
encr.EncryptedKeyValue = ms.ToArray();
xml = encryptionInfo.Xml.OuterXml;
var byXml = Encoding.UTF8.GetBytes(xml);
ms = new MemoryStream();
ms.Write(BitConverter.GetBytes((ushort)4), 0, 2); //Major Version
ms.Write(BitConverter.GetBytes((ushort)4), 0, 2); //Minor Version
ms.Write(BitConverter.GetBytes((uint)0x40), 0, 4); //Reserved
ms.Write(byXml,0,byXml.Length);
var doc = new CompoundDoreplacedent();
//Add the dataspace streams
CreateDataSpaces(doc);
//EncryptionInfo...
doc.Storage.DataStreams.Add("EncryptionInfo", ms.ToArray());
//...and the encrypted package
doc.Storage.DataStreams.Add("EncryptedPackage", encrData);
ms = new MemoryStream();
doc.Save(ms);
//ms.Write(e,0,e.Length);
return ms;
}
19
Source : ZipFile.Save.cs
with Apache License 2.0
from Appdynamics
with Apache License 2.0
from Appdynamics
public static bool WriteCentralDirectoryStructure(Stream s,
ICollection<ZipEntry> entries,
uint numSegments,
Zip64Option zip64,
String comment,
ZipContainer container)
{
var zss = s as ZipSegmentedStream;
if (zss != null)
zss.ContiguousWrite = true;
// write to a memory stream in order to keep the
// CDR contiguous
Int64 aLength = 0;
using (var ms = new MemoryStream())
{
foreach (ZipEntry e in entries)
{
if (e.IncludedInMostRecentSave)
{
// this writes a ZipDirEntry corresponding to the ZipEntry
e.WriteCentralDirectoryEntry(ms);
}
}
var a = ms.ToArray();
s.Write(a, 0, a.Length);
aLength = a.Length;
}
// We need to keep track of the start and
// Finish of the Central Directory Structure.
// Cannot always use WriteStream.Length or Position; some streams do
// not support these. (eg, ASP.NET Response.OutputStream) In those
// cases we have a CountingStream.
// Also, we cannot just set Start as s.Position bfore the write, and Finish
// as s.Position after the write. In a split zip, the write may actually
// flip to the next segment. In that case, Start will be zero. But we
// don't know that til after we know the size of the thing to write. So the
// answer is to compute the directory, then ask the ZipSegmentedStream which
// segment that directory would fall in, it it were written. Then, include
// that data into the directory, and finally, write the directory to the
// output stream.
var output = s as CountingStream;
long Finish = (output != null) ? output.ComputedPosition : s.Position; // BytesWritten
long Start = Finish - aLength;
// need to know which segment the EOCD record starts in
UInt32 startSegment = (zss != null)
? zss.CurrentSegment
: 0;
Int64 SizeOfCentralDirectory = Finish - Start;
int countOfEntries = CountEntries(entries);
bool needZip64CentralDirectory =
zip64 == Zip64Option.Always ||
countOfEntries >= 0xFFFF ||
SizeOfCentralDirectory > 0xFFFFFFFF ||
Start > 0xFFFFFFFF;
byte[] a2 = null;
// emit ZIP64 extensions as required
if (needZip64CentralDirectory)
{
if (zip64 == Zip64Option.Never)
{
#if NETCF || Core
throw new ZipException("The archive requires a ZIP64 Central Directory. Consider enabling ZIP64 extensions.");
#else
System.Diagnostics.StackFrame sf = new System.Diagnostics.StackFrame(1);
if (sf.GetMethod().DeclaringType == typeof(ZipFile))
throw new ZipException("The archive requires a ZIP64 Central Directory. Consider setting the ZipFile.UseZip64WhenSaving property.");
else
throw new ZipException("The archive requires a ZIP64 Central Directory. Consider setting the ZipOutputStream.EnableZip64 property.");
#endif
}
var a = GenZip64EndOfCentralDirectory(Start, Finish, countOfEntries, numSegments);
a2 = GenCentralDirectoryFooter(Start, Finish, zip64, countOfEntries, comment, container);
if (startSegment != 0)
{
UInt32 thisSegment = zss.ComputeSegment(a.Length + a2.Length);
int i = 16;
// number of this disk
Array.Copy(BitConverter.GetBytes(thisSegment), 0, a, i, 4);
i += 4;
// number of the disk with the start of the central directory
//Array.Copy(BitConverter.GetBytes(startSegment), 0, a, i, 4);
Array.Copy(BitConverter.GetBytes(thisSegment), 0, a, i, 4);
i = 60;
// offset 60
// number of the disk with the start of the zip64 eocd
Array.Copy(BitConverter.GetBytes(thisSegment), 0, a, i, 4);
i += 4;
i += 8;
// offset 72
// total number of disks
Array.Copy(BitConverter.GetBytes(thisSegment), 0, a, i, 4);
}
s.Write(a, 0, a.Length);
}
else
a2 = GenCentralDirectoryFooter(Start, Finish, zip64, countOfEntries, comment, container);
// now, the regular footer
if (startSegment != 0)
{
// The replacedumption is the central directory is never split across
// segment boundaries.
UInt16 thisSegment = (UInt16) zss.ComputeSegment(a2.Length);
int i = 4;
// number of this disk
Array.Copy(BitConverter.GetBytes(thisSegment), 0, a2, i, 2);
i += 2;
// number of the disk with the start of the central directory
//Array.Copy(BitConverter.GetBytes((UInt16)startSegment), 0, a2, i, 2);
Array.Copy(BitConverter.GetBytes(thisSegment), 0, a2, i, 2);
i += 2;
}
s.Write(a2, 0, a2.Length);
// reset the contiguous write property if necessary
if (zss != null)
zss.ContiguousWrite = false;
return needZip64CentralDirectory;
}
19
Source : VBACompression.cs
with Apache License 2.0
from Appdynamics
with Apache License 2.0
from Appdynamics
private static byte[] CompressChunk(byte[] buffer, ref int startPos)
{
var comprBuffer = new byte[4096];
int flagPos = 0;
int cPos = 1;
int dPos = startPos;
int dEnd = startPos + 4096 < buffer.Length ? startPos + 4096 : buffer.Length;
while (dPos < dEnd)
{
byte tokenFlags = 0;
for (int i = 0; i < 8; i++)
{
if (dPos - startPos > 0)
{
int bestCandidate = -1;
int bestLength = 0;
int candidate = dPos - 1;
int bitCount = GetLengthBits(dPos - startPos);
int bits = (16 - bitCount);
ushort lengthMask = (ushort)((0xFFFF) >> bits);
while (candidate >= startPos)
{
if (buffer[candidate] == buffer[dPos])
{
int length = 1;
while (buffer.Length > dPos + length && buffer[candidate + length] == buffer[dPos + length] && length < lengthMask && dPos + length < dEnd)
{
length++;
}
if (length > bestLength)
{
bestCandidate = candidate;
bestLength = length;
if (bestLength == lengthMask)
{
break;
}
}
}
candidate--;
}
if (bestLength >= 3) //Copy token
{
tokenFlags |= (byte)(1 << i);
UInt16 offsetMask = (ushort)~lengthMask;
ushort token = (ushort)(((ushort)(dPos - (bestCandidate + 1))) << (bitCount) | (ushort)(bestLength - 3));
Array.Copy(BitConverter.GetBytes(token), 0, comprBuffer, cPos, 2);
dPos = dPos + bestLength;
cPos += 2;
//SetCopy Token
}
else
{
comprBuffer[cPos++] = buffer[dPos++];
}
}
else
{
comprBuffer[cPos++] = buffer[dPos++];
}
if (dPos >= dEnd) break;
}
comprBuffer[flagPos] = tokenFlags;
flagPos = cPos++;
}
var ret = new byte[cPos - 1];
Array.Copy(comprBuffer, ret, ret.Length);
startPos = dEnd;
return ret;
}
19
Source : ExtensionMethods.cs
with MIT License
from araghon007
with MIT License
from araghon007
private static byte[] UInt16ToBytes(ushort value)
{
return BitConverter.GetBytes(value);
}
19
Source : Converter.cs
with MIT License
from arcplus
with MIT License
from arcplus
private static byte[] ToU16Buffer(int[] arr)
{
var bytes = new List<byte>();
foreach (var i in arr)
{
bytes.AddRange(BitConverter.GetBytes((ushort)i));
}
return bytes.ToArray();
}
19
Source : Converter.cs
with MIT License
from arcplus
with MIT License
from arcplus
private List<Primitive> AddVertexAttributes(ObjModel objModel, Geometry mesh,
bool uint32Indices)
{
var facesGroup = mesh.Faces.GroupBy(c => c.MatName);
var faces = new List<Face>();
foreach(var fg in facesGroup)
{
var matName = fg.Key;
var f = new Face { MatName = matName };
foreach(var ff in fg)
{
f.Triangles.AddRange(ff.Triangles);
}
if (f.Triangles.Count > 0)
{
faces.Add(f);
}
}
var hasPositions = faces.Count > 0;
var hasUvs = faces.Any(c => c.Triangles.Any(d => d.V1.T > 0));
var hasNormals = faces.Any(c => c.Triangles.Any(d => d.V1.N > 0));
var vertices = objModel.Vertices;
var normals = objModel.Normals;
var uvs = objModel.Uvs;
// Vertex attributes are shared by all primitives in the mesh
var name0 = mesh.Id;
var ps = new List<Primitive>(faces.Count * 2);
var index = 0;
foreach (var f in faces)
{
var faceName = name0;
if (index > 0)
{
faceName = name0 + "_" + index;
}
MinMax vmmX = new MinMax(), vmmY = new MinMax(), vmmZ = new MinMax();
MinMax nmmX = new MinMax(), nmmY = new MinMax(), nmmZ = new MinMax();
MinMax tmmX = new MinMax(), tmmY = new MinMax();
var vList = 0;
var nList = 0;
var tList = 0;
var vs = new List<byte>(); // vertexBuffers
var ns = new List<byte>(); // normalBuffers
var ts = new List<byte>(); // textureBuffers
// every primitive need their own vertex indices(v,t,n)
Dictionary<string, int> FaceVertexCache = new Dictionary<string, int>();
int FaceVertexCount = 0;
//List<int[]> indiceList = new List<int[]>(faces.Count * 2);
//var matIndexList = new List<int>(faces.Count * 2);
// f is a primitive
var iList = new List<int>(f.Triangles.Count*3*2); // primitive indices
foreach(var t in f.Triangles)
{
var v1Index = t.V1.V - 1;
var v2Index = t.V2.V - 1;
var v3Index = t.V3.V - 1;
var v1 = vertices[v1Index];
var v2 = vertices[v2Index];
var v3 = vertices[v3Index];
UpdateMinMax(new[] { v1.X, v2.X, v3.X }, vmmX);
UpdateMinMax(new[] { v1.Y, v2.Y, v3.Y }, vmmY);
UpdateMinMax(new[] { v1.Z, v2.Z, v3.Z }, vmmZ);
Vec3 n1 = new Vec3(), n2 = new Vec3(), n3 = new Vec3();
if (t.V1.N > 0) // hasNormals
{
var n1Index = t.V1.N - 1;
var n2Index = t.V2.N - 1;
var n3Index = t.V3.N - 1;
n1 = normals[n1Index];
n2 = normals[n2Index];
n3 = normals[n3Index];
UpdateMinMax(new[] { n1.X, n2.X, n3.X }, nmmX);
UpdateMinMax(new[] { n1.Y, n2.Y, n3.Y }, nmmY);
UpdateMinMax(new[] { n1.Z, n2.Z, n3.Z }, nmmZ);
}
Vec2 t1 = new Vec2(), t2 = new Vec2(), t3 = new Vec2();
if (t.V1.T > 0) // hasUvs
{
var t1Index = t.V1.T - 1;
var t2Index = t.V2.T - 1;
var t3Index = t.V3.T - 1;
t1 = uvs[t1Index];
t2 = uvs[t2Index];
t3 = uvs[t3Index];
UpdateMinMax(new[] { t1.U, t2.U, t3.U }, tmmX);
UpdateMinMax(new[] { 1 - t1.V, 1 - t2.V, 1 - t3.V }, tmmY);
}
var v1Str = t.V1.ToString();
if (!FaceVertexCache.ContainsKey(v1Str))
{
FaceVertexCache.Add(v1Str, FaceVertexCount++);
vList++; vs.AddRange(v1.ToFloatBytes());
if (t.V1.N > 0) // hasNormals
{
nList++; ns.AddRange(n1.ToFloatBytes());
}
if (t.V1.T > 0) // hasUvs
{
tList++; ts.AddRange(new Vec2(t1.U, 1 - t1.V).ToFloatBytes());
}
}
var v2Str = t.V2.ToString();
if (!FaceVertexCache.ContainsKey(v2Str))
{
FaceVertexCache.Add(v2Str, FaceVertexCount++);
vList++; vs.AddRange(v2.ToFloatBytes());
if (t.V2.N > 0) // hasNormals
{
nList++; ns.AddRange(n2.ToFloatBytes());
}
if (t.V2.T > 0) // hasUvs
{
tList++; ts.AddRange(new Vec2(t2.U, 1 - t2.V).ToFloatBytes());
}
}
var v3Str = t.V3.ToString();
if (!FaceVertexCache.ContainsKey(v3Str))
{
FaceVertexCache.Add(v3Str, FaceVertexCount++);
vList++; vs.AddRange(v3.ToFloatBytes());
if (t.V3.N > 0) // hasNormals
{
nList++; ns.AddRange(n3.ToFloatBytes());
}
if (t.V3.T > 0) // hasUvs
{
tList++; ts.AddRange(new Vec2(t3.U, 1 - t3.V).ToFloatBytes());
}
}
// Vertex Indices
var correctWinding = CheckWindingCorrect(v1, v2, v3, n1);
if (correctWinding)
{
iList.AddRange(new[] {
FaceVertexCache[v1Str],
FaceVertexCache[v2Str],
FaceVertexCache[v3Str]
});
}
else
{
iList.AddRange(new[] {
FaceVertexCache[v1Str],
FaceVertexCache[v3Str],
FaceVertexCache[v2Str]
});
}
}
var materialIndex = GetMaterial(objModel, f.MatName);
//matIndexList.Add(materialIndex);
var atts = new Dictionary<string, int>();
var accessorIndex = _model.Accessors.Count;
var accessorVertex = new Accessor
{
Min = new double[] { vmmX.Min, vmmY.Min, vmmZ.Min },
Max = new double[] { vmmX.Max, vmmY.Max, vmmZ.Max },
Type = AccessorType.VEC3,
Count = vList,
ComponentType = ComponentType.F32,
Name = faceName + "_positions"
};
_model.Accessors.Add(accessorVertex);
atts.Add("POSITION", accessorIndex);
_buffers.PositionBuffers.Add(vs.ToArray());
_buffers.PositionAccessors.Add(accessorIndex);
if (_options.WithBatchTable)
{
_buffers.BatchTableJson.MaxPoint.Add(accessorVertex.Max);
_buffers.BatchTableJson.MinPoint.Add(accessorVertex.Min);
}
if (nList > 0) //hasNormals)
{
accessorIndex = _model.Accessors.Count;
var accessorNormal = new Accessor
{
Min = new double[] { nmmX.Min, nmmY.Min, nmmZ.Min },
Max = new double[] { nmmX.Max, nmmY.Max, nmmZ.Max },
Type = AccessorType.VEC3,
Count = nList,
ComponentType = ComponentType.F32,
Name = faceName + "_normals"
};
_model.Accessors.Add(accessorNormal);
atts.Add("NORMAL", accessorIndex);
_buffers.NormalBuffers.Add(ns.ToArray());
_buffers.NormalAccessors.Add(accessorIndex);
}
if (tList > 0) //hasUvs)
{
accessorIndex = _model.Accessors.Count;
var accessorUv = new Accessor
{
Min = new double[] { tmmX.Min, tmmY.Min },
Max = new double[] { tmmX.Max, tmmY.Max },
Type = AccessorType.VEC2,
Count = tList,
ComponentType = ComponentType.F32,
Name = faceName + "_texcoords"
};
_model.Accessors.Add(accessorUv);
atts.Add("TEXCOORD_0", accessorIndex);
_buffers.UvBuffers.Add(ts.ToArray());
_buffers.UvAccessors.Add(accessorIndex);
}
else
{
var gMat = _model.Materials[materialIndex];
if (gMat.PbrMetallicRoughness.BaseColorTexture != null)
{
gMat.PbrMetallicRoughness.BaseColorTexture = null;
}
}
if (_options.WithBatchTable)
{
var batchIdCount = vList;
accessorIndex = AddBatchIdAttribute(
_buffers.CurrentBatchId, batchIdCount, faceName + "_batchId");
atts.Add("_BATCHID", accessorIndex);
var batchIds = new List<byte>();
for (var i = 0; i < batchIdCount; i++)
{
batchIds.AddRange(BitConverter.GetBytes((ushort)_buffers.CurrentBatchId));
}
_buffers.BatchIdBuffers.Add(batchIds.ToArray());
_buffers.BatchIdAccessors.Add(accessorIndex);
_buffers.BatchTableJson.BatchIds.Add((ushort)_buffers.CurrentBatchId);
_buffers.BatchTableJson.Names.Add(faceName);
_buffers.CurrentBatchId++;
}
var indices = iList.ToArray();
var indexAccessorIndex = AddIndexArray(indices, uint32Indices, faceName + "_indices");
var indexBuffer = uint32Indices ? ToU32Buffer(indices) : ToU16Buffer(indices);
_buffers.IndexBuffers.Add(indexBuffer);
_buffers.IndexAccessors.Add(indexAccessorIndex);
var p = new Primitive
{
Attributes = atts,
Indices = indexAccessorIndex,
Material = materialIndex,//matIndexList[i],
Mode = Mode.Triangles
};
ps.Add(p);
index++;
}
return ps;
}
19
Source : Endian.cs
with MIT License
from Arefu
with MIT License
from Arefu
public static byte[] GetBytes(ushort value, bool convert)
{
var result = BitConverter.GetBytes(value);
if (convert) Array.Reverse(result);
return result;
}
19
Source : AccurateBinaryWriter.cs
with GNU General Public License v3.0
from Artentus
with GNU General Public License v3.0
from Artentus
public override void Write(ushort value) => WriteRaw(BitConverter.GetBytes(value));
19
Source : AssetBinaryWriter.cs
with MIT License
from atenfyr
with MIT License
from atenfyr
public override void Write(ushort value)
{
this.Write(ReverseIfBigEndian(BitConverter.GetBytes(value)));
}
19
Source : Util.cs
with MIT License
from Azer0s
with MIT License
from Azer0s
public static string ToMACAddressString(this ushort sUshort)
{
var bytes = BitConverter.GetBytes(sUshort).Reverse();
return string.Join(":", bytes.Select(a => a.ToString("X2")));
}
19
Source : Dot1QHeader.cs
with MIT License
from Azer0s
with MIT License
from Azer0s
public byte[] ToBytes()
{
var bytes = new List<byte>();
var firstByte = (byte) (Priority << 5);
firstByte.Set(4, Flag);
firstByte.Set(3, VlanID.Get(11));
firstByte.Set(2, VlanID.Get(10));
firstByte.Set(1, VlanID.Get(9));
firstByte.Set(0, VlanID.Get(8));
bytes.Add(firstByte);
bytes.Add((byte) VlanID);
bytes.AddRange(BitConverter.GetBytes(Type).Reverse());
return bytes.ToArray();
}
19
Source : MACHeader.cs
with MIT License
from Azer0s
with MIT License
from Azer0s
public byte[] ToBytes()
{
var list = new List<byte>();
list.AddRange(Dst.ToBytes());
list.AddRange(Src.ToBytes());
list.AddRange(BitConverter.GetBytes(EtherType).Reverse());
return list.ToArray();
}
19
Source : Compression.cs
with BSD 3-Clause "New" or "Revised" License
from b4rtik
with BSD 3-Clause "New" or "Revised" License
from b4rtik
internal byte[] SerializeData()
{
ValidateChunkSizeAndCompressedFlag();
UInt16 header;
if (IsCompressed)
{
header = (UInt16)(0xb000 | (CompressedChunkSize - 3));
}
else
{
header = (UInt16)(0x3000 | (CompressedChunkSize - 3));
}
return BitConverter.GetBytes(header);
}
19
Source : Compression.cs
with BSD 3-Clause "New" or "Revised" License
from b4rtik
with BSD 3-Clause "New" or "Revised" License
from b4rtik
public byte[] SerializeData()
{
var packedData = Pack(Position, _tokenOffset, _tokenLength);
return BitConverter.GetBytes(packedData);
}
19
Source : EndianAwareBinaryWriter.cs
with MIT License
from bamcis-io
with MIT License
from bamcis-io
public void Write(UInt16 value, Endianness endianness) => this.WriteForEndianness(BitConverter.GetBytes(value), endianness);
19
Source : WDB6.cs
with The Unlicense
from BAndysc
with The Unlicense
from BAndysc
public new Dictionary<int, byte[]> ReadOffsetData(BinaryReader dbReader, long pos)
{
var CopyTable = new Dictionary<int, byte[]>();
var offsetmap = new List<Tuple<int, short>>();
var firstindex = new Dictionary<int, OffsetDuplicate>();
long commonDataTablePos = dbReader.BaseStream.Length - CommonDataTableSize;
long copyTablePos = commonDataTablePos - CopyTableSize;
long indexTablePos = copyTablePos - (HasIndexTable ? RecordCount * 4 : 0);
int[] m_indexes = null;
//Offset Map
if (HasOffsetTable)
{
// Records table
dbReader.Scrub(StringBlockSize);
for (var i = 0; i < MaxId - MinId + 1; i++)
{
int offset = dbReader.ReadInt32();
short length = dbReader.ReadInt16();
if (offset == 0 || length == 0)
continue;
//Special case, may contain duplicates in the offset map that we don't want
if (CopyTableSize == 0)
{
if (!firstindex.ContainsKey(offset))
firstindex.Add(offset, new OffsetDuplicate(offsetmap.Count, firstindex.Count));
else
OffsetDuplicates.Add(MinId + i, firstindex[offset].VisibleIndex);
}
offsetmap.Add(new Tuple<int, short>(offset, length));
}
}
if (HasRelationshipData)
dbReader.BaseStream.Position += (MaxId - MinId + 1) * 4;
//Index table
if (HasIndexTable)
{
//Offset map alone reads straight into this others may not
if (!HasOffsetTable || HasRelationshipData)
dbReader.Scrub(indexTablePos);
m_indexes = new int[RecordCount];
for (var i = 0; i < RecordCount; i++)
m_indexes[i] = dbReader.ReadInt32();
}
//Extract record data
for (var i = 0; i < Math.Max(RecordCount, offsetmap.Count); i++)
{
if (HasOffsetTable && m_indexes != null)
{
int id = m_indexes[Math.Min(CopyTable.Count, m_indexes.Length - 1)];
var map = offsetmap[i];
if (CopyTableSize == 0 && firstindex[map.Item1].HiddenIndex != i) //Ignore duplicates
continue;
dbReader.Scrub(map.Item1);
var recordbytes = BitConverter.GetBytes(id).Concat(dbReader.ReadBytes(map.Item2));
CopyTable.Add(id, recordbytes.ToArray());
}
else
{
dbReader.Scrub(pos + i * RecordSize);
byte[] recordbytes = dbReader.ReadBytes((int) RecordSize).ToArray();
if (HasIndexTable)
{
var newrecordbytes = BitConverter.GetBytes(m_indexes[i]).Concat(recordbytes);
CopyTable.Add(m_indexes[i], newrecordbytes.ToArray());
}
else
{
int bytecount = FieldStructure[IdIndex].ByteCount;
int offset = FieldStructure[IdIndex].Offset;
var id = 0;
for (var j = 0; j < bytecount; j++)
id |= recordbytes[offset + j] << (j * 8);
CopyTable.Add(id, recordbytes);
}
}
}
//CopyTable
if (CopyTableSize != 0 && copyTablePos != dbReader.BaseStream.Length)
{
dbReader.Scrub(copyTablePos);
while (dbReader.BaseStream.Position != dbReader.BaseStream.Length)
{
int id = dbReader.ReadInt32();
int idcopy = dbReader.ReadInt32();
byte[] copyRow = CopyTable[idcopy];
var newRow = new byte[copyRow.Length];
Array.Copy(copyRow, newRow, newRow.Length);
Array.Copy(BitConverter.GetBytes(id), newRow, sizeof(int));
CopyTable.Add(id, newRow);
}
}
//CommonDataTable
if (CommonDataTableSize > 0)
{
dbReader.Scrub(commonDataTablePos);
int columncount = dbReader.ReadInt32();
var commondatalookup = new Dictionary<int, byte[]>[columncount];
//Initial Data extraction
for (var i = 0; i < columncount; i++)
{
int count = dbReader.ReadInt32();
byte type = dbReader.ReadByte();
short bit = CommonDataBits[type];
int size = (32 - bit) >> 3;
commondatalookup[i] = new Dictionary<int, byte[]>();
//New field not defined in header
if (i > FieldStructure.Count - 1)
{
var offset = (ushort) (FieldStructure.Count == 0
? 0
: FieldStructure[i - 1].Offset + FieldStructure[i - 1].ByteCount);
FieldStructure.Add(new FieldStructureEntry(bit, offset, type));
if (FieldStructure.Count > 1)
FieldStructure[i - 1].SetLength(FieldStructure[i]);
}
for (var x = 0; x < count; x++)
{
commondatalookup[i].Add(dbReader.ReadInt32(), dbReader.ReadBytes(size));
if (TableStructure == null || TableStructure?.Build >= 24492)
dbReader.ReadBytes(4 - size);
}
}
int[] ids = CopyTable.Keys.ToArray();
foreach (int id in ids)
{
for (var i = 0; i < commondatalookup.Length; i++)
{
if (!FieldStructure[i].CommonDataColumn)
continue;
var col = commondatalookup[i];
string defaultValue = TableStructure?.Fields?[i]?.DefaultValue;
defaultValue = string.IsNullOrEmpty(defaultValue) ? "0" : defaultValue;
FieldStructureEntry field = FieldStructure[i];
var zeroData = new byte[field.ByteCount];
if (defaultValue != "0")
{
switch (field.CommonDataType)
{
case 1:
zeroData = BitConverter.GetBytes(ushort.Parse(defaultValue));
break;
case 2:
zeroData = new[] {byte.Parse(defaultValue)};
break;
case 3:
zeroData = BitConverter.GetBytes(float.Parse(defaultValue));
break;
case 4:
zeroData = BitConverter.GetBytes(int.Parse(defaultValue));
break;
}
}
byte[] currentData = CopyTable[id];
byte[] data = col.ContainsKey(id) ? col[id] : zeroData;
Array.Resize(ref currentData, currentData.Length + data.Length);
Array.Copy(data, 0, currentData, field.Offset, data.Length);
CopyTable[id] = currentData;
}
}
commondatalookup = null;
InternalRecordSize = (uint) CopyTable.Values.First().Length;
}
return CopyTable;
}
19
Source : BitStream.cs
with The Unlicense
from BAndysc
with The Unlicense
from BAndysc
public void WriteUInt16(ushort value, int bits = 16)
{
bits = Math.Min(Math.Max(bits, 0), 16); // clamp values
WriteBits(BitConverter.GetBytes(value), bits);
}
19
Source : MemoryBlock.cs
with Apache License 2.0
from beetlex-io
with Apache License 2.0
from beetlex-io
public void Full(ushort value)
{
Full(BitConverter.GetBytes(value));
}
19
Source : ZipStorer.cs
with GNU General Public License v3.0
from berichan
with GNU General Public License v3.0
from berichan
private void WriteLocalHeader(ref ZipFileEntry _zfe)
{
long pos = this.ZipFileStream.Position;
Encoding encoder = _zfe.EncodeUTF8 ? Encoding.UTF8 : DefaultEncoding;
byte[] encodedFilename = encoder.GetBytes(_zfe.FilenameInZip);
this.ZipFileStream.Write(new byte[] { 80, 75, 3, 4, 20, 0}, 0, 6); // No extra header
this.ZipFileStream.Write(BitConverter.GetBytes((ushort)(_zfe.EncodeUTF8 ? 0x0800 : 0)), 0, 2); // filename and comment encoding
this.ZipFileStream.Write(BitConverter.GetBytes((ushort)_zfe.Method), 0, 2); // zipping method
this.ZipFileStream.Write(BitConverter.GetBytes(DateTimeToDosTime(_zfe.ModifyTime)), 0, 4); // zipping date and time
this.ZipFileStream.Write(new byte[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, 0, 12); // unused CRC, un/compressed size, updated later
this.ZipFileStream.Write(BitConverter.GetBytes((ushort)encodedFilename.Length), 0, 2); // filename length
this.ZipFileStream.Write(BitConverter.GetBytes((ushort)0), 0, 2); // extra length
this.ZipFileStream.Write(encodedFilename, 0, encodedFilename.Length);
_zfe.HeaderSize = (uint)(this.ZipFileStream.Position - pos);
}
19
Source : ZipStorer.cs
with GNU General Public License v3.0
from berichan
with GNU General Public License v3.0
from berichan
private void WriteCentralDirRecord(ZipFileEntry _zfe)
{
Encoding encoder = _zfe.EncodeUTF8 ? Encoding.UTF8 : DefaultEncoding;
byte[] encodedFilename = encoder.GetBytes(_zfe.FilenameInZip);
byte[] encodedComment = encoder.GetBytes(_zfe.Comment);
this.ZipFileStream.Write(new byte[] { 80, 75, 1, 2, 23, 0xB, 20, 0 }, 0, 8);
this.ZipFileStream.Write(BitConverter.GetBytes((ushort)(_zfe.EncodeUTF8 ? 0x0800 : 0)), 0, 2); // filename and comment encoding
this.ZipFileStream.Write(BitConverter.GetBytes((ushort)_zfe.Method), 0, 2); // zipping method
this.ZipFileStream.Write(BitConverter.GetBytes(DateTimeToDosTime(_zfe.ModifyTime)), 0, 4); // zipping date and time
this.ZipFileStream.Write(BitConverter.GetBytes(_zfe.Crc32), 0, 4); // file CRC
this.ZipFileStream.Write(BitConverter.GetBytes(_zfe.CompressedSize), 0, 4); // compressed file size
this.ZipFileStream.Write(BitConverter.GetBytes(_zfe.FileSize), 0, 4); // uncompressed file size
this.ZipFileStream.Write(BitConverter.GetBytes((ushort)encodedFilename.Length), 0, 2); // Filename in zip
this.ZipFileStream.Write(BitConverter.GetBytes((ushort)0), 0, 2); // extra length
this.ZipFileStream.Write(BitConverter.GetBytes((ushort)encodedComment.Length), 0, 2);
this.ZipFileStream.Write(BitConverter.GetBytes((ushort)0), 0, 2); // disk=0
//this.ZipFileStream.Write(BitConverter.GetBytes((ushort)0), 0, 2); // file type: binary
this.ZipFileStream.Write(BitConverter.GetBytes((ushort)0), 0, 2); // Internal file attributes
this.ZipFileStream.Write(BitConverter.GetBytes(_zfe.ExternalAttributes), 0, 4); // External file attributes
//this.ZipFileStream.Write(BitConverter.GetBytes((ushort)0x8100), 0, 2); // External file attributes (normal/readable)
this.ZipFileStream.Write(BitConverter.GetBytes(_zfe.HeaderOffset), 0, 4); // Offset of header
this.ZipFileStream.Write(encodedFilename, 0, encodedFilename.Length);
this.ZipFileStream.Write(encodedComment, 0, encodedComment.Length);
}
19
Source : EncryptedInt32.cs
with GNU General Public License v3.0
from berichan
with GNU General Public License v3.0
from berichan
public static void Write(EncryptedInt32 value, byte[] data, int offset)
{
uint enc = Encrypt(value.Value, value.Shift, value.Adjust);
byte chk = CalculateChecksum(enc);
BitConverter.GetBytes(enc).CopyTo(data, offset + 0);
BitConverter.GetBytes(value.Adjust).CopyTo(data, offset + 4);
data[offset + 6] = value.Shift;
data[offset + 7] = chk;
}
19
Source : ZipStorer.cs
with GNU General Public License v3.0
from berichan
with GNU General Public License v3.0
from berichan
private void WriteEndRecord(uint _size, uint _offset)
{
Encoding encoder = this.EncodeUTF8 ? Encoding.UTF8 : DefaultEncoding;
byte[] encodedComment = encoder.GetBytes(this.Comment);
this.ZipFileStream.Write(new byte[] { 80, 75, 5, 6, 0, 0, 0, 0 }, 0, 8);
this.ZipFileStream.Write(BitConverter.GetBytes((ushort)Files.Count+ExistingFiles), 0, 2);
this.ZipFileStream.Write(BitConverter.GetBytes((ushort)Files.Count+ExistingFiles), 0, 2);
this.ZipFileStream.Write(BitConverter.GetBytes(_size), 0, 4);
this.ZipFileStream.Write(BitConverter.GetBytes(_offset), 0, 4);
this.ZipFileStream.Write(BitConverter.GetBytes((ushort)encodedComment.Length), 0, 2);
this.ZipFileStream.Write(encodedComment, 0, encodedComment.Length);
}
19
Source : ZipStorer.cs
with GNU General Public License v3.0
from berichan
with GNU General Public License v3.0
from berichan
private void UpdateCrcAndSizes(ref ZipFileEntry _zfe)
{
long lastPos = this.ZipFileStream.Position; // remember position
this.ZipFileStream.Position = _zfe.HeaderOffset + 8;
this.ZipFileStream.Write(BitConverter.GetBytes((ushort)_zfe.Method), 0, 2); // zipping method
this.ZipFileStream.Position = _zfe.HeaderOffset + 14;
this.ZipFileStream.Write(BitConverter.GetBytes(_zfe.Crc32), 0, 4); // Update CRC
this.ZipFileStream.Write(BitConverter.GetBytes(_zfe.CompressedSize), 0, 4); // Compressed size
this.ZipFileStream.Write(BitConverter.GetBytes(_zfe.FileSize), 0, 4); // Uncompressed size
this.ZipFileStream.Position = lastPos; // restore position
}
19
Source : BCTest.cs
with MIT License
from BlazorExtensions
with MIT License
from BlazorExtensions
public static byte[] ComputeCRC16(this byte[] data)
{
return BitConverter.GetBytes(data.Compute()).Reverse().ToArray();
}
19
Source : ProjectBuffer.cs
with MIT License
from BleuBleu
with MIT License
from BleuBleu
public void Serialize(ref ushort i)
{
buffer.AddRange(BitConverter.GetBytes(i));
idx += sizeof(ushort);
}
19
Source : ProjectBuffer.cs
with MIT License
from BleuBleu
with MIT License
from BleuBleu
public void Serialize(ref ushort i)
{
crc = CRC32.Compute(BitConverter.GetBytes(i), crc);
}
19
Source : DataView.cs
with MIT License
from blockcoli
with MIT License
from blockcoli
public void SetUint16(int byteOffset, ushort value, bool? littleEndian = null)
{
var bytes = BitConverter.GetBytes(value);
if (!littleEndian.HasValue || !littleEndian.Value) Array.Reverse(bytes);
Array.Copy(bytes, 0, Buffer, byteOffset, bytes.Length);
}
See More Examples