Here are the examples of the csharp api System.Collections.Generic.Dictionary.Add(int, int) taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
513 Examples
19
View Source File : AvifWriter.AvifWriterState.cs
License : MIT License
Project Creator : 0xC0000054
License : MIT License
Project Creator : 0xC0000054
private void DeduplicateAlphaTiles(
IReadOnlyList<CompressedAV1Image> alphaImages,
replacedgeneousTileInfo replacedgeneousTiles,
IArrayPoolService arrayPool)
{
if (alphaImages.Count == 1)
{
return;
}
foreach (KeyValuePair<int, int> item in replacedgeneousTiles.DuplicateAlphaTileMap)
{
this.duplicateAlphaTiles.Add(item.Key, item.Value);
}
if (alphaImages.Count == replacedgeneousTiles.replacedgeneousAlphaTiles.Count)
{
return;
}
using (IArrayPoolBuffer<int> duplicateTileSearchSpace = GetDuplicateTileSearchSpace(alphaImages,
replacedgeneousTiles.replacedgeneousAlphaTiles,
arrayPool))
{
for (int i = 0; i < duplicateTileSearchSpace.Count; i++)
{
int firstTileIndex = duplicateTileSearchSpace[i];
if (this.duplicateAlphaTiles.ContainsKey(firstTileIndex))
{
continue;
}
CompressedAV1Data firstImageData = alphaImages[firstTileIndex].Data;
IPinnableBuffer firstPinnable = firstImageData;
IntPtr firstBuffer = IntPtr.Zero;
try
{
for (int j = i + 1; j < duplicateTileSearchSpace.Count; j++)
{
int secondTileIndex = duplicateTileSearchSpace[j];
if (this.duplicateAlphaTiles.ContainsKey(secondTileIndex))
{
continue;
}
CompressedAV1Data secondImageData = alphaImages[secondTileIndex].Data;
if (firstImageData.ByteLength == secondImageData.ByteLength)
{
IPinnableBuffer secondPinnable = secondImageData;
if (firstBuffer == IntPtr.Zero)
{
firstBuffer = firstPinnable.Pin();
}
IntPtr secondBuffer = secondPinnable.Pin();
try
{
if (AvifNative.MemoryBlocksAreEqual(firstBuffer, secondBuffer, firstImageData.ByteLength))
{
this.duplicateAlphaTiles.Add(secondTileIndex, firstTileIndex);
}
}
finally
{
secondPinnable.Unpin();
}
}
}
}
finally
{
if (firstBuffer != IntPtr.Zero)
{
firstPinnable.Unpin();
}
}
}
}
}
19
View Source File : AvifWriter.AvifWriterState.cs
License : MIT License
Project Creator : 0xC0000054
License : MIT License
Project Creator : 0xC0000054
private void DeduplicateColorTiles(
IReadOnlyList<CompressedAV1Image> colorImages,
replacedgeneousTileInfo replacedgeneousTiles,
IArrayPoolService arrayPool)
{
if (colorImages.Count == 1)
{
return;
}
foreach (KeyValuePair<int, int> item in replacedgeneousTiles.DuplicateColorTileMap)
{
this.duplicateColorTiles.Add(item.Key, item.Value);
}
if (colorImages.Count == replacedgeneousTiles.replacedgeneousColorTiles.Count)
{
return;
}
using (IArrayPoolBuffer<int> duplicateTileSearchSpace = GetDuplicateTileSearchSpace(colorImages,
replacedgeneousTiles.replacedgeneousColorTiles,
arrayPool))
{
for (int i = 0; i < duplicateTileSearchSpace.Count; i++)
{
int firstTileIndex = duplicateTileSearchSpace[i];
if (this.duplicateColorTiles.ContainsKey(firstTileIndex))
{
continue;
}
CompressedAV1Data firstImageData = colorImages[firstTileIndex].Data;
IPinnableBuffer firstPinnable = firstImageData;
IntPtr firstBuffer = IntPtr.Zero;
try
{
for (int j = i + 1; j < duplicateTileSearchSpace.Count; j++)
{
int secondTileIndex = duplicateTileSearchSpace[j];
if (this.duplicateColorTiles.ContainsKey(secondTileIndex))
{
continue;
}
CompressedAV1Data secondImageData = colorImages[secondTileIndex].Data;
if (firstImageData.ByteLength == secondImageData.ByteLength)
{
IPinnableBuffer secondPinnable = secondImageData;
if (firstBuffer == IntPtr.Zero)
{
firstBuffer = firstPinnable.Pin();
}
IntPtr secondBuffer = secondPinnable.Pin();
try
{
if (AvifNative.MemoryBlocksAreEqual(firstBuffer, secondBuffer, firstImageData.ByteLength))
{
this.duplicateColorTiles.Add(secondTileIndex, firstTileIndex);
}
}
finally
{
secondPinnable.Unpin();
}
}
}
}
finally
{
if (firstBuffer != IntPtr.Zero)
{
firstPinnable.Unpin();
}
}
}
}
}
19
View Source File : AvifFile.cs
License : MIT License
Project Creator : 0xC0000054
License : MIT License
Project Creator : 0xC0000054
private static replacedgeneousTileInfo GetreplacedgeneousTileInfo(Surface surface, Rectangle[] tileRects, bool includeAlphaTiles)
{
Dictionary<int, int> duplicateColorTileMap = new Dictionary<int, int>();
HashSet<int> replacedgeneousColorTiles = new HashSet<int>();
Dictionary<int, int> duplicateAlphaTileMap = new Dictionary<int, int>();
HashSet<int> replacedgeneousAlphaTiles = new HashSet<int>();
if (tileRects.Length > 1)
{
Dictionary<uint, int> replacedgeneousColorTileCache = new Dictionary<uint, int>();
Dictionary<byte, int> replacedgeneousAlphaTileCache = new Dictionary<byte, int>();
for (int i = 0; i < tileRects.Length; i++)
{
if (IsreplacedgeneousColorTile(surface, tileRects[i], out uint firstPixelBgr))
{
replacedgeneousColorTiles.Add(i);
if (replacedgeneousColorTileCache.TryGetValue(firstPixelBgr, out int duplicateTileIndex))
{
duplicateColorTileMap.Add(i, duplicateTileIndex);
}
else
{
replacedgeneousColorTileCache.Add(firstPixelBgr, i);
}
}
if (includeAlphaTiles)
{
if (IsreplacedgeneousAlphaTile(surface, tileRects[i], out byte firstPixelAlpha))
{
replacedgeneousAlphaTiles.Add(i);
if (replacedgeneousAlphaTileCache.TryGetValue(firstPixelAlpha, out int duplicateTileIndex))
{
duplicateAlphaTileMap.Add(i, duplicateTileIndex);
}
else
{
replacedgeneousAlphaTileCache.Add(firstPixelAlpha, i);
}
}
}
}
}
return new replacedgeneousTileInfo(duplicateColorTileMap,
replacedgeneousColorTiles,
duplicateAlphaTileMap,
replacedgeneousAlphaTiles);
}
19
View Source File : LandLord.cs
License : Apache License 2.0
Project Creator : 2881099
License : Apache License 2.0
Project Creator : 2881099
public static List<int> OrderPaiLordWithColor(List<int> paiarr)
{
List<int> _tempList = new List<int>(paiarr);
for (int i = 0; i < _tempList.Count; i++)
{
if (_tempList[i] > 100) _tempList[i] %= 100;
}
int[] temparr = _tempList.ToArray<int>();
Array.Sort<int>(temparr);
List<int> _ASCList = temparr.ToList<int>();
_ASCList.Reverse();//默认是升序反转一下就降序了
//带上花色,有点小复杂
Dictionary<int, int> _dicPoker2Count = GetPoker_Count(_ASCList);
Dictionary<int, int> _dicPoker2CountUsed = new Dictionary<int, int>();
for (int j = 0; j < _ASCList.Count; j++)
{
if (!_dicPoker2CountUsed.ContainsKey(_ASCList[j])) _dicPoker2CountUsed.Add(_ASCList[j], 1);
for (int c = _dicPoker2CountUsed[_ASCList[j]]; c <= 4; c++)
{
_dicPoker2CountUsed[_ASCList[j]]++;
if (paiarr.Contains(_ASCList[j] + 100 * c))
{
_ASCList[j] = _ASCList[j] + 100 * c;
break;
}
}
}
return _ASCList;
}
19
View Source File : LandLord.cs
License : Apache License 2.0
Project Creator : 2881099
License : Apache License 2.0
Project Creator : 2881099
public static List<int> GetPaiColor(List<int> _shoupai, List<int> pokervalue)
{
List<int> _ASCList = new List<int>(pokervalue);
//带上花色,有点小复杂
Dictionary<int, int> _dicPoker2Count = GetPoker_Count(_ASCList);
Dictionary<int, int> _dicPoker2CountUsed = new Dictionary<int, int>();
for (int j = 0; j < _ASCList.Count; j++)
{
if (!_dicPoker2CountUsed.ContainsKey(_ASCList[j])) _dicPoker2CountUsed.Add(_ASCList[j], 1);
for (int c = _dicPoker2CountUsed[_ASCList[j]]; c <= 4; c++)
{
_dicPoker2CountUsed[_ASCList[j]]++;
if (_shoupai.Contains(_ASCList[j] + 100 * c))
{
_ASCList[j] = _ASCList[j] + 100 * c;
break;
}
}
}
return _ASCList;
}
19
View Source File : LandLord.cs
License : Apache License 2.0
Project Creator : 2881099
License : Apache License 2.0
Project Creator : 2881099
public static Dictionary<int, int> GetPoker_Count(List<int> paiList)
{
Dictionary<int, int> _dicPoker2Count = new Dictionary<int, int>();
foreach (int poke in paiList)
{
if (_dicPoker2Count.ContainsKey(poke)) _dicPoker2Count[poke]++;
else _dicPoker2Count.Add(poke, 1);
}
return _dicPoker2Count;
}
19
View Source File : UMAData.cs
License : Apache License 2.0
Project Creator : A7ocin
License : Apache License 2.0
Project Creator : A7ocin
public void RegisterAnimatedBone(int hash)
{
if (!animatedBonesTable.ContainsKey(hash))
{
animatedBonesTable.Add(hash, animatedBonesTable.Count);
}
}
19
View Source File : UMAData.cs
License : Apache License 2.0
Project Creator : A7ocin
License : Apache License 2.0
Project Creator : A7ocin
public void RegisterAnimatedBoneHierarchy(int hash)
{
if (!animatedBonesTable.ContainsKey(hash))
{
animatedBonesTable.Add(hash, animatedBonesTable.Count);
}
}
19
View Source File : Catalogues.cs
License : GNU General Public License v3.0
Project Creator : aedenthorn
License : GNU General Public License v3.0
Project Creator : aedenthorn
private static Dictionary<ISalable, int[]> GetAllSeeds()
{
Dictionary<ISalable, int[]> items = new Dictionary<ISalable, int[]>();
Dictionary<int, string> cropData = Helper.Content.Load<Dictionary<int, string>>("Data\\Crops", 0);
Dictionary<int, string> fruitTreeData = Helper.Content.Load<Dictionary<int, string>>("Data\\fruitTrees", 0);
Dictionary<int, int> seedProducts = new Dictionary<int, int>();
foreach (KeyValuePair<int, string> kvp in cropData)
{
string[] values = kvp.Value.Split('/');
if (!int.TryParse(values[3], out int product))
continue;
seedProducts.Add(kvp.Key, product);
}
foreach (KeyValuePair<int, string> kvp in fruitTreeData)
{
string[] values = kvp.Value.Split('/');
if (!int.TryParse(values[2], out int product))
continue;
seedProducts.Add(kvp.Key, product);
}
foreach (KeyValuePair<int, int> crop in seedProducts)
{
bool include = true;
if(Config.SeedsToInclude.ToLower() == "shipped")
{
include = Game1.player.basicShipped.ContainsKey(crop.Value);
}
else if (Config.SeedsToInclude.ToLower() == "season")
{
include = new Crop(crop.Key, 0, 0).seasonsToGrowIn.Contains(Game1.currentSeason);
}
if (include)
{
Object item = new Object(crop.Key, int.MaxValue, false, -1, 0);
if (!item.bigCraftable.Value && item.ParentSheetIndex == 745)
{
item.Price = (int)Math.Round(50 * Config.PriceMult);
}
items.Add(item, new int[]
{
Config.FreeSeedCatalogue ? 0 : (int)Math.Round(item.salePrice() * Config.PriceMult),
int.MaxValue
});
}
}
return items;
}
19
View Source File : TraceMessage.cs
License : Mozilla Public License 2.0
Project Creator : agebullhu
License : Mozilla Public License 2.0
Project Creator : agebullhu
private void WriteTrace(string message, bool time = false)
{
BeginInvokeInUiThread(p =>
{
if (_threadIndex.ContainsKey(Thread.CurrentThread.ManagedThreadId))
{
var idx = _threadIndex[Thread.CurrentThread.ManagedThreadId] + 1;
if (idx >= _trace.Count)
{
_trace.Add(time ? $"{DateTime.Now}:{p}" : p);
_threadIndex[Thread.CurrentThread.ManagedThreadId] = _trace.Count;
}
else
{
_trace.Insert(idx, time ? $"{DateTime.Now}:{p}" : p);
_threadIndex[Thread.CurrentThread.ManagedThreadId] = idx;
}
}
else
{
_trace.Add(time ? $"{DateTime.Now}:{p}" : p);
_threadIndex.Add(Thread.CurrentThread.ManagedThreadId, _trace.Count - 1);
}
RaisePropertyChanged(() => Track);
LastMessageIndex = _trace.Count - 1;
}, message);
}
19
View Source File : InstructionSwitch.cs
License : GNU General Public License v3.0
Project Creator : ahmed605
License : GNU General Public License v3.0
Project Creator : ahmed605
protected override void DecodeInternal(byte[] code, int offset)
{
SwitchTable = new Dictionary<int, int>();
int switchCount = code[offset + 1];
for (int i = 0; i < switchCount; i++)
{
int index = BitConverter.ToInt32(code, offset + 2 + i*8);
int jump = BitConverter.ToInt32(code, offset + 2 + i*8 + 4);
SwitchTable.Add(index, jump);
}
}
19
View Source File : Simulator.cs
License : MIT License
Project Creator : aillieo
License : MIT License
Project Creator : aillieo
private void ReIndex()
{
indexById.Clear();
for (int i = 0, len = managedAgents.Count; i < len; ++i)
{
indexById.Add(managedAgents[i].id, i);
}
}
19
View Source File : Simulator.cs
License : MIT License
Project Creator : aillieo
License : MIT License
Project Creator : aillieo
public Agent CreateAgent()
{
Agent agent = new Agent(sid++);
managedAgents.Add(agent);
int index = managedAgents.Count;
indexById.Add(agent.id, index);
return agent;
}
19
View Source File : _350_IntersectionofTwoArraysII.cs
License : MIT License
Project Creator : AlexChesser
License : MIT License
Project Creator : AlexChesser
public int[] Intersect(int[] nums1, int[] nums2)
{
Dictionary<int, int> pairs = new Dictionary<int, int>();
List<int> output = new List<int>();
for (int i = 0; i < nums1.Length; i++)
{
if (!pairs.ContainsKey(nums1[i]))
{
pairs.Add(nums1[i], 0);
}
pairs[nums1[i]]++;
}
for (int j = 0; j < nums2.Length; j++)
{
if (pairs.ContainsKey(nums2[j]) && pairs[nums2[j]] > 0)
{
pairs[nums2[j]]--;
output.Add(nums2[j]);
}
}
return output.ToArray();
}
19
View Source File : BarSeriesBase.cs
License : MIT License
Project Creator : AlexGyver
License : MIT License
Project Creator : AlexGyver
protected internal override void UpdateValidData()
{
this.ValidItems = new List<BarItemBase>();
this.ValidItemsIndexInversion = new Dictionary<int, int>();
var categories = this.GetCategoryAxis().Labels.Count;
var valueAxis = this.GetValueAxis();
int i = 0;
foreach (var item in this.Gereplacedems())
{
var barSeriesItem = item as BarItemBase;
if (barSeriesItem != null && item.GetCategoryIndex(i) < categories
&& valueAxis.IsValidValue(barSeriesItem.Value))
{
this.ValidItemsIndexInversion.Add(this.ValidItems.Count, i);
this.ValidItems.Add(barSeriesItem);
}
i++;
}
}
19
View Source File : IntervalBarSeries.cs
License : MIT License
Project Creator : AlexGyver
License : MIT License
Project Creator : AlexGyver
protected internal override void UpdateValidData()
{
this.ValidItems = new List<IntervalBarItem>();
this.ValidItemsIndexInversion = new Dictionary<int, int>();
var valueAxis = this.GetValueAxis();
for (var i = 0; i < this.Items.Count; i++)
{
var item = this.Items[i];
if (valueAxis.IsValidValue(item.Start) && valueAxis.IsValidValue(item.End))
{
this.ValidItemsIndexInversion.Add(this.ValidItems.Count, i);
this.ValidItems.Add(item);
}
}
}
19
View Source File : TornadoBarSeries.cs
License : MIT License
Project Creator : AlexGyver
License : MIT License
Project Creator : AlexGyver
protected internal override void UpdateValidData()
{
this.ValidItems = new List<TornadoBarItem>();
this.ValidItemsIndexInversion = new Dictionary<int, int>();
var valueAxis = this.GetValueAxis();
for (var i = 0; i < this.Items.Count; i++)
{
var item = this.Items[i];
if (valueAxis.IsValidValue(item.Minimum) && valueAxis.IsValidValue(item.Maximum))
{
this.ValidItemsIndexInversion.Add(this.ValidItems.Count, i);
this.ValidItems.Add(item);
}
}
}
19
View Source File : TMP_SpriteAsset.cs
License : MIT License
Project Creator : Alword
License : MIT License
Project Creator : Alword
public void UpdateLookupTables()
{
//Debug.Log("Updating [" + this.name + "] Lookup tables.");
// Check version number of sprite replacedet to see if it needs to be upgraded.
if (this.material != null && string.IsNullOrEmpty(m_Version))
UpgradeSpritereplacedet();
// Initialize / Clear glyph index lookup dictionary.
if (m_GlyphIndexLookup == null)
m_GlyphIndexLookup = new Dictionary<uint, int>();
else
m_GlyphIndexLookup.Clear();
for (int i = 0; i < m_SpriteGlyphTable.Count; i++)
{
uint glyphIndex = m_SpriteGlyphTable[i].index;
if (m_GlyphIndexLookup.ContainsKey(glyphIndex) == false)
m_GlyphIndexLookup.Add(glyphIndex, i);
}
if (m_NameLookup == null)
m_NameLookup = new Dictionary<int, int>();
else
m_NameLookup.Clear();
if (m_UnicodeLookup == null)
m_UnicodeLookup = new Dictionary<uint, int>();
else
m_UnicodeLookup.Clear();
for (int i = 0; i < m_SpriteCharacterTable.Count; i++)
{
int nameHashCode = m_SpriteCharacterTable[i].hashCode;
if (m_NameLookup.ContainsKey(nameHashCode) == false)
m_NameLookup.Add(nameHashCode, i);
uint unicode = m_SpriteCharacterTable[i].unicode;
if (m_UnicodeLookup.ContainsKey(unicode) == false)
m_UnicodeLookup.Add(unicode, i);
// Update glyph reference which is not serialized
uint glyphIndex = m_SpriteCharacterTable[i].glyphIndex;
if (m_GlyphIndexLookup.TryGetValue(glyphIndex, out int index))
m_SpriteCharacterTable[i].glyph = m_SpriteGlyphTable[index];
}
m_IsSpritereplacedetLookupTablesDirty = false;
}
19
View Source File : Interface.cs
License : MIT License
Project Creator : Aminator
License : MIT License
Project Creator : Aminator
public static void Unpack(string inputFilePath, string outputDirectoryPath)
{
if (!File.Exists(inputFilePath))
throw new ArgumentException("Input file does not exists");
if (!Directory.Exists(outputDirectoryPath))
throw new ArgumentException("Ouput directory does not exists");
string inputFileName = Path.GetFileNameWithoutExtension(inputFilePath);
string inputDirectoryPath = Path.GetDirectoryName(inputFilePath);
var model = LoadModelAsync(inputFilePath).Result;
GltfLoader.Schema.Buffer binBuffer = null;
byte[] binBufferData = null;
if (model.Buffers != null && string.IsNullOrEmpty(model.Buffers[0].Uri))
{
binBuffer = model.Buffers[0];
binBufferData = model.LoadBinaryBuffer(0, inputFilePath);
}
var imageBufferViewIndices = new List<int>();
if (model.Images != null)
{
for (var index = 0; index < model.Images.Length; index++)
{
var image = model.Images[index];
if (!string.IsNullOrEmpty(image.Uri))
{
if (!image.Uri.StartsWith("data:"))
{
var sourceFilePath = Path.Combine(inputDirectoryPath, image.Uri);
var fileName = $"{inputFilePath}_image{index}.bin";
if (File.Exists(sourceFilePath))
{
var destinationFilePath = Path.Combine(outputDirectoryPath, fileName);
File.Copy(sourceFilePath, destinationFilePath, true);
}
image.Uri = fileName;
}
}
else if (image.BufferView.HasValue)
{
var bufferView = model.BufferViews[image.BufferView.Value];
if (bufferView.Buffer == 0)
{
imageBufferViewIndices.Add(image.BufferView.Value);
var fileExtension = image.MimeType == "image/jpeg" ? "jpg" : "png";
var fileName = $"{inputFileName}_image{index}.{fileExtension}";
using (var fileStream = File.Create(Path.Combine(outputDirectoryPath, fileName)))
{
fileStream.Write(binBufferData, bufferView.ByteOffset, bufferView.ByteLength);
}
image.BufferView = null;
image.MimeType = null;
image.Uri = fileName;
}
}
}
}
if (model.BufferViews != null)
{
var binFileName = $"{inputFileName}.bin";
var binFilePath = Path.Combine(outputDirectoryPath, binFileName);
var binByteLength = 0;
var indexMap = new Dictionary<int, int>();
var bufferViews = new List<BufferView>();
using (var fileStream = File.Create(binFilePath))
{
for (var index = 0; index < model.BufferViews.Length; index++)
{
if (!imageBufferViewIndices.Any(imageIndex => imageIndex == index))
{
var bufferView = model.BufferViews[index];
if (bufferView.Buffer == 0)
{
fileStream.Align(4);
var fileStreamPosition = fileStream.Position;
fileStream.Write(binBufferData, bufferView.ByteOffset, bufferView.ByteLength);
bufferView.ByteOffset = (int)fileStreamPosition;
}
var newIndex = bufferViews.Count;
if (index != newIndex)
{
indexMap.Add(index, newIndex);
}
bufferViews.Add(bufferView);
}
}
binByteLength = (int)fileStream.Length;
}
model.BufferViews = bufferViews.ToArray();
if (binByteLength == 0)
{
File.Delete(binFilePath);
if (binBuffer != null)
{
model.Buffers = model.Buffers.Skip(1).ToArray();
foreach (var bufferView in model.BufferViews)
{
bufferView.Buffer--;
}
}
}
else
{
binBuffer.Uri = binFileName;
binBuffer.ByteLength = binByteLength;
}
if (model.Accessors != null)
{
foreach (var accessor in model.Accessors)
{
if (accessor.BufferView.HasValue)
{
if (indexMap.TryGetValue(accessor.BufferView.Value, out int newIndex))
{
accessor.BufferView = newIndex;
}
}
}
}
}
if (model.Buffers != null)
{
for (var index = 1; index < model.Buffers.Length; index++)
{
var buffer = model.Buffers[index];
if (!buffer.Uri.StartsWith("data:"))
{
var sourceFilePath = Path.Combine(inputDirectoryPath, buffer.Uri);
var fileName = $"{inputFileName}{index}.bin";
if (File.Exists(sourceFilePath))
{
var destinationFilePath = Path.Combine(outputDirectoryPath, fileName);
File.Copy(sourceFilePath, destinationFilePath, true);
}
buffer.Uri = fileName;
}
}
}
SaveModel(model, Path.Combine(outputDirectoryPath, $"{inputFileName}.gltf"));
}
19
View Source File : SurfaceManager.cs
License : GNU General Public License v3.0
Project Creator : anotak
License : GNU General Public License v3.0
Project Creator : anotak
public void AllocateBuffers()
{
// Make replacedysis of sector geometry
Dictionary<int, int> sectorverts = new Dictionary<int, int>();
foreach(Sector s in General.Map.Map.Sectors)
{
if(s.Triangles != null)
{
int numvertices = s.Triangles.Vertices.Count;
while(numvertices > 0)
{
// Determine for how many vertices in this entry
int vertsinentry = (numvertices > MAX_VERTICES_PER_SECTOR) ? MAX_VERTICES_PER_SECTOR : numvertices;
// We count the number of sectors that have specific number of vertices
if(!sectorverts.ContainsKey(vertsinentry))
sectorverts.Add(vertsinentry, 0);
sectorverts[vertsinentry]++;
numvertices -= vertsinentry;
}
}
}
// Now (re)allocate the needed buffers
foreach(KeyValuePair<int, int> sv in sectorverts)
{
// Zero vertices can't be drawn
if(sv.Key > 0)
{
SurfaceBufferSet set = GetSet(sv.Key);
// Calculte how many free entries we need
int neededentries = sv.Value;
int freeentriesneeded = neededentries - set.entries.Count;
// Allocate the space needed
EnsureFreeBufferSpace(set, freeentriesneeded);
}
}
}
19
View Source File : IMGUI.cs
License : MIT License
Project Creator : Apostolique
License : MIT License
Project Creator : Apostolique
public int CreateId(int id, bool isAbsoluteId) {
if (!isAbsoluteId) {
id = CombineHash(_idHash, id);
}
if (_idsUsedThisFrame.TryGetValue(id, out int count)) {
count++;
_idsUsedThisFrame[id] = count;
id = CombineHash(id, count);
} else {
_idsUsedThisFrame.Add(id, 1);
}
return id;
}
19
View Source File : IMGUI.cs
License : MIT License
Project Creator : Apostolique
License : MIT License
Project Creator : Apostolique
public override void UpdateSetup(GameTime gameTime) {
// 1. Ping ourself to prevent cleanup.
// 2. Cleanup last cycle
// 3. Pending components become active.
// a. Set parenting.
// 4. Update pref sizes.
// 5. Apply pref sizes.
// 6. Update setup.
LastPing = InputHelper.CurrentFrame - 1;
Cleanup();
_idsUsedThisFrame.Add(Id, 1);
while (_pendingComponents.Count > 0) {
var pc = _pendingComponents.Dequeue();
if (pc.Component.Parent == null) {
_activeComponents.Add(pc.Id, pc.Component);
} else {
pc.Component.Parent.Remove(pc.Component);
}
pc.Parent.Add(pc.Component);
pc.Component.GrabFocus = GrabFocus;
}
while (_nextTick.Count > 0) {
_nextTick.Dequeue().Invoke();
}
foreach (var c in _children) {
c.UpdatePrefSize(gameTime);
// TODO: Update position?
c.Width = c.PrefWidth;
c.Height = c.PrefHeight;
// TODO: Set clip limit to the window?
c.UpdateSetup(gameTime);
}
}
19
View Source File : ExcelStyles.cs
License : Apache License 2.0
Project Creator : Appdynamics
License : Apache License 2.0
Project Creator : Appdynamics
private void SetStyleAddress(StyleBase sender, Style.StyleChangeEventArgs e, ExcelAddressBase address, ExcelWorksheet ws, ref Dictionary<int, int> styleCashe)
{
if (address.Start.Column == 0 || address.Start.Row == 0)
{
throw (new Exception("error address"));
}
//Columns
else if (address.Start.Row == 1 && address.End.Row == ExcelPackage.MaxRows)
{
ExcelColumn column;
int col = address.Start.Column, row = 0;
bool isNew;
//Get the startcolumn
object o = null;
if (!ws.ExistsValueInner(0, address.Start.Column, ref o))
{
column = ws.Column(address.Start.Column);
isNew = true;
}
else
{
//column = (ExcelColumn)ws.GetValueInner(0, address.Start.Column);
column = (ExcelColumn)o;
isNew = false;
}
var prevColumMax = column.ColumnMax;
while (column.ColumnMin <= address.End.Column)
{
if(column.ColumnMin > prevColumMax+1)
{
var newColumn = ws.Column(prevColumMax + 1);
newColumn.ColumnMax = column.ColumnMin-1;
AddNewStyleColumn(sender, e, ws, styleCashe, newColumn, newColumn.StyleID);
}
if (column.ColumnMax > address.End.Column)
{
var newCol = ws.CopyColumn(column, address.End.Column + 1, column.ColumnMax);
column.ColumnMax = address.End.Column;
}
var s = ws.GetStyleInner(0, column.ColumnMin);
AddNewStyleColumn(sender, e, ws, styleCashe, column, s);
//index++;
prevColumMax = column.ColumnMax;
if (!ws._values.NextCell(ref row, ref col) || row > 0)
{
if(column._columnMax == address.End.Column)
{
break;
}
if (isNew)
{
column._columnMax = address.End.Column;
}
else
{
var newColumn = ws.Column(column._columnMax + 1);
newColumn.ColumnMax = address.End.Column;
AddNewStyleColumn(sender, e, ws, styleCashe, newColumn, newColumn.StyleID);
column = newColumn;
}
break;
}
else
{
column = (ws.GetValueInner(0, col) as ExcelColumn);
}
}
if (column._columnMax < address.End.Column)
{
var newCol = ws.Column(column._columnMax + 1) as ExcelColumn;
newCol._columnMax = address.End.Column;
var s = ws.GetStyleInner(0, column.ColumnMin);
if (styleCashe.ContainsKey(s))
{
ws.SetStyleInner(0, column.ColumnMin, styleCashe[s]);
}
else
{
ExcelXfs st = CellXfs[s];
int newId = st.GetNewID(CellXfs, sender, e.StyleClreplaced, e.StyleProperty, e.Value);
styleCashe.Add(s, newId);
ws.SetStyleInner(0, column.ColumnMin, newId);
}
column._columnMax = address.End.Column;
}
//Set for individual cells in the span. We loop all cells here since the cells are sorted with columns first.
var cse = new CellsStoreEnumerator<ExcelCoreValue>(ws._values, 1, address._fromCol, address._toRow, address._toCol);
while (cse.Next())
{
if (cse.Column >= address.Start.Column &&
cse.Column <= address.End.Column &&
cse.Value._styleId != 0)
{
if (styleCashe.ContainsKey(cse.Value._styleId))
{
ws.SetStyleInner(cse.Row, cse.Column, styleCashe[cse.Value._styleId]);
}
else
{
ExcelXfs st = CellXfs[cse.Value._styleId];
int newId = st.GetNewID(CellXfs, sender, e.StyleClreplaced, e.StyleProperty, e.Value);
styleCashe.Add(cse.Value._styleId, newId);
ws.SetStyleInner(cse.Row, cse.Column, newId);
}
}
}
if (!(address._fromCol == 1 && address._toCol == ExcelPackage.MaxColumns))
{
//Update cells with styled columns
cse = new CellsStoreEnumerator<ExcelCoreValue>(ws._values, 1, 0, address._toRow, 0);
while (cse.Next())
{
if (cse.Value._styleId == 0) continue;
for (int c = address._fromCol; c <= address._toCol; c++)
{
if (!ws.ExistsStyleInner(cse.Row, c))
{
if (styleCashe.ContainsKey(cse.Value._styleId))
{
ws.SetStyleInner(cse.Row, c, styleCashe[cse.Value._styleId]);
}
else
{
ExcelXfs st = CellXfs[cse.Value._styleId];
int newId = st.GetNewID(CellXfs, sender, e.StyleClreplaced, e.StyleProperty, e.Value);
styleCashe.Add(cse.Value._styleId, newId);
ws.SetStyleInner(cse.Row, c, newId);
}
}
}
}
}
}
//Rows
else if (address.Start.Column == 1 && address.End.Column == ExcelPackage.MaxColumns)
{
for (int rowNum = address.Start.Row; rowNum <= address.End.Row; rowNum++)
{
var s = ws.GetStyleInner(rowNum, 0);
if (s == 0)
{
//iterate all columns and set the row to the style of the last column
var cse = new CellsStoreEnumerator<ExcelCoreValue>(ws._values, 0, 1, 0, ExcelPackage.MaxColumns);
while (cse.Next())
{
s = cse.Value._styleId;
if (s == 0) continue;
var c = ws.GetValueInner(cse.Row, cse.Column) as ExcelColumn;
if (c != null && c.ColumnMax < ExcelPackage.MaxColumns)
{
for (int col = c.ColumnMin; col < c.ColumnMax; col++)
{
if (!ws.ExistsStyleInner(rowNum, col))
{
ws.SetStyleInner(rowNum, col, s);
}
}
}
}
ws.SetStyleInner(rowNum, 0, s);
cse.Dispose();
}
if (styleCashe.ContainsKey(s))
{
ws.SetStyleInner(rowNum, 0, styleCashe[s]);
}
else
{
ExcelXfs st = CellXfs[s];
int newId = st.GetNewID(CellXfs, sender, e.StyleClreplaced, e.StyleProperty, e.Value);
styleCashe.Add(s, newId);
ws.SetStyleInner(rowNum, 0, newId);
}
}
//Update individual cells
var cse2 = new CellsStoreEnumerator<ExcelCoreValue>(ws._values, address._fromRow, address._fromCol, address._toRow, address._toCol);
while (cse2.Next())
{
var s = cse2.Value._styleId;
if (s == 0) continue;
if (styleCashe.ContainsKey(s))
{
ws.SetStyleInner(cse2.Row, cse2.Column, styleCashe[s]);
}
else
{
ExcelXfs st = CellXfs[s];
int newId = st.GetNewID(CellXfs, sender, e.StyleClreplaced, e.StyleProperty, e.Value);
styleCashe.Add(s, newId);
ws.SetStyleInner(cse2.Row, cse2.Column, newId);
}
}
//Update cells with styled rows
cse2 = new CellsStoreEnumerator<ExcelCoreValue>(ws._values, 0, 1, 0, address._toCol);
while (cse2.Next())
{
if (cse2.Value._styleId == 0) continue;
for (int r = address._fromRow; r <= address._toRow; r++)
{
if (!ws.ExistsStyleInner(r, cse2.Column))
{
var s = cse2.Value._styleId;
if (styleCashe.ContainsKey(s))
{
ws.SetStyleInner(r, cse2.Column, styleCashe[s]);
}
else
{
ExcelXfs st = CellXfs[s];
int newId = st.GetNewID(CellXfs, sender, e.StyleClreplaced, e.StyleProperty, e.Value);
styleCashe.Add(s, newId);
ws.SetStyleInner(r, cse2.Column, newId);
}
}
}
}
}
else //Cellrange
{
var tmpCache = styleCashe;
var rowCache = new Dictionary<int, int>(address.End.Row - address.Start.Row + 1);
var colCache = new Dictionary<int, ExcelCoreValue>(address.End.Column - address.Start.Column + 1);
ws._values.SetRangeValueSpecial(address.Start.Row, address.Start.Column, address.End.Row, address.End.Column,
(List<ExcelCoreValue> list, int index, int row, int column, object args) =>
{
// Optimized GetStyleID
var s = list[index]._styleId;
if (s == 0 && !ws.ExistsStyleInner(row, 0, ref s))
{
// get row styleId with cache
if (!rowCache.ContainsKey(row)) rowCache.Add(row, ws._values.GetValue(row, 0)._styleId);
s = rowCache[row];
if (s == 0)
{
// get column styleId with cache
if (!colCache.ContainsKey(column)) colCache.Add(column, ws._values.GetValue(0, column));
s = colCache[column]._styleId;
if (s == 0)
{
int r = 0, c = column;
if (ws._values.PrevCell(ref r, ref c))
{
//var val = ws._values.GetValue(0, c);
if (!colCache.ContainsKey(c)) colCache.Add(c, ws._values.GetValue(0, c));
var val = colCache[c];
var colObj = (ExcelColumn)(val._value);
if (colObj != null && colObj.ColumnMax >= column) //Fixes issue 15174
{
s = val._styleId;
}
}
}
}
}
if (tmpCache.ContainsKey(s))
{
//ws.SetStyleInner(row, column, tmpCache[s]);
list[index] = new ExcelCoreValue { _value = list[index]._value, _styleId = tmpCache[s] };
}
else
{
ExcelXfs st = CellXfs[s];
int newId = st.GetNewID(CellXfs, sender, e.StyleClreplaced, e.StyleProperty, e.Value);
tmpCache.Add(s, newId);
//ws.SetStyleInner(row, column, newId);
list[index] = new ExcelCoreValue { _value = list[index]._value, _styleId = newId };
}
},
e);
}
}
19
View Source File : ExcelWorksheets.cs
License : Apache License 2.0
Project Creator : Appdynamics
License : Apache License 2.0
Project Creator : Appdynamics
private void CloneCells(ExcelWorksheet Copy, ExcelWorksheet added)
{
bool sameWorkbook=(Copy.Workbook == _pck.Workbook);
bool doAdjust = _pck.DoAdjustDrawings;
_pck.DoAdjustDrawings = false;
//Merged cells
foreach (var r in Copy.MergedCells) //Issue #94
{
added.MergedCells.Add(new ExcelAddress(r),false);
}
//Shared Formulas
foreach (int key in Copy._sharedFormulas.Keys)
{
added._sharedFormulas.Add(key, Copy._sharedFormulas[key].Clone());
}
Dictionary<int, int> styleCashe = new Dictionary<int, int>();
//Cells
int row,col;
var val = new CellsStoreEnumerator<ExcelCoreValue>(Copy._values);
while(val.Next())
{
row = val.Row;
col = val.Column;
int styleID=0;
if (row == 0) //Column
{
var c = Copy.GetValueInner(row, col) as ExcelColumn;
if (c != null)
{
var clone = c.Clone(added, c.ColumnMin);
clone.StyleID = c.StyleID;
added.SetValueInner(row, col, clone);
styleID = c.StyleID;
}
}
else if (col == 0) //Row
{
var r=Copy.Row(row);
if (r != null)
{
r.Clone(added);
styleID = r.StyleID;
}
}
else
{
styleID = CopyValues(Copy, added, row, col);
}
if (!sameWorkbook)
{
if (styleCashe.ContainsKey(styleID))
{
added.SetStyleInner(row, col, styleCashe[styleID]);
}
else
{
var s = added.Workbook.Styles.CloneStyle(Copy.Workbook.Styles, styleID);
styleCashe.Add(styleID, s);
added.SetStyleInner(row, col, s);
}
}
}
added._package.DoAdjustDrawings = doAdjust;
}
19
View Source File : ExcelRangeBase.cs
License : Apache License 2.0
Project Creator : Appdynamics
License : Apache License 2.0
Project Creator : Appdynamics
public void Copy(ExcelRangeBase Destination, ExcelRangeCopyOptionFlags? excelRangeCopyOptionFlags)
{
bool sameWorkbook = Destination._worksheet.Workbook == _worksheet.Workbook;
ExcelStyles sourceStyles = _worksheet.Workbook.Styles,
styles = Destination._worksheet.Workbook.Styles;
Dictionary<int, int> styleCashe = new Dictionary<int, int>();
//Clear all existing cells;
int toRow = _toRow - _fromRow + 1,
toCol = _toCol - _fromCol + 1;
int i=0;
object o = null;
byte flag=0;
Uri hl = null;
//ExcelComment comment=null;
var excludeFormulas = excelRangeCopyOptionFlags.HasValue && (excelRangeCopyOptionFlags.Value & ExcelRangeCopyOptionFlags.ExcludeFormulas) == ExcelRangeCopyOptionFlags.ExcludeFormulas;
var cse = new CellsStoreEnumerator<ExcelCoreValue>(_worksheet._values, _fromRow, _fromCol, _toRow, _toCol);
var copiedValue = new List<CopiedCell>();
while (cse.Next())
{
var row=cse.Row;
var col = cse.Column; //Issue 15070
var cell = new CopiedCell
{
Row = Destination._fromRow + (row - _fromRow),
Column = Destination._fromCol + (col - _fromCol),
Value=cse.Value._value
};
if (!excludeFormulas && _worksheet._formulas.Exists(row, col, ref o))
{
if (o is int)
{
cell.Formula=_worksheet.GetFormula(cse.Row, cse.Column);
if(_worksheet._flags.GetFlagValue(cse.Row, cse.Column, CellFlags.ArrayFormula))
{
Destination._worksheet._flags.SetFlagValue(cse.Row, cse.Column, true, CellFlags.ArrayFormula);
}
}
else
{
//Destination._worksheet._formulas.SetValue(row, col, o);
cell.Formula=o;
}
}
if(_worksheet.ExistsStyleInner(row, col, ref i))
{
if (sameWorkbook)
{
//Destination._worksheet.SetStyleInner(row, col, i);
cell.StyleID=i;
}
else
{
if (styleCashe.ContainsKey(i))
{
i = styleCashe[i];
}
else
{
var oldStyleID = i;
i = styles.CloneStyle(sourceStyles, i);
styleCashe.Add(oldStyleID, i);
}
//Destination._worksheet.SetStyleInner(row, col, i);
cell.StyleID=i;
}
}
if (_worksheet._hyperLinks.Exists(row, col, ref hl))
{
//Destination._worksheet._hyperLinks.SetValue(row, col, hl);
cell.HyperLink=hl;
}
// Will just be null if no comment exists.
cell.Comment = _worksheet.Cells[cse.Row, cse.Column].Comment;
if (_worksheet._flags.Exists(row, col, ref flag))
{
cell.Flag = flag;
}
copiedValue.Add(cell);
}
//Copy styles with no cell value
var cses = new CellsStoreEnumerator<ExcelCoreValue>(_worksheet._values, _fromRow, _fromCol, _toRow, _toCol);
while (cses.Next())
{
if (!_worksheet.ExistsValueInner(cses.Row, cses.Column))
{
var row = Destination._fromRow + (cses.Row - _fromRow);
var col = Destination._fromCol + (cses.Column - _fromCol);
var cell = new CopiedCell
{
Row = row,
Column = col,
Value = null
};
i = cses.Value._styleId;
if (sameWorkbook)
{
cell.StyleID = i;
}
else
{
if (styleCashe.ContainsKey(i))
{
i = styleCashe[i];
}
else
{
var oldStyleID = i;
i = styles.CloneStyle(sourceStyles, i);
styleCashe.Add(oldStyleID, i);
}
//Destination._worksheet.SetStyleInner(row, col, i);
cell.StyleID = i;
}
copiedValue.Add(cell);
}
}
var copiedMergedCells = new Dictionary<int, ExcelAddress>();
//Merged cells
var csem = new CellsStoreEnumerator<int>(_worksheet.MergedCells._cells, _fromRow, _fromCol, _toRow, _toCol);
while (csem.Next())
{
if(!copiedMergedCells.ContainsKey(csem.Value))
{
var adr = new ExcelAddress(_worksheet.Name, _worksheet.MergedCells.List[csem.Value]);
if(this.Collide(adr)==eAddressCollition.Inside)
{
copiedMergedCells.Add(csem.Value, new ExcelAddress(
Destination._fromRow + (adr.Start.Row - _fromRow),
Destination._fromCol + (adr.Start.Column - _fromCol),
Destination._fromRow + (adr.End.Row - _fromRow),
Destination._fromCol + (adr.End.Column - _fromCol)));
}
else
{
//Partial merge of the address ignore.
copiedMergedCells.Add(csem.Value, null);
}
}
}
Destination._worksheet.MergedCells.Clear(new ExcelAddressBase(Destination._fromRow, Destination._fromCol, Destination._fromRow+toRow-1, Destination._fromCol+toCol-1));
Destination._worksheet._values.Clear(Destination._fromRow, Destination._fromCol, toRow, toCol);
Destination._worksheet._formulas.Clear(Destination._fromRow, Destination._fromCol, toRow, toCol);
//Destination._worksheet._styles.Clear(Destination._fromRow, Destination._fromCol, toRow, toCol);
//Destination._worksheet._types.Clear(Destination._fromRow, Destination._fromCol, toRow, toCol);
Destination._worksheet._hyperLinks.Clear(Destination._fromRow, Destination._fromCol, toRow, toCol);
Destination._worksheet._flags.Clear(Destination._fromRow, Destination._fromCol, toRow, toCol);
Destination._worksheet._commentsStore.Clear(Destination._fromRow, Destination._fromCol, toRow, toCol);
foreach(var cell in copiedValue)
{
Destination._worksheet.SetValueInner(cell.Row, cell.Column, cell.Value);
//if(cell.Type!=null)
//{
// Destination._worksheet._types.SetValue(cell.Row, cell.Column, cell.Type);
//}
if(cell.StyleID!=null)
{
Destination._worksheet.SetStyleInner(cell.Row, cell.Column, cell.StyleID.Value);
}
if(cell.Formula!=null)
{
cell.Formula = UpdateFormulaReferences(cell.Formula.ToString(), Destination._fromRow - _fromRow, Destination._fromCol - _fromCol, 0, 0, Destination.WorkSheet, Destination.WorkSheet, true);
Destination._worksheet._formulas.SetValue(cell.Row, cell.Column, cell.Formula);
}
if(cell.HyperLink!=null)
{
Destination._worksheet._hyperLinks.SetValue(cell.Row, cell.Column, cell.HyperLink);
}
if (cell.Comment != null)
{
Destination.Worksheet.Cells[cell.Row, cell.Column].AddComment(cell.Comment.Text, cell.Comment.Author);
}
if (cell.Flag != 0)
{
Destination._worksheet._flags.SetValue(cell.Row, cell.Column, cell.Flag);
}
}
//Add merged cells
foreach(var m in copiedMergedCells.Values)
{
if(m!=null)
{
Destination._worksheet.MergedCells.Add(m, true);
}
}
if (_fromCol == 1 && _toCol == ExcelPackage.MaxColumns)
{
for (int r = 0; r < this.Rows; r++)
{
var destinationRow = Destination.Worksheet.Row(Destination.Start.Row + r);
destinationRow.OutlineLevel = this.Worksheet.Row(_fromRow + r).OutlineLevel;
}
}
if (_fromRow == 1 && _toRow == ExcelPackage.MaxRows)
{
for (int c = 0; c < this.Columns; c++)
{
var destinationCol = Destination.Worksheet.Column(Destination.Start.Column + c);
destinationCol.OutlineLevel = this.Worksheet.Column(_fromCol + c).OutlineLevel;
}
}
}
19
View Source File : ExcelStyles.cs
License : Apache License 2.0
Project Creator : Appdynamics
License : Apache License 2.0
Project Creator : Appdynamics
private void AddNewStyleColumn(StyleBase sender, StyleChangeEventArgs e, ExcelWorksheet ws, Dictionary<int, int> styleCashe, ExcelColumn column, int s)
{
if (styleCashe.ContainsKey(s))
{
ws.SetStyleInner(0, column.ColumnMin, styleCashe[s]);
}
else
{
ExcelXfs st = CellXfs[s];
int newId = st.GetNewID(CellXfs, sender, e.StyleClreplaced, e.StyleProperty, e.Value);
styleCashe.Add(s, newId);
ws.SetStyleInner(0, column.ColumnMin, newId);
}
}
19
View Source File : ExcelWorksheet.cs
License : Apache License 2.0
Project Creator : Appdynamics
License : Apache License 2.0
Project Creator : Appdynamics
internal int GetStyleIdDefaultWithMemo(int row, int col)
{
int v = 0;
if (ExistsStyleInner(row, 0, ref v)) //First Row
{
return v;
}
else // then column
{
if (!columnStyles.ContainsKey(col))
{
if (ExistsStyleInner(0, col, ref v))
{
columnStyles.Add(col, v);
}
else
{
int r = 0, c = col;
if (_values.PrevCell(ref r, ref c))
{
//var column=ws.GetValueInner(0,c) as ExcelColumn;
var val = _values.GetValue(0, c);
var column = (ExcelColumn)(val._value);
if (column != null && column.ColumnMax >= col) //Fixes issue 15174
{
//return ws.GetStyleInner(0, c);
columnStyles.Add(col, val._styleId);
}
else
{
columnStyles.Add(col, 0);
}
}
else
{
columnStyles.Add(col, 0);
}
}
}
return columnStyles[col];
}
}
19
View Source File : ExcelWorksheet.cs
License : Apache License 2.0
Project Creator : Appdynamics
License : Apache License 2.0
Project Creator : Appdynamics
internal int GetStyleIdDefaultWithMemo(int row, int col)
{
int v = 0;
if (ExistsStyleInner(row, 0, ref v)) //First Row
{
return v;
}
else // then column
{
if (!columnStyles.ContainsKey(col))
{
if (ExistsStyleInner(0, col, ref v))
{
columnStyles.Add(col, v);
}
else
{
int r = 0, c = col;
if (_values.PrevCell(ref r, ref c))
{
//var column=ws.GetValueInner(0,c) as ExcelColumn;
var val = _values.GetValue(0, c);
var column = (ExcelColumn)(val._value);
if (column != null && column.ColumnMax >= col) //Fixes issue 15174
{
//return ws.GetStyleInner(0, c);
columnStyles.Add(col, val._styleId);
}
else
{
columnStyles.Add(col, 0);
}
}
else
{
columnStyles.Add(col, 0);
}
}
}
return columnStyles[col];
}
}
19
View Source File : ObjModel.cs
License : MIT License
Project Creator : arcplus
License : MIT License
Project Creator : arcplus
private Geometry AddGeo(Geometry g, GeomBox box,
List<int> pnts, List<int> normals, List<int> uvs)
{
var gg = new Geometry { Id = g.Id };
var pntList = box.Pnts; // new List<int>(); //
var normList = box.Norms; // new List<int>(); //
var uvList = box.Uvs; // new List<int>(); //
//if (pntList.Count == 0)
//{
// foreach (var f in g.Faces)
// {
// foreach (var t in f.Triangles)
// {
// var v1 = t.V1;
// if (!pntList.Contains(v1.V))
// {
// pntList.Add(v1.V);
// }
// if (v1.N > 0 && !normList.Contains(v1.N))
// {
// normList.Add(v1.N);
// }
// if (v1.T > 0 && !uvList.Contains(v1.T))
// {
// uvList.Add(v1.T);
// }
// var v2 = t.V2;
// if (!pntList.Contains(v2.V))
// {
// pntList.Add(v2.V);
// }
// if (v2.N > 0 && !normList.Contains(v2.N))
// {
// normList.Add(v2.N);
// }
// if (v2.T > 0 && !uvList.Contains(v2.T))
// {
// uvList.Add(v2.T);
// }
// var v3 = t.V3;
// if (!pntList.Contains(v3.V))
// {
// pntList.Add(v3.V);
// }
// if (v3.N > 0 && !normList.Contains(v3.N))
// {
// normList.Add(v3.N);
// }
// if (v3.T > 0 && !uvList.Contains(v3.T))
// {
// uvList.Add(v3.T);
// }
// }
// }
//}
var pntDict = new Dictionary<int, int>();
var normDict = new Dictionary<int, int>();
var uvDict = new Dictionary<int, int>();
foreach (var p in pntList)
{
var index = pnts.IndexOf(p);
if (index == -1)
{
index = pnts.Count;
pnts.Add(p);
}
pntDict.Add(p, index + 1);
}
foreach (var n in normList)
{
var index = normals.IndexOf(n);
if (index == -1)
{
index = normals.Count;
normals.Add(n);
}
normDict.Add(n, index + 1);
}
foreach (var t in uvList)
{
var index = uvs.IndexOf(t);
if (index == -1)
{
index = uvs.Count;
uvs.Add(t);
}
uvDict.Add(t, index + 1);
}
foreach (var f in g.Faces)
{
var ff = new Face { MatName = f.MatName };
foreach (var t in f.Triangles)
{
var v1 = GetVertex(t.V1, pntDict, normDict, uvDict);
var v2 = GetVertex(t.V2, pntDict, normDict, uvDict);
var v3 = GetVertex(t.V3, pntDict, normDict, uvDict);
var fv = new FaceTriangle(v1, v2, v3);
ff.Triangles.Add(fv);
}
gg.Faces.Add(ff);
}
return gg;
}
19
View Source File : ExporterFactory.cs
License : GNU General Public License v3.0
Project Creator : Artentus
License : GNU General Public License v3.0
Project Creator : Artentus
private static IReadOnlyList<ModpackDefinition> TopologicalSort(in IList<ModpackDefinition> modpacks)
{
// We build this mapping once at the beginning to reduce time complexity
_idIndexMappings.Clear();
for (int i = 0; i < modpacks.Count; i++)
{
int id = modpacks[i].Uid;
if (_idIndexMappings.ContainsKey(id)) // We can also use this to check for duplicate IDs
throw new InvalidOperationException("Multiple modpack definitions with same ID in the list");
_idIndexMappings.Add(id, i);
}
var result = new List<ModpackDefinition>(modpacks.Count);
var visited = new bool[modpacks.Count];
for (int i = 0; i < modpacks.Count; i++)
TopologicalSortRec(modpacks, i, visited, result);
return result;
}
19
View Source File : WordSplitter.cs
License : MIT License
Project Creator : aspose-pdf
License : MIT License
Project Creator : aspose-pdf
private static Dictionary<int, int> FindBlocks(
string text,
IList<Regex> blockExpressions)
{
var blockLocations = new Dictionary<int, int>();
if (blockExpressions == null)
{
return blockLocations;
}
foreach (var exp in blockExpressions)
{
var matches = exp.Matches(text);
foreach (System.Text.RegularExpressions.Match match in matches)
{
try
{
blockLocations.Add(match.Index, match.Index + match.Length);
}
catch (ArgumentException)
{
var msg =
$"One or more block expressions result in a text sequence that overlaps. Current expression: {exp}";
throw new ArgumentException(msg);
}
}
}
return blockLocations;
}
19
View Source File : DMTFileReader.cs
License : MIT License
Project Creator : Autodesk
License : MIT License
Project Creator : Autodesk
private static void AddTriangle(
Dictionary<int, int> oldIndexToNewIndexMap,
DMTTriangleBlock block,
bool verticesHaveNormals,
List<Point> vertices,
List<Vector> vertexNormals,
int vertex1Index,
int vertex2Index,
int vertex3Index)
{
if (oldIndexToNewIndexMap.ContainsKey(vertex1Index))
{
block.TriangleFirstVertexIndices.Add(oldIndexToNewIndexMap[vertex1Index]);
}
else
{
block.TriangleVertices.Add(vertices[vertex1Index]);
block.TriangleFirstVertexIndices.Add(block.TriangleVertices.Count - 1);
oldIndexToNewIndexMap.Add(vertex1Index, block.TriangleVertices.Count - 1);
if (verticesHaveNormals)
{
block.VertexNormals.Add(vertexNormals[vertex1Index]);
}
}
if (oldIndexToNewIndexMap.ContainsKey(vertex2Index))
{
block.TriangleSecondVertexIndices.Add(oldIndexToNewIndexMap[vertex2Index]);
}
else
{
block.TriangleVertices.Add(vertices[vertex2Index]);
block.TriangleSecondVertexIndices.Add(block.TriangleVertices.Count - 1);
oldIndexToNewIndexMap.Add(vertex2Index, block.TriangleVertices.Count - 1);
if (verticesHaveNormals)
{
block.VertexNormals.Add(vertexNormals[vertex2Index]);
}
}
if (oldIndexToNewIndexMap.ContainsKey(vertex3Index))
{
block.TriangleThirdVertexIndices.Add(oldIndexToNewIndexMap[vertex3Index]);
}
else
{
block.TriangleVertices.Add(vertices[vertex3Index]);
block.TriangleThirdVertexIndices.Add(block.TriangleVertices.Count - 1);
oldIndexToNewIndexMap.Add(vertex3Index, block.TriangleVertices.Count - 1);
if (verticesHaveNormals)
{
block.VertexNormals.Add(vertexNormals[vertex3Index]);
}
}
}
19
View Source File : Tests.cs
License : Apache License 2.0
Project Creator : aws-samples
License : Apache License 2.0
Project Creator : aws-samples
[TestMethod]
public async Task MakeSyntheticData()
{
var startDate = new DateTime(2013, 1, 1);
var endDate = new DateTime(2017, 12, 31);
var numberOfStores = 10;
var numberOfItems = 50;
var yearlyDemandIncrease = 0.015;
var highestDemanddItem = 125;
var lowestDemanddItem = 5;
var totalDays = endDate.Subtract(startDate).TotalDays;
var itemDemands = new Dictionary<int, int>();
var storeModifiers = new Dictionary<int, decimal>();
var weekModifiers = new Dictionary<DayOfWeek, decimal>
{
{DayOfWeek.Sunday, 0.16m},
{DayOfWeek.Monday, 0.14m},
{DayOfWeek.Tuesday, 0.12m},
{DayOfWeek.Wednesday, 0.10m},
{DayOfWeek.Thursday, 0.15m},
{DayOfWeek.Friday, 0.17m},
{DayOfWeek.Saturday, 0.18m}
};
for (var item = 1; item <= numberOfItems; item++)
{
var demand = new Random().Next(lowestDemanddItem, highestDemanddItem);
itemDemands.Add(item, demand);
}
for (var store = 1; store <= numberOfStores; store++)
{
var modifier = new Random().Next(-20, 20);
storeModifiers.Add(store, modifier);
}
using (var file = File.CreateText("train.csv"))
{
file.WriteLine("date,store,item,demand");
for (var day = 0; day <= totalDays; day++)
{
var date = startDate.AddDays(day);
var year = 1 + (date.Year - startDate.Year);
for (var store = 1; store <= numberOfStores; store++)
for (var item = 1; item <= numberOfItems; item++)
{
var startingDemand = itemDemands[item];
var demandIncrease = startingDemand * (year * yearlyDemandIncrease);
var demand = Convert.ToInt32(startingDemand + demandIncrease);
var modifier = storeModifiers[store] * 0.01m;
var storeModification = demand * modifier;
demand = Convert.ToInt32(demand + storeModification);
var dayOfWeekModifier = weekModifiers[date.DayOfWeek];
var dayOfWeekModification = demand * dayOfWeekModifier;
demand = Convert.ToInt32(demand + dayOfWeekModification);
await file.WriteLineAsync($"{date.Year}-{date.Month:D2}-{date.Day:D2},{store},{item},{demand}");
}
}
}
}
19
View Source File : InstancedGeometry.cs
License : GNU Lesser General Public License v2.1
Project Creator : axiom3d
License : GNU Lesser General Public License v2.1
Project Creator : axiom3d
internal unsafe void BuildIndexRemap(uint* pBuffer, int numIndexes, ref Dictionary<int, int> remap)
{
remap.Clear();
for (int i = 0; i < numIndexes; ++i)
{
// use insert since duplicates are silently discarded
remap.Add((int)*pBuffer++, remap.Count);
// this will have mapped oldindex -> new index IF oldindex
// wasn't already there
}
}
19
View Source File : InstancedGeometry.cs
License : GNU Lesser General Public License v2.1
Project Creator : axiom3d
License : GNU Lesser General Public License v2.1
Project Creator : axiom3d
internal unsafe void BuildIndexRemap(BufferBase pBuffer, int numIndexes, ref Dictionary<int, int> remap)
{
remap.Clear();
for (int i = 0; i < numIndexes; ++i)
{
// use insert since duplicates are silently discarded
remap.Add(pBuffer.Ptr++, remap.Count);
// this will have mapped oldindex -> new index IF oldindex
// wasn't already there
}
}
19
View Source File : WDC1.cs
License : The Unlicense
Project Creator : BAndysc
License : The Unlicense
Project Creator : BAndysc
public new Dictionary<int, byte[]> ReadOffsetData(BinaryReader dbReader, long pos)
{
var CopyTable = new Dictionary<int, byte[]>();
var offsetmap = new List<Tuple<int, short>>();
var firstindex = new Dictionary<int, OffsetDuplicate>();
var OffsetDuplicates = new Dictionary<int, int>();
var Copies = new Dictionary<int, List<int>>();
int[] m_indexes = null;
// OffsetTable
if (HasOffsetTable && OffsetTableOffset > 0)
{
dbReader.BaseStream.Position = OffsetTableOffset;
for (var i = 0; i < MaxId - MinId + 1; i++)
{
int offset = dbReader.ReadInt32();
short length = dbReader.ReadInt16();
if (offset == 0 || length == 0)
continue;
// special case, may contain duplicates in the offset map that we don't want
if (CopyTableSize == 0)
{
if (!firstindex.ContainsKey(offset))
firstindex.Add(offset, new OffsetDuplicate(offsetmap.Count, firstindex.Count));
else
{
OffsetDuplicates.Add(MinId + i, firstindex[offset].VisibleIndex);
continue;
}
}
offsetmap.Add(new Tuple<int, short>(offset, length));
}
}
// IndexTable
if (HasIndexTable)
{
m_indexes = new int[RecordCount];
for (var i = 0; i < RecordCount; i++)
m_indexes[i] = dbReader.ReadInt32();
}
// Copytable
if (CopyTableSize > 0)
{
long end = dbReader.BaseStream.Position + CopyTableSize;
while (dbReader.BaseStream.Position < end)
{
int id = dbReader.ReadInt32();
int idcopy = dbReader.ReadInt32();
if (!Copies.ContainsKey(idcopy))
Copies.Add(idcopy, new List<int>());
Copies[idcopy].Add(id);
}
}
// ColumnMeta
ColumnMeta = new List<ColumnStructureEntry>();
for (var i = 0; i < FieldCount; i++)
{
ColumnStructureEntry column = new ColumnStructureEntry
{
RecordOffset = dbReader.ReadUInt16(),
Size = dbReader.ReadUInt16(),
AdditionalDataSize = dbReader.ReadUInt32(), // size of pallet / sparse values
CompressionType = (CompressionType) dbReader.ReadUInt32(),
BitOffset = dbReader.ReadInt32(),
BitWidth = dbReader.ReadInt32(),
Cardinality = dbReader.ReadInt32()
};
// preload arraysizes
if (column.CompressionType == CompressionType.None)
column.ArraySize = Math.Max(column.Size / FieldStructure[i].BitCount, 1);
else if (column.CompressionType == CompressionType.PalletArray)
column.ArraySize = Math.Max(column.Cardinality, 1);
ColumnMeta.Add(column);
}
// Pallet values
for (var i = 0; i < ColumnMeta.Count; i++)
{
if (ColumnMeta[i].CompressionType == CompressionType.Pallet ||
ColumnMeta[i].CompressionType == CompressionType.PalletArray)
{
int elements = (int) ColumnMeta[i].AdditionalDataSize / 4;
int cardinality = Math.Max(ColumnMeta[i].Cardinality, 1);
ColumnMeta[i].PalletValues = new List<byte[]>();
for (var j = 0; j < elements / cardinality; j++)
ColumnMeta[i].PalletValues.Add(dbReader.ReadBytes(cardinality * 4));
}
}
// Sparse values
for (var i = 0; i < ColumnMeta.Count; i++)
{
if (ColumnMeta[i].CompressionType == CompressionType.Sparse)
{
ColumnMeta[i].SparseValues = new Dictionary<int, byte[]>();
for (var j = 0; j < ColumnMeta[i].AdditionalDataSize / 8; j++)
ColumnMeta[i].SparseValues[dbReader.ReadInt32()] = dbReader.ReadBytes(4);
}
}
// Relationships
if (RelationshipDataSize > 0)
{
RelationShipData = new RelationShipData
{
Records = dbReader.ReadUInt32(),
MinId = dbReader.ReadUInt32(),
MaxId = dbReader.ReadUInt32(),
Entries = new Dictionary<uint, byte[]>()
};
for (var i = 0; i < RelationShipData.Records; i++)
{
byte[] foreignKey = dbReader.ReadBytes(4);
uint index = dbReader.ReadUInt32();
// has duplicates just like the copy table does... why?
if (!RelationShipData.Entries.ContainsKey(index))
RelationShipData.Entries.Add(index, foreignKey);
}
FieldStructure.Add(new FieldStructureEntry(0, 0));
ColumnMeta.Add(new ColumnStructureEntry());
}
// Record Data
BitStream bitStream = new(recordData);
for (var i = 0; i < RecordCount; i++)
{
var id = 0;
if (HasOffsetTable && HasIndexTable)
{
id = m_indexes[CopyTable.Count];
var map = offsetmap[i];
if (CopyTableSize == 0 && firstindex[map.Item1].HiddenIndex != i) //Ignore duplicates
continue;
dbReader.BaseStream.Position = map.Item1;
byte[] data = dbReader.ReadBytes(map.Item2);
var recordbytes = BitConverter.GetBytes(id).Concat(data);
// append relationship id
if (RelationShipData != null)
{
// seen cases of missing indicies
if (RelationShipData.Entries.TryGetValue((uint) i, out byte[] foreignData))
recordbytes = recordbytes.Concat(foreignData);
else
recordbytes = recordbytes.Concat(new byte[4]);
}
CopyTable.Add(id, recordbytes.ToArray());
if (Copies.ContainsKey(id))
{
foreach (int copy in Copies[id])
CopyTable.Add(copy, BitConverter.GetBytes(copy).Concat(data).ToArray());
}
}
else
{
bitStream.Seek(i * RecordSize, 0);
var idOffset = 0;
var data = new List<byte>();
if (HasIndexTable)
{
id = m_indexes[i];
data.AddRange(BitConverter.GetBytes(id));
}
int c = HasIndexTable ? 1 : 0;
for (var f = 0; f < FieldCount; f++)
{
int bitOffset = ColumnMeta[f].BitOffset;
int bitWidth = ColumnMeta[f].BitWidth;
int cardinality = ColumnMeta[f].Cardinality;
uint palletIndex;
int take = columnSizes[c] * ColumnMeta[f].ArraySize;
switch (ColumnMeta[f].CompressionType)
{
case CompressionType.None:
int bitSize = FieldStructure[f].BitCount;
if (!HasIndexTable && f == IdIndex)
{
idOffset = data.Count;
id = bitStream.ReadInt32(bitSize); // always read Ids as ints
data.AddRange(BitConverter.GetBytes(id));
}
else
data.AddRange(bitStream.ReadBytes(bitSize * ColumnMeta[f].ArraySize, false, take));
break;
case CompressionType.Immediate:
case CompressionType.SignedImmediate:
if (!HasIndexTable && f == IdIndex)
{
idOffset = data.Count;
id = bitStream.ReadInt32(bitWidth); // always read Ids as ints
data.AddRange(BitConverter.GetBytes(id));
}
else
data.AddRange(bitStream.ReadBytes(bitWidth, false, take));
break;
case CompressionType.Sparse:
if (ColumnMeta[f].SparseValues.TryGetValue(id, out byte[] valBytes))
data.AddRange(valBytes.Take(take));
else
data.AddRange(BitConverter.GetBytes(ColumnMeta[f].BitOffset).Take(take));
break;
case CompressionType.Pallet:
case CompressionType.PalletArray:
palletIndex = bitStream.ReadUInt32(bitWidth);
data.AddRange(ColumnMeta[f].PalletValues[(int) palletIndex].Take(take));
break;
default:
throw new Exception($"Unknown compression {ColumnMeta[f].CompressionType}");
}
c += ColumnMeta[f].ArraySize;
}
// append relationship id
if (RelationShipData != null)
{
// seen cases of missing indicies
if (RelationShipData.Entries.TryGetValue((uint) i, out byte[] foreignData))
data.AddRange(foreignData);
else
data.AddRange(new byte[4]);
}
CopyTable.Add(id, data.ToArray());
if (Copies.ContainsKey(id))
{
foreach (int copy in Copies[id])
{
byte[] newrecord = CopyTable[id].ToArray();
Buffer.BlockCopy(BitConverter.GetBytes(copy), 0, newrecord, idOffset, 4);
CopyTable.Add(copy, newrecord);
}
}
}
}
if (HasIndexTable)
{
FieldStructure.Insert(0, new FieldStructureEntry(0, 0));
ColumnMeta.Insert(0, new ColumnStructureEntry());
}
offsetmap.Clear();
firstindex.Clear();
OffsetDuplicates.Clear();
Copies.Clear();
Array.Resize(ref recordData, 0);
bitStream.Dispose();
ColumnMeta.ForEach(x =>
{
x.PalletValues?.Clear();
x.SparseValues?.Clear();
});
InternalRecordSize = (uint) CopyTable.First().Value.Length;
if (CopyTableSize > 0)
CopyTable = CopyTable.OrderBy(x => x.Key).ToDictionary(x => x.Key, x => x.Value);
return CopyTable;
}
19
View Source File : MetadataTablesDictionary.cs
License : MIT License
Project Creator : barry-jones
License : MIT License
Project Creator : barry-jones
public void SetMetadataTable(MetadataTables table, MetadataRow[] rows)
{
this[table] = rows;
// Add each entry to the index map
int count = rows.Length;
for(int i = 0; i < count; i++)
{
indexTable.Add(rows[i].FileOffset, i);
}
}
19
View Source File : MessageCache.cs
License : MIT License
Project Creator : bartoszlenar
License : MIT License
Project Creator : bartoszlenar
public void AddMessage(string translationName, int errorId, IReadOnlyList<string> messages)
{
ThrowHelper.NullArgument(translationName, nameof(translationName));
ThrowHelper.NullInCollection(messages, nameof(messages));
if (!_messages.ContainsKey(translationName))
{
_messages.Add(translationName, new Dictionary<int, IReadOnlyList<string>>());
}
_messages[translationName].Add(errorId, messages);
if (!_messagesAmount.ContainsKey(errorId))
{
_messagesAmount.Add(errorId, messages.Count);
}
}
19
View Source File : ErrorFlag.cs
License : MIT License
Project Creator : bartoszlenar
License : MIT License
Project Creator : bartoszlenar
public void SetEnabled(int level, int errorId)
{
if (_errorsForLevels.ContainsKey(level))
{
return;
}
_errorsForLevels.Add(level, errorId);
_detectionForLevels.Add(level, false);
}
19
View Source File : ChestInfo.cs
License : MIT License
Project Creator : bdfzchen2015
License : MIT License
Project Creator : bdfzchen2015
private void SetFriendP(ServerPlayer player, ServerPlayer friend)
{
if (_friendPendings.ContainsKey(player.UUID))
if (friend == null)
_friendPendings.Remove(player.UUID);
else
_friendPendings[player.UUID] = friend.UUID;
else if (friend != null)
_friendPendings.Add(player.UUID, friend.UUID);
}
19
View Source File : QuickCollectionTests.cs
License : Apache License 2.0
Project Creator : bepu
License : Apache License 2.0
Project Creator : bepu
[MethodImpl(MethodImplOptions.NoInlining)]
public static void TestDictionaryResizing(IUnmanagedMemoryPool pool)
{
Random random = new Random(5);
var dictionary = new QuickDictionary<int, int, PrimitiveComparer<int>>(4, pool);
Dictionary<int, int> controlDictionary = new Dictionary<int, int>();
for (int iterationIndex = 0; iterationIndex < 100000; ++iterationIndex)
{
if (random.NextDouble() < 0.7)
{
dictionary.Add(iterationIndex, iterationIndex, pool);
controlDictionary.Add(iterationIndex, iterationIndex);
}
if (random.NextDouble() < 0.2)
{
var indexToRemove = random.Next(dictionary.Count);
var toRemove = dictionary.Keys[indexToRemove];
dictionary.FastRemove(toRemove);
controlDictionary.Remove(toRemove);
}
if (iterationIndex % 1000 == 0)
{
dictionary.EnsureCapacity(dictionary.Count * 3, pool);
}
else if (iterationIndex % 7777 == 0)
{
dictionary.Compact(pool);
}
}
Debug.replacedert(dictionary.Count == controlDictionary.Count);
for (int i = 0; i < dictionary.Count; ++i)
{
Debug.replacedert(controlDictionary.ContainsKey(dictionary.Keys[i]));
}
foreach (var element in controlDictionary.Keys)
{
Debug.replacedert(dictionary.ContainsKey(element));
}
dictionary.Dispose(pool);
}
19
View Source File : TraversalTree.cs
License : MIT License
Project Creator : BIMrxLAB
License : MIT License
Project Creator : BIMrxLAB
private void Traverse(TreeNode elementNode)
{
int id = elementNode.Id.IntegerValue;
// Terminate if we revisit a node we have already inspected:
if (_visitedElementCount.ContainsKey(id))
{
return;
}
// Otherwise, add the new node to the collection of visited elements:
if (!_visitedElementCount.ContainsKey(id))
{
_visitedElementCount.Add(id, 0);
}
++_visitedElementCount[id];
//
// Find all child nodes and replacedyze them recursively
AppendChildren(elementNode);
foreach (TreeNode node in elementNode.ChildNodes)
{
Traverse(node);
}
}
19
View Source File : DisassemblyBuilder.cs
License : MIT License
Project Creator : bitfaster
License : MIT License
Project Creator : bitfaster
public Disreplacedembly Build()
{
var asmLineToAsmLineIndex = new Dictionary<int, int>(this.linesContainingLabels.Count);
foreach (var line in this.linesContainingLabels)
{
if (this.asmLabelToLineIndex.TryGetValue(line.Item2, out var index))
{
asmLineToAsmLineIndex.Add(line.Item1, index);
}
}
return new Disreplacedembly(
sb.ToString(),
asmLineToAddressIndex,
asmToSourceLineIndex,
asmLineToAsmLineIndex);
}
19
View Source File : UltEventBase.cs
License : MIT License
Project Creator : BLUDRAG
License : MIT License
Project Creator : BLUDRAG
public void CacheParameter(object value)
{
if(!LinkedValueDictionary.ContainsKey(_invocationIndex))
{
LinkedValueDictionary.Add(_invocationIndex, new List<object>());
}
LinkedValueDictionary[_invocationIndex].Add(value);
if(!ReturnValueIndices.ContainsKey(_invocationIndex))
{
ReturnValueIndices.Add(_invocationIndex, LinkedValueDictionary[_invocationIndex].Count);
}
else
{
ReturnValueIndices[_invocationIndex] = LinkedValueDictionary[_invocationIndex].Count;
}
}
19
View Source File : Arena.cs
License : MIT License
Project Creator : blukatdevelopment
License : MIT License
Project Creator : blukatdevelopment
public Actor InitActor(Actor.Brains brain, int id){
scores.Add(id, 0);
if(!Session.NetActive()){
SpawnActor(brain, id);
if(brain == Actor.Brains.Player1){
playerWorldId = id;
}
return null;
}
Actor ret = null;
if(id == playerWorldId){
ret = SpawnActor(Actor.Brains.Player1, id);
}
else {
ret = SpawnActor(brain, id);
}
return ret;
}
19
View Source File : ShoppeKeeperDialogueReference.cs
License : MIT License
Project Creator : bradhannah
License : MIT License
Project Creator : bradhannah
internal int GetRandomMerchantStringIndexFromRange(int nMin, int nMax)
{
// if this hasn't been access before, then lets add a chunk to make sure we don't repeat the same thing
// twice in a row
if (!_previousRandomSelectionByMin.ContainsKey(nMin)) _previousRandomSelectionByMin.Add(nMin, -1);
Debug.replacedert(nMin < nMax);
int nTotalResponses = nMax - nMin;
int nResponseIndex = GetRandomIndexFromRange(nMin, nMax);
// if this response is the same as the last response, then we add one and make sure it is still in bounds
// by modding it
if (nResponseIndex == _previousRandomSelectionByMin[nMin])
nResponseIndex =
nMin + (nResponseIndex + 1) % nTotalResponses;
_previousRandomSelectionByMin[nMin] = nResponseIndex;
return nResponseIndex;
}
19
View Source File : DSSVehicleDockingManager.cs
License : MIT License
Project Creator : ccgould
License : MIT License
Project Creator : ccgould
private void UpdateSubscription(List<ItemsContainer> vehicleContainers, bool subscribing,Vehicle v)
{
foreach (ItemsContainer container in vehicleContainers)
{
if (container?.tr == null) continue;
if (subscribing)
{
if (!Subscibers.ContainsKey(container.tr.GetInstanceID()))
{
QuickLogger.Debug($"Subscribing vehicle {v.GetName()} {container.tr.GetInstanceID()}",true);
container.onAddItem += ContainerOnOnAddItem;
container.onRemoveItem += ContainerOnOnRemoveItem;
Subscibers.Add(container.tr.GetInstanceID(), v.GetInstanceID());
}
}
else
{
QuickLogger.Debug($"Un-Subscribing vehicle {v.GetName()} {container.tr.GetInstanceID()}",true);
container.onAddItem -= ContainerOnOnAddItem;
container.onRemoveItem -= ContainerOnOnRemoveItem;
Subscibers.Clear();
}
}
}
19
View Source File : ProducterValueChangedNotifyProcesser.cs
License : Apache License 2.0
Project Creator : cdy816
License : Apache License 2.0
Project Creator : cdy816
public void Registor(int id)
{
if (id == -1)
{
mIsAll = true;
return;
}
if (!mRegistorTagIds.ContainsKey(id))
mRegistorTagIds.Add(id,0);
}
19
View Source File : ProducterValueChangedNotifyProcesser.cs
License : Apache License 2.0
Project Creator : cdy816
License : Apache License 2.0
Project Creator : cdy816
public void Registor(List<int> ids)
{
foreach(var id in ids)
{
if (!mRegistorTagIds.ContainsKey(id))
mRegistorTagIds.Add(id, 0);
}
}
19
View Source File : DataFileInfo.cs
License : Apache License 2.0
Project Creator : cdy816
License : Apache License 2.0
Project Creator : cdy816
public static Dictionary<int, int> CheckBlockHeadCach(this DataFileSeriserbase datafile, long offset, out int tagCount, out int fileDuration, out int blockDuration, out int timetick, out long blockPointer, out DateTime time)
{
//文件头部结构:Pre DataRegion(8) + Next DataRegion(8) + Datatime(8)+tagcount(4)+ tagid sum(8) +file duration(4)+ block duration(4)+Time tick duration(4)+ { + tagid1+tagid2+...+tagidn }+ {[tag1 block point1(8) + tag2 block point1+ tag3 block point1+...] + [tag1 block point2(8) + tag2 block point2+ tag3 block point2+...]....}
var dataoffset = offset + 16;
//读取时间
time = datafile.ReadDateTime(dataoffset);
dataoffset += 8;
//读取变量个数
int count = datafile.ReadInt(dataoffset);
dataoffset += 4;
tagCount = count;
//读取校验和
long idsum = datafile.ReadLong(dataoffset);
dataoffset += 8;
//读取单个文件的时长
fileDuration = datafile.ReadInt(dataoffset);
dataoffset += 4;
//读取数据块时长
blockDuration = datafile.ReadInt(dataoffset);
dataoffset += 4;
//读取时钟周期
timetick = datafile.ReadInt(dataoffset);
dataoffset += 4;
lock (TagHeadOffsetManager.manager)
{
if (!TagHeadOffsetManager.manager.Contains(idsum, count))
{
//Tag id 列表经过压缩,内容格式为:DataSize + Data
var dsize = datafile.ReadInt(dataoffset);
if (dsize <= 0)
{
tagCount = 0;
fileDuration = 0;
blockDuration = 0;
timetick = 0;
blockPointer = 0;
return new Dictionary<int, int>();
}
dataoffset += 4;
blockPointer = dataoffset + dsize - offset;
var dtmp = new Dictionary<int, int>();
using (var dd = datafile.Read(dataoffset, dsize))
{
MarshalVarintCodeMemory vcm = new MarshalVarintCodeMemory(dd.StartMemory, dsize);
var ltmp = vcm.ToIntList();
//vcm.Dispose();
if (ltmp.Count > 0)
{
int preid = ltmp[0];
dtmp.Add(preid, 0);
for (int i = 1; i < ltmp.Count; i++)
{
var id = ltmp[i] + preid;
dtmp.Add(id, i);
preid = id;
}
}
TagHeadOffsetManager.manager.Add(idsum, count, dtmp, blockPointer);
dd.Dispose();
}
return dtmp;
}
else
{
var re = TagHeadOffsetManager.manager.Get(idsum, count);
blockPointer = re.Item2;
return re.Item1;
}
}
}
See More Examples