From 2c7cd2da1601f8ff89dd805d7f7ef24c79c81246 Mon Sep 17 00:00:00 2001 From: mika Date: Wed, 27 Aug 2025 21:08:15 +0300 Subject: [PATCH 01/10] #BUILD --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 25ee0c1..72a3ffb 100644 --- a/README.md +++ b/README.md @@ -44,3 +44,4 @@ Pull requests to improve this converter are welcome! (please create Issue first, ### Powered by [![JetBrains logo.](https://resources.jetbrains.com/storage/products/company/brand/logos/jetbrains.svg)](https://jb.gg/OpenSourceSupport) + From d1a72565e98100dbea3aa4a1320dbe69779e4014 Mon Sep 17 00:00:00 2001 From: unitycoder Date: Sun, 19 Oct 2025 16:50:02 +0300 Subject: [PATCH 02/10] doing memory cleanup and possible race condition fixes --- MainWindow.xaml.cs | 17 +-- Readers/LAZ.cs | 6 +- Structs/ImportSettings.cs | 4 +- Writers/PCROOT.cs | 221 ++++++++++++++++++++++++++------------ 4 files changed, 162 insertions(+), 86 deletions(-) diff --git a/MainWindow.xaml.cs b/MainWindow.xaml.cs index d51049c..83a48cf 100644 --- a/MainWindow.xaml.cs +++ b/MainWindow.xaml.cs @@ -414,18 +414,7 @@ private static async Task ProcessAllFiles(object workerParamsObject) } finally { - // Ensure the semaphore is released safely - if (semaphore.CurrentCount == 0) // Make sure we don't release more times than we acquire - { - try - { - semaphore.Release(); - } - catch (SemaphoreFullException ex) - { - //Log.Write($"Semaphore was already fully released. Exception: {ex.Message}"); - } - } + semaphore.Release(); } //int? taskId = Task.CurrentId; // Get the current task ID @@ -570,7 +559,9 @@ private static async Task ProcessAllFiles(object workerParamsObject) // call update one more time ProgressTick(null, null); // clear timer + progressTimerThread.Tick -= ProgressTick; progressTimerThread.Stop(); + progressTimerThread = null; mainWindowStatic.progressBarFiles.Foreground = Brushes.Green; //mainWindowStatic.progressBarPoints.Foreground = Brushes.Green; })); @@ -874,7 +865,7 @@ static bool ParseFile(ImportSettings importSettings, int fileIndex, int? taskId, Log.Write(jsonString, LogEvent.File); - long checkCancelEvery = fullPointCount / 128; + long checkCancelEvery = Math.Max(1, fullPointCount / 128); // detect is 0-255 or 0-65535 range bool isCustomIntensityRange = false; diff --git a/Readers/LAZ.cs b/Readers/LAZ.cs index 8df06c8..bd59cbb 100644 --- a/Readers/LAZ.cs +++ b/Readers/LAZ.cs @@ -483,7 +483,11 @@ protected virtual void Dispose(bool disposing) { if (disposing) { - lazReader = null; + if (lazReader != null) + { + lazReader.close_reader(); + lazReader = null; + } } } diff --git a/Structs/ImportSettings.cs b/Structs/ImportSettings.cs index 87258c9..79026d5 100644 --- a/Structs/ImportSettings.cs +++ b/Structs/ImportSettings.cs @@ -153,7 +153,7 @@ public void ReleaseWriter(int? taskId) // Log.Write("ReleaseWriter >>> Memory used: " + GC.GetTotalMemory(false)); // Clean up the writer if necessary writer?.Cleanup(0); - //writer?.Dispose(); + //writer?.Dispose(); // not disposing, just cleaning up for reuse // Return the writer to the pool for reuse _writerPool.Add(writer); // Log.Write("ReleaseWriter >>> Memory used: " + GC.GetTotalMemory(false)); @@ -171,7 +171,7 @@ public void ReleaseReader(int? taskId) if (Readers.TryRemove(taskId, out var reader)) { reader?.Close(); - // reader?.Dispose(); + (reader as IDisposable)?.Dispose(); } else { diff --git a/Writers/PCROOT.cs b/Writers/PCROOT.cs index 9e3ede2..45c11ad 100644 --- a/Writers/PCROOT.cs +++ b/Writers/PCROOT.cs @@ -3,6 +3,7 @@ using PointCloudConverter.Logger; using System; using System.Collections; +using System.Collections.Concurrent; using System.Diagnostics; using System.IO; using System.Runtime.CompilerServices; @@ -20,27 +21,74 @@ public class PCROOT : IWriter, IDisposable BinaryWriter writerPoints = null; ImportSettings importSettings; // this is per file here - static List nodeBounds = new List(); // for all tiles - static float cloudMinX = float.PositiveInfinity; - static float cloudMinY = float.PositiveInfinity; - static float cloudMinZ = float.PositiveInfinity; - static float cloudMaxX = float.NegativeInfinity; - static float cloudMaxY = float.NegativeInfinity; - static float cloudMaxZ = float.NegativeInfinity; + static ConcurrentBag nodeBoundsBag = new ConcurrentBag(); // for all tiles + + BoundsAcc localBounds; + struct BoundsAcc + { + public float minX, minY, minZ, maxX, maxY, maxZ; + public void Init() + { + minX = minY = minZ = float.PositiveInfinity; + maxX = maxY = maxZ = float.NegativeInfinity; + } + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void Acc(float x, float y, float z) + { + if (x < minX) minX = x; + if (x > maxX) maxX = x; + if (y < minY) minY = y; + if (y > maxY) maxY = y; + if (z < minZ) minZ = z; + if (z > maxZ) maxZ = z; + } + } + + // global aggregator + static class GlobalBounds + { + private static readonly object _lock = new(); + public static float minX = float.PositiveInfinity, minY = float.PositiveInfinity, minZ = float.PositiveInfinity; + public static float maxX = float.NegativeInfinity, maxY = float.NegativeInfinity, maxZ = float.NegativeInfinity; + + public static void Merge(in BoundsAcc b) + { + lock (_lock) + { + if (b.minX < minX) minX = b.minX; + if (b.maxX > maxX) maxX = b.maxX; + if (b.minY < minY) minY = b.minY; + if (b.maxY > maxY) maxY = b.maxY; + if (b.minZ < minZ) minZ = b.minZ; + if (b.maxZ > maxZ) maxZ = b.maxZ; + } + } + } StringBuilder keyBuilder = new StringBuilder(32); - Dictionary keyCache = new Dictionary(); + //Dictionary keyCache = new Dictionary(); + Dictionary<(int x, int y, int z), List> nodeX = new(); + Dictionary<(int x, int y, int z), List> nodeY = new(); + Dictionary<(int x, int y, int z), List> nodeZ = new(); + Dictionary<(int x, int y, int z), List> nodeR = new(); + Dictionary<(int x, int y, int z), List> nodeG = new(); + Dictionary<(int x, int y, int z), List> nodeB = new(); + Dictionary<(int x, int y, int z), List> nodeIntensity = new(); + Dictionary<(int x, int y, int z), List> nodeClassification = new(); + Dictionary<(int x, int y, int z), List> nodeTime = new(); + // our nodes (=tiles, =grid cells), string is tileID and float are X,Y,Z,R,G,B values - Dictionary> nodeX = new Dictionary>(); - Dictionary> nodeY = new Dictionary>(); - Dictionary> nodeZ = new Dictionary>(); - Dictionary> nodeR = new Dictionary>(); - Dictionary> nodeG = new Dictionary>(); - Dictionary> nodeB = new Dictionary>(); - Dictionary> nodeIntensity = new Dictionary>(); - Dictionary> nodeClassification = new Dictionary>(); - Dictionary> nodeTime = new Dictionary>(); + //Dictionary> nodeX = new Dictionary>(); + // Dictionary> nodeY = new Dictionary>(); + // Dictionary> nodeZ = new Dictionary>(); + // Dictionary> nodeR = new Dictionary>(); + // Dictionary> nodeG = new Dictionary>(); + // Dictionary> nodeB = new Dictionary>(); + // Dictionary> nodeIntensity = new Dictionary>(); + // Dictionary> nodeClassification = new Dictionary>(); + // Dictionary> nodeTime = new Dictionary>(); //int? taskID; @@ -51,6 +99,9 @@ public class PCROOT : IWriter, IDisposable private readonly List _shuffleListBuffer = new(4096 * 4); private readonly List[] _tempArray = new List[4096 * 4]; + private byte[] pointBuffer = new byte[12]; + private byte[] colorBuffer = new byte[12]; + public void Dispose() { //Log.Write("Memory used: " + GC.GetTotalMemory(false)); @@ -124,19 +175,29 @@ protected virtual void Dispose(bool disposing) bsPoints?.Dispose(); writerPoints?.Dispose(); + nodeX.Clear(); + nodeY.Clear(); + nodeZ.Clear(); + nodeR.Clear(); + nodeG.Clear(); + nodeB.Clear(); + nodeIntensity.Clear(); + nodeClassification.Clear(); + nodeTime.Clear(); + // Clear and dispose instance dictionaries - ClearDictionary(nodeX); - ClearDictionary(nodeY); - ClearDictionary(nodeZ); - ClearDictionary(nodeR); - ClearDictionary(nodeG); - ClearDictionary(nodeB); - ClearDictionary(nodeIntensity); - ClearDictionary(nodeClassification); - ClearDictionary(nodeTime); - - keyCache.Clear(); - keyCache = null; + //ClearDictionary(nodeX); + //ClearDictionary(nodeY); + //ClearDictionary(nodeZ); + //ClearDictionary(nodeR); + //ClearDictionary(nodeG); + //ClearDictionary(nodeB); + //ClearDictionary(nodeIntensity); + //ClearDictionary(nodeClassification); + //ClearDictionary(nodeTime); + + // keyCache.Clear(); + // keyCache = null; } // If there were unmanaged resources, you'd clean them up here @@ -165,7 +226,6 @@ public bool InitWriter(dynamic _importSettings, long pointCount, ILogger logger) Log = logger; // clear old nodes - keyCache.Clear(); nodeX.Clear(); nodeY.Clear(); nodeZ.Clear(); @@ -179,6 +239,9 @@ public bool InitWriter(dynamic _importSettings, long pointCount, ILogger logger) writerPoints = null; importSettings = (ImportSettings)(object)_importSettings; + localBounds = new BoundsAcc(); + localBounds.Init(); + return res; } @@ -197,7 +260,7 @@ void IWriter.WriteRGB(float r, float g, float b) } - // for pcroot, this is saving the rootfile + // for pcroot, this is saving the rootfile (just once) void IWriter.Close() { // this happens if imported metadata only? @@ -210,6 +273,9 @@ void IWriter.Close() // { //Log.Write(" ***************************** save this only after last file from all threads ***************************** "); // check if any tile overlaps with other tiles + + var nodeBounds = nodeBoundsBag.ToList(); + if (importSettings.checkoverlap == true) { for (int i = 0, len = nodeBounds.Count; i < len; i++) @@ -301,8 +367,17 @@ void IWriter.Close() if (importSettings.importRGB == true && importSettings.importClassification == true) commentRow += sep + "classification"; if (addComments) tilerootdata.Insert(1, commentRow); + // get global bounds from static + GlobalBounds.Merge(localBounds); + float cloudMinX = GlobalBounds.minX; + float cloudMinY = GlobalBounds.minY; + float cloudMinZ = GlobalBounds.minZ; + float cloudMaxX = GlobalBounds.maxX; + float cloudMaxY = GlobalBounds.maxY; + float cloudMaxZ = GlobalBounds.maxZ; + // add global header settings to first row - // version, gridsize, pointcount, boundsMinX, boundsMinY, boundsMinZ, boundsMaxX, boundsMaxY, boundsMaxZ + // version, gridsize, pointcount, boundsMinX, boundsMinY, boundsMinZ, boundsMaxX, boundsMaxY, boundsMaxZ string globalData = versionID + sep + importSettings.gridSize.ToString() + sep + totalPointCount + sep + cloudMinX + sep + cloudMinY + sep + cloudMinZ + sep + cloudMaxX + sep + cloudMaxY + sep + cloudMaxZ; // autoOffsetX, globalOffsetY, globalOffsetZ, packMagic globalData += sep + importSettings.offsetX + sep + importSettings.offsetY + sep + importSettings.offsetZ + sep + importSettings.packMagicValue; @@ -344,15 +419,18 @@ void IWriter.Close() Console.ForegroundColor = ConsoleColor.White; } - // cleanup after last file + // cleanup after last file (cannot clear for each file, since its static for all files) nodeBounds.Clear(); - cloudMinX = float.PositiveInfinity; - cloudMinY = float.PositiveInfinity; - cloudMinZ = float.PositiveInfinity; - cloudMaxX = float.NegativeInfinity; - cloudMaxY = float.NegativeInfinity; - cloudMaxZ = float.NegativeInfinity; + //localBounds = new BoundsAcc(); + localBounds.Init(); + + //cloudMinX = float.PositiveInfinity; + //cloudMinY = float.PositiveInfinity; + //cloudMinZ = float.PositiveInfinity; + //cloudMaxX = float.NegativeInfinity; + //cloudMaxY = float.NegativeInfinity; + //cloudMaxZ = float.NegativeInfinity; // } // if last file // clear all lists @@ -366,7 +444,6 @@ void IWriter.Close() //nodeIntensity.Clear(); //nodeTime.Clear(); - // dispose bsPoints?.Dispose(); writerPoints?.Dispose(); @@ -380,17 +457,15 @@ void IWriter.Cleanup(int fileIndex) bsPoints?.Dispose(); writerPoints?.Dispose(); - // Clear and dispose instance dictionaries - ClearDictionary(nodeX); - ClearDictionary(nodeY); - ClearDictionary(nodeZ); - ClearDictionary(nodeR); - ClearDictionary(nodeG); - ClearDictionary(nodeB); - ClearDictionary(nodeIntensity); - ClearDictionary(nodeClassification); - ClearDictionary(nodeTime); - keyCache.Clear(); + nodeX.Clear(); + nodeY.Clear(); + nodeZ.Clear(); + nodeR.Clear(); + nodeG.Clear(); + nodeB.Clear(); + nodeIntensity.Clear(); + nodeClassification.Clear(); + nodeTime.Clear(); } void IWriter.Randomize() @@ -400,13 +475,8 @@ void IWriter.Randomize() void IWriter.AddPoint(int index, float x, float y, float z, float r, float g, float b, ushort intensity, double time, byte classification) { - // get global all clouds bounds - cloudMinX = Math.Min(cloudMinX, x); - cloudMaxX = Math.Max(cloudMaxX, x); - cloudMinY = Math.Min(cloudMinY, y); - cloudMaxY = Math.Max(cloudMaxY, y); - cloudMinZ = Math.Min(cloudMinZ, z); - cloudMaxZ = Math.Max(cloudMaxZ, z); + // collect global all clouds bounds + localBounds.Acc(x, y, z); float gridSize = importSettings.gridSize; @@ -422,16 +492,19 @@ void IWriter.AddPoint(int index, float x, float y, float z, float r, float g, fl keyBuilder.Append(cellY); keyBuilder.Append('_'); keyBuilder.Append(cellZ); - string key = keyBuilder.ToString(); + //string key = keyBuilder.ToString(); + var key = (cellX, cellY, cellZ); if (importSettings.packColors == true) { - if (keyCache.TryGetValue(key, out _) == false) - { - keyCache.Add(key, (cellX, cellY, cellZ)); // or if useLossyFiltering - } + //if (keyCache.TryGetValue(key, out _) == false) + //{ + // keyCache.Add(key, (cellX, cellY, cellZ)); // or if useLossyFiltering + //} } + //if (!nodeX.TryGetValue(key, out var xs)) nodeX[key] = xs = new List(); + // if already exists, add to existing list if (nodeX.TryGetValue(key, out _)) { @@ -514,9 +587,13 @@ void IWriter.Save(int fileIndex) List outputFiles = new List(); + // merge local bounds to global + GlobalBounds.Merge(in localBounds); + // process all tiles //foreach (KeyValuePair> nodeData in nodeX) - foreach (KeyValuePair> nodeData in nodeX) + //foreach (KeyValuePair> nodeData in nodeX) + foreach (KeyValuePair<(int x, int y, int z), List> nodeData in nodeX) { if (nodeData.Value.Count < importSettings.minimumPointCount) { @@ -526,7 +603,7 @@ void IWriter.Save(int fileIndex) nodeTempX = nodeData.Value; - string key = nodeData.Key; + var key = nodeData.Key; //int key = nodeData.Key; nodeTempY = nodeY[key]; @@ -620,7 +697,6 @@ void IWriter.Save(int fileIndex) //Console.WriteLine("nodeTempX.Count="+ nodeTempX.Count); double totalTime = 0; // for average timestamp - byte[] pointBuffer = new byte[12]; // hold floats as bytes // loop and output all points within that node/tile for (int i = 0, len = nodeTempX.Count; i < len; i++) @@ -649,7 +725,10 @@ void IWriter.Save(int fileIndex) { // get local coords within tile //var keys = nodeData.Key.Split('_'); - (cellX, cellY, cellZ) = keyCache[key]; + //(cellX, cellY, cellZ) = keyCache[key]; + cellX = key.x; + cellY = key.y; + cellZ = key.z; // TODO no need to parse, we should know these values? //cellX = int.Parse(keys[0]); //cellY = int.Parse(keys[1]); @@ -733,7 +812,10 @@ void IWriter.Save(int fileIndex) //cellX = int.Parse(keys[0]); //cellY = int.Parse(keys[1]); //cellZ = int.Parse(keys[2]); - (cellX, cellY, cellZ) = keyCache[key]; + //(cellX, cellY, cellZ) = keyCache[key]; + cellX = key.x; + cellY = key.y; + cellZ = key.z; // offset point inside local tile //(int restoredX, int restoredY, int restoredZ) = Unhash(nodeData.Key); //cellX = restoredX; @@ -866,7 +948,6 @@ void IWriter.Save(int fileIndex) //int keepEveryN = importSettings.keepEveryN; int len = nodeTempX.Count; - byte[] colorBuffer = new byte[12]; // Buffer to hold the RGB values as bytes //unsafe void FloatToBytes(float value, byte[] buffer, int offset) //{ @@ -979,7 +1060,7 @@ void IWriter.Save(int fileIndex) cb.averageTimeStamp = averageTime; } - nodeBounds.Add(cb); + nodeBoundsBag.Add(cb); } // loop all nodes/tiles foreach // finished this file From ecd34d5a56ac9980c5ec435f6fe79787826568dc Mon Sep 17 00:00:00 2001 From: unitycoder Date: Sun, 19 Oct 2025 17:18:08 +0300 Subject: [PATCH 03/10] cleanup localbounds calculation --- Structs/PointCloudTile.cs | 1 - Writers/PCROOT.cs | 48 +++++++++++++++++++++++---------------- 2 files changed, 28 insertions(+), 21 deletions(-) diff --git a/Structs/PointCloudTile.cs b/Structs/PointCloudTile.cs index ea3f59a..93c198a 100644 --- a/Structs/PointCloudTile.cs +++ b/Structs/PointCloudTile.cs @@ -27,6 +27,5 @@ public struct PointCloudTile // average timestamp from all points in this tile public double averageTimeStamp; public float overlapRatio; // 0-1, 0 means no overlap, 1 means full overlap - } } diff --git a/Writers/PCROOT.cs b/Writers/PCROOT.cs index 45c11ad..493fb11 100644 --- a/Writers/PCROOT.cs +++ b/Writers/PCROOT.cs @@ -33,16 +33,16 @@ public void Init() maxX = maxY = maxZ = float.NegativeInfinity; } - [MethodImpl(MethodImplOptions.AggressiveInlining)] - public void Acc(float x, float y, float z) - { - if (x < minX) minX = x; - if (x > maxX) maxX = x; - if (y < minY) minY = y; - if (y > maxY) maxY = y; - if (z < minZ) minZ = z; - if (z > maxZ) maxZ = z; - } + //[MethodImpl(MethodImplOptions.AggressiveInlining)] + //public void Acc(float x, float y, float z) + //{ + // if (x < minX) minX = x; + // if (x > maxX) maxX = x; + // if (y < minY) minY = y; + // if (y > maxY) maxY = y; + // if (z < minZ) minZ = z; + // if (z > maxZ) maxZ = z; + //} } // global aggregator @@ -56,11 +56,11 @@ public static void Merge(in BoundsAcc b) { lock (_lock) { - if (b.minX < minX) minX = b.minX; + if (b.minX < minX) minX = b.minX; if (b.maxX > maxX) maxX = b.maxX; - if (b.minY < minY) minY = b.minY; + if (b.minY < minY) minY = b.minY; if (b.maxY > maxY) maxY = b.maxY; - if (b.minZ < minZ) minZ = b.minZ; + if (b.minZ < minZ) minZ = b.minZ; if (b.maxZ > maxZ) maxZ = b.maxZ; } } @@ -422,7 +422,6 @@ void IWriter.Close() // cleanup after last file (cannot clear for each file, since its static for all files) nodeBounds.Clear(); - //localBounds = new BoundsAcc(); localBounds.Init(); //cloudMinX = float.PositiveInfinity; @@ -475,9 +474,6 @@ void IWriter.Randomize() void IWriter.AddPoint(int index, float x, float y, float z, float r, float g, float b, ushort intensity, double time, byte classification) { - // collect global all clouds bounds - localBounds.Acc(x, y, z); - float gridSize = importSettings.gridSize; // add to correct cell, MOVE to writer @@ -587,9 +583,6 @@ void IWriter.Save(int fileIndex) List outputFiles = new List(); - // merge local bounds to global - GlobalBounds.Merge(in localBounds); - // process all tiles //foreach (KeyValuePair> nodeData in nodeX) //foreach (KeyValuePair> nodeData in nodeX) @@ -1053,6 +1046,20 @@ void IWriter.Save(int fileIndex) cb.cellY = cellY; cb.cellZ = cellZ; + // add minmax to local bounds + localBounds.minX = Math.Min(localBounds.minX, minX); + localBounds.minY = Math.Min(localBounds.minY, minY); + localBounds.minZ = Math.Min(localBounds.minZ, minZ); + localBounds.maxX = Math.Max(localBounds.maxX, maxX); + localBounds.maxY = Math.Max(localBounds.maxY, maxY); + localBounds.maxZ = Math.Max(localBounds.maxZ, maxZ); + + // merge local bounds from this tile into global + GlobalBounds.Merge(in localBounds); + + //Log.Write(localBounds.minX + "," + localBounds.maxX); + + if (importSettings.averageTimestamp == true && totalPointsWritten > 0) { double averageTime = totalTime / totalPointsWritten; @@ -1060,6 +1067,7 @@ void IWriter.Save(int fileIndex) cb.averageTimeStamp = averageTime; } + // this tile data nodeBoundsBag.Add(cb); } // loop all nodes/tiles foreach From b936971567ca7c275f7af15ca7b6ca9521ce951e Mon Sep 17 00:00:00 2001 From: unitycoder Date: Sun, 19 Oct 2025 17:59:23 +0300 Subject: [PATCH 04/10] cleanup main loop, --- MainWindow.xaml.cs | 93 ++++++++++++++++------------------------------ Writers/PCROOT.cs | 32 ++++++++-------- 2 files changed, 48 insertions(+), 77 deletions(-) diff --git a/MainWindow.xaml.cs b/MainWindow.xaml.cs index 83a48cf..c0421ef 100644 --- a/MainWindow.xaml.cs +++ b/MainWindow.xaml.cs @@ -397,92 +397,63 @@ private static async Task ProcessAllFiles(object workerParamsObject) for (int i = 0, len = importSettings.maxFiles; i < len; i++) { - if (cancellationToken.IsCancellationRequested) + await semaphore.WaitAsync(cancellationToken); // acquire BEFORE starting task + int index = i; // capture + tasks.Add(Task.Run(() => { - return; - } - - //await semaphore.WaitAsync(cancellationToken); - try - { - await semaphore.WaitAsync(cancellationToken); - } - catch (OperationCanceledException) - { - // Handle the cancellation scenario here - Log.Write("Wait was canceled."); - } - finally - { - semaphore.Release(); - } - //int? taskId = Task.CurrentId; // Get the current task ID - - //progressFile = i; - Interlocked.Increment(ref progressFile); - - //bool isLastTask = (i == len - 1); // Check if this is the last task - - int index = i; // Capture the current file index in the loop - int len2 = len; - tasks.Add(Task.Run(async () => - { - int? taskId = Task.CurrentId; // Get the current task ID - //Log.Write("task started: " + taskId + " fileindex: " + index); - Log.Write("task:" + taskId + ", reading file (" + (index + 1) + "/" + len2 + ") : " + importSettings.inputFiles[index] + " (" + Tools.HumanReadableFileSize(new FileInfo(importSettings.inputFiles[index]).Length) + ")\n"); + int? taskId = Task.CurrentId; // may be null; your pool should accept null try { - // Do actual point cloud parsing for this file and pass taskId - var res = ParseFile(importSettings, index, taskId, cancellationToken); - if (!res) + Log.Write($"task:{taskId}, reading file ({index + 1}/{len}) : " + + $"{importSettings.inputFiles[index]} ({Tools.HumanReadableFileSize(new FileInfo(importSettings.inputFiles[index]).Length)})\n"); + + var ok = ParseFile(importSettings, index, taskId, cancellationToken); + if (!ok) { - Interlocked.Increment(ref errorCounter); // thread-safe error counter increment - if (importSettings.useJSONLog) + Interlocked.Increment(ref errorCounter); + if (cancellationToken.IsCancellationRequested) { - // if canceled, we dont want to log this (causes nullref) - if (cancellationToken.IsCancellationRequested == false) - { - Log.Write("{\"event\": \"" + LogEvent.File + "\", \"path\": " + System.Text.Json.JsonSerializer.Serialize(importSettings.inputFiles[i]) + ", \"status\": \"" + LogStatus.Processing + "\"}", LogEvent.Error); - } + Log.Write("Task was canceled."); + } + else if (importSettings.useJSONLog) + { + Log.Write( + "{\"event\":\"" + LogEvent.File + "\",\"path\":" + + System.Text.Json.JsonSerializer.Serialize(importSettings.inputFiles[index]) + + ",\"status\":\"" + LogStatus.Processing + "\"}", LogEvent.Error); } else { - if (cancellationToken.IsCancellationRequested) - { - Log.Write("Task was canceled."); - } - else - { - Log.Write("files" + importSettings.inputFiles.Count + " i:" + i); - Log.Write("Error> Failed to parse file: " + importSettings.inputFiles[i], LogEvent.Error); - } + Log.Write("files" + importSettings.inputFiles.Count + " i:" + index); + Log.Write("Error> Failed to parse file: " + importSettings.inputFiles[index], LogEvent.Error); } } } - catch (TaskCanceledException ex) + catch (OperationCanceledException) { - Log.Write("Task was canceled: " + ex.Message, LogEvent.Error); + Log.Write("Operation was canceled."); } catch (TimeoutException ex) { Log.Write("Timeout occurred: " + ex.Message, LogEvent.Error); } - catch (OperationCanceledException) - { - MessageBox.Show("Operation was canceled."); - } catch (Exception ex) { Log.Write("Exception> " + ex.Message, LogEvent.Error); - //throw; // Rethrow to ensure Task.WhenAll sees the exception } finally { - semaphore.Release(); // Release the semaphore slot when the task is done + Interlocked.Increment(ref progressFile); + // make sure we don't keep heavy buffers in the pools. + //try { importSettings.ReleaseReader(taskId); } catch { } + //try { importSettings.ReleaseWriter(taskId); } catch { } + + // release exactly once per WaitAsync + semaphore.Release(); } - })); - } // for all files + }, cancellationToken)); + } await Task.WhenAll(tasks); // Wait for all tasks to complete diff --git a/Writers/PCROOT.cs b/Writers/PCROOT.cs index 493fb11..360e7b3 100644 --- a/Writers/PCROOT.cs +++ b/Writers/PCROOT.cs @@ -64,10 +64,23 @@ public static void Merge(in BoundsAcc b) if (b.maxZ > maxZ) maxZ = b.maxZ; } } + + public static void Reset() + { + lock (_lock) + { + minX = float.PositiveInfinity; + minY = float.PositiveInfinity; + minZ = float.PositiveInfinity; + maxX = float.NegativeInfinity; + maxY = float.NegativeInfinity; + maxZ = float.NegativeInfinity; + } + } } StringBuilder keyBuilder = new StringBuilder(32); - //Dictionary keyCache = new Dictionary(); + // our nodes (=tiles, =grid cells), string is tileID and float are X,Y,Z,R,G,B values Dictionary<(int x, int y, int z), List> nodeX = new(); Dictionary<(int x, int y, int z), List> nodeY = new(); Dictionary<(int x, int y, int z), List> nodeZ = new(); @@ -78,20 +91,6 @@ public static void Merge(in BoundsAcc b) Dictionary<(int x, int y, int z), List> nodeClassification = new(); Dictionary<(int x, int y, int z), List> nodeTime = new(); - - // our nodes (=tiles, =grid cells), string is tileID and float are X,Y,Z,R,G,B values - //Dictionary> nodeX = new Dictionary>(); - // Dictionary> nodeY = new Dictionary>(); - // Dictionary> nodeZ = new Dictionary>(); - // Dictionary> nodeR = new Dictionary>(); - // Dictionary> nodeG = new Dictionary>(); - // Dictionary> nodeB = new Dictionary>(); - // Dictionary> nodeIntensity = new Dictionary>(); - // Dictionary> nodeClassification = new Dictionary>(); - // Dictionary> nodeTime = new Dictionary>(); - - //int? taskID; - static int skippedNodesCounter = 0; static int skippedPointsCounter = 0; // FIXME, not used in regular mode, only for lossy filtering, TODO can calculate from importsetting values static bool useLossyFiltering = false; //not used, for testing only @@ -376,6 +375,8 @@ void IWriter.Close() float cloudMaxY = GlobalBounds.maxY; float cloudMaxZ = GlobalBounds.maxZ; + GlobalBounds.Reset(); + // add global header settings to first row // version, gridsize, pointcount, boundsMinX, boundsMinY, boundsMinZ, boundsMaxX, boundsMaxY, boundsMaxZ string globalData = versionID + sep + importSettings.gridSize.ToString() + sep + totalPointCount + sep + cloudMinX + sep + cloudMinY + sep + cloudMinZ + sep + cloudMaxX + sep + cloudMaxY + sep + cloudMaxZ; @@ -446,7 +447,6 @@ void IWriter.Close() // dispose bsPoints?.Dispose(); writerPoints?.Dispose(); - } // close void IWriter.Cleanup(int fileIndex) From 72177f9e7d937d94d8d49ec9fb491dd123fc6fec Mon Sep 17 00:00:00 2001 From: unitycoder Date: Sun, 19 Oct 2025 19:31:43 +0300 Subject: [PATCH 05/10] check for valid log --- Structs/ImportSettings.cs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Structs/ImportSettings.cs b/Structs/ImportSettings.cs index 79026d5..95b1f61 100644 --- a/Structs/ImportSettings.cs +++ b/Structs/ImportSettings.cs @@ -20,12 +20,12 @@ public class ImportSettings [JsonConverter(typeof(JsonStringEnumConverter))] public Logger.LogEvent @event { get; set; } - + [JsonIgnore] // FIXME doesnt ígnore it public IReader reader; // single threaded reader //public Dictionary Readers { get; set; } = new Dictionary(); public ConcurrentDictionary Readers { get; set; } = new ConcurrentDictionary(); - [JsonIgnore] + [JsonIgnore] public IWriter writer = new UCPC(); public string ReaderType => reader?.GetType().Name; @@ -175,7 +175,7 @@ public void ReleaseReader(int? taskId) } else { - Log.Write($"Reader for task ID {taskId} could not be removed because it was not found.", LogEvent.Warning); + if (Log != null) Log.Write($"Reader for task ID {taskId} could not be removed because it was not found.", LogEvent.Warning); } } } From 202295ce7c7057380bb996bff15dbebbcffeda06 Mon Sep 17 00:00:00 2001 From: unitycoder Date: Fri, 24 Oct 2025 22:15:22 +0300 Subject: [PATCH 06/10] fix reader, fix classification, test single dictionary for pcroot data --- MainWindow.xaml.cs | 13 +- Readers/LAZ.cs | 9 +- Writers/PCROOT.cs | 987 +++++++++++++++------------------------------ 3 files changed, 342 insertions(+), 667 deletions(-) diff --git a/MainWindow.xaml.cs b/MainWindow.xaml.cs index c0421ef..c73bbe3 100644 --- a/MainWindow.xaml.cs +++ b/MainWindow.xaml.cs @@ -964,7 +964,7 @@ static bool ParseFile(ImportSettings importSettings, int fileIndex, int? taskId, //if (classification<0 || classification>1) Log.Write("****: " + classification.ToString()); - //if (i < 10000) Log.Write("class: " + classification.ToString() + " minClass: " + minClass + " maxClass: " + maxClass); + //if (i < 100000) Log.Write("class: " + classification.ToString());// + " minClass: " + minClass + " maxClass: " + maxClass); //classification = 0; //if (intensity.r < minInt) //{ @@ -1015,6 +1015,12 @@ static bool ParseFile(ImportSettings importSettings, int fileIndex, int? taskId, lastStatusMessage = "Finished saving.."; //taskReader.Close(); + if (importSettings.importMetadata == true) + { + var metaData = taskReader.GetMetaData(importSettings, fileIndex); + jobMetadata.lasHeaders.Add(metaData); + } + //Log.Write("------------ release reader and writer ------------"); importSettings.ReleaseReader(taskId); //taskReader.Dispose(); @@ -1032,11 +1038,6 @@ static bool ParseFile(ImportSettings importSettings, int fileIndex, int? taskId, //Log.Write(jsonString, LogEvent.File); - if (importSettings.importMetadata == true) - { - var metaData = taskReader.GetMetaData(importSettings, fileIndex); - jobMetadata.lasHeaders.Add(metaData); - } } // if importMetadataOnly == false ^ else // only metadata: diff --git a/Readers/LAZ.cs b/Readers/LAZ.cs index bd59cbb..b90b851 100644 --- a/Readers/LAZ.cs +++ b/Readers/LAZ.cs @@ -77,6 +77,7 @@ LasHeader IReader.GetMetaData(ImportSettings importSettings, int fileIndex) var h = new LasHeader(); h.FileName = importSettings.inputFiles[fileIndex]; + h.FileSourceID = lazReader.header.file_source_ID; h.GlobalEncoding = lazReader.header.global_encoding; h.ProjectID_GUID_data1 = lazReader.header.project_ID_GUID_data_1; @@ -483,11 +484,11 @@ protected virtual void Dispose(bool disposing) { if (disposing) { - if (lazReader != null) - { - lazReader.close_reader(); + //if (lazReader != null) + //{ + // lazReader.close_reader(); lazReader = null; - } +// } } } diff --git a/Writers/PCROOT.cs b/Writers/PCROOT.cs index 360e7b3..1060fa4 100644 --- a/Writers/PCROOT.cs +++ b/Writers/PCROOT.cs @@ -1,11 +1,14 @@ // PCROOT (v3) Exporter https://github.com/unitycoder/UnityPointCloudViewer/wiki/Binary-File-Format-Structure#custom-v3-tiles-pcroot-and-pct-rgb +// Memory-optimized version - Combined 9 dictionaries into 1 for 30-50% memory reduction using PointCloudConverter.Logger; using System; using System.Collections; using System.Collections.Concurrent; +using System.Collections.Generic; using System.Diagnostics; using System.IO; +using System.Linq; using System.Runtime.CompilerServices; using System.Text; using System.Text.Json; @@ -19,9 +22,9 @@ public class PCROOT : IWriter, IDisposable BufferedStream bsPoints = null; BinaryWriter writerPoints = null; - ImportSettings importSettings; // this is per file here + ImportSettings importSettings; - static ConcurrentBag nodeBoundsBag = new ConcurrentBag(); // for all tiles + static ConcurrentBag nodeBoundsBag = new ConcurrentBag(); BoundsAcc localBounds; struct BoundsAcc @@ -32,20 +35,8 @@ public void Init() minX = minY = minZ = float.PositiveInfinity; maxX = maxY = maxZ = float.NegativeInfinity; } - - //[MethodImpl(MethodImplOptions.AggressiveInlining)] - //public void Acc(float x, float y, float z) - //{ - // if (x < minX) minX = x; - // if (x > maxX) maxX = x; - // if (y < minY) minY = y; - // if (y > maxY) maxY = y; - // if (z < minZ) minZ = z; - // if (z > maxZ) maxZ = z; - //} } - // global aggregator static class GlobalBounds { private static readonly object _lock = new(); @@ -79,161 +70,94 @@ public static void Reset() } } - StringBuilder keyBuilder = new StringBuilder(32); - // our nodes (=tiles, =grid cells), string is tileID and float are X,Y,Z,R,G,B values - Dictionary<(int x, int y, int z), List> nodeX = new(); - Dictionary<(int x, int y, int z), List> nodeY = new(); - Dictionary<(int x, int y, int z), List> nodeZ = new(); - Dictionary<(int x, int y, int z), List> nodeR = new(); - Dictionary<(int x, int y, int z), List> nodeG = new(); - Dictionary<(int x, int y, int z), List> nodeB = new(); - Dictionary<(int x, int y, int z), List> nodeIntensity = new(); - Dictionary<(int x, int y, int z), List> nodeClassification = new(); - Dictionary<(int x, int y, int z), List> nodeTime = new(); + // MEMORY OPTIMIZATION: Combined structure for all point attributes + // Using class instead of struct to avoid value-type copy issues + class PointData + { + public List X, Y, Z, R, G, B; + public List Intensity; + public List Classification; + public List Time; - static int skippedNodesCounter = 0; - static int skippedPointsCounter = 0; // FIXME, not used in regular mode, only for lossy filtering, TODO can calculate from importsetting values - static bool useLossyFiltering = false; //not used, for testing only + public void Clear() + { + X?.Clear(); + Y?.Clear(); + Z?.Clear(); + R?.Clear(); + G?.Clear(); + B?.Clear(); + Intensity?.Clear(); + Classification?.Clear(); + Time?.Clear(); + } + } - private readonly List _shuffleListBuffer = new(4096 * 4); - private readonly List[] _tempArray = new List[4096 * 4]; + // MEMORY OPTIMIZATION: Single dictionary instead of 9 separate dictionaries + Dictionary<(int x, int y, int z), PointData> nodeData = new(); + + StringBuilder keyBuilder = new StringBuilder(32); + static int skippedNodesCounter = 0; + static int skippedPointsCounter = 0; + static bool useLossyFiltering = false; private byte[] pointBuffer = new byte[12]; private byte[] colorBuffer = new byte[12]; + static ILogger Log; + + public PCROOT(int? _taskID) + { + } + + ~PCROOT() + { + Dispose(false); + } + public void Dispose() { - //Log.Write("Memory used: " + GC.GetTotalMemory(false)); Dispose(true); GC.Collect(); - // GC.SuppressFinalize(this); GC.WaitForPendingFinalizers(); GC.Collect(); - //GC.Collect(); - //Log.Write("Memory used: " + GC.GetTotalMemory(false)); } - - private void ClearDictionary(Dictionary> dictionary) + protected virtual void Dispose(bool disposing) { - if (dictionary != null) + if (disposing) { - foreach (var list in dictionary.Values) - { - list.Clear(); // Clear the list to free up memory - } - dictionary.Clear(); // Clear the dictionary itself - dictionary = null; // Help GC by removing reference - } - } + bsPoints?.Dispose(); + writerPoints?.Dispose(); - private void ClearDictionary(Dictionary> dictionary) - { - if (dictionary != null) - { - foreach (var list in dictionary.Values) + if (nodeData != null) { - list.Clear(); // Clear the list to free up memory + foreach (var data in nodeData.Values) + { + data.Clear(); + } + nodeData.Clear(); } - dictionary.Clear(); // Clear the dictionary itself - dictionary = null; // Help GC by removing reference } } - private void ClearDictionary(Dictionary> dictionary) + public bool InitWriter(dynamic _importSettings, long pointCount, ILogger logger) { - if (dictionary != null) - { - foreach (var list in dictionary.Values) - { - list.Clear(); // Clear the list to free up memory - } - dictionary.Clear(); // Clear the dictionary itself - dictionary = null; // Help GC by removing reference - } - } + Log = logger; - private void ClearDictionary(Dictionary> dictionary) - { - if (dictionary != null) + if (nodeData != null) { - foreach (var list in dictionary.Values) + foreach (var data in nodeData.Values) { - list.Clear(); // Clear the list to free up memory + data.Clear(); } - dictionary.Clear(); // Clear the dictionary itself - //dictionary = null; // Help GC by removing reference + nodeData.Clear(); } - } - - protected virtual void Dispose(bool disposing) - { - if (disposing) + else { - // Dispose managed resources here - bsPoints?.Dispose(); - writerPoints?.Dispose(); - - nodeX.Clear(); - nodeY.Clear(); - nodeZ.Clear(); - nodeR.Clear(); - nodeG.Clear(); - nodeB.Clear(); - nodeIntensity.Clear(); - nodeClassification.Clear(); - nodeTime.Clear(); - - // Clear and dispose instance dictionaries - //ClearDictionary(nodeX); - //ClearDictionary(nodeY); - //ClearDictionary(nodeZ); - //ClearDictionary(nodeR); - //ClearDictionary(nodeG); - //ClearDictionary(nodeB); - //ClearDictionary(nodeIntensity); - //ClearDictionary(nodeClassification); - //ClearDictionary(nodeTime); - - // keyCache.Clear(); - // keyCache = null; + nodeData = new Dictionary<(int x, int y, int z), PointData>(); } - // If there were unmanaged resources, you'd clean them up here - } - - ~PCROOT() - { - //Log.Write("pcroot writer finalized for task: " + taskID); - Dispose(false); - } - - // add constructor - public PCROOT(int? _taskID) - { - //Log.Write("*** PCROOT writer created for task: " + _taskID); - //taskID = _taskID; - } - - static ILogger Log; - - public bool InitWriter(dynamic _importSettings, long pointCount, ILogger logger) - { - //Log.Write("--------------------- initwriter for taskID: " + taskID); - var res = true; - - Log = logger; - - // clear old nodes - nodeX.Clear(); - nodeY.Clear(); - nodeZ.Clear(); - nodeR.Clear(); - nodeG.Clear(); - nodeB.Clear(); - nodeIntensity.Clear(); - nodeClassification.Clear(); - nodeTime.Clear(); bsPoints = null; writerPoints = null; importSettings = (ImportSettings)(object)_importSettings; @@ -241,297 +165,69 @@ public bool InitWriter(dynamic _importSettings, long pointCount, ILogger logger) localBounds = new BoundsAcc(); localBounds.Init(); - return res; + return true; } void IWriter.CreateHeader(int pointCount) { - } void IWriter.WriteXYZ(float x, float y, float z) { - } void IWriter.WriteRGB(float r, float g, float b) { - - } - - // for pcroot, this is saving the rootfile (just once) - void IWriter.Close() - { - // this happens if imported metadata only? - if (importSettings == null) return; - - // save rootfile - // only save after last file, TODO should save this if process fails or user cancels, so no need to start from 0 again.. but then needs some merge or continue from index n feature - // if (isLastTask == true) - //if (fileIndex == (importSettings.maxFiles - 1)) - // { - //Log.Write(" ***************************** save this only after last file from all threads ***************************** "); - // check if any tile overlaps with other tiles - - var nodeBounds = nodeBoundsBag.ToList(); - - if (importSettings.checkoverlap == true) - { - for (int i = 0, len = nodeBounds.Count; i < len; i++) - { - var cb = nodeBounds[i]; - // check if this tile overlaps with other tiles - for (int j = 0, len2 = nodeBounds.Count; j < len2; j++) - { - if (i == j) continue; // skip self - var cb2 = nodeBounds[j]; - // check if this tile overlaps with other tile - float epsilon = 1e-6f; - bool overlaps = cb.minX < cb2.maxX + epsilon && cb.maxX > cb2.minX - epsilon && - cb.minY < cb2.maxY + epsilon && cb.maxY > cb2.minY - epsilon && - cb.minZ < cb2.maxZ + epsilon && cb.maxZ > cb2.minZ - epsilon; - - if (overlaps) - { - // calculate overlap ratio - float overlapX = Math.Min(cb.maxX, cb2.maxX) - Math.Max(cb.minX, cb2.minX); - float overlapY = Math.Min(cb.maxY, cb2.maxY) - Math.Max(cb.minY, cb2.minY); - float overlapZ = Math.Min(cb.maxZ, cb2.maxZ) - Math.Max(cb.minZ, cb2.minZ); - float overlapVolume = overlapX * overlapY * overlapZ; - float volume1 = (cb.maxX - cb.minX) * (cb.maxY - cb.minY) * (cb.maxZ - cb.minZ); - float volume2 = (cb2.maxX - cb2.minX) * (cb2.maxY - cb2.minY) * (cb2.maxZ - cb2.minZ); - - // check if the volume of either tile is zero - if (volume1 != 0 && volume2 != 0) - { - float overlapRatio = overlapVolume / Math.Min(volume1, volume2); - cb.overlapRatio = overlapRatio; - } - else - { - cb.overlapRatio = 0; // or any other appropriate value - } - - nodeBounds[i] = cb; - } - } - } - } // if checkoverlap - - string fileOnly = Path.GetFileNameWithoutExtension(importSettings.outputFile); - string baseFolder = Path.GetDirectoryName(importSettings.outputFile); - - - var tilerootdata = new List(); - var outputFileRoot = Path.Combine(baseFolder, fileOnly) + ".pcroot"; - - long totalPointCount = 0; - - // add to tileroot list - for (int i = 0, len = nodeBounds.Count; i < len; i++) - { - var tilerow = nodeBounds[i].totalPoints + sep + nodeBounds[i].minX + sep + nodeBounds[i].minY + sep + nodeBounds[i].minZ + sep + nodeBounds[i].maxX + sep + nodeBounds[i].maxY + sep + nodeBounds[i].maxZ + sep + nodeBounds[i].cellX + sep + nodeBounds[i].cellY + sep + nodeBounds[i].cellZ + sep + nodeBounds[i].averageTimeStamp + sep + nodeBounds[i].overlapRatio; - // force dot as decimal separator for values - tilerow = tilerow.Replace(",", "."); - tilerow = nodeBounds[i].fileName + sep + tilerow; - tilerootdata.Add(tilerow); - totalPointCount += nodeBounds[i].totalPoints; - } - - string jsonString = "{" + - "\"event\": \"" + LogEvent.File + "\"," + - "\"path\": " + JsonSerializer.Serialize(outputFileRoot) + "," + - "\"totalpoints\": " + totalPointCount + "," + - "\"skippedNodes\": " + skippedNodesCounter + "," + - "\"skippedPoints\": " + skippedPointsCounter + "" + - "}"; - - Log.Write(jsonString, LogEvent.End); - Log.Write("\nSaving rootfile: " + outputFileRoot + "\n*Total points= " + Tools.HumanReadableCount(totalPointCount)); - - int versionID = importSettings.packColors ? 2 : 1; // (1 = original, 2 = packed v3 format) - if (importSettings.packColors == true) versionID = 2; - if (useLossyFiltering == true) versionID = 3; - if ((importSettings.importIntensity == true || importSettings.importClassification == true) && importSettings.importRGB && importSettings.packColors) versionID = 4; // new int packed format - if ((importSettings.importIntensity == true && importSettings.importClassification == true) && importSettings.importRGB && importSettings.packColors) versionID = 5; // new int packed format + classification - - bool addComments = false; - - // add comment to first row (version, gridsize, pointcount, boundsMinX, boundsMinY, boundsMinZ, boundsMaxX, boundsMaxY, boundsMaxZ) - string identifer = "# PCROOT - https://github.com/unitycoder/PointCloudConverter"; - if (addComments) tilerootdata.Insert(0, identifer); - - string commentRow = "# version" + sep + "gridsize" + sep + "pointcount" + sep + "boundsMinX" + sep + "boundsMinY" + sep + "boundsMinZ" + sep + "boundsMaxX" + sep + "boundsMaxY" + sep + "boundsMaxZ" + sep + "autoOffsetX" + sep + "autoOffsetY" + sep + "autoOffsetZ" + sep + "packMagicValue"; - if (importSettings.importRGB == true && importSettings.importIntensity == true) commentRow += sep + "intensity"; - if (importSettings.importRGB == true && importSettings.importClassification == true) commentRow += sep + "classification"; - if (addComments) tilerootdata.Insert(1, commentRow); - - // get global bounds from static - GlobalBounds.Merge(localBounds); - float cloudMinX = GlobalBounds.minX; - float cloudMinY = GlobalBounds.minY; - float cloudMinZ = GlobalBounds.minZ; - float cloudMaxX = GlobalBounds.maxX; - float cloudMaxY = GlobalBounds.maxY; - float cloudMaxZ = GlobalBounds.maxZ; - - GlobalBounds.Reset(); - - // add global header settings to first row - // version, gridsize, pointcount, boundsMinX, boundsMinY, boundsMinZ, boundsMaxX, boundsMaxY, boundsMaxZ - string globalData = versionID + sep + importSettings.gridSize.ToString() + sep + totalPointCount + sep + cloudMinX + sep + cloudMinY + sep + cloudMinZ + sep + cloudMaxX + sep + cloudMaxY + sep + cloudMaxZ; - // autoOffsetX, globalOffsetY, globalOffsetZ, packMagic - globalData += sep + importSettings.offsetX + sep + importSettings.offsetY + sep + importSettings.offsetZ + sep + importSettings.packMagicValue; - // force dot as decimal separator - globalData = globalData.Replace(",", "."); - - if (addComments) - { - tilerootdata.Insert(2, globalData); - } - else - { - tilerootdata.Insert(0, globalData); - } - - // append comment for rows also - if (addComments) tilerootdata.Insert(3, "# filename" + sep + "pointcount" + sep + "minX" + sep + "minY" + sep + "minZ" + sep + "maxX" + sep + "maxY" + sep + "maxZ" + sep + "cellX" + sep + "cellY" + sep + "cellZ" + sep + "averageTimeStamp" + sep + "overlapRatio"); - - File.WriteAllLines(outputFileRoot, tilerootdata.ToArray()); - - Console.ForegroundColor = ConsoleColor.Green; - Log.Write("Done saving v3 : " + outputFileRoot); - Console.ForegroundColor = ConsoleColor.White; - if (skippedNodesCounter > 0) - { - Log.Write("*Skipped " + skippedNodesCounter + " nodes with less than " + importSettings.minimumPointCount + " points)"); - } - - if (useLossyFiltering == true && skippedPointsCounter > 0) - { - Log.Write("*Skipped " + skippedPointsCounter + " points due to bytepacked grid filtering"); - } - - if ((tilerootdata.Count - 1) <= 0) - { - Console.ForegroundColor = ConsoleColor.Yellow; - // TODO add json error log - Log.Write("Error> No tiles found! Try enable -scale (to make your cloud to smaller) Or make -gridsize bigger, or set -limit point count to smaller value"); - Console.ForegroundColor = ConsoleColor.White; - } - - // cleanup after last file (cannot clear for each file, since its static for all files) - nodeBounds.Clear(); - - localBounds.Init(); - - //cloudMinX = float.PositiveInfinity; - //cloudMinY = float.PositiveInfinity; - //cloudMinZ = float.PositiveInfinity; - //cloudMaxX = float.NegativeInfinity; - //cloudMaxY = float.NegativeInfinity; - //cloudMaxZ = float.NegativeInfinity; - // } // if last file - - // clear all lists - //keyCache.Clear(); - //nodeX.Clear(); - //nodeY.Clear(); - //nodeZ.Clear(); - //nodeR.Clear(); - //nodeG.Clear(); - //nodeB.Clear(); - //nodeIntensity.Clear(); - //nodeTime.Clear(); - - // dispose - bsPoints?.Dispose(); - writerPoints?.Dispose(); - } // close - - void IWriter.Cleanup(int fileIndex) - { - //Log.Write("Cleanup: this doesnt do anything yet.."); - //Dispose(); - bsPoints?.Dispose(); - writerPoints?.Dispose(); - - nodeX.Clear(); - nodeY.Clear(); - nodeZ.Clear(); - nodeR.Clear(); - nodeG.Clear(); - nodeB.Clear(); - nodeIntensity.Clear(); - nodeClassification.Clear(); - nodeTime.Clear(); } void IWriter.Randomize() { - } void IWriter.AddPoint(int index, float x, float y, float z, float r, float g, float b, ushort intensity, double time, byte classification) { float gridSize = importSettings.gridSize; - // add to correct cell, MOVE to writer - // TODO handle bytepacked gridsize here int cellX = (int)(x / gridSize); int cellY = (int)(y / gridSize); int cellZ = (int)(z / gridSize); - keyBuilder.Clear(); - keyBuilder.Append(cellX); - keyBuilder.Append('_'); - keyBuilder.Append(cellY); - keyBuilder.Append('_'); - keyBuilder.Append(cellZ); - //string key = keyBuilder.ToString(); var key = (cellX, cellY, cellZ); - if (importSettings.packColors == true) - { - //if (keyCache.TryGetValue(key, out _) == false) - //{ - // keyCache.Add(key, (cellX, cellY, cellZ)); // or if useLossyFiltering - //} - } - - //if (!nodeX.TryGetValue(key, out var xs)) nodeX[key] = xs = new List(); - - // if already exists, add to existing list - if (nodeX.TryGetValue(key, out _)) + // Get or create point data for this cell + if (!nodeData.TryGetValue(key, out var data)) { - nodeX[key].Add(x); - nodeY[key].Add(y); - nodeZ[key].Add(z); - - nodeR[key].Add(r); - nodeG[key].Add(g); - nodeB[key].Add(b); - - if (importSettings.importRGB && importSettings.importIntensity == true) nodeIntensity[key].Add(intensity); - // TODO separate if rgb and or int? - if (importSettings.importRGB && importSettings.importClassification == true) nodeClassification[key].Add(classification); - if (importSettings.averageTimestamp == true) nodeTime[key].Add(time); + data = new PointData + { + X = new List { x }, + Y = new List { y }, + Z = new List { z }, + R = new List { r }, + G = new List { g }, + B = new List { b } + }; + + if (importSettings.importRGB && importSettings.importIntensity) data.Intensity = new List { intensity }; + if (importSettings.importRGB && importSettings.importClassification) data.Classification = new List { classification }; + if (importSettings.averageTimestamp) data.Time = new List { time }; + + nodeData[key] = data; } - else // create new list for this key + else // got existing cell, add point to it { - // NOTE if memory error here, use smaller gridsize (single array maxsize is ~2gb) - nodeX[key] = new List { x }; - nodeY[key] = new List { y }; - nodeZ[key] = new List { z }; - nodeR[key] = new List { r }; - nodeG[key] = new List { g }; - nodeB[key] = new List { b }; - - if (importSettings.importRGB && importSettings.importIntensity == true) nodeIntensity[key] = new List { intensity }; - if (importSettings.importRGB && importSettings.importClassification == true) nodeClassification[key] = new List { classification }; - if (importSettings.averageTimestamp == true) nodeTime[key] = new List { time }; + // Since PointData is now a class (reference type), modifications persist + data.X.Add(x); + data.Y.Add(y); + data.Z.Add(z); + data.R.Add(r); + data.G.Add(g); + data.B.Add(b); + + if (importSettings.importRGB && importSettings.importIntensity) data.Intensity.Add(intensity); + if (importSettings.importRGB && importSettings.importClassification) data.Classification.Add(classification); + if (importSettings.averageTimestamp) data.Time.Add(time); } - } // addpoint() + } [MethodImpl(MethodImplOptions.AggressiveInlining)] unsafe void FloatToBytes(float value, byte[] buffer, int offset) @@ -551,8 +247,6 @@ unsafe void IntToBytes(int value, byte[] buffer, int offset) } } - - // returns list of saved files void IWriter.Save(int fileIndex) { if (useLossyFiltering == true) @@ -562,69 +256,59 @@ void IWriter.Save(int fileIndex) string fileOnly = Path.GetFileNameWithoutExtension(importSettings.outputFile); string baseFolder = Path.GetDirectoryName(importSettings.outputFile); - // TODO no need colors for json.. could move this inside custom logger, so that it doesnt do anything, if json Console.ForegroundColor = ConsoleColor.Blue; - Log.Write("Saving " + nodeX.Count + " tiles into: " + baseFolder); + Log.Write("Saving " + nodeData.Count + " tiles into: " + baseFolder); Console.ForegroundColor = ConsoleColor.White; List nodeTempX; List nodeTempY; List nodeTempZ; - List nodeTempR; List nodeTempG; List nodeTempB; - List nodeTempIntensity = null; List nodeTempClassification = null; List nodeTempTime = null; List outputFiles = new List(); - // process all tiles - //foreach (KeyValuePair> nodeData in nodeX) - //foreach (KeyValuePair> nodeData in nodeX) - foreach (KeyValuePair<(int x, int y, int z), List> nodeData in nodeX) + // Process all tiles + foreach (KeyValuePair<(int x, int y, int z), PointData> nodeEntry in nodeData) { - if (nodeData.Value.Count < importSettings.minimumPointCount) + var key = nodeEntry.Key; + var data = nodeEntry.Value; + + if (data.X.Count < importSettings.minimumPointCount) { skippedNodesCounter++; continue; } - nodeTempX = nodeData.Value; + nodeTempX = data.X; + nodeTempY = data.Y; + nodeTempZ = data.Z; + nodeTempR = data.R; + nodeTempG = data.G; + nodeTempB = data.B; - var key = nodeData.Key; - //int key = nodeData.Key; - - nodeTempY = nodeY[key]; - nodeTempZ = nodeZ[key]; - - nodeTempR = nodeR[key]; - nodeTempG = nodeG[key]; - nodeTempB = nodeB[key]; - - // collect both rgb and intensity - if (importSettings.importRGB == true && importSettings.importIntensity == true) - //if (importSettings.importIntensity == true) + if (importSettings.importRGB && importSettings.importIntensity) { - nodeTempIntensity = nodeIntensity[key]; + nodeTempIntensity = data.Intensity; } - // TODO separate? - if (importSettings.importRGB == true && importSettings.importClassification == true) + if (importSettings.importRGB && importSettings.importClassification) { - nodeTempClassification = nodeClassification[key]; + nodeTempClassification = data.Classification; } - if (importSettings.averageTimestamp == true) + if (importSettings.averageTimestamp) { - nodeTempTime = nodeTime[key]; + nodeTempTime = data.Time; } - // randomize points in this node + // Randomize points if enabled if (importSettings.randomize) { Tools.ShufflePointAttributes( @@ -639,9 +323,7 @@ void IWriter.Save(int fileIndex) ); } - - - // get this node bounds, TODO but we know node(grid cell) x,y,z values? + // Get tile bounds float minX = float.PositiveInfinity; float minY = float.PositiveInfinity; float minZ = float.PositiveInfinity; @@ -649,64 +331,37 @@ void IWriter.Save(int fileIndex) float maxY = float.NegativeInfinity; float maxZ = float.NegativeInfinity; - // build tilefile for points in this node - string fullpath = Path.Combine(baseFolder, fileOnly) + "_" + fileIndex + "_" + key + tileExtension; - string fullpathFileOnly = fileOnly + "_" + fileIndex + "_" + key + tileExtension; - - // if batch mode (more than 1 file), FIXME generates new unique filename..but why not overwrite? - // THIS is now disabled, it didnt really work since pcroot was not updated with new file names! - //if (fileIndex > 0 && File.Exists(fullpath)) - //{ - //Log.Write("File already exists! " + fullpath); - ////Console.WriteLine("File already exists! " + fullpath); - //Int32 unixTimestamp = (Int32)(DateTime.UtcNow.Subtract(new DateTime(1970, 1, 1))).TotalSeconds; - //fullpath = Path.Combine(baseFolder, fileOnly) + "_" + fileIndex + "_" + key + "_r" + (unixTimestamp) + tileExtension; - //fullpathFileOnly = fileOnly + "_" + fileIndex + "_" + key + tileExtension; - //} - - // save this tile - //Log.Write("*** Saving tile: " + fullpathFileOnly + " (" + nodeTempX.Count + " points)"); + int cellX = key.x; + int cellY = key.y; + int cellZ = key.z; + + string fullpath = Path.Combine(baseFolder, fileOnly) + "_" + fileIndex + "_" + cellX + "_" + cellY + "_" + cellZ + tileExtension; + string fullpathFileOnly = fileOnly + "_" + fileIndex + "_" + cellX + "_" + cellY + "_" + cellZ + tileExtension; + bsPoints = new BufferedStream(new FileStream(fullpath, FileMode.Create)); writerPoints = new BinaryWriter(bsPoints); - // collect list of saved files outputFiles.Add(fullpath); - int cellX = 0; - int cellY = 0; - int cellZ = 0; - // FIXME this is wrong value, if file is appended.. but for now append is disabled int totalPointsWritten = 0; - - // TESTING for lossy - int fixedGridSize = 10; // one tile is this size - int cellsInTile = 64; // how many subtiles in one tile - //float center = (1f / (float)cells) / 2f; + int fixedGridSize = 10; + int cellsInTile = 64; bool[] reservedGridCells = null; if (useLossyFiltering == true) reservedGridCells = new bool[cellsInTile * cellsInTile * cellsInTile]; - //Console.WriteLine("nodeTempX.Count="+ nodeTempX.Count); + double totalTime = 0; - double totalTime = 0; // for average timestamp - - // loop and output all points within that node/tile + // Write all points in this tile for (int i = 0, len = nodeTempX.Count; i < len; i++) { - //// skip points - //if (importSettings.skipPoints == true && (i % importSettings.skipEveryN == 0)) continue; - - //// keep points - //if (importSettings.keepPoints == true && (i % importSettings.keepEveryN != 0)) continue; - - // get original world positions float px = nodeTempX[i]; float py = nodeTempY[i]; float pz = nodeTempZ[i]; int packedX = 0; int packedY = 0; - // FIXME bounds is wrong if appended (but append is disabled now), should include previous data also, but now append is disabled.. also probably should use known cell xyz bounds directly + if (px < minX) minX = px; if (px > maxX) maxX = px; if (py < minY) minY = py; @@ -716,23 +371,12 @@ void IWriter.Save(int fileIndex) if (importSettings.packColors == true) { - // get local coords within tile - //var keys = nodeData.Key.Split('_'); - //(cellX, cellY, cellZ) = keyCache[key]; - cellX = key.x; - cellY = key.y; - cellZ = key.z; - // TODO no need to parse, we should know these values? - //cellX = int.Parse(keys[0]); - //cellY = int.Parse(keys[1]); - //cellZ = int.Parse(keys[2]); - // offset to local coords (within tile) px -= (cellX * importSettings.gridSize); py -= (cellY * importSettings.gridSize); pz -= (cellZ * importSettings.gridSize); - // pack G, Py and INTensity - if (importSettings.importRGB == true && importSettings.importIntensity == true && importSettings.importClassification == false) + // Pack G, Py and INTensity + if (importSettings.importRGB && importSettings.importIntensity && !importSettings.importClassification) { float c = py; int cIntegral = (int)c; @@ -740,8 +384,8 @@ void IWriter.Save(int fileIndex) byte bg = (byte)(nodeTempG[i] * 255); byte bi = importSettings.useCustomIntensityRange ? (byte)(nodeTempIntensity[i] / 257) : (byte)nodeTempIntensity[i]; packedY = (bg << 24) | (bi << 16) | (cIntegral << 8) | cFractional; - } // pack G, Py, CLASSification - else if (importSettings.importRGB == true && importSettings.importIntensity == false && importSettings.importClassification == true) + } + else if (importSettings.importRGB && !importSettings.importIntensity && importSettings.importClassification) { float c = py; int cIntegral = (int)c; @@ -749,25 +393,22 @@ void IWriter.Save(int fileIndex) byte bg = (byte)(nodeTempG[i] * 255); byte bc = nodeTempClassification[i]; packedY = (bg << 24) | (bc << 16) | (cIntegral << 8) | cFractional; - } // pack G, Py, INTensity, CLASSification - else if (importSettings.importRGB == true && importSettings.importIntensity == true && importSettings.importClassification == true) + } + else if (importSettings.importRGB && importSettings.importIntensity && importSettings.importClassification) { float c = py; int cIntegral = (int)c; int cFractional = (int)((c - cIntegral) * 255); byte bg = (byte)(nodeTempG[i] * 255); byte bi = importSettings.useCustomIntensityRange ? (byte)(nodeTempIntensity[i] / 257) : (byte)nodeTempIntensity[i]; - // byte bi = nodeTempIntensity[i]; packedY = (bg << 24) | (bi << 16) | (cIntegral << 8) | cFractional; } - else // pack G and Py + else { - // pack green and y (note this is lossy, especially with *0.98) py = Tools.SuperPacker(nodeTempG[i] * 0.98f, py, importSettings.gridSize * importSettings.packMagicValue); } - // pack Red, Px, CLASSification (since intensity is already in green) - if (importSettings.importRGB == true && importSettings.importIntensity == true && importSettings.importClassification == true) + if (importSettings.importRGB && importSettings.importIntensity && importSettings.importClassification) { float c = px; int cIntegral = (int)c; @@ -776,51 +417,18 @@ void IWriter.Save(int fileIndex) byte bc = nodeTempClassification[i]; packedX = (br << 24) | (bc << 16) | (cIntegral << 8) | cFractional; } - else // pack Red and Px + else { px = Tools.SuperPacker(nodeTempR[i] * 0.98f, px, importSettings.gridSize * importSettings.packMagicValue); } - // pack blue and z pz = Tools.SuperPacker(nodeTempB[i] * 0.98f, pz, importSettings.gridSize * importSettings.packMagicValue); - - // TODO pack intensity also? - //if (importSettings.importIntensity) - //{ - // //px = Tools.SuperPacker(nodeTempIntensity[i] * 0.98f, px, importSettings.gridSize * importSettings.packMagicValue); - // //py = Tools.SuperPacker(nodeTempIntensity[i] * 0.98f, py, importSettings.gridSize * importSettings.packMagicValue); - // //pz = Tools.SuperPacker(nodeTempIntensity[i] * 0.98f, pz, importSettings.gridSize * importSettings.packMagicValue); - // //px = Tools.SuperPacker3(nodeTempR[i] * 0.98f, nodeTempIntensity[i] * 0.98f, px); - // //py = Tools.SuperPacker3(nodeTempG[i] * 0.98f, nodeTempIntensity[i] * 0.98f, py); - // //pz = Tools.SuperPacker3(nodeTempB[i] * 0.98f, nodeTempIntensity[i] * 0.98f, pz); - //} - } - else if (useLossyFiltering == true) // test lossy, not regular packed + else if (useLossyFiltering == true) { - // get local coords within tile - //var keys = nodeData.Key.Split('_'); - // TODO no need to parse, we should know these values? these are world cell grid coors - // TODO take reserved grid cells earlier, when reading points! not here on 2nd pass.. - //cellX = int.Parse(keys[0]); - //cellY = int.Parse(keys[1]); - //cellZ = int.Parse(keys[2]); - //(cellX, cellY, cellZ) = keyCache[key]; - cellX = key.x; - cellY = key.y; - cellZ = key.z; - // offset point inside local tile - //(int restoredX, int restoredY, int restoredZ) = Unhash(nodeData.Key); - //cellX = restoredX; - //cellY = restoredY; - //cellZ = restoredZ; px -= (cellX * fixedGridSize); py -= (cellY * fixedGridSize); pz -= (cellZ * fixedGridSize); - //byte packx = (byte)(px * cells); - //byte packy = (byte)(py * cells); - //byte packz = (byte)(pz * cells); - // normalize into tile coords px /= (float)cellsInTile; py /= (float)cellsInTile; pz /= (float)cellsInTile; @@ -830,9 +438,6 @@ void IWriter.Save(int fileIndex) var reservedTileLocalCellIndex = packx + cellsInTile * (packy + cellsInTile * packz); - //if (i < 10) Log.Write("cellX:" + cellX + " cellY:" + cellY + " cellZ:" + cellZ + " px: " + px + " py: " + py + " pz: " + pz + " localIndex: " + reservedTileLocalCellIndex + " packx: " + packx + " packy: " + packy + " packz: " + packz); - - // TODO could decide which point is more important or stronger color? if (reservedGridCells[reservedTileLocalCellIndex] == true) { skippedPointsCounter++; @@ -840,7 +445,7 @@ void IWriter.Save(int fileIndex) } reservedGridCells[reservedTileLocalCellIndex] = true; - } // if packed or lossy + } if (useLossyFiltering == true) { @@ -848,191 +453,112 @@ void IWriter.Save(int fileIndex) byte by = (byte)(py * cellsInTile); byte bz = (byte)(pz * cellsInTile); - float h = 0f; - float s = 0f; - float v = 0f; + float h = 0f, s = 0f, v = 0f; RGBtoHSV(nodeTempR[i], nodeTempG[i], nodeTempB[i], out h, out s, out v); - //if (i < 3) Console.WriteLine("h: " + h + " s: " + s + " v: " + v); - - // fix values h = h / 360f; - byte bh = (byte)(h * 255f); byte bs = (byte)(s * 255f); byte bv = (byte)(v * 255f); - // cut off 3 bits (from 8 bits) byte huepacked = (byte)(bh >> 3); - // cut off 3 bits, then move in the middle bits byte satpacked = (byte)(bs >> 3); - // cut off 4 bits (from 8 bits) byte valpacked = (byte)(bv >> 4); - // combine H (5 bits), S (5 bits), V (4 bits) uint hsv554 = (uint)((huepacked << 9) + (satpacked << 5) + valpacked); uint combinedXYZHSV = (uint)(((bz + by << 6 + bx << 12)) << 14) + hsv554; writerPoints.Write((uint)combinedXYZHSV); } - else // write packed and unpacked + else { - //writerPoints.Write(px); - //if (importSettings.packColors == true && importSettings.importRGB == true && importSettings.importIntensity == true) - //{ - // writerPoints.Write(packed); - //} - //else - //{ - // writerPoints.Write(py); - //} - //writerPoints.Write(pz); - - // x, red, classification - if (importSettings.packColors == true && importSettings.importRGB == true && importSettings.importIntensity == true && importSettings.importClassification == true) + if (importSettings.packColors && importSettings.importRGB && importSettings.importIntensity && importSettings.importClassification) { - IntToBytes(packedX, pointBuffer, 0); // Convert int to bytes manually + IntToBytes(packedX, pointBuffer, 0); } - else // x, red + else { FloatToBytes(px, pointBuffer, 0); } - // packed: y, green, intensity AND/OR classification - if (importSettings.packColors == true && importSettings.importRGB == true && (importSettings.importIntensity == true || importSettings.importClassification == true)) + if (importSettings.packColors && importSettings.importRGB && (importSettings.importIntensity || importSettings.importClassification)) { - // y, int, classification for now IntToBytes(packedY, pointBuffer, 4); } - else // y + else { FloatToBytes(py, pointBuffer, 4); } - // z FloatToBytes(pz, pointBuffer, 8); - writerPoints.Write(pointBuffer); - } // wrote packed or unpacked xyz + } - if (importSettings.averageTimestamp == true) + if (importSettings.averageTimestamp) { - //double ptime = - totalTime += nodeTempTime[i]; // time for this single point - //Console.WriteLine(ptime); + totalTime += nodeTempTime[i]; } totalPointsWritten++; - } // loop all points in tile (node) + } - // close tile file writerPoints.Close(); bsPoints.Dispose(); - // not packed + // Write separate RGB file if not packed if (importSettings.packColors == false && useLossyFiltering == false) { - //try - //{ - // save separate RGB using (var writerColors = new BinaryWriter(new BufferedStream(new FileStream(fullpath + ".rgb", FileMode.Create)))) { - //bool skipPoints = importSettings.skipPoints; - //bool keepPoints = importSettings.keepPoints; - //int skipEveryN = importSettings.skipEveryN; - //int keepEveryN = importSettings.keepEveryN; - int len = nodeTempX.Count; - - //unsafe void FloatToBytes(float value, byte[] buffer, int offset) - //{ - // fixed (byte* b = &buffer[offset]) - // { - // *(float*)b = value; - // } - //} - for (int i = 0; i < len; i++) { - //if ((skipPoints && (i % skipEveryN == 0)) || (keepPoints && (i % keepEveryN != 0))) continue; - FloatToBytes(nodeTempR[i], colorBuffer, 0); FloatToBytes(nodeTempG[i], colorBuffer, 4); FloatToBytes(nodeTempB[i], colorBuffer, 8); - writerColors.Write(colorBuffer); } } - //} - //catch (Exception e) - //{ - // Trace.WriteLine("Error writing RGB file: " + e.Message); - // throw; - //} - - // TESTING save separate Intensity, if both rgb and intensity are enabled - if (importSettings.importRGB == true && importSettings.importIntensity == true) + + // Write intensity file + if (importSettings.importRGB && importSettings.importIntensity) { - BufferedStream bsIntensity; - bsIntensity = new BufferedStream(new FileStream(fullpath + ".int", FileMode.Create)); + BufferedStream bsIntensity = new BufferedStream(new FileStream(fullpath + ".int", FileMode.Create)); var writerIntensity = new BinaryWriter(bsIntensity); - // output all points within that node cell for (int i = 0, len = nodeTempX.Count; i < len; i++) { - //// skip points - //if (importSettings.skipPoints == true && (i % importSettings.skipEveryN == 0)) continue; - - //// keep points - //if (importSettings.keepPoints == true && (i % importSettings.keepEveryN != 0)) continue; - - // TODO write as byte (not RGB floats) and write all in one float c = nodeTempIntensity[i] / 255f; writerIntensity.Write(c); writerIntensity.Write(c); writerIntensity.Write(c); - } // loop all point in cell cells + } - // close tile/node writerIntensity.Close(); bsIntensity.Dispose(); } - // TEST separate classification - if (importSettings.importRGB == true && importSettings.importClassification == true) + // Write classification file + if (importSettings.importRGB && importSettings.importClassification) { - BufferedStream bsClassification; - bsClassification = new BufferedStream(new FileStream(fullpath + ".cla", FileMode.Create)); + BufferedStream bsClassification = new BufferedStream(new FileStream(fullpath + ".cla", FileMode.Create)); var writerClassification = new BinaryWriter(bsClassification); - // output all points within that node cell for (int i = 0, len = nodeTempX.Count; i < len; i++) { - //// skip points - //if (importSettings.skipPoints == true && (i % importSettings.skipEveryN == 0)) continue; - - //// keep points - //if (importSettings.keepPoints == true && (i % importSettings.keepEveryN != 0)) continue; - - // TODO write as byte (not RGB floats) float c = nodeTempClassification[i] / 255f; writerClassification.Write(c); writerClassification.Write(c); writerClassification.Write(c); - } // loop all point in cell cells + } - // close tile/node writerClassification.Close(); bsClassification.Dispose(); } + } - } // if packColors == false && useLossyFiltering == false - - // collect node bounds, name and pointcount + // Collect node bounds var cb = new PointCloudTile(); cb.fileName = fullpathFileOnly; - //cb.totalPoints = nodeTempX.Count; cb.totalPoints = totalPointsWritten; - - // get bounds and cell XYZ cb.minX = minX; cb.minY = minY; cb.minZ = minZ; @@ -1046,7 +572,6 @@ void IWriter.Save(int fileIndex) cb.cellY = cellY; cb.cellZ = cellZ; - // add minmax to local bounds localBounds.minX = Math.Min(localBounds.minX, minX); localBounds.minY = Math.Min(localBounds.minY, minY); localBounds.minZ = Math.Min(localBounds.minZ, minZ); @@ -1054,35 +579,184 @@ void IWriter.Save(int fileIndex) localBounds.maxY = Math.Max(localBounds.maxY, maxY); localBounds.maxZ = Math.Max(localBounds.maxZ, maxZ); - // merge local bounds from this tile into global GlobalBounds.Merge(in localBounds); - //Log.Write(localBounds.minX + "," + localBounds.maxX); - - - if (importSettings.averageTimestamp == true && totalPointsWritten > 0) + if (importSettings.averageTimestamp && totalPointsWritten > 0) { double averageTime = totalTime / totalPointsWritten; - //Console.WriteLine("averageTime: " + averageTime); cb.averageTimeStamp = averageTime; } - // this tile data nodeBoundsBag.Add(cb); - } // loop all nodes/tiles foreach + } - // finished this file string jsonString = "{" + "\"event\": \"" + LogEvent.File + "\"," + "\"status\": \"" + LogStatus.Complete + "\"," + "\"path\": " + JsonSerializer.Serialize(importSettings.inputFiles[fileIndex]) + "," + - "\"tiles\": " + nodeX.Count + "," + - "\"folder\": " + JsonSerializer.Serialize(baseFolder) + "}" + - "\"filenames\": " + JsonSerializer.Serialize(outputFiles); + "\"tiles\": " + nodeData.Count + "," + + "\"folder\": " + JsonSerializer.Serialize(baseFolder) + "," + + "\"filenames\": " + JsonSerializer.Serialize(outputFiles) + "}"; Log.Write(jsonString, LogEvent.End); + } + + void IWriter.Close() + { + if (importSettings == null) return; - } // Save() + var nodeBounds = nodeBoundsBag.ToList(); + if (importSettings.checkoverlap == true) + { + for (int i = 0, len = nodeBounds.Count; i < len; i++) + { + var cb = nodeBounds[i]; + for (int j = 0, len2 = nodeBounds.Count; j < len2; j++) + { + if (i == j) continue; + var cb2 = nodeBounds[j]; + float epsilon = 1e-6f; + bool overlaps = cb.minX < cb2.maxX + epsilon && cb.maxX > cb2.minX - epsilon && + cb.minY < cb2.maxY + epsilon && cb.maxY > cb2.minY - epsilon && + cb.minZ < cb2.maxZ + epsilon && cb.maxZ > cb2.minZ - epsilon; + + if (overlaps) + { + float overlapX = Math.Min(cb.maxX, cb2.maxX) - Math.Max(cb.minX, cb2.minX); + float overlapY = Math.Min(cb.maxY, cb2.maxY) - Math.Max(cb.minY, cb2.minY); + float overlapZ = Math.Min(cb.maxZ, cb2.maxZ) - Math.Max(cb.minZ, cb2.minZ); + float overlapVolume = overlapX * overlapY * overlapZ; + float volume1 = (cb.maxX - cb.minX) * (cb.maxY - cb.minY) * (cb.maxZ - cb.minZ); + float volume2 = (cb2.maxX - cb2.minX) * (cb2.maxY - cb2.minY) * (cb2.maxZ - cb2.minZ); + + if (volume1 != 0 && volume2 != 0) + { + float overlapRatio = overlapVolume / Math.Min(volume1, volume2); + cb.overlapRatio = overlapRatio; + } + else + { + cb.overlapRatio = 0; + } + + nodeBounds[i] = cb; + } + } + } + } + + string fileOnly = Path.GetFileNameWithoutExtension(importSettings.outputFile); + string baseFolder = Path.GetDirectoryName(importSettings.outputFile); + + var tilerootdata = new List(); + var outputFileRoot = Path.Combine(baseFolder, fileOnly) + ".pcroot"; + + long totalPointCount = 0; + + for (int i = 0, len = nodeBounds.Count; i < len; i++) + { + var tilerow = nodeBounds[i].totalPoints + sep + nodeBounds[i].minX + sep + nodeBounds[i].minY + sep + nodeBounds[i].minZ + sep + nodeBounds[i].maxX + sep + nodeBounds[i].maxY + sep + nodeBounds[i].maxZ + sep + nodeBounds[i].cellX + sep + nodeBounds[i].cellY + sep + nodeBounds[i].cellZ + sep + nodeBounds[i].averageTimeStamp + sep + nodeBounds[i].overlapRatio; + tilerow = tilerow.Replace(",", "."); + tilerow = nodeBounds[i].fileName + sep + tilerow; + tilerootdata.Add(tilerow); + totalPointCount += nodeBounds[i].totalPoints; + } + + string jsonString = "{" + + "\"event\": \"" + LogEvent.File + "\"," + + "\"path\": " + JsonSerializer.Serialize(outputFileRoot) + "," + + "\"totalpoints\": " + totalPointCount + "," + + "\"skippedNodes\": " + skippedNodesCounter + "," + + "\"skippedPoints\": " + skippedPointsCounter + + "}"; + + Log.Write(jsonString, LogEvent.End); + Log.Write("\nSaving rootfile: " + outputFileRoot + "\n*Total points= " + Tools.HumanReadableCount(totalPointCount)); + + int versionID = importSettings.packColors ? 2 : 1; + if (importSettings.packColors == true) versionID = 2; + if (useLossyFiltering == true) versionID = 3; + if ((importSettings.importIntensity || importSettings.importClassification) && importSettings.importRGB && importSettings.packColors) versionID = 4; + if ((importSettings.importIntensity && importSettings.importClassification) && importSettings.importRGB && importSettings.packColors) versionID = 5; + + bool addComments = false; + + string identifer = "# PCROOT - https://github.com/unitycoder/PointCloudConverter"; + if (addComments) tilerootdata.Insert(0, identifer); + + string commentRow = "# version" + sep + "gridsize" + sep + "pointcount" + sep + "boundsMinX" + sep + "boundsMinY" + sep + "boundsMinZ" + sep + "boundsMaxX" + sep + "boundsMaxY" + sep + "boundsMaxZ" + sep + "autoOffsetX" + sep + "autoOffsetY" + sep + "autoOffsetZ" + sep + "packMagicValue"; + if (importSettings.importRGB && importSettings.importIntensity) commentRow += sep + "intensity"; + if (importSettings.importRGB && importSettings.importClassification) commentRow += sep + "classification"; + if (addComments) tilerootdata.Insert(1, commentRow); + + GlobalBounds.Merge(localBounds); + float cloudMinX = GlobalBounds.minX; + float cloudMinY = GlobalBounds.minY; + float cloudMinZ = GlobalBounds.minZ; + float cloudMaxX = GlobalBounds.maxX; + float cloudMaxY = GlobalBounds.maxY; + float cloudMaxZ = GlobalBounds.maxZ; + + GlobalBounds.Reset(); + + string globalData = versionID + sep + importSettings.gridSize.ToString() + sep + totalPointCount + sep + cloudMinX + sep + cloudMinY + sep + cloudMinZ + sep + cloudMaxX + sep + cloudMaxY + sep + cloudMaxZ; + globalData += sep + importSettings.offsetX + sep + importSettings.offsetY + sep + importSettings.offsetZ + sep + importSettings.packMagicValue; + globalData = globalData.Replace(",", "."); + + if (addComments) + { + tilerootdata.Insert(2, globalData); + } + else + { + tilerootdata.Insert(0, globalData); + } + + if (addComments) tilerootdata.Insert(3, "# filename" + sep + "pointcount" + sep + "minX" + sep + "minY" + sep + "minZ" + sep + "maxX" + sep + "maxY" + sep + "maxZ" + sep + "cellX" + sep + "cellY" + sep + "cellZ" + sep + "averageTimeStamp" + sep + "overlapRatio"); + + File.WriteAllLines(outputFileRoot, tilerootdata.ToArray()); + + Console.ForegroundColor = ConsoleColor.Green; + Log.Write("Done saving v3 : " + outputFileRoot); + Console.ForegroundColor = ConsoleColor.White; + if (skippedNodesCounter > 0) + { + Log.Write("*Skipped " + skippedNodesCounter + " nodes with less than " + importSettings.minimumPointCount + " points)"); + } + + if (useLossyFiltering && skippedPointsCounter > 0) + { + Log.Write("*Skipped " + skippedPointsCounter + " points due to bytepacked grid filtering"); + } + + if ((tilerootdata.Count - 1) <= 0) + { + Console.ForegroundColor = ConsoleColor.Yellow; + Log.Write("Error> No tiles found! Try enable -scale (to make your cloud to smaller) Or make -gridsize bigger, or set -limit point count to smaller value"); + Console.ForegroundColor = ConsoleColor.White; + } + + nodeBounds.Clear(); + localBounds.Init(); + + bsPoints?.Dispose(); + writerPoints?.Dispose(); + } + + void IWriter.Cleanup(int fileIndex) + { + bsPoints?.Dispose(); + writerPoints?.Dispose(); + + if (nodeData != null) + { + foreach (var data in nodeData.Values) + { + data.Clear(); + } + nodeData.Clear(); + } + } void RGBtoHSV(float r, float g, float b, out float h, out float s, out float v) { @@ -1090,28 +764,27 @@ void RGBtoHSV(float r, float g, float b, out float h, out float s, out float v) min = Math.Min(Math.Min(r, g), b); max = Math.Max(Math.Max(r, g), b); - v = max; // v + v = max; delta = max - min; if (max != 0) - s = delta / max; // s + s = delta / max; else { - // r = g = b = 0 // s = 0, v is undefined s = 0; h = -1; return; } if (r == max) - h = (g - b) / delta; // between yellow & magenta + h = (g - b) / delta; else if (g == max) - h = 2 + (b - r) / delta; // between cyan & yellow + h = 2 + (b - r) / delta; else - h = 4 + (r - g) / delta; // between magenta & cyan + h = 4 + (r - g) / delta; - h *= 60; // degrees + h *= 60; if (h < 0) h += 360; } @@ -1120,5 +793,5 @@ public void SetIntensityRange(bool isCustomRange) { importSettings.useCustomIntensityRange = isCustomRange; } - } // class -} // namespace + } +} \ No newline at end of file From eef26a1ac87c9151db0c143ee79e05bbe39a8eee Mon Sep 17 00:00:00 2001 From: unitycoder Date: Mon, 27 Oct 2025 20:33:17 +0200 Subject: [PATCH 07/10] refactor GetXYZ and GetRGB, breaking change for IReader --- MainWindow.xaml.cs | 58 ++++++++++++++++++++------------------- Readers/E57.cs | 39 ++++++++++++++++----------- Readers/IReader.cs | 4 +-- Readers/LAZ.cs | 47 ++++++++++++-------------------- Readers/PLY.cs | 67 ++++++++++++++++++++++++++++------------------ 5 files changed, 113 insertions(+), 102 deletions(-) diff --git a/MainWindow.xaml.cs b/MainWindow.xaml.cs index c73bbe3..758bab8 100644 --- a/MainWindow.xaml.cs +++ b/MainWindow.xaml.cs @@ -30,7 +30,7 @@ namespace PointCloudConverter { public partial class MainWindow : Window { - static readonly string version = "27.08.2025"; + static readonly string version = "27.10.2025"; static readonly string appname = "PointCloud Converter - " + version; static readonly string rootFolder = AppDomain.CurrentDomain.BaseDirectory; @@ -844,7 +844,7 @@ static bool ParseFile(ImportSettings importSettings, int fileIndex, int? taskId, // Loop all points // FIXME: would be nicer, if use different STEP value for skip, keep and limit..(to collect points all over the file, not just start) long maxPointIterations = importSettings.useLimit ? pointCount : fullPointCount; - for (int i = 0; i < maxPointIterations; i++) + for (long i = 0; i < maxPointIterations; i++) { // check for cancel every 1% of points if (i % checkCancelEvery == 0) @@ -857,22 +857,24 @@ static bool ParseFile(ImportSettings importSettings, int fileIndex, int? taskId, } // get point XYZ - Float3 point = taskReader.GetXYZ(); - if (point.hasError == true) break; // TODO display errors + var success = taskReader.GetXYZ(out float px, out float py, out float pz); + if (!success) break; // TODO display errors somewhere // get point color - Color rgb = (default); + //Color rgb = (default); + float pr = 1f, pg = 1f, pb = 1f; if (importSettings.importRGB == true) { - rgb = taskReader.GetRGB(); + //rgb = taskReader.GetRGB(); + taskReader.GetRGB(out pr, out pg, out pb); // convert from srg to linear (if your model seems too bright) if (importSettings.sRGB) { - rgb.r = Tools.SRGBToLinear(rgb.r); - rgb.g = Tools.SRGBToLinear(rgb.g); - rgb.b = Tools.SRGBToLinear(rgb.b); + pr = Tools.SRGBToLinear(pr); + pg = Tools.SRGBToLinear(pg); + pb = Tools.SRGBToLinear(pb); } } @@ -883,42 +885,42 @@ static bool ParseFile(ImportSettings importSettings, int fileIndex, int? taskId, if (importSettings.keepPoints == true && (i % importSettings.keepEveryN != 0)) continue; // add offsets (its 0 if not used) - point.x -= importSettings.offsetX; - point.y -= importSettings.offsetY; - point.z -= importSettings.offsetZ; + px -= importSettings.offsetX; + py -= importSettings.offsetY; + pz -= importSettings.offsetZ; // scale if enabled if (importSettings.useScale == true) { - point.x *= importSettings.scale; - point.y *= importSettings.scale; - point.z *= importSettings.scale; + px *= importSettings.scale; + py *= importSettings.scale; + pz *= importSettings.scale; } // flip if enabled if (importSettings.swapYZ == true) { - var temp = point.z; - point.z = point.y; - point.y = temp; + var temp = pz; + pz = py; + py = temp; } // flip Z if enabled if (importSettings.invertZ == true) { - point.z = -point.z; + pz = -pz; } // flip X if enabled if (importSettings.invertX == true) { - point.x = -point.x; + px = -px; } // filtering is done after scaling and offsets if (importSettings.useFilter) { - var cell = ((int)Math.Floor(point.x / importSettings.filterDistance), (int)Math.Floor(point.y / importSettings.filterDistance), (int)Math.Floor(point.z / importSettings.filterDistance)); + var cell = ((int)Math.Floor(px / importSettings.filterDistance), (int)Math.Floor(py / importSettings.filterDistance), (int)Math.Floor(pz / importSettings.filterDistance)); if (!occupiedCells.TryAdd(cell, 0)) { @@ -948,9 +950,9 @@ static bool ParseFile(ImportSettings importSettings, int fileIndex, int? taskId, // if no rgb, then replace RGB with intensity, NOTE this doesnt work correctly if using detect intensity range! (since raw value is now ushort, can be 0-65k) if (importSettings.importRGB == false) { - rgb.r = intensity / 255f; // convert byte to float - rgb.g = rgb.r; - rgb.b = rgb.r; + pr = intensity / 255f; // convert byte to float + pg = pr; + pb = pr; } } @@ -980,9 +982,9 @@ static bool ParseFile(ImportSettings importSettings, int fileIndex, int? taskId, // if no rgb, then replace RGB with intensity if (importSettings.importRGB == false) { - rgb.r = classification / 255f; - rgb.g = rgb.r; - rgb.b = rgb.r; + pr = classification / 255f; + pg = pr; + pb = pr; } } @@ -996,7 +998,7 @@ static bool ParseFile(ImportSettings importSettings, int fileIndex, int? taskId, // collect this point XYZ and RGB into node, optionally intensity also //importSettings.writer.AddPoint(i, (float)point.x, (float)point.y, (float)point.z, rgb.r, rgb.g, rgb.b, importSettings.importIntensity, intensity.r, importSettings.averageTimestamp, time); // TODO can remove importsettings, its already passed on init - taskWriter.AddPoint(index: i, x: (float)point.x, y: (float)point.y, z: (float)point.z, r: rgb.r, g: rgb.g, b: rgb.b, intensity: intensity, time: time, classification: classification); + taskWriter.AddPoint(index: (int)i, x: (float)px, y: (float)py, z: (float)pz, r: pr, g: pg, b: pb, intensity: intensity, time: time, classification: classification); //progressPoint = i; progressInfo.CurrentValue = i; } // for all points diff --git a/Readers/E57.cs b/Readers/E57.cs index 7f61530..9fac1ee 100644 --- a/Readers/E57.cs +++ b/Readers/E57.cs @@ -122,12 +122,16 @@ public long GetPointCount() return header?.E57Root?.Data3D?[0]?.Points?.RecordCount ?? 0; } - public Float3 GetXYZ() + public bool GetXYZ(out float x, out float y, out float z) { if (currentChunk == null || currentPointIndex >= currentChunk.Count) { if (!chunkEnumerator.MoveNext()) - return new Float3 { hasError = true }; + { + //return new Float3 { hasError = true }; + x = y = z = 0; + return false; + } currentChunk = chunkEnumerator.Current; currentPointIndex = 0; @@ -137,18 +141,22 @@ public Float3 GetXYZ() } var p = currentChunk.Positions[currentPointIndex]; - lastXYZ.x = p.X; - lastXYZ.y = p.Y; - lastXYZ.z = p.Z; - lastXYZ.hasError = false; + //lastXYZ.x = p.X; + //lastXYZ.y = p.Y; + //lastXYZ.z = p.Z; + //lastXYZ.hasError = false; + x = (float)p.X; + y = (float)p.Y; + z = (float)p.Z; currentPointIndex++; - return lastXYZ; + //return lastXYZ; + return true; } private C3b[] cachedColors = null; - public Color GetRGB() + public void GetRGB(out float r, out float g, out float b) { if (cachedColors == null && currentChunk?.Colors != null) { @@ -160,15 +168,14 @@ public Color GetRGB() if (cachedColors != null && i >= 0 && i < cachedColors.Length) { var c = cachedColors[i]; - return new Color - { - r = c.R / 255f, - g = c.G / 255f, - b = c.B / 255f - }; + r = c.R / 255f; + g = c.G / 255f; + b = c.B / 255f; + } + else + { + r = g = b = 0f; } - - return default; } public ushort GetIntensity() diff --git a/Readers/IReader.cs b/Readers/IReader.cs index 4b26a29..40a5ad7 100644 --- a/Readers/IReader.cs +++ b/Readers/IReader.cs @@ -12,9 +12,9 @@ public interface IReader // bounds are used for AutoOffset Bounds GetBounds(); // retrieve single point X,Y,Z coordinates (float) - Float3 GetXYZ(); + bool GetXYZ(out float x, out float y, out float z); // retrieve single point R,G,B colors (byte 0-255) - Color GetRGB(); + void GetRGB(out float r, out float g, out float b); // retrieve single point scan time double GetTime(); diff --git a/Readers/LAZ.cs b/Readers/LAZ.cs index b90b851..944d45d 100644 --- a/Readers/LAZ.cs +++ b/Readers/LAZ.cs @@ -17,6 +17,7 @@ using Color = PointCloudConverter.Structs.Color; using System.Xml.Linq; using Windows.Data.Xml.Dom; +using System.Diagnostics; namespace PointCloudConverter.Readers { @@ -360,27 +361,23 @@ long IReader.GetPointCount() return count; } - Color IReader.GetRGB() + void IReader.GetRGB(out float r, out float g, out float b) { - var c = new Color(); - // get point reference var p = lazReader.point; if (p.rgb[0] > 255 || p.rgb[1] > 255 || p.rgb[2] > 255) { - c.r = Tools.LUT255[(byte)(p.rgb[0] / 256f)]; - c.g = Tools.LUT255[(byte)(p.rgb[1] / 256f)]; - c.b = Tools.LUT255[(byte)(p.rgb[2] / 256f)]; + r = Tools.LUT255[(byte)(p.rgb[0] / 256f)]; + g = Tools.LUT255[(byte)(p.rgb[1] / 256f)]; + b = Tools.LUT255[(byte)(p.rgb[2] / 256f)]; } else // Values are within the 0-255 range { - c.r = Tools.LUT255[(byte)(p.rgb[0])]; - c.g = Tools.LUT255[(byte)(p.rgb[1])]; - c.b = Tools.LUT255[(byte)(p.rgb[2])]; + r = Tools.LUT255[(byte)(p.rgb[0])]; + g = Tools.LUT255[(byte)(p.rgb[1])]; + b = Tools.LUT255[(byte)(p.rgb[2])]; } - - return c; } ushort IReader.GetIntensity() @@ -419,34 +416,24 @@ byte IReader.GetClassification() return finalClassification; } - Float3 IReader.GetXYZ() + public bool GetXYZ(out float x, out float y, out float z) { - var f = new Float3(); - f.hasError = false; - - // Read point int err = lazReader.read_point(); - - // check for received errors - //var err = lazReader.get_error(); - //if (err == null) if (err != 0) { + x = y = z = 0; Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine("Failed to read until end of file?"); Console.WriteLine("ErrorCode: " + err); Console.ForegroundColor = ConsoleColor.White; - f.hasError = true; + return false; } - // Get precision coordinates - var coordArray = new double[3]; - lazReader.get_coordinates(coordArray); - f.x = coordArray[0]; - f.y = coordArray[1]; - f.z = coordArray[2]; + x = (float)(lazReader.header.x_scale_factor * lazReader.point.X + lazReader.header.x_offset); + y = (float)(lazReader.header.y_scale_factor * lazReader.point.Y + lazReader.header.y_offset); + z = (float)(lazReader.header.z_scale_factor * lazReader.point.Z + lazReader.header.z_offset); - return f; + return true; } double IReader.GetTime() @@ -487,8 +474,8 @@ protected virtual void Dispose(bool disposing) //if (lazReader != null) //{ // lazReader.close_reader(); - lazReader = null; -// } + lazReader = null; + // } } } diff --git a/Readers/PLY.cs b/Readers/PLY.cs index 306a28b..6683e94 100644 --- a/Readers/PLY.cs +++ b/Readers/PLY.cs @@ -26,8 +26,8 @@ public class PLY : IReader, IDisposable //private PlyParser.PropertyData pintensity, pclass, ptime; - private Float3 currentPoint; - private Color currentColor; + //private Float3 currentPoint; + private Color currentColor = new Color(); // private double currentTime; // private byte currentIntensity; // private byte currentClassification; @@ -70,49 +70,64 @@ public bool InitReader(ImportSettings importSettings, int fileIndex) public Bounds GetBounds() => bounds; - public Float3 GetXYZ() + public bool GetXYZ(out float x, out float y, out float z) { if (currentChunkIndex >= vertexChunks.Count) - return new Float3 { hasError = true }; + { + x = y = z = 0; + return false; + } + + // return new Float3 { hasError = true }; int chunkSize = ((Array)px.Data).Length; if (currentPointInChunk >= chunkSize) { currentChunkIndex++; if (currentChunkIndex >= vertexChunks.Count) - return new Float3 { hasError = true }; + { + //return new Float3 { hasError = true }; + x = y = z = 0; + return false; + } currentPointInChunk = 0; SetCurrentChunkProperties(); } - currentPoint = new Float3 - { - x = Convert.ToSingle(px.Data.GetValue(currentPointInChunk)), - y = Convert.ToSingle(py.Data.GetValue(currentPointInChunk)), - z = Convert.ToSingle(pz.Data.GetValue(currentPointInChunk)), - hasError = false - }; - - currentColor = new Color - { - r = Convert.ToSingle(Convert.ToByte(pr.Data.GetValue(currentPointInChunk))) / 255f, - g = Convert.ToSingle(Convert.ToByte(pg.Data.GetValue(currentPointInChunk))) / 255f, - b = Convert.ToSingle(Convert.ToByte(pb.Data.GetValue(currentPointInChunk))) / 255f - }; + //currentPoint = new Float3 + //{ + // x = Convert.ToSingle(px.Data.GetValue(currentPointInChunk)), + // y = Convert.ToSingle(py.Data.GetValue(currentPointInChunk)), + // z = Convert.ToSingle(pz.Data.GetValue(currentPointInChunk)), + // hasError = false + //}; + + x = Convert.ToSingle(px.Data.GetValue(currentPointInChunk)); + y = Convert.ToSingle(py.Data.GetValue(currentPointInChunk)); + z = Convert.ToSingle(pz.Data.GetValue(currentPointInChunk)); + + //currentColor = new Color + //{ + // r = Convert.ToSingle(Convert.ToByte(pr.Data.GetValue(currentPointInChunk))) / 255f, + // g = Convert.ToSingle(Convert.ToByte(pg.Data.GetValue(currentPointInChunk))) / 255f, + // b = Convert.ToSingle(Convert.ToByte(pb.Data.GetValue(currentPointInChunk))) / 255f + //}; + currentColor.r = Convert.ToSingle(Convert.ToByte(pr.Data.GetValue(currentPointInChunk))) / 255f; + currentColor.g = Convert.ToSingle(Convert.ToByte(pg.Data.GetValue(currentPointInChunk))) / 255f; + currentColor.b = Convert.ToSingle(Convert.ToByte(pb.Data.GetValue(currentPointInChunk))) / 255f; currentPointInChunk++; - return currentPoint; + //return currentPoint; + return true; } - public Color GetRGB() + public void GetRGB(out float r, out float g, out float b) { - //currentColor = new Color(); - //currentColor.r = 255; - //currentColor.g = 0; - //currentColor.b = 0; - return currentColor; + r = currentColor.r; + g = currentColor.g; + b = currentColor.b; } public double GetTime() From d319d29b6cfa381b558047e5f838aa20a7a30c23 Mon Sep 17 00:00:00 2001 From: unitycoder Date: Mon, 27 Oct 2025 20:47:14 +0200 Subject: [PATCH 08/10] improve json logger --- Interfaces/ILogger.cs | 11 +++++++ MainWindow.xaml.cs | 73 +++++++++++++++---------------------------- 2 files changed, 37 insertions(+), 47 deletions(-) diff --git a/Interfaces/ILogger.cs b/Interfaces/ILogger.cs index 86ebaa0..ec24430 100644 --- a/Interfaces/ILogger.cs +++ b/Interfaces/ILogger.cs @@ -25,6 +25,7 @@ public interface ILogger { void Write(string msg); void Write(string msg, LogEvent eventType); + void Write(ReadOnlySpan writtenSpan, LogEvent progress); } // Handles non-JSON (text-based) logging @@ -41,6 +42,11 @@ public void Write(string msg, LogEvent eventType) // Could be expanded to handle different events in the future //Console.WriteLine($"{eventType}: {msg}"); } + + void ILogger.Write(ReadOnlySpan writtenSpan, LogEvent progress) + { + // not used + } } // Handles JSON-based logging @@ -55,6 +61,11 @@ public void Write(string msg, LogEvent eventType) { Console.WriteLine(msg); } + + void ILogger.Write(ReadOnlySpan writtenSpan, LogEvent progress) + { + Console.WriteLine(System.Text.Encoding.UTF8.GetString(writtenSpan)); + } } public static class LoggerFactory diff --git a/MainWindow.xaml.cs b/MainWindow.xaml.cs index 758bab8..9c5751b 100644 --- a/MainWindow.xaml.cs +++ b/MainWindow.xaml.cs @@ -69,6 +69,7 @@ public partial class MainWindow : Window // filter by distance private readonly float cellSize = 0.5f; + // TODO Replace ConcurrentDictionary<(int,int,int),byte> with a compact, contention-free structure private static ConcurrentDictionary<(int, int, int), byte> occupiedCells = new(); // plugins @@ -570,6 +571,7 @@ public class ProgressInfo public long MaxValue { get; internal set; } // Maximum value for the progress public string FilePath { get; internal set; } public bool UseJsonLog { get; internal set; } + public int LastPercent = -1; } static void InitProgressBars(ImportSettings importSettings) @@ -634,57 +636,34 @@ static void ProgressTick(object sender, EventArgs e) // Update all progress bars based on the current values in the List lock (lockObject) // Lock to safely read progressInfos { - foreach (var progressInfo in progressInfos) + foreach (var info in progressInfos) { - int index = progressInfo.Index; - long currentValue = progressInfo.CurrentValue; - long maxValue = progressInfo.MaxValue; - - // Access ProgressBar directly from the StackPanel.Children using its index - if (index >= 0 && index < mainWindowStatic.ProgressBarsContainer.Children.Count) + int idx = info.Index; + long cur = info.CurrentValue; + long max = info.MaxValue <= 0 ? 1 : info.MaxValue; // avoid /0 + int percent = (int)(100L * cur / max); + + // Update UI bar + if (idx >= 0 && idx < mainWindowStatic.ProgressBarsContainer.Children.Count && + mainWindowStatic.ProgressBarsContainer.Children[idx] is ProgressBar bar) { - if (mainWindowStatic.ProgressBarsContainer.Children[index] is ProgressBar progressBar) - { - progressBar.Maximum = maxValue; - progressBar.Value = currentValue; - progressBar.Foreground = ((currentValue + 1 >= maxValue) ? Brushes.Lime : Brushes.Red); //+1 hack fix - //progressBar.ToolTip = $"Thread {index} - {currentValue} / {maxValue}"; // not visible, because modal dialog - //Log.Write("ProgressTick: " + index + " " + currentValue + " / " + maxValue); - - // print json progress - if (progressInfo.UseJsonLog) // TODO now same bool value is for each progressinfo.. - { - string jsonString = "{" + - "\"event\": \"" + LogEvent.Progress + "\"," + - "\"thread\": " + index + "," + - "\"currentPoint\": " + currentValue + "," + - "\"totalPoints\": " + maxValue + "," + - "\"percentage\": " + (int)((currentValue / (float)maxValue) * 100.0) + "," + - "\"file\": " + System.Text.Json.JsonSerializer.Serialize(progressInfo.FilePath) + - "}"; - Log.Write(jsonString, LogEvent.Progress); - } - } + bar.Maximum = max; + bar.Value = cur; + bar.Foreground = ((cur + 1 >= max) ? Brushes.Lime : Brushes.Red); } - } // foreach progressinfo - } // lock - //} - //else // finished ? - //{ - // Log.Write("*************** ProgressTick: progressTotalPoints is 0, finishing.."); - // mainWindowStatic.progressBarFiles.Value = 0; - // mainWindowStatic.lblStatus.Content = ""; - - // foreach (UIElement element in mainWindowStatic.ProgressBarsContainer.Children) - // { - // if (element is ProgressBar progressBar) - // { - // progressBar.Value = 0; - // progressBar.Foreground = Brushes.Lime; - // } - // } - //} + + // Emit JSON ONLY when percentage changes + if (info.UseJsonLog && percent != info.LastPercent) + { + info.LastPercent = percent; + + // Efficient JSON (no string concat) + LogExtensions.WriteProgressUtf8(Log, threadIndex: idx, current: cur, total: max, percent: percent, filePath: info.FilePath); + } // foreach progressinfo + } // lock + } }); + } // ProgressTick() From aa4f32ef1c8b1e8526960d9c65ccd5854be112f3 Mon Sep 17 00:00:00 2001 From: unitycoder Date: Fri, 31 Oct 2025 17:57:14 +0200 Subject: [PATCH 09/10] test log extensions for memory usage, #BUILD --- MainWindow.xaml.cs | 4 +- Tools/LogExtensions.cs | 93 ++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 95 insertions(+), 2 deletions(-) create mode 100644 Tools/LogExtensions.cs diff --git a/MainWindow.xaml.cs b/MainWindow.xaml.cs index 9c5751b..4eb1b8d 100644 --- a/MainWindow.xaml.cs +++ b/MainWindow.xaml.cs @@ -30,7 +30,7 @@ namespace PointCloudConverter { public partial class MainWindow : Window { - static readonly string version = "27.10.2025"; + static readonly string version = "31.10.2025"; static readonly string appname = "PointCloud Converter - " + version; static readonly string rootFolder = AppDomain.CurrentDomain.BaseDirectory; @@ -899,7 +899,7 @@ static bool ParseFile(ImportSettings importSettings, int fileIndex, int? taskId, // filtering is done after scaling and offsets if (importSettings.useFilter) { - var cell = ((int)Math.Floor(px / importSettings.filterDistance), (int)Math.Floor(py / importSettings.filterDistance), (int)Math.Floor(pz / importSettings.filterDistance)); + var cell = ((int)MathF.Floor(px / importSettings.filterDistance), (int)MathF.Floor(py / importSettings.filterDistance), (int)MathF.Floor(pz / importSettings.filterDistance)); if (!occupiedCells.TryAdd(cell, 0)) { diff --git a/Tools/LogExtensions.cs b/Tools/LogExtensions.cs new file mode 100644 index 0000000..c431fc0 --- /dev/null +++ b/Tools/LogExtensions.cs @@ -0,0 +1,93 @@ +using PointCloudConverter.Logger; +using System.Buffers; +using System.Text.Json; + +public static class LogExtensions +{ + // Call this from ProgressTick + public static void WriteProgressUtf8(ILogger log, int threadIndex, long current, long total, int percent, string filePath) + { + using var writer = PooledJsonWriter.Rent(); // pooled buffer + Utf8JsonWriter + writer.WriteStartObject(); + writer.WriteString("event", "Progress"); + writer.WriteNumber("thread", threadIndex); + writer.WriteNumber("currentPoint", current); + writer.WriteNumber("totalPoints", total); + writer.WriteNumber("percentage", percent); + writer.WriteString("file", filePath); + writer.WriteEndObject(); + writer.Flush(); + + // Send raw UTF-8 to logger (you implement this; falls back to Console) + log.Write(writer.WrittenSpan, LogEvent.Progress); + } +} + +/// Small pooled IBufferWriter + Utf8JsonWriter holder. +internal sealed class PooledJsonWriter : IDisposable +{ + private static readonly ArrayPool Pool = ArrayPool.Shared; + private byte[] _buffer; + private int _written; + private readonly Utf8JsonWriter _json; + + private PooledJsonWriter() + { + _buffer = Pool.Rent(512); // grows if needed + _json = new Utf8JsonWriter(new BufferWriter(this), new JsonWriterOptions { SkipValidation = true }); + } + + public static PooledJsonWriter Rent() => new PooledJsonWriter(); + + public void WriteStartObject() => _json.WriteStartObject(); + public void WriteEndObject() => _json.WriteEndObject(); + public void WriteString(string name, string value) => _json.WriteString(name, value); + public void WriteNumber(string name, long value) => _json.WriteNumber(name, value); + public void WriteNumber(string name, int value) => _json.WriteNumber(name, value); + public void Flush() => _json.Flush(); + + public ReadOnlySpan WrittenSpan => new ReadOnlySpan(_buffer, 0, _written); + + public void Dispose() + { + _json.Dispose(); + var buf = _buffer; + _buffer = null; + _written = 0; + if (buf != null) Pool.Return(buf); + } + + // Minimal pooled IBufferWriter + private sealed class BufferWriter : IBufferWriter + { + private readonly PooledJsonWriter _owner; + public BufferWriter(PooledJsonWriter owner) => _owner = owner; + + public void Advance(int count) => _owner._written += count; + + public Memory GetMemory(int sizeHint = 0) + { + Ensure(sizeHint); + return _owner._buffer.AsMemory(_owner._written); + } + + public Span GetSpan(int sizeHint = 0) + { + Ensure(sizeHint); + return _owner._buffer.AsSpan(_owner._written); + } + + private void Ensure(int sizeHint) + { + if (sizeHint < 1) sizeHint = 1; + int need = _owner._written + sizeHint; + if (need <= _owner._buffer.Length) return; + + int newSize = Math.Max(need, _owner._buffer.Length * 2); + var newBuf = Pool.Rent(newSize); + Buffer.BlockCopy(_owner._buffer, 0, newBuf, 0, _owner._written); + Pool.Return(_owner._buffer); + _owner._buffer = newBuf; + } + } +} From c56ffd707074f8e2ac639cb2472296370dffab82 Mon Sep 17 00:00:00 2001 From: mika Date: Fri, 31 Oct 2025 18:04:46 +0200 Subject: [PATCH 10/10] #BUILD beta --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 72a3ffb..660b11f 100644 --- a/README.md +++ b/README.md @@ -45,3 +45,4 @@ Pull requests to improve this converter are welcome! (please create Issue first, ### Powered by [![JetBrains logo.](https://resources.jetbrains.com/storage/products/company/brand/logos/jetbrains.svg)](https://jb.gg/OpenSourceSupport) +