From 57030654010d96225eca715f7af007fbdc4e76b5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Arvid=20Gr=C3=A4ns?= <79848215+ArGr1@users.noreply.github.com> Date: Fri, 28 Mar 2025 12:59:51 +0100 Subject: [PATCH 01/20] Write hierarchy db instead of export --- CadRevealComposer/SceneCreator.cs | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/CadRevealComposer/SceneCreator.cs b/CadRevealComposer/SceneCreator.cs index f206683b..5437abe7 100644 --- a/CadRevealComposer/SceneCreator.cs +++ b/CadRevealComposer/SceneCreator.cs @@ -9,6 +9,7 @@ using Commons.Utils; using Configuration; using HierarchyComposer.Functions; +using HierarchyComposer.Model; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging.Abstractions; using Operations; @@ -44,13 +45,11 @@ public static void ExportModelMetadata(DirectoryInfo outputDirectory, ModelMetad File.WriteAllText(metadataPath, metadataString); } - public static void ExportHierarchyDatabase(string databasePath, IReadOnlyList allNodes) + public static void WriteToHierarchyDatabase(string databasePath, IReadOnlyList allNodes) { - var nodes = HierarchyComposerConverter.ConvertToHierarchyNodes(allNodes); - ILogger databaseLogger = NullLogger.Instance; var exporter = new DatabaseComposer(databaseLogger); - exporter.ComposeDatabase(nodes.ToList(), Path.GetFullPath(databasePath)); + exporter.ComposeDatabase(allNodes, Path.GetFullPath(databasePath)); } public static void AddPrioritizedSectorsToDatabase( From f936f24cf501726d114e9830ab242de6a0e3a388 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Arvid=20Gr=C3=A4ns?= <79848215+ArGr1@users.noreply.github.com> Date: Fri, 28 Mar 2025 13:01:44 +0100 Subject: [PATCH 02/20] Convert hierarchy nodes outside of exporting task to lessen memory load. --- CadRevealComposer/CadRevealComposerRunner.cs | 30 +++++++++++++------- 1 file changed, 19 insertions(+), 11 deletions(-) diff --git a/CadRevealComposer/CadRevealComposerRunner.cs b/CadRevealComposer/CadRevealComposerRunner.cs index d2bffd25..e7fccb2c 100644 --- a/CadRevealComposer/CadRevealComposerRunner.cs +++ b/CadRevealComposer/CadRevealComposerRunner.cs @@ -67,6 +67,8 @@ IReadOnlyList modelFormatProviders filtering ); + GC.Collect(GC.MaxGeneration, GCCollectionMode.Aggressive, blocking: true); + if (generalMetadata != null) { // Log that we added some metadata @@ -105,19 +107,10 @@ IReadOnlyList modelFormatProviders filtering.PrintFilteringStatsToConsole(); - var exportHierarchyDatabaseTask = Task.Run(() => - { - // Exporting hierarchy on side thread to allow it to run in parallel - var hierarchyExportTimer = Stopwatch.StartNew(); - var databasePath = Path.GetFullPath(Path.Join(outputDirectory.FullName, "hierarchy.db")); - SceneCreator.ExportHierarchyDatabase(databasePath, nodesToExport); - Console.WriteLine( - $"Exported hierarchy database to path \"{databasePath}\" in {hierarchyExportTimer.Elapsed}" - ); - }); - geometriesToProcess = Simplify.OptimizeVertexCountInMeshes(geometriesToProcess); + var exportHierarchyDatabaseTask = WriteHierarchyOnSideThread(outputDirectory, nodesToExport); + var geometriesToProcessArray = geometriesToProcess.ToArray(); if (composerParameters.DevPrimitiveCacheFolder != null) { @@ -145,6 +138,21 @@ IReadOnlyList modelFormatProviders Console.WriteLine($"Convert completed in {totalTimeElapsed.Elapsed}"); } + private static Task WriteHierarchyOnSideThread(DirectoryInfo outputDirectory, IReadOnlyList nodes) + { + var hierarchyNodes = HierarchyComposerConverter.ConvertToHierarchyNodes(nodes); + GC.Collect(GC.MaxGeneration, GCCollectionMode.Aggressive, blocking: true); + return Task.Run(() => + { + var hierarchyExportTimer = Stopwatch.StartNew(); + var databasePath = Path.GetFullPath(Path.Join(outputDirectory.FullName, "hierarchy.db")); + SceneCreator.WriteToHierarchyDatabase(databasePath, hierarchyNodes); + Console.WriteLine( + $"Exported hierarchy database to path \"{databasePath}\" in {hierarchyExportTimer.Elapsed}" + ); + }); + } + public record SplitAndExportResults(List TreeIndexToSectorIdDict); public record TreeIndexSectorIdPair(uint TreeIndex, uint SectorId); From 22b4fac8888bd9d4d9810cddd346d4ace55e36cf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Arvid=20Gr=C3=A4ns?= <79848215+ArGr1@users.noreply.github.com> Date: Fri, 28 Mar 2025 13:48:51 +0100 Subject: [PATCH 03/20] remove unnecessary garbage collection --- CadRevealComposer/CadRevealComposerRunner.cs | 3 --- 1 file changed, 3 deletions(-) diff --git a/CadRevealComposer/CadRevealComposerRunner.cs b/CadRevealComposer/CadRevealComposerRunner.cs index e7fccb2c..86ab1390 100644 --- a/CadRevealComposer/CadRevealComposerRunner.cs +++ b/CadRevealComposer/CadRevealComposerRunner.cs @@ -67,8 +67,6 @@ IReadOnlyList modelFormatProviders filtering ); - GC.Collect(GC.MaxGeneration, GCCollectionMode.Aggressive, blocking: true); - if (generalMetadata != null) { // Log that we added some metadata @@ -141,7 +139,6 @@ IReadOnlyList modelFormatProviders private static Task WriteHierarchyOnSideThread(DirectoryInfo outputDirectory, IReadOnlyList nodes) { var hierarchyNodes = HierarchyComposerConverter.ConvertToHierarchyNodes(nodes); - GC.Collect(GC.MaxGeneration, GCCollectionMode.Aggressive, blocking: true); return Task.Run(() => { var hierarchyExportTimer = Stopwatch.StartNew(); From f0f02f763247b4b2e48331241bb6eeba141b0e3c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Arvid=20Gr=C3=A4ns?= <79848215+ArGr1@users.noreply.github.com> Date: Wed, 2 Apr 2025 09:24:05 +0200 Subject: [PATCH 04/20] Remove writing hierarchy on side thread. --- CadRevealComposer/CadRevealComposerRunner.cs | 24 +++++++------------- 1 file changed, 8 insertions(+), 16 deletions(-) diff --git a/CadRevealComposer/CadRevealComposerRunner.cs b/CadRevealComposer/CadRevealComposerRunner.cs index 86ab1390..9388bf47 100644 --- a/CadRevealComposer/CadRevealComposerRunner.cs +++ b/CadRevealComposer/CadRevealComposerRunner.cs @@ -7,7 +7,6 @@ using System.IO; using System.Linq; using System.Text.Json; -using System.Threading.Tasks; using Configuration; using Devtools; using IdProviders; @@ -107,7 +106,7 @@ IReadOnlyList modelFormatProviders geometriesToProcess = Simplify.OptimizeVertexCountInMeshes(geometriesToProcess); - var exportHierarchyDatabaseTask = WriteHierarchyOnSideThread(outputDirectory, nodesToExport); + WriteHierarchy(outputDirectory, nodesToExport); var geometriesToProcessArray = geometriesToProcess.ToArray(); if (composerParameters.DevPrimitiveCacheFolder != null) @@ -124,10 +123,6 @@ IReadOnlyList modelFormatProviders composerParameters ); - if (!exportHierarchyDatabaseTask.IsCompleted) - Console.WriteLine("Waiting for hierarchy export to complete..."); - exportHierarchyDatabaseTask.Wait(); - WriteParametersToParamsFile(modelParameters, composerParameters, outputDirectory); ModifyHierarchyPostProcess(outputDirectory, splitExportResults); @@ -136,18 +131,15 @@ IReadOnlyList modelFormatProviders Console.WriteLine($"Convert completed in {totalTimeElapsed.Elapsed}"); } - private static Task WriteHierarchyOnSideThread(DirectoryInfo outputDirectory, IReadOnlyList nodes) + private static void WriteHierarchy(DirectoryInfo outputDirectory, IReadOnlyList nodes) { var hierarchyNodes = HierarchyComposerConverter.ConvertToHierarchyNodes(nodes); - return Task.Run(() => - { - var hierarchyExportTimer = Stopwatch.StartNew(); - var databasePath = Path.GetFullPath(Path.Join(outputDirectory.FullName, "hierarchy.db")); - SceneCreator.WriteToHierarchyDatabase(databasePath, hierarchyNodes); - Console.WriteLine( - $"Exported hierarchy database to path \"{databasePath}\" in {hierarchyExportTimer.Elapsed}" - ); - }); + var hierarchyExportTimer = Stopwatch.StartNew(); + var databasePath = Path.GetFullPath(Path.Join(outputDirectory.FullName, "hierarchy.db")); + SceneCreator.WriteToHierarchyDatabase(databasePath, hierarchyNodes); + Console.WriteLine( + $"Exported hierarchy database to path \"{databasePath}\" in {hierarchyExportTimer.Elapsed}" + ); } public record SplitAndExportResults(List TreeIndexToSectorIdDict); From a41f141a70f4e7ceb51866c8c829d55237a9104a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Arvid=20Gr=C3=A4ns?= <79848215+ArGr1@users.noreply.github.com> Date: Wed, 2 Apr 2025 10:50:27 +0200 Subject: [PATCH 05/20] debugging console writes --- CadRevealComposer/CadRevealComposerRunner.cs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CadRevealComposer/CadRevealComposerRunner.cs b/CadRevealComposer/CadRevealComposerRunner.cs index 9388bf47..e3fa3bc2 100644 --- a/CadRevealComposer/CadRevealComposerRunner.cs +++ b/CadRevealComposer/CadRevealComposerRunner.cs @@ -133,9 +133,13 @@ IReadOnlyList modelFormatProviders private static void WriteHierarchy(DirectoryInfo outputDirectory, IReadOnlyList nodes) { + Console.WriteLine("WriteHieararchy line 136"); var hierarchyNodes = HierarchyComposerConverter.ConvertToHierarchyNodes(nodes); + Console.WriteLine("WriteHieararchy line 138"); var hierarchyExportTimer = Stopwatch.StartNew(); + Console.WriteLine("WriteHieararchy line 140"); var databasePath = Path.GetFullPath(Path.Join(outputDirectory.FullName, "hierarchy.db")); + Console.WriteLine("WriteHieararchy line 142"); SceneCreator.WriteToHierarchyDatabase(databasePath, hierarchyNodes); Console.WriteLine( $"Exported hierarchy database to path \"{databasePath}\" in {hierarchyExportTimer.Elapsed}" From 564362ca69d69e68e61a1a8d8d6f6f31f732075e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Arvid=20Gr=C3=A4ns?= <79848215+ArGr1@users.noreply.github.com> Date: Thu, 3 Apr 2025 09:38:03 +0200 Subject: [PATCH 06/20] dlete console writelines --- CadRevealComposer/CadRevealComposerRunner.cs | 4 ---- 1 file changed, 4 deletions(-) diff --git a/CadRevealComposer/CadRevealComposerRunner.cs b/CadRevealComposer/CadRevealComposerRunner.cs index e3fa3bc2..9388bf47 100644 --- a/CadRevealComposer/CadRevealComposerRunner.cs +++ b/CadRevealComposer/CadRevealComposerRunner.cs @@ -133,13 +133,9 @@ IReadOnlyList modelFormatProviders private static void WriteHierarchy(DirectoryInfo outputDirectory, IReadOnlyList nodes) { - Console.WriteLine("WriteHieararchy line 136"); var hierarchyNodes = HierarchyComposerConverter.ConvertToHierarchyNodes(nodes); - Console.WriteLine("WriteHieararchy line 138"); var hierarchyExportTimer = Stopwatch.StartNew(); - Console.WriteLine("WriteHieararchy line 140"); var databasePath = Path.GetFullPath(Path.Join(outputDirectory.FullName, "hierarchy.db")); - Console.WriteLine("WriteHieararchy line 142"); SceneCreator.WriteToHierarchyDatabase(databasePath, hierarchyNodes); Console.WriteLine( $"Exported hierarchy database to path \"{databasePath}\" in {hierarchyExportTimer.Elapsed}" From dcaed1f7a25cecbce5df8bdfa75e2e6813953ef4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Arvid=20Gr=C3=A4ns?= <79848215+ArGr1@users.noreply.github.com> Date: Thu, 3 Apr 2025 09:38:35 +0200 Subject: [PATCH 07/20] Check memory usage and write to console --- .../Functions/DatabaseComposer.cs | 40 +++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/HierarchyComposer/Functions/DatabaseComposer.cs b/HierarchyComposer/Functions/DatabaseComposer.cs index e4e79fb1..3616abb8 100644 --- a/HierarchyComposer/Functions/DatabaseComposer.cs +++ b/HierarchyComposer/Functions/DatabaseComposer.cs @@ -22,8 +22,26 @@ public DatabaseComposer(ILogger? logger = null) } // ReSharper disable once CognitiveComplexity + + // Method to check and write current memory usage to the console + static void CheckMemoryUsage(string currentLine) + { + // Get the current process + Process currentProcess = Process.GetCurrentProcess(); + + // Get the physical memory usage (in bytes) + long totalBytesOfMemoryUsed = currentProcess.WorkingSet64; + + // Convert to megabytes for easier reading + double megabytesUsed = totalBytesOfMemoryUsed / (1024.0 * 1024.0); + + // Write the memory usage to the console + Console.WriteLine($"Memory usage (MB): {megabytesUsed:N2} at line {currentLine}"); + } + public void ComposeDatabase(IReadOnlyList inputNodes, string outputDatabaseFullPath) { + CheckMemoryUsage("44"); if (File.Exists(outputDatabaseFullPath)) File.Delete(outputDatabaseFullPath); @@ -57,6 +75,8 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp _logger.LogInformation("Creating database model entries"); long pdmsEntryIdCounter = 0; + CheckMemoryUsage("78"); + var pdmsEntries = jsonPdmsKeyValuePairs .GroupBy(kvp => kvp.GetGroupKey()) .ToDictionary( @@ -74,6 +94,8 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp .GroupBy(b => b.GetGroupKey()) .ToDictionary(keySelector: g => g.Key, elementSelector: g => g.First().CopyWithNewId(++aabbIdCounter)); + CheckMemoryUsage("97"); + var nodes = inputNodes .Select(inputNode => new Node { @@ -98,6 +120,8 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp }) .ToDictionary(n => n.Id, n => n); + CheckMemoryUsage("123"); + var nodePdmsEntries = nodes.Values.Where(n => n.NodePDMSEntry != null).SelectMany(n => n.NodePDMSEntry!); var sqliteComposeTimer = MopTimer.Create("Populating database and building index", _logger); @@ -105,6 +129,8 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp using var connection = new SqliteConnection(connectionString); connection.Open(); + CheckMemoryUsage("132"); + // ReSharper disable AccessToDisposedClosure MopTimer.RunAndMeasure( "Insert PDMSEntries", @@ -120,6 +146,8 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp } ); + CheckMemoryUsage("149"); + MopTimer.RunAndMeasure( "Insert NodePDMSEntries", _logger, @@ -134,6 +162,8 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp } ); + CheckMemoryUsage("165"); + MopTimer.RunAndMeasure( "Insert AABBs", _logger, @@ -154,6 +184,8 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp } ); + CheckMemoryUsage("187"); + MopTimer.RunAndMeasure( "Insert Nodes", _logger, @@ -167,6 +199,8 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp } ); + CheckMemoryUsage("202"); + MopTimer.RunAndMeasure( "Creating indexes", _logger, @@ -188,6 +222,8 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp } ); + CheckMemoryUsage("225"); + MopTimer.RunAndMeasure( "Optimizing Database", _logger, @@ -204,6 +240,8 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp } ); + CheckMemoryUsage("243"); + MopTimer.RunAndMeasure( "VACUUM Database", _logger, @@ -244,6 +282,8 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp } ); + CheckMemoryUsage("285"); + // ReSharper restore AccessToDisposedClosure sqliteComposeTimer.LogCompletion(); } From c9d24c127c6758958bc4cd6121913799d4f97b54 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Arvid=20Gr=C3=A4ns?= <79848215+ArGr1@users.noreply.github.com> Date: Thu, 3 Apr 2025 11:09:27 +0200 Subject: [PATCH 08/20] IEnumerable instead of list --- HierarchyComposer/Functions/DatabaseComposer.cs | 13 ++++++------- HierarchyComposer/Model/Node.cs | 2 +- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/HierarchyComposer/Functions/DatabaseComposer.cs b/HierarchyComposer/Functions/DatabaseComposer.cs index 3616abb8..c4bf9015 100644 --- a/HierarchyComposer/Functions/DatabaseComposer.cs +++ b/HierarchyComposer/Functions/DatabaseComposer.cs @@ -108,13 +108,12 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp HasMesh = inputNode.HasMesh, ParentId = inputNode.ParentId, TopNodeId = inputNode.TopNodeId, - NodePDMSEntry = inputNode - .PDMSData.Select(kvp => new NodePDMSEntry - { - NodeId = inputNode.NodeId, - PDMSEntryId = pdmsEntries[kvp.GetGroupKey()].Id - }) - .ToList(), + NodePDMSEntry = inputNode.PDMSData + .Select(kvp => new NodePDMSEntry + { + NodeId = inputNode.NodeId, + PDMSEntryId = pdmsEntries[kvp.GetGroupKey()].Id + }), AABB = inputNode.AABB == null ? null : aabbs[inputNode.AABB.GetGroupKey()], DiagnosticInfo = inputNode.OptionalDiagnosticInfo }) diff --git a/HierarchyComposer/Model/Node.cs b/HierarchyComposer/Model/Node.cs index 852555c4..3473a8b5 100644 --- a/HierarchyComposer/Model/Node.cs +++ b/HierarchyComposer/Model/Node.cs @@ -27,7 +27,7 @@ public class Node public uint TopNodeId { get; init; } - public virtual ICollection? NodePDMSEntry { get; init; } = null!; + public virtual IEnumerable? NodePDMSEntry { get; init; } = null!; public uint? AABBId { get; init; } From 5d5ae5dc34b102fe2477e3be392454e0c626d578 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Arvid=20Gr=C3=A4ns?= <79848215+ArGr1@users.noreply.github.com> Date: Thu, 3 Apr 2025 11:09:39 +0200 Subject: [PATCH 09/20] do not cast to array --- HierarchyComposer/Functions/DatabaseComposer.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/HierarchyComposer/Functions/DatabaseComposer.cs b/HierarchyComposer/Functions/DatabaseComposer.cs index c4bf9015..e52449c5 100644 --- a/HierarchyComposer/Functions/DatabaseComposer.cs +++ b/HierarchyComposer/Functions/DatabaseComposer.cs @@ -68,7 +68,7 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp var jsonPdmsKeyValuePairs = MopTimer.RunAndMeasure( "Collecting PDMS data", _logger, - () => inputNodes.SelectMany(n => n.PDMSData).ToArray() + () => inputNodes.SelectMany(n => n.PDMSData) ); var jsonAabbs = inputNodes.Where(jn => jn.AABB != null).Select(jn => jn.AABB!); From ee048abd5d826b3f0bd321204dc052d3e66795ba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Arvid=20Gr=C3=A4ns?= <79848215+ArGr1@users.noreply.github.com> Date: Thu, 3 Apr 2025 11:11:47 +0200 Subject: [PATCH 10/20] indent lines --- HierarchyComposer/Functions/DatabaseComposer.cs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/HierarchyComposer/Functions/DatabaseComposer.cs b/HierarchyComposer/Functions/DatabaseComposer.cs index e52449c5..4df74907 100644 --- a/HierarchyComposer/Functions/DatabaseComposer.cs +++ b/HierarchyComposer/Functions/DatabaseComposer.cs @@ -109,11 +109,11 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp ParentId = inputNode.ParentId, TopNodeId = inputNode.TopNodeId, NodePDMSEntry = inputNode.PDMSData - .Select(kvp => new NodePDMSEntry - { - NodeId = inputNode.NodeId, - PDMSEntryId = pdmsEntries[kvp.GetGroupKey()].Id - }), + .Select(kvp => new NodePDMSEntry + { + NodeId = inputNode.NodeId, + PDMSEntryId = pdmsEntries[kvp.GetGroupKey()].Id + }), AABB = inputNode.AABB == null ? null : aabbs[inputNode.AABB.GetGroupKey()], DiagnosticInfo = inputNode.OptionalDiagnosticInfo }) From 6e94e16a67292fbc5c6f5fb2067f0e951b44f966 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Arvid=20Gr=C3=A4ns?= <79848215+ArGr1@users.noreply.github.com> Date: Thu, 3 Apr 2025 11:13:24 +0200 Subject: [PATCH 11/20] formatting --- CadRevealComposer/CadRevealComposerRunner.cs | 4 +--- HierarchyComposer/Functions/DatabaseComposer.cs | 11 +++++------ 2 files changed, 6 insertions(+), 9 deletions(-) diff --git a/CadRevealComposer/CadRevealComposerRunner.cs b/CadRevealComposer/CadRevealComposerRunner.cs index 9388bf47..05484dcd 100644 --- a/CadRevealComposer/CadRevealComposerRunner.cs +++ b/CadRevealComposer/CadRevealComposerRunner.cs @@ -137,9 +137,7 @@ private static void WriteHierarchy(DirectoryInfo outputDirectory, IReadOnlyList< var hierarchyExportTimer = Stopwatch.StartNew(); var databasePath = Path.GetFullPath(Path.Join(outputDirectory.FullName, "hierarchy.db")); SceneCreator.WriteToHierarchyDatabase(databasePath, hierarchyNodes); - Console.WriteLine( - $"Exported hierarchy database to path \"{databasePath}\" in {hierarchyExportTimer.Elapsed}" - ); + Console.WriteLine($"Exported hierarchy database to path \"{databasePath}\" in {hierarchyExportTimer.Elapsed}"); } public record SplitAndExportResults(List TreeIndexToSectorIdDict); diff --git a/HierarchyComposer/Functions/DatabaseComposer.cs b/HierarchyComposer/Functions/DatabaseComposer.cs index 4df74907..4a93f61d 100644 --- a/HierarchyComposer/Functions/DatabaseComposer.cs +++ b/HierarchyComposer/Functions/DatabaseComposer.cs @@ -108,12 +108,11 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp HasMesh = inputNode.HasMesh, ParentId = inputNode.ParentId, TopNodeId = inputNode.TopNodeId, - NodePDMSEntry = inputNode.PDMSData - .Select(kvp => new NodePDMSEntry - { - NodeId = inputNode.NodeId, - PDMSEntryId = pdmsEntries[kvp.GetGroupKey()].Id - }), + NodePDMSEntry = inputNode.PDMSData.Select(kvp => new NodePDMSEntry + { + NodeId = inputNode.NodeId, + PDMSEntryId = pdmsEntries[kvp.GetGroupKey()].Id + }), AABB = inputNode.AABB == null ? null : aabbs[inputNode.AABB.GetGroupKey()], DiagnosticInfo = inputNode.OptionalDiagnosticInfo }) From c3bb9297cfaaa9d84fb4a121d851810d65905b61 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Arvid=20Gr=C3=A4ns?= <79848215+ArGr1@users.noreply.github.com> Date: Thu, 3 Apr 2025 14:58:29 +0200 Subject: [PATCH 12/20] Really hacky batching of node dict creation. --- .../Functions/DatabaseComposer.cs | 101 +++++++++++++----- 1 file changed, 73 insertions(+), 28 deletions(-) diff --git a/HierarchyComposer/Functions/DatabaseComposer.cs b/HierarchyComposer/Functions/DatabaseComposer.cs index 4a93f61d..cd96d16d 100644 --- a/HierarchyComposer/Functions/DatabaseComposer.cs +++ b/HierarchyComposer/Functions/DatabaseComposer.cs @@ -96,29 +96,74 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp CheckMemoryUsage("97"); - var nodes = inputNodes - .Select(inputNode => new Node + // Process nodes in smaller batches to reduce memory usage + var nodesBatchSize = 1000; // Adjust batch size as needed + var nodes = new Dictionary(); + var nodeBatches = inputNodes.Chunk(nodesBatchSize); + int i = 0; + foreach (var batch in nodeBatches) + { + var batchNodes = batch + .Select(inputNode => new Node + { + Id = inputNode.NodeId, + EndId = inputNode.EndId, + RefNoPrefix = inputNode.RefNoPrefix, + RefNoDb = inputNode.RefNoDb, + RefNoSequence = inputNode.RefNoSequence, + Name = inputNode.Name, + HasMesh = inputNode.HasMesh, + ParentId = inputNode.ParentId, + TopNodeId = inputNode.TopNodeId, + NodePDMSEntry = inputNode.PDMSData.Select(kvp => new NodePDMSEntry + { + NodeId = inputNode.NodeId, + PDMSEntryId = pdmsEntries[kvp.GetGroupKey()].Id + }), + AABB = inputNode.AABB == null ? null : aabbs[inputNode.AABB.GetGroupKey()], + DiagnosticInfo = inputNode.OptionalDiagnosticInfo + }) + .ToDictionary(n => n.Id, n => n); + try { - Id = inputNode.NodeId, - EndId = inputNode.EndId, - RefNoPrefix = inputNode.RefNoPrefix, - RefNoDb = inputNode.RefNoDb, - RefNoSequence = inputNode.RefNoSequence, - Name = inputNode.Name, - HasMesh = inputNode.HasMesh, - ParentId = inputNode.ParentId, - TopNodeId = inputNode.TopNodeId, - NodePDMSEntry = inputNode.PDMSData.Select(kvp => new NodePDMSEntry + foreach (var kvp in batchNodes) { - NodeId = inputNode.NodeId, - PDMSEntryId = pdmsEntries[kvp.GetGroupKey()].Id - }), - AABB = inputNode.AABB == null ? null : aabbs[inputNode.AABB.GetGroupKey()], - DiagnosticInfo = inputNode.OptionalDiagnosticInfo - }) - .ToDictionary(n => n.Id, n => n); + nodes[kvp.Key] = kvp.Value; + } + } + catch (Exception) + { + throw new ArgumentException("nodes key already exists "); // TODO: DELETE THIS, only for testing /kag + } + i += 1; + CheckMemoryUsage($"Chunk {i} processed"); + } + - CheckMemoryUsage("123"); + // + // var nodes = inputNodes + // .Select(inputNode => new Node + // { + // Id = inputNode.NodeId, + // EndId = inputNode.EndId, + // RefNoPrefix = inputNode.RefNoPrefix, + // RefNoDb = inputNode.RefNoDb, + // RefNoSequence = inputNode.RefNoSequence, + // Name = inputNode.Name, + // HasMesh = inputNode.HasMesh, + // ParentId = inputNode.ParentId, + // TopNodeId = inputNode.TopNodeId, + // NodePDMSEntry = inputNode.PDMSData.Select(kvp => new NodePDMSEntry + // { + // NodeId = inputNode.NodeId, + // PDMSEntryId = pdmsEntries[kvp.GetGroupKey()].Id + // }), + // AABB = inputNode.AABB == null ? null : aabbs[inputNode.AABB.GetGroupKey()], + // DiagnosticInfo = inputNode.OptionalDiagnosticInfo + // }) + // .ToDictionary(n => n.Id, n => n); + + CheckMemoryUsage("166"); var nodePdmsEntries = nodes.Values.Where(n => n.NodePDMSEntry != null).SelectMany(n => n.NodePDMSEntry!); @@ -127,7 +172,7 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp using var connection = new SqliteConnection(connectionString); connection.Open(); - CheckMemoryUsage("132"); + CheckMemoryUsage("175"); // ReSharper disable AccessToDisposedClosure MopTimer.RunAndMeasure( @@ -144,7 +189,7 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp } ); - CheckMemoryUsage("149"); + CheckMemoryUsage("192"); MopTimer.RunAndMeasure( "Insert NodePDMSEntries", @@ -160,7 +205,7 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp } ); - CheckMemoryUsage("165"); + CheckMemoryUsage("208"); MopTimer.RunAndMeasure( "Insert AABBs", @@ -182,7 +227,7 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp } ); - CheckMemoryUsage("187"); + CheckMemoryUsage("230"); MopTimer.RunAndMeasure( "Insert Nodes", @@ -197,7 +242,7 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp } ); - CheckMemoryUsage("202"); + CheckMemoryUsage("245"); MopTimer.RunAndMeasure( "Creating indexes", @@ -220,7 +265,7 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp } ); - CheckMemoryUsage("225"); + CheckMemoryUsage("268"); MopTimer.RunAndMeasure( "Optimizing Database", @@ -238,7 +283,7 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp } ); - CheckMemoryUsage("243"); + CheckMemoryUsage("286"); MopTimer.RunAndMeasure( "VACUUM Database", @@ -280,7 +325,7 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp } ); - CheckMemoryUsage("285"); + CheckMemoryUsage("Last line"); // ReSharper restore AccessToDisposedClosure sqliteComposeTimer.LogCompletion(); From cad236f81b1a8b65aeb0d15eac0460ebeb51b813 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Arvid=20Gr=C3=A4ns?= <79848215+ArGr1@users.noreply.github.com> Date: Thu, 3 Apr 2025 15:00:21 +0200 Subject: [PATCH 13/20] linting --- HierarchyComposer/Functions/DatabaseComposer.cs | 1 - 1 file changed, 1 deletion(-) diff --git a/HierarchyComposer/Functions/DatabaseComposer.cs b/HierarchyComposer/Functions/DatabaseComposer.cs index cd96d16d..6d5fa230 100644 --- a/HierarchyComposer/Functions/DatabaseComposer.cs +++ b/HierarchyComposer/Functions/DatabaseComposer.cs @@ -139,7 +139,6 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp CheckMemoryUsage($"Chunk {i} processed"); } - // // var nodes = inputNodes // .Select(inputNode => new Node From a8ecd4ed0afebf453655ae2992d60d0fedc2f6a3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Arvid=20Gr=C3=A4ns?= <79848215+ArGr1@users.noreply.github.com> Date: Mon, 7 Apr 2025 11:40:32 +0200 Subject: [PATCH 14/20] Memory improvement suggestion node dict creation --- .../Functions/DatabaseComposer.cs | 88 ++++++++++++------- HierarchyComposer/Model/Node.cs | 2 +- 2 files changed, 56 insertions(+), 34 deletions(-) diff --git a/HierarchyComposer/Functions/DatabaseComposer.cs b/HierarchyComposer/Functions/DatabaseComposer.cs index 6d5fa230..1ef3cdb9 100644 --- a/HierarchyComposer/Functions/DatabaseComposer.cs +++ b/HierarchyComposer/Functions/DatabaseComposer.cs @@ -96,49 +96,71 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp CheckMemoryUsage("97"); - // Process nodes in smaller batches to reduce memory usage - var nodesBatchSize = 1000; // Adjust batch size as needed - var nodes = new Dictionary(); - var nodeBatches = inputNodes.Chunk(nodesBatchSize); - int i = 0; - foreach (var batch in nodeBatches) + + +// --- Optimized Iterative Construction --- + +// 1. Estimate capacity if possible to reduce dictionary reallocations + int initialCapacity = 0; + var nodes = new Dictionary(initialCapacity); +// 2. Iterate through input nodes one by one + foreach (var inputNode in inputNodes) { - var batchNodes = batch - .Select(inputNode => new Node + // 3. Create the main Node object + var newNode = new Node + { + Id = inputNode.NodeId, + EndId = inputNode.EndId, + RefNoPrefix = inputNode.RefNoPrefix, + RefNoDb = inputNode.RefNoDb, + RefNoSequence = inputNode.RefNoSequence, + Name = inputNode.Name, + HasMesh = inputNode.HasMesh, + ParentId = inputNode.ParentId, + TopNodeId = inputNode.TopNodeId, + // Initialize list here, potentially with capacity + NodePDMSEntry = null, // Initialize as null or empty list + // AABB lookup (same logic as before) + AABB = inputNode.AABB == null ? null : aabbs[inputNode.AABB.GetGroupKey()], + DiagnosticInfo = inputNode.OptionalDiagnosticInfo + }; + + // 4. Process NodePDMSEntry efficiently + if (inputNode.PDMSData.Count > 0) // Check if there's data + { + // Create the list *once* per node, with capacity if available + var pdmsEntryList = new List(inputNode.PDMSData.Count); + + foreach (var kvp in inputNode.PDMSData) { - Id = inputNode.NodeId, - EndId = inputNode.EndId, - RefNoPrefix = inputNode.RefNoPrefix, - RefNoDb = inputNode.RefNoDb, - RefNoSequence = inputNode.RefNoSequence, - Name = inputNode.Name, - HasMesh = inputNode.HasMesh, - ParentId = inputNode.ParentId, - TopNodeId = inputNode.TopNodeId, - NodePDMSEntry = inputNode.PDMSData.Select(kvp => new NodePDMSEntry + // Perform lookup and create entry directly + pdmsEntryList.Add(new NodePDMSEntry { - NodeId = inputNode.NodeId, + NodeId = inputNode.NodeId, // Use the already accessed NodeId PDMSEntryId = pdmsEntries[kvp.GetGroupKey()].Id - }), - AABB = inputNode.AABB == null ? null : aabbs[inputNode.AABB.GetGroupKey()], - DiagnosticInfo = inputNode.OptionalDiagnosticInfo - }) - .ToDictionary(n => n.Id, n => n); - try - { - foreach (var kvp in batchNodes) - { - nodes[kvp.Key] = kvp.Value; + }); } + newNode.NodePDMSEntry = pdmsEntryList; // Assign the populated list } - catch (Exception) + else { - throw new ArgumentException("nodes key already exists "); // TODO: DELETE THIS, only for testing /kag + // Ensure the list is initialized if null wasn't intended, e.g.: + newNode.NodePDMSEntry = new List(); // Assign empty list if PDMSData is null/empty } - i += 1; - CheckMemoryUsage($"Chunk {i} processed"); + + + // 5. Add the fully constructed node to the dictionary + // Use Add for potentially better performance if keys are guaranteed unique, + // or use the indexer nodes[newNode.Id] = newNode; if duplicates might need overwriting (though ToDictionary implies unique keys). + nodes.Add(newNode.Id, newNode); } +// 'nodes' dictionary is now populated. + + + + + // // var nodes = inputNodes // .Select(inputNode => new Node diff --git a/HierarchyComposer/Model/Node.cs b/HierarchyComposer/Model/Node.cs index 3473a8b5..bb638702 100644 --- a/HierarchyComposer/Model/Node.cs +++ b/HierarchyComposer/Model/Node.cs @@ -27,7 +27,7 @@ public class Node public uint TopNodeId { get; init; } - public virtual IEnumerable? NodePDMSEntry { get; init; } = null!; + public virtual IEnumerable? NodePDMSEntry { get; set; } = null!; public uint? AABBId { get; init; } From 23cdbf2b13b4fb3b57b7659bfc35830b086da2e2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Arvid=20Gr=C3=A4ns?= <79848215+ArGr1@users.noreply.github.com> Date: Mon, 7 Apr 2025 11:44:16 +0200 Subject: [PATCH 15/20] console write --- HierarchyComposer/Functions/DatabaseComposer.cs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/HierarchyComposer/Functions/DatabaseComposer.cs b/HierarchyComposer/Functions/DatabaseComposer.cs index 1ef3cdb9..425df502 100644 --- a/HierarchyComposer/Functions/DatabaseComposer.cs +++ b/HierarchyComposer/Functions/DatabaseComposer.cs @@ -157,6 +157,8 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp // 'nodes' dictionary is now populated. + CheckMemoryUsage("'nodes' dictionary is now populated. Line 160"); + From a0e4bca29e90387fb3e95a372fe2dfb4de326922 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Arvid=20Gr=C3=A4ns?= <79848215+ArGr1@users.noreply.github.com> Date: Mon, 7 Apr 2025 15:05:15 +0200 Subject: [PATCH 16/20] Batch size test --- .../Functions/DatabaseComposer.cs | 552 +++++++++++------- 1 file changed, 327 insertions(+), 225 deletions(-) diff --git a/HierarchyComposer/Functions/DatabaseComposer.cs b/HierarchyComposer/Functions/DatabaseComposer.cs index 425df502..f4d953f8 100644 --- a/HierarchyComposer/Functions/DatabaseComposer.cs +++ b/HierarchyComposer/Functions/DatabaseComposer.cs @@ -1,21 +1,26 @@ -namespace HierarchyComposer.Functions; - +// Keep existing using statements using System; using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Linq; -using Extensions; using Microsoft.Data.Sqlite; using Microsoft.EntityFrameworkCore; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging.Abstractions; + +namespace HierarchyComposer.Functions; + +using Extensions; using Model; public class DatabaseComposer { private readonly ILogger _logger; + // Define a batch size for database insertions. Tune as needed. + private const int DatabaseBatchSize = 5000; // Example value + public DatabaseComposer(ILogger? logger = null) { _logger = logger ?? NullLogger.Instance; @@ -23,7 +28,7 @@ public DatabaseComposer(ILogger? logger = null) // ReSharper disable once CognitiveComplexity - // Method to check and write current memory usage to the console + // Method to check and write current memory usage to the console (Kept from original) static void CheckMemoryUsage(string currentLine) { // Get the current process @@ -41,7 +46,7 @@ static void CheckMemoryUsage(string currentLine) public void ComposeDatabase(IReadOnlyList inputNodes, string outputDatabaseFullPath) { - CheckMemoryUsage("44"); + CheckMemoryUsage("Start of ComposeDatabase"); if (File.Exists(outputDatabaseFullPath)) File.Delete(outputDatabaseFullPath); @@ -54,226 +59,201 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp }; var connectionString = connectionStringBuilder.ToString(); + // Create Schema using EF Core (as before) var optionsBuilder = new DbContextOptionsBuilder(); optionsBuilder.UseSqlite(connectionString); CreateEmptyDatabase(optionsBuilder.Options); - var jsonNodesWithoutPdms = inputNodes.Where(n => !n.PDMSData.Any()).ToArray(); + // --- Preprocessing (Largely unchanged, necessary for unique IDs before Node creation) --- + _logger.LogInformation("Preprocessing input nodes..."); + + var jsonNodesWithoutPdms = inputNodes.Where(n => !n.PDMSData.Any()).ToArray(); // ToArray is needed to materialize for modification foreach (var jsonNode in jsonNodesWithoutPdms) { // Adding information node to reduce query complexity on the hierarchy service, so that every node has at least one PDMS value jsonNode.PDMSData["Info:"] = "No E3D data available for selected part."; } + CheckMemoryUsage("After adding Info PDMS data"); + + // Collect all key-value pairs - ** Still a potential memory peak here ** var jsonPdmsKeyValuePairs = MopTimer.RunAndMeasure( "Collecting PDMS data", _logger, - () => inputNodes.SelectMany(n => n.PDMSData) + () => inputNodes.SelectMany(n => n.PDMSData).ToList() // ToList to avoid multiple enumerations ); - var jsonAabbs = inputNodes.Where(jn => jn.AABB != null).Select(jn => jn.AABB!); - - _logger.LogInformation("Creating database model entries"); - long pdmsEntryIdCounter = 0; - - CheckMemoryUsage("78"); - - var pdmsEntries = jsonPdmsKeyValuePairs - .GroupBy(kvp => kvp.GetGroupKey()) - .ToDictionary( - keySelector: g => g.Key, - elementSelector: g => new PDMSEntry() - { - Id = ++pdmsEntryIdCounter, - Key = g.First().Key, - Value = g.First().Value - } - ); - - var aabbIdCounter = 0; - var aabbs = jsonAabbs - .GroupBy(b => b.GetGroupKey()) - .ToDictionary(keySelector: g => g.Key, elementSelector: g => g.First().CopyWithNewId(++aabbIdCounter)); - - CheckMemoryUsage("97"); + CheckMemoryUsage("After collecting PDMS data"); + // Collect all AABBs - ** Still a potential memory peak here ** + var jsonAabbs = MopTimer.RunAndMeasure( + "Collecting AABB data", + _logger, + () => inputNodes.Where(jn => jn.AABB != null).Select(jn => jn.AABB!).ToList() // ToList to avoid multiple enumerations + ); -// --- Optimized Iterative Construction --- + CheckMemoryUsage("After collecting AABB data"); -// 1. Estimate capacity if possible to reduce dictionary reallocations - int initialCapacity = 0; - var nodes = new Dictionary(initialCapacity); -// 2. Iterate through input nodes one by one - foreach (var inputNode in inputNodes) - { - // 3. Create the main Node object - var newNode = new Node - { - Id = inputNode.NodeId, - EndId = inputNode.EndId, - RefNoPrefix = inputNode.RefNoPrefix, - RefNoDb = inputNode.RefNoDb, - RefNoSequence = inputNode.RefNoSequence, - Name = inputNode.Name, - HasMesh = inputNode.HasMesh, - ParentId = inputNode.ParentId, - TopNodeId = inputNode.TopNodeId, - // Initialize list here, potentially with capacity - NodePDMSEntry = null, // Initialize as null or empty list - // AABB lookup (same logic as before) - AABB = inputNode.AABB == null ? null : aabbs[inputNode.AABB.GetGroupKey()], - DiagnosticInfo = inputNode.OptionalDiagnosticInfo - }; - - // 4. Process NodePDMSEntry efficiently - if (inputNode.PDMSData.Count > 0) // Check if there's data - { - // Create the list *once* per node, with capacity if available - var pdmsEntryList = new List(inputNode.PDMSData.Count); + _logger.LogInformation("Deduplicating PDMS entries and AABBs..."); + int pdmsEntryIdCounter = 0; + int aabbIdCounter = 0; - foreach (var kvp in inputNode.PDMSData) - { - // Perform lookup and create entry directly - pdmsEntryList.Add(new NodePDMSEntry + // Deduplicate PDMSEntries - ** Memory peak for the dictionary itself ** + var pdmsEntries = MopTimer.RunAndMeasure( + "Grouping PDMSEntries", + _logger, + () => jsonPdmsKeyValuePairs + .GroupBy(kvp => kvp.GetGroupKey()) + .ToDictionary( + keySelector: g => g.Key, + elementSelector: g => new PDMSEntry() { - NodeId = inputNode.NodeId, // Use the already accessed NodeId - PDMSEntryId = pdmsEntries[kvp.GetGroupKey()].Id - }); - } - newNode.NodePDMSEntry = pdmsEntryList; // Assign the populated list - } - else - { - // Ensure the list is initialized if null wasn't intended, e.g.: - newNode.NodePDMSEntry = new List(); // Assign empty list if PDMSData is null/empty - } - - - // 5. Add the fully constructed node to the dictionary - // Use Add for potentially better performance if keys are guaranteed unique, - // or use the indexer nodes[newNode.Id] = newNode; if duplicates might need overwriting (though ToDictionary implies unique keys). - nodes.Add(newNode.Id, newNode); - } - -// 'nodes' dictionary is now populated. - - CheckMemoryUsage("'nodes' dictionary is now populated. Line 160"); - - - - - - // - // var nodes = inputNodes - // .Select(inputNode => new Node - // { - // Id = inputNode.NodeId, - // EndId = inputNode.EndId, - // RefNoPrefix = inputNode.RefNoPrefix, - // RefNoDb = inputNode.RefNoDb, - // RefNoSequence = inputNode.RefNoSequence, - // Name = inputNode.Name, - // HasMesh = inputNode.HasMesh, - // ParentId = inputNode.ParentId, - // TopNodeId = inputNode.TopNodeId, - // NodePDMSEntry = inputNode.PDMSData.Select(kvp => new NodePDMSEntry - // { - // NodeId = inputNode.NodeId, - // PDMSEntryId = pdmsEntries[kvp.GetGroupKey()].Id - // }), - // AABB = inputNode.AABB == null ? null : aabbs[inputNode.AABB.GetGroupKey()], - // DiagnosticInfo = inputNode.OptionalDiagnosticInfo - // }) - // .ToDictionary(n => n.Id, n => n); - - CheckMemoryUsage("166"); + Id = ++pdmsEntryIdCounter, + Key = g.First().Key, + Value = g.First().Value + } + ) + ); + // Hint to GC that the intermediate list might be collectible + jsonPdmsKeyValuePairs = null; + CheckMemoryUsage("After creating pdmsEntries dictionary"); + + // Deduplicate AABBs - ** Memory peak for the dictionary itself ** + var aabbs = MopTimer.RunAndMeasure( + "Grouping AABBs", + _logger, + () => jsonAabbs + .GroupBy(b => b.GetGroupKey()) + .ToDictionary(keySelector: g => g.Key, elementSelector: g => g.First().CopyWithNewId(++aabbIdCounter)) + ); + // Hint to GC + jsonAabbs = null; + CheckMemoryUsage("After creating aabbs dictionary"); - var nodePdmsEntries = nodes.Values.Where(n => n.NodePDMSEntry != null).SelectMany(n => n.NodePDMSEntry!); + _logger.LogInformation("Starting database population..."); var sqliteComposeTimer = MopTimer.Create("Populating database and building index", _logger); using var connection = new SqliteConnection(connectionString); connection.Open(); - CheckMemoryUsage("175"); - - // ReSharper disable AccessToDisposedClosure - MopTimer.RunAndMeasure( - "Insert PDMSEntries", - _logger, - () => - { - using var transaction = connection.BeginTransaction(); - - using var cmd = connection.CreateCommand(); - PDMSEntry.RawInsertBatch(cmd, pdmsEntries.Values); + // --- Batch Insertions --- - transaction.Commit(); - } + // Batch Insert PDMSEntries + MopTimer.RunAndMeasure("Insert PDMSEntries", _logger, () => + BatchInsertHelper(connection, pdmsEntries.Values, PDMSEntry.RawInsertBatch, DatabaseBatchSize, "PDMSEntries", _logger) ); + CheckMemoryUsage("After inserting PDMSEntries"); - CheckMemoryUsage("192"); - - MopTimer.RunAndMeasure( - "Insert NodePDMSEntries", - _logger, - () => - { - using var transaction = connection.BeginTransaction(); - - using var cmd = connection.CreateCommand(); - NodePDMSEntry.RawInsertBatch(cmd, nodePdmsEntries); - - transaction.Commit(); - } - ); - - CheckMemoryUsage("208"); - - MopTimer.RunAndMeasure( - "Insert AABBs", - _logger, - () => + // Batch Insert AABBs (Create R-Tree table first) + MopTimer.RunAndMeasure("Create R-Tree Table and Insert AABBs", _logger, () => + { + using (var transaction = connection.BeginTransaction()) + using (var cmd = connection.CreateCommand()) { - using var transaction = connection.BeginTransaction(); - using var cmd = connection.CreateCommand(); - - // Manually creating a special R-Tree table to speed up queries on the AABB table, specifically - // finding AABBs based on a location. The sqlite rtree module auto-creates spatial indexes. + cmd.Transaction = transaction; + // Manually creating a special R-Tree table cmd.CommandText = "CREATE VIRTUAL TABLE AABBs USING rtree(Id, min_x, max_x, min_y, max_y, min_z, max_z)"; cmd.ExecuteNonQuery(); - - AABB.RawInsertBatch(cmd, aabbs.Values); - - transaction.Commit(); + transaction.Commit(); // Commit schema change before data insertion batching } - ); + // Now batch insert data + BatchInsertHelper(connection, aabbs.Values, AABB.RawInsertBatch, DatabaseBatchSize, "AABBs", _logger); + }); + CheckMemoryUsage("After inserting AABBs"); + + // --- Batch Process and Insert Nodes and NodePDMSEntries --- + _logger.LogInformation("Processing and inserting Nodes and NodePDMSEntries in batches..."); + var nodeProcessingTimer = Stopwatch.StartNew(); + int totalNodes = inputNodes.Count; + int numNodeBatches = (totalNodes + DatabaseBatchSize - 1) / DatabaseBatchSize; + + for (int i = 0; i < numNodeBatches; i++) + { + var batchStartIndex = i * DatabaseBatchSize; + var currentBatchSize = Math.Min(DatabaseBatchSize, totalNodes - batchStartIndex); + // Use Skip/Take for simplicity, though List index access might be slightly faster if inputNodes is List + var inputNodesBatch = inputNodes.Skip(batchStartIndex).Take(currentBatchSize); - CheckMemoryUsage("230"); + var nodesBatch = new List(currentBatchSize); + var nodePdmsEntriesBatch = new List(); // Capacity is variable, start default - MopTimer.RunAndMeasure( - "Insert Nodes", - _logger, - () => + // Process the batch + foreach (var inputNode in inputNodesBatch) { - using var transaction = connection.BeginTransaction(); - using var cmd = connection.CreateCommand(); - Node.RawInsertBatch(cmd, nodes.Values); + // Create Node object + var newNode = new Node + { + Id = inputNode.NodeId, + EndId = inputNode.EndId, + RefNoPrefix = inputNode.RefNoPrefix, + RefNoDb = inputNode.RefNoDb, + RefNoSequence = inputNode.RefNoSequence, + Name = inputNode.Name, + HasMesh = inputNode.HasMesh, + ParentId = inputNode.ParentId, + TopNodeId = inputNode.TopNodeId, + // AABB lookup from the pre-calculated dictionary + AABB = inputNode.AABB == null ? null : aabbs[inputNode.AABB.GetGroupKey()], + DiagnosticInfo = inputNode.OptionalDiagnosticInfo + // NodePDMSEntry relationship is handled via the separate table/list + }; + nodesBatch.Add(newNode); + + // Create corresponding NodePDMSEntry objects for this node + if (inputNode.PDMSData.Any()) + { + foreach (var kvp in inputNode.PDMSData) + { + // Lookup PDMSEntry Id from the pre-calculated dictionary + if (pdmsEntries.TryGetValue(kvp.GetGroupKey(), out var pdmsEntry)) + { + nodePdmsEntriesBatch.Add(new NodePDMSEntry + { + NodeId = inputNode.NodeId, + PDMSEntryId = pdmsEntry.Id + }); + } + else + { + // Should not happen if preprocessing was correct, but log if it does + _logger.LogWarning("Could not find pre-calculated PDMSEntry for Node {NodeId}, Key: {Key}, Value: {Value}", + inputNode.NodeId, kvp.Key, kvp.Value); + } + } + } + } // End foreach inputNode in batch - transaction.Commit(); + // Insert the collected batches for Nodes and NodePDMSEntries + if (nodesBatch.Any()) + { + BatchInsertHelper(connection, nodesBatch, Node.RawInsertBatch, nodesBatch.Count, $"Nodes (Batch {i + 1}/{numNodeBatches})", _logger, isInnerBatch: true); + } + if (nodePdmsEntriesBatch.Any()) + { + BatchInsertHelper(connection, nodePdmsEntriesBatch, NodePDMSEntry.RawInsertBatch, nodePdmsEntriesBatch.Count, $"NodePDMSEntries (Batch {i + 1}/{numNodeBatches})", _logger, isInnerBatch: true); } - ); - CheckMemoryUsage("245"); + _logger.LogDebug("Processed node batch {BatchNum}/{TotalBatches}", i + 1, numNodeBatches); + CheckMemoryUsage($"After processing node batch {i + 1}"); + } // End for each batch + + nodeProcessingTimer.Stop(); + _logger.LogInformation("Finished processing and inserting {TotalNodes} Nodes and related entries in {ElapsedSeconds:0.00} seconds.", totalNodes, nodeProcessingTimer.Elapsed.TotalSeconds); + + + // --- Index Creation and Optimization (Unchanged) --- MopTimer.RunAndMeasure( "Creating indexes", _logger, () => { + _logger.LogInformation("Creating standard indexes..."); using var transaction = connection.BeginTransaction(); using var cmd = connection.CreateCommand(); + cmd.Transaction = transaction; // Ensure command uses the transaction cmd.CommandText = "CREATE INDEX PDMSEntries_Value_index ON PDMSEntries (Value)"; // key index will just slow things down cmd.ExecuteNonQuery(); cmd.CommandText = "CREATE INDEX PDMSEntries_Value_nocase_index ON PDMSEntries (Value collate nocase)"; @@ -285,28 +265,27 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp cmd.CommandText = "CREATE INDEX Nodes_RefNo_Index ON Nodes (RefNoPrefix, RefNoDb, RefNoSequence)"; cmd.ExecuteNonQuery(); transaction.Commit(); + _logger.LogInformation("Standard indexes created."); } ); - - CheckMemoryUsage("268"); + CheckMemoryUsage("After creating indexes"); MopTimer.RunAndMeasure( "Optimizing Database", _logger, () => { - // Run Sqlite Optimizing methods once. This may be superstition. The operations are usually quick (<1 second). + _logger.LogInformation("Running PRAGMA analyze/optimize..."); using var cmd = connection.CreateCommand(); - // Analyze the database. Actual performance gains of this on a "fresh database" have not been checked. + cmd.CommandTimeout = 300; // Increase timeout for potentially long operations cmd.CommandText = "pragma analyze"; cmd.ExecuteNonQuery(); - // Optimize the database. Actual performance gains of this have not been checked. cmd.CommandText = "pragma optimize"; cmd.ExecuteNonQuery(); + _logger.LogInformation("PRAGMA analyze/optimize finished."); } ); - - CheckMemoryUsage("286"); + CheckMemoryUsage("After optimizing database"); MopTimer.RunAndMeasure( "VACUUM Database", @@ -314,90 +293,213 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp () => { #if DEBUG - // Ignore in debug mode to run faster + _logger.LogInformation("Skipping VACUUM in DEBUG mode."); return; #else - // Vacuum completely recreates the database but removes all "Extra Data" from it. - // Its a quite slow operation but might fix the "First query is super slow issue" on the hierarchy service. + _logger.LogInformation("Starting VACUUM..."); using var vacuumCmds = connection.CreateCommand(); + vacuumCmds.CommandTimeout = 1200; // Vacuum can take a very long time, increase timeout significantly vacuumCmds.CommandText = "PRAGMA page_count"; - var pageCountBeforeVacuum = (Int64)vacuumCmds.ExecuteScalar()!; + Int64 pageCountBeforeVacuum = 0; + try { pageCountBeforeVacuum = (Int64?)vacuumCmds.ExecuteScalar() ?? 0; } catch { /* Ignore */ } + var timer = Stopwatch.StartNew(); - // Vacuum the database. This is quite slow! vacuumCmds.CommandText = "VACUUM"; vacuumCmds.ExecuteNonQuery(); - vacuumCmds.CommandText = "PRAGMA page_count"; - var pageCountAfterVacuum = (Int64)vacuumCmds.ExecuteScalar()!; + timer.Stop(); - // Disable auto_vacuum explicitly as we expect no more data to be written to the database after this. - vacuumCmds.CommandText = "PRAGMA auto_vacuum = NONE"; - vacuumCmds.ExecuteNonQuery(); + Int64 pageCountAfterVacuum = 0; + try { + vacuumCmds.CommandText = "PRAGMA page_count"; + pageCountAfterVacuum = (Int64?)vacuumCmds.ExecuteScalar() ?? 0; + } catch { /* Ignore */ } - // Analyze only a subset of the data when doing optimize queries. - // See more at: https://sqlite.org/pragma.html#pragma_analysis_limit - // Recommended values are between 100-1000. - vacuumCmds.CommandText = "PRAGMA analysis_limit = 1000"; - vacuumCmds.ExecuteNonQuery(); - // FUTURE: Consider if we should disable VACUUM in dev builds if its too slow, its not really needed there. - Console.WriteLine( - $"VACUUM finished in {timer.Elapsed}. Reduced size from {pageCountBeforeVacuum} to {pageCountAfterVacuum}" + _logger.LogInformation( + "VACUUM finished in {Elapsed}. Page count before: {PageCountBefore}, after: {PageCountAfter}", + timer.Elapsed, pageCountBeforeVacuum, pageCountAfterVacuum ); + + try { + // Set other pragmas after vacuum + vacuumCmds.CommandText = "PRAGMA auto_vacuum = NONE"; + vacuumCmds.ExecuteNonQuery(); + vacuumCmds.CommandText = "PRAGMA analysis_limit = 1000"; + vacuumCmds.ExecuteNonQuery(); + } catch (Exception ex) { + _logger.LogWarning(ex, "Failed to set PRAGMAs after VACUUM."); + } #endif } ); - CheckMemoryUsage("Last line"); + CheckMemoryUsage("End of ComposeDatabase"); - // ReSharper restore AccessToDisposedClosure sqliteComposeTimer.LogCompletion(); } + + /// + /// Helper method to insert items in batches using a provided raw insert action. + /// + private static void BatchInsertHelper( + SqliteConnection connection, + IEnumerable allItems, + Action> insertMethod, // e.g., PDMSEntry.RawInsertBatch + int batchSize, + string itemNamePlural, // For logging + ILogger logger, + bool isInnerBatch = false) // Flag to adjust logging verbosity + { + if (allItems == null) return; + + var timer = Stopwatch.StartNew(); + int totalCount = 0; + int batchCount = 0; + + var batch = new List(Math.Min(batchSize, 1024)); // Pre-allocate list reasonably + + foreach (var item in allItems) + { + batch.Add(item); + totalCount++; + if (batch.Count >= batchSize) + { + InsertBatchInternal(connection, batch, insertMethod, itemNamePlural, ++batchCount, logger, isInnerBatch); + batch.Clear(); // Clear for the next batch + } + } + + // Insert any remaining items in the last partial batch + if (batch.Count > 0) + { + InsertBatchInternal(connection, batch, insertMethod, itemNamePlural, ++batchCount, logger, isInnerBatch); + } + timer.Stop(); + + // Reduce log noise for inner batches (Nodes/NodePDMSEntries) + if (!isInnerBatch) + { + logger.LogInformation("Finished inserting {TotalCount} {ItemNamePlural} in {BatchCount} batches in {ElapsedSeconds:0.00} seconds.", totalCount, itemNamePlural, batchCount, timer.Elapsed.TotalSeconds); + } else if (timer.Elapsed.TotalSeconds > 1) // Log inner batches only if they take significant time + { + logger.LogDebug("Finished inserting {TotalCount} {ItemNamePlural} in {ElapsedSeconds:0.00} seconds.", totalCount, itemNamePlural, timer.Elapsed.TotalSeconds); + } + } + + /// + /// Internal helper to execute the insert action for a single batch within a transaction. + /// + private static void InsertBatchInternal( + SqliteConnection connection, + List batchItems, + Action> insertMethod, + string itemNamePlural, // For logging context in case of error + int batchNum, + ILogger logger, + bool isInnerBatch) + { + if (batchItems.Count == 0) return; + + var logLevel = isInnerBatch ? LogLevel.Trace : LogLevel.Debug; // Less verbose for inner batches + logger.Log(logLevel, "Inserting batch {BatchNum} of {ItemNamePlural} ({ItemCount} items)...", batchNum, itemNamePlural, batchItems.Count); + + using var transaction = connection.BeginTransaction(); + using var cmd = connection.CreateCommand(); + cmd.Transaction = transaction; // Associate command with transaction + try + { + insertMethod(cmd, batchItems); // Call the specific RawInsertBatch method provided + transaction.Commit(); + } + catch (Exception ex) + { + // Log error with batch context + logger.LogError(ex, "Failed to insert batch {BatchNum} for {ItemNamePlural}. Error: {Message}", batchNum, itemNamePlural, ex.Message); + // Rollback is implicit due to transaction dispose on exception, but rethrow to halt process + throw; + } + } + + + // AddTreeIndexToSectorToDatabase method remains unchanged as it uses a different pattern public static void AddTreeIndexToSectorToDatabase( IReadOnlyList<(uint TreeIndex, uint SectorId)> treeIndexToSectorId, DirectoryInfo outputDirectory ) { var databasePath = Path.GetFullPath(Path.Join(outputDirectory.FullName, "hierarchy.db")); + // Consider adding batching here too if treeIndexToSectorId can be very large + const int SectorBatchSize = 50000; // Example batch size for this method + using (var connection = new SqliteConnection($"Data Source={databasePath}")) { connection.Open(); - var createTableCommand = connection.CreateCommand(); - createTableCommand.CommandText = - "CREATE TABLE PrioritizedSectors (TreeIndex INTEGER NOT NULL, PrioritizedSectorId INTEGER NOT NULL, PRIMARY KEY (TreeIndex, PrioritizedSectorId)) WITHOUT ROWID; "; - createTableCommand.ExecuteNonQuery(); + using (var createTableCommand = connection.CreateCommand()) + { + // Use IF NOT EXISTS for resilience if method is called multiple times + createTableCommand.CommandText = + "CREATE TABLE IF NOT EXISTS PrioritizedSectors (TreeIndex INTEGER NOT NULL, PrioritizedSectorId INTEGER NOT NULL, PRIMARY KEY (TreeIndex, PrioritizedSectorId)) WITHOUT ROWID; "; + createTableCommand.ExecuteNonQuery(); + } - var command = connection.CreateCommand(); - command.CommandText = - "INSERT INTO PrioritizedSectors (TreeIndex, PrioritizedSectorId) VALUES ($TreeIndex, $PrioritizedSectorId)"; - var treeIndexParameter = command.CreateParameter(); - treeIndexParameter.ParameterName = "$TreeIndex"; - var prioritizedSectorIdParameter = command.CreateParameter(); - prioritizedSectorIdParameter.ParameterName = $"PrioritizedSectorId"; + using (var command = connection.CreateCommand()) + { + // Use parameterized query for safety and efficiency + command.CommandText = + "INSERT OR IGNORE INTO PrioritizedSectors (TreeIndex, PrioritizedSectorId) VALUES ($TreeIndex, $PrioritizedSectorId)"; // Use INSERT OR IGNORE to handle duplicates gracefully - command.Parameters.AddRange([treeIndexParameter, prioritizedSectorIdParameter]); + var treeIndexParameter = command.CreateParameter(); + treeIndexParameter.ParameterName = "$TreeIndex"; + var prioritizedSectorIdParameter = command.CreateParameter(); + prioritizedSectorIdParameter.ParameterName = "$PrioritizedSectorId"; // Corrected name - var transaction = connection.BeginTransaction(); - command.Transaction = transaction; + command.Parameters.AddRange(new [] {treeIndexParameter, prioritizedSectorIdParameter}); // Use array initializer - foreach (var pair in treeIndexToSectorId.Distinct()) - { - treeIndexParameter.Value = pair.TreeIndex; - prioritizedSectorIdParameter.Value = pair.SectorId; - command.ExecuteNonQuery(); - } + int itemCount = 0; + var distinctItems = treeIndexToSectorId.Distinct(); // Process distinct items - transaction.Commit(); + using (var transaction = connection.BeginTransaction()) + { + command.Transaction = transaction; // Assign transaction to command once + + foreach (var pair in distinctItems) + { + treeIndexParameter.Value = pair.TreeIndex; + prioritizedSectorIdParameter.Value = pair.SectorId; + command.ExecuteNonQuery(); + itemCount++; + + // Commit periodically in batches + if (itemCount % SectorBatchSize == 0) + { + Console.WriteLine($"Committing PrioritizedSectors batch at item {itemCount}..."); // Simple progress indicator + transaction.Commit(); + transaction.Dispose(); // Dispose old transaction + var newTransaction = connection.BeginTransaction(); // Start new transaction + command.Transaction = newTransaction; // Assign new transaction + } + } + // Commit any remaining items in the final batch + transaction.Commit(); + } // Final transaction is disposed here + } } + Console.WriteLine($"Finished inserting/updating {treeIndexToSectorId.Count} PrioritizedSectors entries."); } + // CreateEmptyDatabase method remains unchanged private static void CreateEmptyDatabase(DbContextOptions options) { using var context = new HierarchyContext(options); + // EnsureCreated is generally fine for creating schema once. if (!context.Database.EnsureCreated()) - throw new Exception($"Could not create database"); + { + // Consider logging error here + throw new Exception($"Could not create database schema using EF Core EnsureCreated."); + } } } From f735e5c7033ca0826a752e8deca77e10cda390ce Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Arvid=20Gr=C3=A4ns?= <79848215+ArGr1@users.noreply.github.com> Date: Tue, 8 Apr 2025 09:29:15 +0200 Subject: [PATCH 17/20] update to dotnet 9 --- CadRevealComposer.Exe/CadRevealComposer.Exe.csproj | 2 +- CadRevealComposer.Tests/CadRevealComposer.Tests.csproj | 2 +- CadRevealComposer/CadRevealComposer.csproj | 2 +- .../CadRevealFbxProvider.Tests.csproj | 2 +- CadRevealFbxProvider/CadRevealFbxProvider.csproj | 2 +- .../CadRevealObjProvider.Tests.csproj | 2 +- CadRevealObjProvider/CadRevealObjProvider.csproj | 2 +- .../CadRevealRvmProvider.Tests.csproj | 2 +- CadRevealRvmProvider/CadRevealRvmProvider.csproj | 2 +- Commons.Tests/Commons.Tests.csproj | 2 +- Commons/Commons.csproj | 2 +- HierarchyComposer.Tests/HierarchyComposer.Tests.csproj | 2 +- HierarchyComposer/HierarchyComposer.csproj | 2 +- RvmSharp.Exe/RvmSharp.Exe.csproj | 2 +- RvmSharp.Tests/RvmSharp.Tests.csproj | 2 +- RvmSharp/RvmSharp.csproj | 8 ++++---- 16 files changed, 19 insertions(+), 19 deletions(-) diff --git a/CadRevealComposer.Exe/CadRevealComposer.Exe.csproj b/CadRevealComposer.Exe/CadRevealComposer.Exe.csproj index 4f4b85d2..503bc409 100644 --- a/CadRevealComposer.Exe/CadRevealComposer.Exe.csproj +++ b/CadRevealComposer.Exe/CadRevealComposer.Exe.csproj @@ -2,7 +2,7 @@ Exe - net8.0 + net9.0 enable diff --git a/CadRevealComposer.Tests/CadRevealComposer.Tests.csproj b/CadRevealComposer.Tests/CadRevealComposer.Tests.csproj index ee83355a..bea7d1f5 100644 --- a/CadRevealComposer.Tests/CadRevealComposer.Tests.csproj +++ b/CadRevealComposer.Tests/CadRevealComposer.Tests.csproj @@ -1,7 +1,7 @@ - net8.0 + net9.0 enable false diff --git a/CadRevealComposer/CadRevealComposer.csproj b/CadRevealComposer/CadRevealComposer.csproj index fe18ad22..1c237adf 100644 --- a/CadRevealComposer/CadRevealComposer.csproj +++ b/CadRevealComposer/CadRevealComposer.csproj @@ -1,7 +1,7 @@  - net8.0 + net9.0 enable diff --git a/CadRevealFbxProvider.Tests/CadRevealFbxProvider.Tests.csproj b/CadRevealFbxProvider.Tests/CadRevealFbxProvider.Tests.csproj index bb525aec..4e5560fb 100644 --- a/CadRevealFbxProvider.Tests/CadRevealFbxProvider.Tests.csproj +++ b/CadRevealFbxProvider.Tests/CadRevealFbxProvider.Tests.csproj @@ -1,7 +1,7 @@  - net8.0 + net9.0 enable enable diff --git a/CadRevealFbxProvider/CadRevealFbxProvider.csproj b/CadRevealFbxProvider/CadRevealFbxProvider.csproj index ddfc54a6..e046d993 100644 --- a/CadRevealFbxProvider/CadRevealFbxProvider.csproj +++ b/CadRevealFbxProvider/CadRevealFbxProvider.csproj @@ -1,7 +1,7 @@ - net8.0 + net9.0 enable enable diff --git a/CadRevealObjProvider.Tests/CadRevealObjProvider.Tests.csproj b/CadRevealObjProvider.Tests/CadRevealObjProvider.Tests.csproj index 0463902f..56d1829d 100644 --- a/CadRevealObjProvider.Tests/CadRevealObjProvider.Tests.csproj +++ b/CadRevealObjProvider.Tests/CadRevealObjProvider.Tests.csproj @@ -1,7 +1,7 @@ - net8.0 + net9.0 enable enable diff --git a/CadRevealObjProvider/CadRevealObjProvider.csproj b/CadRevealObjProvider/CadRevealObjProvider.csproj index 125d6799..a6079da4 100644 --- a/CadRevealObjProvider/CadRevealObjProvider.csproj +++ b/CadRevealObjProvider/CadRevealObjProvider.csproj @@ -1,7 +1,7 @@ - net8.0 + net9.0 enable enable diff --git a/CadRevealRvmProvider.Tests/CadRevealRvmProvider.Tests.csproj b/CadRevealRvmProvider.Tests/CadRevealRvmProvider.Tests.csproj index 20f40588..64da2c7d 100644 --- a/CadRevealRvmProvider.Tests/CadRevealRvmProvider.Tests.csproj +++ b/CadRevealRvmProvider.Tests/CadRevealRvmProvider.Tests.csproj @@ -1,7 +1,7 @@  - net8.0 + net9.0 enable enable false diff --git a/CadRevealRvmProvider/CadRevealRvmProvider.csproj b/CadRevealRvmProvider/CadRevealRvmProvider.csproj index c93e72f7..338d92d1 100644 --- a/CadRevealRvmProvider/CadRevealRvmProvider.csproj +++ b/CadRevealRvmProvider/CadRevealRvmProvider.csproj @@ -1,7 +1,7 @@  - net8.0 + net9.0 enable enable diff --git a/Commons.Tests/Commons.Tests.csproj b/Commons.Tests/Commons.Tests.csproj index 66b0bbb0..d165681c 100644 --- a/Commons.Tests/Commons.Tests.csproj +++ b/Commons.Tests/Commons.Tests.csproj @@ -1,7 +1,7 @@ - net8.0 + net9.0 enable enable diff --git a/Commons/Commons.csproj b/Commons/Commons.csproj index 61647a3e..857214d3 100644 --- a/Commons/Commons.csproj +++ b/Commons/Commons.csproj @@ -2,7 +2,7 @@ Library - net8.0;netstandard2.1 + net9.0;netstandard2.1 enable nullable; latest diff --git a/HierarchyComposer.Tests/HierarchyComposer.Tests.csproj b/HierarchyComposer.Tests/HierarchyComposer.Tests.csproj index 557da474..eed3cab1 100644 --- a/HierarchyComposer.Tests/HierarchyComposer.Tests.csproj +++ b/HierarchyComposer.Tests/HierarchyComposer.Tests.csproj @@ -1,7 +1,7 @@ - net8.0 + net9.0 false diff --git a/HierarchyComposer/HierarchyComposer.csproj b/HierarchyComposer/HierarchyComposer.csproj index afe019af..8fbce302 100644 --- a/HierarchyComposer/HierarchyComposer.csproj +++ b/HierarchyComposer/HierarchyComposer.csproj @@ -1,7 +1,7 @@ - net8.0 + net9.0 enable diff --git a/RvmSharp.Exe/RvmSharp.Exe.csproj b/RvmSharp.Exe/RvmSharp.Exe.csproj index 86617ebe..fe7ad092 100644 --- a/RvmSharp.Exe/RvmSharp.Exe.csproj +++ b/RvmSharp.Exe/RvmSharp.Exe.csproj @@ -2,7 +2,7 @@ Exe - net8.0 + net9.0 enable nullable; MIT diff --git a/RvmSharp.Tests/RvmSharp.Tests.csproj b/RvmSharp.Tests/RvmSharp.Tests.csproj index 65f09ac8..1873db25 100644 --- a/RvmSharp.Tests/RvmSharp.Tests.csproj +++ b/RvmSharp.Tests/RvmSharp.Tests.csproj @@ -1,7 +1,7 @@  - net8.0 + net9.0 false diff --git a/RvmSharp/RvmSharp.csproj b/RvmSharp/RvmSharp.csproj index 0864f83c..de2dda15 100644 --- a/RvmSharp/RvmSharp.csproj +++ b/RvmSharp/RvmSharp.csproj @@ -2,15 +2,15 @@ Library - net8.0;netstandard2.1 + net9.0;netstandard2.1 enable nullable; latest - + true true snupkg - + true @@ -28,7 +28,7 @@ true RvmSharp is a fast AVEVA Rvm parser and converter, it can read .RVM files and attribute files and convert these files to meshes. - + From 35ac28afc69f6549deb9137fedb7bdcbd4bcff93 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Arvid=20Gr=C3=A4ns?= <79848215+ArGr1@users.noreply.github.com> Date: Tue, 8 Apr 2025 09:43:13 +0200 Subject: [PATCH 18/20] do not use string interning in RvmProvider --- CadRevealRvmProvider/RvmProvider.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CadRevealRvmProvider/RvmProvider.cs b/CadRevealRvmProvider/RvmProvider.cs index 47c71a7d..9f482693 100644 --- a/CadRevealRvmProvider/RvmProvider.cs +++ b/CadRevealRvmProvider/RvmProvider.cs @@ -40,7 +40,7 @@ NodeNameFiltering nodeNameFiltering }); var stringInternPool = new BenStringInternPool(new SharedInternPool()); - var rvmStore = RvmWorkload.ReadRvmFiles(workload, progressReport, stringInternPool); + var rvmStore = RvmWorkload.ReadRvmFiles(workload, progressReport, null); teamCityReadRvmFilesLogBlock.CloseBlock(); From 6773921ecf581267d859b2c051557e97e29770be Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Arvid=20Gr=C3=A4ns?= <79848215+ArGr1@users.noreply.github.com> Date: Tue, 8 Apr 2025 10:15:24 +0200 Subject: [PATCH 19/20] writing len of different datas to console --- HierarchyComposer/Functions/DatabaseComposer.cs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/HierarchyComposer/Functions/DatabaseComposer.cs b/HierarchyComposer/Functions/DatabaseComposer.cs index f4d953f8..5a6432ba 100644 --- a/HierarchyComposer/Functions/DatabaseComposer.cs +++ b/HierarchyComposer/Functions/DatabaseComposer.cs @@ -94,6 +94,9 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp CheckMemoryUsage("After collecting AABB data"); + // Write// Write the length of PDMS, AABB, and inputNodes data to the console + Console.WriteLine($"Input Nodes Count: {inputNodes.Count}, PDMS Data Count: {jsonPdmsKeyValuePairs.Count}, AABB Data Count: {jsonAabbs.Count}"); + _logger.LogInformation("Deduplicating PDMS entries and AABBs..."); int pdmsEntryIdCounter = 0; int aabbIdCounter = 0; From 60e6df95c18ef7a07dc0658ffc6b4dd32e68b8c4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Arvid=20Gr=C3=A4ns?= <79848215+ArGr1@users.noreply.github.com> Date: Wed, 9 Apr 2025 15:43:59 +0200 Subject: [PATCH 20/20] Enable logging of DatabaseComposer --- CadRevealComposer/SceneCreator.cs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/CadRevealComposer/SceneCreator.cs b/CadRevealComposer/SceneCreator.cs index 5437abe7..31fbba2f 100644 --- a/CadRevealComposer/SceneCreator.cs +++ b/CadRevealComposer/SceneCreator.cs @@ -47,7 +47,11 @@ public static void ExportModelMetadata(DirectoryInfo outputDirectory, ModelMetad public static void WriteToHierarchyDatabase(string databasePath, IReadOnlyList allNodes) { - ILogger databaseLogger = NullLogger.Instance; + ILogger databaseLogger = LoggerFactory.Create(builder => + { + builder.SetMinimumLevel(LogLevel.Debug); // Set the desired log level + }).CreateLogger(); + //Logger databaseLogger = NullLogger.Instance; var exporter = new DatabaseComposer(databaseLogger); exporter.ComposeDatabase(allNodes, Path.GetFullPath(databasePath)); }