diff --git a/CadRevealComposer/CadRevealComposerRunner.cs b/CadRevealComposer/CadRevealComposerRunner.cs index d2bffd25..05484dcd 100644 --- a/CadRevealComposer/CadRevealComposerRunner.cs +++ b/CadRevealComposer/CadRevealComposerRunner.cs @@ -7,7 +7,6 @@ using System.IO; using System.Linq; using System.Text.Json; -using System.Threading.Tasks; using Configuration; using Devtools; using IdProviders; @@ -105,19 +104,10 @@ IReadOnlyList modelFormatProviders filtering.PrintFilteringStatsToConsole(); - var exportHierarchyDatabaseTask = Task.Run(() => - { - // Exporting hierarchy on side thread to allow it to run in parallel - var hierarchyExportTimer = Stopwatch.StartNew(); - var databasePath = Path.GetFullPath(Path.Join(outputDirectory.FullName, "hierarchy.db")); - SceneCreator.ExportHierarchyDatabase(databasePath, nodesToExport); - Console.WriteLine( - $"Exported hierarchy database to path \"{databasePath}\" in {hierarchyExportTimer.Elapsed}" - ); - }); - geometriesToProcess = Simplify.OptimizeVertexCountInMeshes(geometriesToProcess); + WriteHierarchy(outputDirectory, nodesToExport); + var geometriesToProcessArray = geometriesToProcess.ToArray(); if (composerParameters.DevPrimitiveCacheFolder != null) { @@ -133,10 +123,6 @@ IReadOnlyList modelFormatProviders composerParameters ); - if (!exportHierarchyDatabaseTask.IsCompleted) - Console.WriteLine("Waiting for hierarchy export to complete..."); - exportHierarchyDatabaseTask.Wait(); - WriteParametersToParamsFile(modelParameters, composerParameters, outputDirectory); ModifyHierarchyPostProcess(outputDirectory, splitExportResults); @@ -145,6 +131,15 @@ IReadOnlyList modelFormatProviders Console.WriteLine($"Convert completed in {totalTimeElapsed.Elapsed}"); } + private static void WriteHierarchy(DirectoryInfo outputDirectory, IReadOnlyList nodes) + { + var hierarchyNodes = HierarchyComposerConverter.ConvertToHierarchyNodes(nodes); + var hierarchyExportTimer = Stopwatch.StartNew(); + var databasePath = Path.GetFullPath(Path.Join(outputDirectory.FullName, "hierarchy.db")); + SceneCreator.WriteToHierarchyDatabase(databasePath, hierarchyNodes); + Console.WriteLine($"Exported hierarchy database to path \"{databasePath}\" in {hierarchyExportTimer.Elapsed}"); + } + public record SplitAndExportResults(List TreeIndexToSectorIdDict); public record TreeIndexSectorIdPair(uint TreeIndex, uint SectorId); diff --git a/CadRevealComposer/SceneCreator.cs b/CadRevealComposer/SceneCreator.cs index f206683b..5437abe7 100644 --- a/CadRevealComposer/SceneCreator.cs +++ b/CadRevealComposer/SceneCreator.cs @@ -9,6 +9,7 @@ using Commons.Utils; using Configuration; using HierarchyComposer.Functions; +using HierarchyComposer.Model; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging.Abstractions; using Operations; @@ -44,13 +45,11 @@ public static void ExportModelMetadata(DirectoryInfo outputDirectory, ModelMetad File.WriteAllText(metadataPath, metadataString); } - public static void ExportHierarchyDatabase(string databasePath, IReadOnlyList allNodes) + public static void WriteToHierarchyDatabase(string databasePath, IReadOnlyList allNodes) { - var nodes = HierarchyComposerConverter.ConvertToHierarchyNodes(allNodes); - ILogger databaseLogger = NullLogger.Instance; var exporter = new DatabaseComposer(databaseLogger); - exporter.ComposeDatabase(nodes.ToList(), Path.GetFullPath(databasePath)); + exporter.ComposeDatabase(allNodes, Path.GetFullPath(databasePath)); } public static void AddPrioritizedSectorsToDatabase( diff --git a/HierarchyComposer/Functions/DatabaseComposer.cs b/HierarchyComposer/Functions/DatabaseComposer.cs index e4e79fb1..6d5fa230 100644 --- a/HierarchyComposer/Functions/DatabaseComposer.cs +++ b/HierarchyComposer/Functions/DatabaseComposer.cs @@ -22,8 +22,26 @@ public DatabaseComposer(ILogger? logger = null) } // ReSharper disable once CognitiveComplexity + + // Method to check and write current memory usage to the console + static void CheckMemoryUsage(string currentLine) + { + // Get the current process + Process currentProcess = Process.GetCurrentProcess(); + + // Get the physical memory usage (in bytes) + long totalBytesOfMemoryUsed = currentProcess.WorkingSet64; + + // Convert to megabytes for easier reading + double megabytesUsed = totalBytesOfMemoryUsed / (1024.0 * 1024.0); + + // Write the memory usage to the console + Console.WriteLine($"Memory usage (MB): {megabytesUsed:N2} at line {currentLine}"); + } + public void ComposeDatabase(IReadOnlyList inputNodes, string outputDatabaseFullPath) { + CheckMemoryUsage("44"); if (File.Exists(outputDatabaseFullPath)) File.Delete(outputDatabaseFullPath); @@ -50,13 +68,15 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp var jsonPdmsKeyValuePairs = MopTimer.RunAndMeasure( "Collecting PDMS data", _logger, - () => inputNodes.SelectMany(n => n.PDMSData).ToArray() + () => inputNodes.SelectMany(n => n.PDMSData) ); var jsonAabbs = inputNodes.Where(jn => jn.AABB != null).Select(jn => jn.AABB!); _logger.LogInformation("Creating database model entries"); long pdmsEntryIdCounter = 0; + CheckMemoryUsage("78"); + var pdmsEntries = jsonPdmsKeyValuePairs .GroupBy(kvp => kvp.GetGroupKey()) .ToDictionary( @@ -74,29 +94,75 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp .GroupBy(b => b.GetGroupKey()) .ToDictionary(keySelector: g => g.Key, elementSelector: g => g.First().CopyWithNewId(++aabbIdCounter)); - var nodes = inputNodes - .Select(inputNode => new Node - { - Id = inputNode.NodeId, - EndId = inputNode.EndId, - RefNoPrefix = inputNode.RefNoPrefix, - RefNoDb = inputNode.RefNoDb, - RefNoSequence = inputNode.RefNoSequence, - Name = inputNode.Name, - HasMesh = inputNode.HasMesh, - ParentId = inputNode.ParentId, - TopNodeId = inputNode.TopNodeId, - NodePDMSEntry = inputNode - .PDMSData.Select(kvp => new NodePDMSEntry + CheckMemoryUsage("97"); + + // Process nodes in smaller batches to reduce memory usage + var nodesBatchSize = 1000; // Adjust batch size as needed + var nodes = new Dictionary(); + var nodeBatches = inputNodes.Chunk(nodesBatchSize); + int i = 0; + foreach (var batch in nodeBatches) + { + var batchNodes = batch + .Select(inputNode => new Node + { + Id = inputNode.NodeId, + EndId = inputNode.EndId, + RefNoPrefix = inputNode.RefNoPrefix, + RefNoDb = inputNode.RefNoDb, + RefNoSequence = inputNode.RefNoSequence, + Name = inputNode.Name, + HasMesh = inputNode.HasMesh, + ParentId = inputNode.ParentId, + TopNodeId = inputNode.TopNodeId, + NodePDMSEntry = inputNode.PDMSData.Select(kvp => new NodePDMSEntry { NodeId = inputNode.NodeId, PDMSEntryId = pdmsEntries[kvp.GetGroupKey()].Id - }) - .ToList(), - AABB = inputNode.AABB == null ? null : aabbs[inputNode.AABB.GetGroupKey()], - DiagnosticInfo = inputNode.OptionalDiagnosticInfo - }) - .ToDictionary(n => n.Id, n => n); + }), + AABB = inputNode.AABB == null ? null : aabbs[inputNode.AABB.GetGroupKey()], + DiagnosticInfo = inputNode.OptionalDiagnosticInfo + }) + .ToDictionary(n => n.Id, n => n); + try + { + foreach (var kvp in batchNodes) + { + nodes[kvp.Key] = kvp.Value; + } + } + catch (Exception) + { + throw new ArgumentException("nodes key already exists "); // TODO: DELETE THIS, only for testing /kag + } + i += 1; + CheckMemoryUsage($"Chunk {i} processed"); + } + + // + // var nodes = inputNodes + // .Select(inputNode => new Node + // { + // Id = inputNode.NodeId, + // EndId = inputNode.EndId, + // RefNoPrefix = inputNode.RefNoPrefix, + // RefNoDb = inputNode.RefNoDb, + // RefNoSequence = inputNode.RefNoSequence, + // Name = inputNode.Name, + // HasMesh = inputNode.HasMesh, + // ParentId = inputNode.ParentId, + // TopNodeId = inputNode.TopNodeId, + // NodePDMSEntry = inputNode.PDMSData.Select(kvp => new NodePDMSEntry + // { + // NodeId = inputNode.NodeId, + // PDMSEntryId = pdmsEntries[kvp.GetGroupKey()].Id + // }), + // AABB = inputNode.AABB == null ? null : aabbs[inputNode.AABB.GetGroupKey()], + // DiagnosticInfo = inputNode.OptionalDiagnosticInfo + // }) + // .ToDictionary(n => n.Id, n => n); + + CheckMemoryUsage("166"); var nodePdmsEntries = nodes.Values.Where(n => n.NodePDMSEntry != null).SelectMany(n => n.NodePDMSEntry!); @@ -105,6 +171,8 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp using var connection = new SqliteConnection(connectionString); connection.Open(); + CheckMemoryUsage("175"); + // ReSharper disable AccessToDisposedClosure MopTimer.RunAndMeasure( "Insert PDMSEntries", @@ -120,6 +188,8 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp } ); + CheckMemoryUsage("192"); + MopTimer.RunAndMeasure( "Insert NodePDMSEntries", _logger, @@ -134,6 +204,8 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp } ); + CheckMemoryUsage("208"); + MopTimer.RunAndMeasure( "Insert AABBs", _logger, @@ -154,6 +226,8 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp } ); + CheckMemoryUsage("230"); + MopTimer.RunAndMeasure( "Insert Nodes", _logger, @@ -167,6 +241,8 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp } ); + CheckMemoryUsage("245"); + MopTimer.RunAndMeasure( "Creating indexes", _logger, @@ -188,6 +264,8 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp } ); + CheckMemoryUsage("268"); + MopTimer.RunAndMeasure( "Optimizing Database", _logger, @@ -204,6 +282,8 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp } ); + CheckMemoryUsage("286"); + MopTimer.RunAndMeasure( "VACUUM Database", _logger, @@ -244,6 +324,8 @@ public void ComposeDatabase(IReadOnlyList inputNodes, string outp } ); + CheckMemoryUsage("Last line"); + // ReSharper restore AccessToDisposedClosure sqliteComposeTimer.LogCompletion(); } diff --git a/HierarchyComposer/Model/Node.cs b/HierarchyComposer/Model/Node.cs index 852555c4..3473a8b5 100644 --- a/HierarchyComposer/Model/Node.cs +++ b/HierarchyComposer/Model/Node.cs @@ -27,7 +27,7 @@ public class Node public uint TopNodeId { get; init; } - public virtual ICollection? NodePDMSEntry { get; init; } = null!; + public virtual IEnumerable? NodePDMSEntry { get; init; } = null!; public uint? AABBId { get; init; }