diff --git a/Daqifi.Desktop/Exporter/LoggingSessionSampleSource.cs b/Daqifi.Desktop/Exporter/LoggingSessionSampleSource.cs
new file mode 100644
index 00000000..1e6235dc
--- /dev/null
+++ b/Daqifi.Desktop/Exporter/LoggingSessionSampleSource.cs
@@ -0,0 +1,167 @@
+using System.Runtime.CompilerServices;
+using Daqifi.Core.Logging.Export;
+using Daqifi.Desktop.Channel;
+using Daqifi.Desktop.Logger;
+using Microsoft.EntityFrameworkCore;
+
+namespace Daqifi.Desktop.Exporter;
+
+///
+/// Adapts a desktop to the core
+/// seam consumed by .
+/// Supports two modes: an EF Core path backed by
+/// (production) and an in-memory path over a pre-populated
+/// collection (used by tests).
+///
+public sealed class LoggingSessionSampleSource : ISampleSource
+{
+ #region Private Fields
+ private readonly LoggingSession _session;
+ private readonly IDbContextFactory _contextFactory;
+ private readonly ICollection _inMemorySamples;
+ private IReadOnlyList _channelsCache;
+ private int? _countCache;
+ #endregion
+
+ #region Constructors
+ ///
+ /// Creates a sample source that reads from the persisted EF Core store.
+ ///
+ /// The session whose samples should be exported.
+ /// Factory that produces short-lived s.
+ public LoggingSessionSampleSource(LoggingSession session, IDbContextFactory contextFactory)
+ {
+ _session = session ?? throw new ArgumentNullException(nameof(session));
+ _contextFactory = contextFactory ?? throw new ArgumentNullException(nameof(contextFactory));
+ }
+
+ ///
+ /// Creates a sample source backed by a pre-populated in-memory sample collection.
+ /// Used by tests that don't have a real database available.
+ ///
+ /// The session the samples belong to (only is read).
+ /// Sample rows to enumerate.
+ public LoggingSessionSampleSource(LoggingSession session, ICollection inMemorySamples)
+ {
+ _session = session ?? throw new ArgumentNullException(nameof(session));
+ _inMemorySamples = inMemorySamples ?? throw new ArgumentNullException(nameof(inMemorySamples));
+ }
+ #endregion
+
+ #region ISampleSource
+ ///
+ /// Returns the ordered set of channels present in this session. Channels are deduped by
+ /// (DeviceName, DeviceSerialNo, ChannelName); the
+ /// is taken from the first observed sample for that channel. Both the in-memory and DB paths use
+ /// the same dedup logic so the resulting descriptor sets match.
+ ///
+ public IReadOnlyList GetChannels()
+ {
+ if (_channelsCache != null)
+ {
+ return _channelsCache;
+ }
+
+ if (_inMemorySamples != null)
+ {
+ _channelsCache = _inMemorySamples
+ .GroupBy(s => new { s.DeviceName, s.DeviceSerialNo, s.ChannelName })
+ .Select(g => new ChannelDescriptor(
+ g.Key.DeviceName,
+ g.Key.DeviceSerialNo,
+ g.Key.ChannelName,
+ g.First().Type))
+ .OrderBy(c => c.DeviceName)
+ .ThenBy(c => c.DeviceSerialNo)
+ .ThenBy(c => c.ChannelName)
+ .ToList();
+ return _channelsCache;
+ }
+
+ using var context = _contextFactory.CreateDbContext();
+ context.ChangeTracker.AutoDetectChangesEnabled = false;
+
+ _channelsCache = context.Samples
+ .AsNoTracking()
+ .Where(s => s.LoggingSessionID == _session.ID)
+ .GroupBy(s => new { s.DeviceName, s.DeviceSerialNo, s.ChannelName })
+ .Select(g => new
+ {
+ g.Key.DeviceName,
+ g.Key.DeviceSerialNo,
+ g.Key.ChannelName,
+ Type = g.Min(s => s.Type),
+ })
+ .OrderBy(s => s.DeviceName)
+ .ThenBy(s => s.DeviceSerialNo)
+ .ThenBy(s => s.ChannelName)
+ .AsEnumerable()
+ .Select(s => new ChannelDescriptor(s.DeviceName, s.DeviceSerialNo, s.ChannelName, s.Type))
+ .ToList();
+ return _channelsCache;
+ }
+
+ ///
+ /// Returns the total sample count for this session. Used by core's
+ /// to drive progress reporting. Honors on the DB path.
+ ///
+ public async ValueTask GetSampleCountAsync(CancellationToken cancellationToken = default)
+ {
+ if (_countCache.HasValue)
+ {
+ return _countCache.Value;
+ }
+
+ if (_inMemorySamples != null)
+ {
+ _countCache = _inMemorySamples.Count;
+ return _countCache.Value;
+ }
+
+ await using var context = _contextFactory.CreateDbContext();
+ context.ChangeTracker.AutoDetectChangesEnabled = false;
+ _countCache = await context.Samples
+ .AsNoTracking()
+ .CountAsync(s => s.LoggingSessionID == _session.ID, cancellationToken)
+ .ConfigureAwait(false);
+ return _countCache.Value;
+ }
+
+ ///
+ /// Streams all samples for this session in ascending timestamp order. The EF path uses
+ /// AsAsyncEnumerable so rows are read row-by-row without materializing the whole result set.
+ ///
+ public async IAsyncEnumerable StreamSamples([EnumeratorCancellation] CancellationToken cancellationToken = default)
+ {
+ if (_inMemorySamples != null)
+ {
+ foreach (var s in _inMemorySamples.OrderBy(d => d.TimestampTicks))
+ {
+ cancellationToken.ThrowIfCancellationRequested();
+ yield return new SampleRow(
+ s.TimestampTicks,
+ $"{s.DeviceName}:{s.DeviceSerialNo}:{s.ChannelName}",
+ s.Value);
+ }
+ yield break;
+ }
+
+ await using var context = _contextFactory.CreateDbContext();
+ context.ChangeTracker.AutoDetectChangesEnabled = false;
+
+ var query = context.Samples
+ .AsNoTracking()
+ .Where(s => s.LoggingSessionID == _session.ID)
+ .OrderBy(s => s.TimestampTicks)
+ .Select(s => new { s.TimestampTicks, s.DeviceName, s.DeviceSerialNo, s.ChannelName, s.Value });
+
+ await foreach (var s in query.AsAsyncEnumerable().WithCancellation(cancellationToken))
+ {
+ yield return new SampleRow(
+ s.TimestampTicks,
+ $"{s.DeviceName}:{s.DeviceSerialNo}:{s.ChannelName}",
+ s.Value);
+ }
+ }
+ #endregion
+}
diff --git a/Daqifi.Desktop/Exporter/OptimizedLoggingSessionExporter.cs b/Daqifi.Desktop/Exporter/OptimizedLoggingSessionExporter.cs
index 31107630..590414eb 100644
--- a/Daqifi.Desktop/Exporter/OptimizedLoggingSessionExporter.cs
+++ b/Daqifi.Desktop/Exporter/OptimizedLoggingSessionExporter.cs
@@ -1,23 +1,44 @@
-using Daqifi.Desktop.Channel;
-using Daqifi.Desktop.Common.Loggers;
-using Daqifi.Desktop.Logger;
using System.IO;
using System.Text;
+using Daqifi.Core.Logging.Export;
+using Daqifi.Desktop.Common.Loggers;
+using Daqifi.Desktop.Logger;
using Daqifi.Desktop.Models;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.DependencyInjection;
namespace Daqifi.Desktop.Exporter;
-public record SampleData(long TimestampTicks, string DeviceChannel, double Value);
-
+///
+/// Thin desktop adapter around : builds an
+/// from a , maps
+/// desktop options into , and folds per-session
+/// progress into the multi-session percentage the dialog expects.
+///
public class OptimizedLoggingSessionExporter
{
+ #region Constants
+ private const int BUFFER_SIZE = 1024 * 1024; // 1MB buffer for file writes
+ #endregion
+
+ #region Static State
+ // CsvExporter is stateless and ships from a sibling library that isn't registered with our DI
+ // container. Caching one instance avoids re-allocating per export without forcing every caller
+ // to inject it.
+ private static readonly CsvExporter SharedCsvExporter = new();
+ #endregion
+
+ #region Private Fields
private readonly AppLogger _appLogger = AppLogger.Instance;
private readonly string _delimiter = DaqifiSettings.Instance.CsvDelimiter;
private readonly IDbContextFactory _loggingContext;
- private const int BUFFER_SIZE = 1024 * 1024; // 1MB buffer for file writes
+ #endregion
+ #region Constructors
+ ///
+ /// Creates an exporter that resolves its factory from
+ /// . Used in production where DI is wired up.
+ ///
public OptimizedLoggingSessionExporter()
{
if (App.ServiceProvider != null)
@@ -26,451 +47,166 @@ public OptimizedLoggingSessionExporter()
}
}
+ ///
+ /// Creates an exporter with an explicit factory. Used in tests
+ /// that spin up a temp SQLite database.
+ ///
public OptimizedLoggingSessionExporter(IDbContextFactory loggingContext)
{
_loggingContext = loggingContext;
}
+ #endregion
- public void ExportLoggingSession(LoggingSession loggingSession, string filepath, bool exportRelativeTime, IProgress progress, CancellationToken cancellationToken, int sessionIndex, int totalSessions)
+ #region Public Methods
+ ///
+ /// Exports every sample in as one CSV row per timestamp.
+ ///
+ /// Session to export. May carry an in-memory DataSamples
+ /// collection (test path) or just an ID the EF context will look up (production path).
+ /// Absolute path to the output CSV file.
+ /// When true, the time column is seconds-since-first-sample; otherwise ISO 8601.
+ /// Optional progress sink reporting overall percentage across all sessions.
+ /// Token that aborts the export.
+ /// Zero-based index of this session within a multi-session export.
+ /// Total number of sessions in this export run.
+ public void ExportLoggingSession(LoggingSession loggingSession, string filepath, bool exportRelativeTime,
+ IProgress progress, CancellationToken cancellationToken, int sessionIndex, int totalSessions)
{
try
{
- // Check if we have in-memory data (for tests) or need to query database
- if (loggingSession.DataSamples?.Any() == true)
- {
- // Use optimized in-memory processing for test scenarios
- ExportFromMemory(loggingSession, filepath, exportRelativeTime, progress, cancellationToken, sessionIndex, totalSessions);
- }
- else if (_loggingContext != null)
+ var source = TryBuildSource(loggingSession);
+ if (source == null)
{
- // Use database streaming for production scenarios
- ExportFromDatabase(loggingSession, filepath, exportRelativeTime, progress, cancellationToken, sessionIndex, totalSessions);
- }
- else
- {
- _appLogger.Warning("No data source available for export");
+ return;
}
- }
- catch (Exception ex)
- {
- _appLogger.Error(ex, "Exception in OptimizedExportLoggingSession");
- }
- }
- private void ExportFromMemory(LoggingSession loggingSession, string filepath, bool exportRelativeTime, IProgress progress, CancellationToken cancellationToken, int sessionIndex, int totalSessions)
- {
- var channelNames = loggingSession.DataSamples
- .Select(s => $"{s.DeviceName}:{s.DeviceSerialNo}:{s.ChannelName}")
- .Distinct()
- .OrderBy(name => name) // Use default string ordering to match database path
- .ToList();
+ var options = new CsvExportOptions
+ {
+ Delimiter = _delimiter,
+ UseRelativeTime = exportRelativeTime,
+ };
- var hasTimestamps = loggingSession.DataSamples.Any(); // Check if we have samples, not if timestamps > 0
- if (channelNames.Count == 0 || !hasTimestamps)
- {
- return;
+ RunExport(source, filepath, options, progress, cancellationToken, sessionIndex, totalSessions);
}
-
- var firstTimestamp = loggingSession.DataSamples.Min(s => s.TimestampTicks);
-
- // Write header efficiently
- using var writer = new StreamWriter(filepath, false, Encoding.UTF8, BUFFER_SIZE);
- WriteHeaderToWriter(writer, channelNames, exportRelativeTime);
-
- // Process data without creating intermediate collections
- WriteMemoryDataDirectly(writer, loggingSession.DataSamples, channelNames, firstTimestamp, exportRelativeTime, progress, cancellationToken, sessionIndex, totalSessions);
- }
-
- private void ExportFromDatabase(LoggingSession loggingSession, string filepath, bool exportRelativeTime, IProgress progress, CancellationToken cancellationToken, int sessionIndex, int totalSessions)
- {
- // Get channel names and basic info without loading all data
- var channelInfo = GetChannelInfoFromDatabase(loggingSession);
- if (channelInfo.channelNames.Count == 0 || !channelInfo.hasTimestamps)
+ catch (OperationCanceledException)
{
- return;
+ // Let the viewmodel's cancellation handler record the breadcrumb.
+ throw;
}
-
- // Write header
- WriteHeader(filepath, channelInfo.channelNames, exportRelativeTime);
-
- // Process data in streaming fashion using database queries
- StreamDataToFile(loggingSession, filepath, channelInfo, exportRelativeTime, progress, cancellationToken, sessionIndex, totalSessions);
- }
-
- private (List channelNames, bool hasTimestamps, int samplesCount, long firstTimestamp) GetChannelInfoFromDatabase(LoggingSession loggingSession)
- {
- using var context = _loggingContext.CreateDbContext();
- context.ChangeTracker.AutoDetectChangesEnabled = false;
-
- var query = context.Samples
- .AsNoTracking()
- .Where(s => s.LoggingSessionID == loggingSession.ID);
-
- // Get channel names - select raw fields first (EF Core can't translate string.Format),
- // then format on the client side
- var channelNames = query
- .Select(s => new { s.DeviceName, s.DeviceSerialNo, s.ChannelName })
- .Distinct()
- .OrderBy(s => s.DeviceName).ThenBy(s => s.DeviceSerialNo).ThenBy(s => s.ChannelName)
- .AsEnumerable()
- .Select(s => $"{s.DeviceName}:{s.DeviceSerialNo}:{s.ChannelName}")
- .ToList();
-
- // Get min timestamp and count in a single query to avoid logic errors and reduce roundtrips
- var stats = query
- .GroupBy(s => 1) // Dummy groupby to use aggregate functions
- .Select(g => new {
- MinTimestamp = g.Min(s => (long?)s.TimestampTicks) ?? 0L,
- Count = g.Count()
- })
- .FirstOrDefault();
-
- var firstTimestamp = stats?.MinTimestamp ?? 0L;
- var samplesCount = stats?.Count ?? 0;
-
- return (channelNames, samplesCount > 0, samplesCount, firstTimestamp);
- }
-
- private void WriteHeaderToWriter(StreamWriter writer, List channelNames, bool exportRelativeTime)
- {
- var header = exportRelativeTime ? "Relative Time (s)" : "Time";
- writer.Write(header);
- foreach (var channelName in channelNames)
+ catch (Exception ex)
{
- writer.Write(_delimiter);
- writer.Write(channelName);
+ _appLogger.Error(ex,
+ $"Exception in OptimizedExportLoggingSession (sessionId={loggingSession?.ID}, filepath={filepath}, relativeTime={exportRelativeTime})");
}
- writer.WriteLine();
}
- private void WriteMemoryDataDirectly(StreamWriter writer, ICollection dataSamples, List channelNames,
- long firstTimestamp, bool exportRelativeTime, IProgress progress, CancellationToken cancellationToken, int sessionIndex, int totalSessions)
+ ///
+ /// Exports samples grouped into rolling windows of samples,
+ /// writing one averaged row per window. Trailing partial windows are flushed (a behavior change
+ /// vs the legacy implementation, which silently dropped them).
+ ///
+ /// Session to export.
+ /// Absolute path to the output CSV file.
+ /// Window size in samples. Must be positive; non-positive values short-circuit with a warning log.
+ /// When true, the time column is seconds-since-first-sample; otherwise ISO 8601.
+ /// Optional progress sink reporting overall percentage across all sessions.
+ /// Token that aborts the export.
+ /// Zero-based index of this session within a multi-session export.
+ /// Total number of sessions in this export run.
+ public void ExportAverageSamples(LoggingSession session, string filepath, double averageQuantity,
+ bool exportRelativeTime, IProgress progress, CancellationToken cancellationToken, int sessionIndex, int totalSessions)
{
- var sb = new StringBuilder(1024 * 4);
- var processedSamples = 0;
- var totalSamples = dataSamples.Count;
-
- // Stream process by timestamp to avoid materializing all groups in memory
- var orderedSamples = dataSamples.OrderBy(s => s.TimestampTicks);
-
- long? currentTimestamp = null;
- var timestampBucket = new List();
-
- foreach (var sample in orderedSamples)
+ try
{
- if (cancellationToken.IsCancellationRequested)
+ var window = (int)averageQuantity;
+ if (window <= 0)
{
- _appLogger.Warning("Export operation cancelled by user.");
+ _appLogger.Warning(
+ $"Skipping average export: AverageWindow must be positive (was {averageQuantity}, sessionId={session?.ID}, filepath={filepath}).");
return;
}
- // Check if we've moved to a new timestamp
- if (currentTimestamp.HasValue && sample.TimestampTicks != currentTimestamp.Value)
+ var source = TryBuildSource(session);
+ if (source == null)
{
- // Write the completed timestamp group
- WriteTimestampGroup(writer, sb, timestampBucket, channelNames, firstTimestamp, exportRelativeTime);
- processedSamples += timestampBucket.Count;
-
- // Update progress periodically
- if (processedSamples % 5000 == 0)
- {
- var sessionProgress = Math.Min(100, (int)((double)processedSamples / totalSamples * 100));
- var overallProgress = (int)((sessionIndex + sessionProgress / 100.0) * (100.0 / totalSessions));
- progress?.Report(overallProgress);
- }
-
- timestampBucket.Clear();
+ return;
}
- currentTimestamp = sample.TimestampTicks;
- timestampBucket.Add(sample);
- }
-
- // Write the final timestamp group
- if (timestampBucket.Count > 0)
- {
- WriteTimestampGroup(writer, sb, timestampBucket, channelNames, firstTimestamp, exportRelativeTime);
- processedSamples += timestampBucket.Count;
-
- // Final progress update
- var finalProgress = (int)((sessionIndex + 1.0) * (100.0 / totalSessions));
- progress?.Report(finalProgress);
- }
- }
-
- private void WriteTimestampGroup(StreamWriter writer, StringBuilder sb, List timestampSamples,
- List channelNames, long firstTimestamp, bool exportRelativeTime)
- {
- if (!timestampSamples.Any()) return;
-
- var timestamp = timestampSamples[0].TimestampTicks;
- var timeString = exportRelativeTime
- ? ((timestamp - firstTimestamp) / (double)TimeSpan.TicksPerSecond).ToString("F3")
- : FormatAbsoluteTimestamp(timestamp);
-
- sb.Clear();
- sb.Append(timeString);
-
- // Create lookup for this timestamp's samples - handle duplicates by taking the last value
- var sampleLookup = new Dictionary();
- foreach (var sample in timestampSamples)
- {
- var channelKey = $"{sample.DeviceName}:{sample.DeviceSerialNo}:{sample.ChannelName}";
- sampleLookup[channelKey] = sample.Value; // Overwrite duplicates like original implementation
- }
-
- foreach (var channelName in channelNames)
- {
- sb.Append(_delimiter);
- if (sampleLookup.TryGetValue(channelName, out var value))
+ var options = new CsvExportOptions
{
- sb.Append(value.ToString("G"));
- }
- }
+ Delimiter = _delimiter,
+ UseRelativeTime = exportRelativeTime,
+ AverageWindow = window,
+ };
- sb.AppendLine();
- writer.Write(sb.ToString());
- }
-
- private void WriteHeader(string filepath, List channelNames, bool exportRelativeTime)
- {
- using var writer = new StreamWriter(filepath, false, Encoding.UTF8, BUFFER_SIZE);
- var header = exportRelativeTime ? "Relative Time (s)" : "Time";
- writer.Write(header);
- foreach (var channelName in channelNames)
- {
- writer.Write(_delimiter);
- writer.Write(channelName);
+ RunExport(source, filepath, options, progress, cancellationToken, sessionIndex, totalSessions);
}
- writer.WriteLine();
- }
-
- private void StreamDataToFile(LoggingSession loggingSession, string filepath,
- (List channelNames, bool hasTimestamps, int samplesCount, long firstTimestamp) channelInfo,
- bool exportRelativeTime, IProgress progress, CancellationToken cancellationToken, int sessionIndex, int totalSessions)
- {
- using var context = _loggingContext.CreateDbContext();
- context.ChangeTracker.AutoDetectChangesEnabled = false;
-
- using var writer = new StreamWriter(filepath, true, Encoding.UTF8, BUFFER_SIZE);
- var sb = new StringBuilder(1024 * 4);
-
- // Single query, streamed row-by-row via IEnumerable — no N+1
- var samplesStream = context.Samples
- .AsNoTracking()
- .Where(s => s.LoggingSessionID == loggingSession.ID)
- .OrderBy(s => s.TimestampTicks)
- .Select(s => new { s.TimestampTicks, s.DeviceName, s.DeviceSerialNo, s.ChannelName, s.Value });
-
- var processedSamples = 0;
- long? currentTimestamp = null;
- var timestampBucket = new List();
-
- foreach (var s in samplesStream)
+ catch (OperationCanceledException)
{
- if (cancellationToken.IsCancellationRequested)
- {
- _appLogger.Warning("Export operation cancelled by user.");
- return;
- }
-
- var sample = new SampleData(s.TimestampTicks, $"{s.DeviceName}:{s.DeviceSerialNo}:{s.ChannelName}", s.Value);
-
- if (currentTimestamp.HasValue && s.TimestampTicks != currentTimestamp.Value)
- {
- WriteCompleteTimestampRow(writer, sb, timestampBucket, channelInfo.channelNames,
- channelInfo.firstTimestamp, exportRelativeTime);
- processedSamples += timestampBucket.Count;
-
- if (processedSamples % 5000 == 0)
- {
- var sessionProgress = Math.Min(100, (int)((double)processedSamples / channelInfo.samplesCount * 100));
- var overallProgress = (int)((sessionIndex + sessionProgress / 100.0) * (100.0 / totalSessions));
- progress?.Report(overallProgress);
- }
-
- timestampBucket.Clear();
- }
-
- currentTimestamp = s.TimestampTicks;
- timestampBucket.Add(sample);
+ // Let the viewmodel's cancellation handler record the breadcrumb.
+ throw;
}
-
- // Write final group
- if (timestampBucket.Count > 0)
+ catch (Exception ex)
{
- WriteCompleteTimestampRow(writer, sb, timestampBucket, channelInfo.channelNames,
- channelInfo.firstTimestamp, exportRelativeTime);
- processedSamples += timestampBucket.Count;
+ _appLogger.Error(ex,
+ $"Failed in OptimizedExportAverageSamples (sessionId={session?.ID}, filepath={filepath}, averageWindow={averageQuantity}, relativeTime={exportRelativeTime})");
}
-
- var finalProgress = (int)((sessionIndex + 1.0) * (100.0 / totalSessions));
- progress?.Report(finalProgress);
}
+ #endregion
+ #region Private Helpers
///
- /// Formats a timestamp tick value as an ISO 8601 string, or returns an error token if the value is outside
- /// the valid range (1– ticks inclusive).
- /// Zero and negative values are treated as invalid. Prevents when the
- /// database contains corrupt timestamp values.
+ /// Builds the appropriate for this session, or
+ /// returns null if there is no usable data source. Mirrors the legacy
+ /// "no file when there are no channels" behavior so empty sessions don't
+ /// produce empty CSVs.
///
- private static string FormatAbsoluteTimestamp(long ticks)
- => (ticks > 0 && ticks <= DateTime.MaxValue.Ticks)
- ? new DateTime(ticks).ToString("O")
- : $"INVALID({ticks})";
-
- private void WriteCompleteTimestampRow(StreamWriter writer, StringBuilder sb, List timestampSamples,
- List channelNames, long firstTimestamp, bool exportRelativeTime)
+ private ISampleSource TryBuildSource(LoggingSession loggingSession)
{
- if (!timestampSamples.Any()) return;
-
- var timestamp = timestampSamples[0].TimestampTicks;
- var timeString = exportRelativeTime
- ? ((timestamp - firstTimestamp) / (double)TimeSpan.TicksPerSecond).ToString("F3")
- : FormatAbsoluteTimestamp(timestamp);
-
- sb.Clear();
- sb.Append(timeString);
-
- // Create lookup for fast channel value retrieval - handle duplicates by taking the last value
- var sampleLookup = new Dictionary();
- foreach (var sample in timestampSamples)
+ ISampleSource source;
+ if (loggingSession.DataSamples?.Any() == true)
{
- sampleLookup[sample.DeviceChannel] = sample.Value; // Overwrite duplicates like original implementation
+ source = new LoggingSessionSampleSource(loggingSession, loggingSession.DataSamples);
}
-
- foreach (var channelName in channelNames)
+ else if (_loggingContext != null)
{
- sb.Append(_delimiter);
- if (sampleLookup.TryGetValue(channelName, out var value))
- {
- sb.Append(value.ToString("G"));
- }
+ source = new LoggingSessionSampleSource(loggingSession, _loggingContext);
}
-
- sb.AppendLine();
- writer.Write(sb.ToString());
- }
-
-
- public void ExportAverageSamples(LoggingSession session, string filepath, double averageQuantity,
- bool exportRelativeTime, IProgress progress, CancellationToken cancellationToken, int sessionIndex, int totalSessions)
- {
- try
+ else
{
- using var context = _loggingContext.CreateDbContext();
- context.ChangeTracker.AutoDetectChangesEnabled = false;
-
- var channelNames = context.Samples
- .AsNoTracking()
- .Where(s => s.LoggingSessionID == session.ID)
- .Select(s => new { s.DeviceName, s.DeviceSerialNo, s.ChannelName })
- .Distinct()
- .OrderBy(s => s.DeviceName).ThenBy(s => s.DeviceSerialNo).ThenBy(s => s.ChannelName)
- .AsEnumerable()
- .Select(s => $"{s.DeviceName}:{s.DeviceSerialNo}:{s.ChannelName}")
- .ToList();
-
- if (!channelNames.Any())
- return;
-
- using var writer = new StreamWriter(filepath, false, Encoding.UTF8, BUFFER_SIZE);
-
- // Write header
- var header = exportRelativeTime ? "Relative Time (s)" : "Time";
- writer.Write(header);
- foreach (var channelName in channelNames)
- {
- writer.Write(_delimiter);
- writer.Write(channelName);
- }
- writer.WriteLine();
-
- // Stream and process averages in batches
- StreamAverageData(context, session.ID, writer, channelNames, averageQuantity, exportRelativeTime, progress, cancellationToken, sessionIndex, totalSessions);
+ _appLogger.Warning("No data source available for export");
+ return null;
}
- catch (Exception ex)
+
+ if (source.GetChannels().Count == 0)
{
- _appLogger.Error(ex, "Failed in OptimizedExportAverageSamples");
+ return null;
}
+
+ return source;
}
- private void StreamAverageData(LoggingContext context, int sessionId, StreamWriter writer,
- List channelNames, double averageQuantity, bool exportRelativeTime,
+ private static void RunExport(ISampleSource source, string filepath, CsvExportOptions options,
IProgress progress, CancellationToken cancellationToken, int sessionIndex, int totalSessions)
{
- var tempTotals = channelNames.ToDictionary(name => name, _ => 0.0);
- var tempCounts = channelNames.ToDictionary(name => name, _ => 0);
- var sb = new StringBuilder(1024 * 4);
-
- long? firstTimestampTicks = null;
- var count = 0;
- var totalProcessed = 0;
-
- // Process samples in streaming fashion
- var samplesQuery = context.Samples
- .AsNoTracking()
- .Where(s => s.LoggingSessionID == sessionId)
- .OrderBy(s => s.TimestampTicks)
- .Select(s => new { s.TimestampTicks, s.DeviceName, s.DeviceSerialNo, s.ChannelName, s.Value })
- .AsEnumerable()
- .Select(s => new SampleData(s.TimestampTicks, $"{s.DeviceName}:{s.DeviceSerialNo}:{s.ChannelName}", s.Value));
-
- var totalSamples = context.Samples
- .AsNoTracking()
- .Count(s => s.LoggingSessionID == sessionId);
-
- foreach (var sample in samplesQuery)
+ // Fold the [0..100] per-session progress that core reports into the
+ // overall (sessionIndex + p/100) * (100/totalSessions) shape the dialog
+ // already binds to. Match the legacy ceiling so we never exceed 100.
+ IProgress wrappedProgress = null;
+ if (progress != null && totalSessions > 0)
{
- if (!firstTimestampTicks.HasValue)
- firstTimestampTicks = sample.TimestampTicks;
-
- tempTotals[sample.DeviceChannel] += sample.Value;
- tempCounts[sample.DeviceChannel]++;
- count++;
- totalProcessed++;
-
- if (count >= averageQuantity)
+ wrappedProgress = new Progress(p =>
{
- var timeString = exportRelativeTime
- ? ((sample.TimestampTicks - firstTimestampTicks.Value) / (double)TimeSpan.TicksPerSecond).ToString("F3")
- : FormatAbsoluteTimestamp(sample.TimestampTicks);
-
- sb.Clear();
- sb.Append(timeString);
-
- foreach (var channelName in channelNames)
- {
- sb.Append(_delimiter);
- if (tempCounts[channelName] > 0)
- {
- var average = tempTotals[channelName] / tempCounts[channelName];
- sb.Append(average.ToString("G"));
- }
- }
- sb.AppendLine();
-
- writer.Write(sb.ToString());
-
- // Reset accumulators
- foreach (var channelName in channelNames)
- {
- tempTotals[channelName] = 0.0;
- tempCounts[channelName] = 0;
- }
- count = 0;
-
- // Update progress periodically
- if (totalProcessed % 1000 == 0)
- {
- var progressPercentage = Math.Min(100, (int)((double)totalProcessed / totalSamples * 100));
- var overallProgress = (int)((sessionIndex + progressPercentage / 100.0) * (100.0 / totalSessions));
- progress?.Report(overallProgress);
- }
- }
-
- if (cancellationToken.IsCancellationRequested)
- return;
+ var bounded = Math.Min(100, Math.Max(0, p));
+ var overall = (int)((sessionIndex + bounded / 100.0) * (100.0 / totalSessions));
+ progress.Report(overall);
+ });
}
+
+ using var writer = new StreamWriter(filepath, false, Encoding.UTF8, BUFFER_SIZE);
+ SharedCsvExporter.ExportAsync(source, writer, options, wrappedProgress, cancellationToken)
+ .GetAwaiter()
+ .GetResult();
}
+ #endregion
}