Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions SS14.Admin/AdminLogs/Export/ExportConfiguration.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
namespace SS14.Admin.AdminLogs.Export;

public class ExportConfiguration
{
public const string Name = "Export";

/// <summary>
/// The maximum amount of export processes that can be queued up before new export requests will be rejected
/// </summary>
public int ProcessQueueMaxSize { get; set; } = 6;

/// <summary>
/// This is the directory for containing generated exports
/// </summary>
public string ExportDirectory { get; set; } = "export";
}
9 changes: 9 additions & 0 deletions SS14.Admin/AdminLogs/Export/ExportProcessItem.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
namespace SS14.Admin.AdminLogs.Export;

public sealed record ExportProcessItem(
string? Search = null,
DateTime? FromDate = null,
DateTime? ToDate = null,
int? RoundId = null,
bool UseCompression = false
);
42 changes: 42 additions & 0 deletions SS14.Admin/AdminLogs/Export/LogExportBackgroundService.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
using Serilog;
using ILogger = Serilog.ILogger;

namespace SS14.Admin.AdminLogs.Export;

public sealed class LogExportBackgroundService : BackgroundService
{
private readonly IServiceProvider _serviceProvider;
private readonly LogExportQueue _queue;
private readonly ILogger _log;

public LogExportBackgroundService(IServiceProvider provider, LogExportQueue queue, IServiceProvider serviceProvider)
{
_serviceProvider = serviceProvider;
_queue = queue;

_log = Log.ForContext<LogExportBackgroundService>();
}

protected override Task ExecuteAsync(CancellationToken stoppingToken)
{
_log.Information("{ServiceName} started", nameof(LogExportBackgroundService));
return ProcessQueueAsync(stoppingToken);
}

private async Task ProcessQueueAsync(CancellationToken ct)
{
while (!ct.IsCancellationRequested)
{
var item = await _queue.DequeueAsync(ct);
await ProcessTask(item, ct);
}
}

private async Task ProcessTask(ExportProcessItem item, CancellationToken ct)
{
using var scope = _serviceProvider.CreateScope();
var exporter = scope.ServiceProvider.GetRequiredService<LogExporter>();
var filename = await exporter.Export(item, ct);
await _queue.ReportFinishedExport(filename);
}
}
47 changes: 47 additions & 0 deletions SS14.Admin/AdminLogs/Export/LogExportExtension.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
using System.Text.Json;
using Microsoft.AspNetCore.Routing.Constraints;
using Microsoft.Extensions.Options;

namespace SS14.Admin.AdminLogs.Export;

public static class LogExportExtension
{
public static void MapLogExportEndpoints(this IEndpointRouteBuilder endpoints)
{
endpoints.MapGet("/logs/export/poll", async (CancellationToken ct, LogExportQueue queue) =>
{
var timoutCt = CancellationTokenSource.CreateLinkedTokenSource(ct);
timoutCt.CancelAfter(TimeSpan.FromSeconds(120));
using var channel = queue.CreateReportChannel();
var filename = await channel.Listen(timoutCt.Token);

return Results.Ok(filename);
}).RequireAuthorization();

endpoints.MapGet("/logs/export/list", async (IOptions<ExportConfiguration> config) =>
{
var exportPath = Path.Combine(Directory.GetCurrentDirectory(), config.Value.ExportDirectory);
var files = Directory.EnumerateFiles(exportPath, "*.csv*");

return Results.Ok(files.Select(Path.GetFileName));

}).RequireAuthorization();

endpoints.MapGet("/logs/export/download/{filename}", async (string filename, IOptions<ExportConfiguration> config) =>
{
var extension = Path.GetExtension(filename);
var mimetype = extension switch
{
".gz" => "application/x-gzip",
".csv" => "text/csv",
_ => null
};

if (mimetype == null)
return Results.NotFound();
var basePath = Path.Combine(Directory.GetCurrentDirectory(), config.Value.ExportDirectory);
var path = Path.Combine(basePath, Path.GetFileName(filename));
return !File.Exists(path) ? Results.NotFound() : Results.File(path, contentType: mimetype);
}).RequireAuthorization();
}
}
96 changes: 96 additions & 0 deletions SS14.Admin/AdminLogs/Export/LogExportQueue.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
using System.Threading.Channels;
using Microsoft.Extensions.Options;

namespace SS14.Admin.AdminLogs.Export;

public sealed class LogExportQueue
{
private readonly IOptions<ExportConfiguration> _configuration;
private readonly Channel<ExportProcessItem> _queue;

private readonly List<Channel<string>> _reportChannels = [];

public int MaxItemCount => _configuration.Value.ProcessQueueMaxSize;
public int Count => _queue.Reader.Count;

public LogExportQueue(IOptions<ExportConfiguration> configuration)
{
_configuration = configuration;

var channelConfiguration = new BoundedChannelOptions(MaxItemCount)
{
FullMode = BoundedChannelFullMode.DropWrite
};

_queue = Channel.CreateBounded<ExportProcessItem>(channelConfiguration);
}

public async Task<bool> TryQueueProcessItem(ExportProcessItem item)
{
if (Count == MaxItemCount)
return false;

await _queue.Writer.WriteAsync(item);
return true;
}

public async ValueTask<ExportProcessItem> DequeueAsync(CancellationToken cancellationToken)
{
return await _queue.Reader.ReadAsync(cancellationToken);
}

public ReportChannel CreateReportChannel()
{
var channelOptions = new BoundedChannelOptions(1)
{
SingleReader = true,
SingleWriter = true,
AllowSynchronousContinuations = false,
FullMode = BoundedChannelFullMode.DropOldest,
};

var channel = Channel.CreateBounded<string>(channelOptions);
_reportChannels.Add(channel);

return new ReportChannel(
channel,
new WeakReference<IList<Channel<string>>>(_reportChannels)
);
}

public async Task ReportFinishedExport(string filename)
{
foreach (var channel in _reportChannels)
{
await channel.Writer.WriteAsync(filename);
}
}

public sealed record ReportChannel : IDisposable
{
private readonly Channel<string> _channel;
private readonly WeakReference<IList<Channel<string>>> _channels;

public ReportChannel(Channel<string> channel, WeakReference<IList<Channel<string>>> channels)
{
_channel = channel;
_channels = channels;
}


public async ValueTask<string> Listen(CancellationToken ct)
{
return await _channel.Reader.ReadAsync(ct);
}

public void Dispose()
{
_channel.Writer.TryComplete();

if (!_channels.TryGetTarget(out var channels))
return;

channels.Remove(_channel);
}
}
}
104 changes: 104 additions & 0 deletions SS14.Admin/AdminLogs/Export/LogExporter.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
using System.IO.Compression;
using Content.Server.Database;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Options;

namespace SS14.Admin.AdminLogs.Export;

public sealed class LogExporter
{
private const char ColumnSeparator = ',';
private const char Quote = '"';
private const string EscapedQuote = "\"\"";

private readonly PostgresServerDbContext _context;
private readonly IOptions<ExportConfiguration> _configuration;

public LogExporter(PostgresServerDbContext context, IOptions<ExportConfiguration> configuration)
{
_context = context;
_configuration = configuration;
}

public async Task<string> Export(ExportProcessItem item, CancellationToken ct)
{
// Prevent accidentally exporting all logs
if ((!item.FromDate.HasValue || !item.ToDate.HasValue) && !item.RoundId.HasValue)
throw new Exception("Neither date or round id filter is set correctly for log export.");

var filename = $"{DateTime.UtcNow.ToShortDateString()}-{Guid.NewGuid()}_log_export.csv{(item.UseCompression ? ".gz" : "")}";
var path = Path.Combine(_configuration.Value.ExportDirectory, filename);
await using var fileStream = new FileStream(path, FileMode.Create, FileAccess.Write);

if (item.UseCompression)
{
await using var compressionStream = new GZipStream(fileStream, CompressionMode.Compress, leaveOpen: true);
await using var writer = new StreamWriter(compressionStream);
await WriteCsv(writer, item, ct);
}
else
{
await using var writer = new StreamWriter(fileStream);
await WriteCsv(writer, item, ct);
}

return filename;
}

private async Task WriteCsv(StreamWriter writer, ExportProcessItem item, CancellationToken ct)
{
var query = _context.AdminLog
.AsNoTracking()
.AsQueryable();

if (item.RoundId.HasValue)
query = query.Where(e => e.RoundId == item.RoundId);

if (item is { FromDate: not null, ToDate: not null })
query = query.Where(e => item.FromDate.Value.Date.ToUniversalTime() <= e.Date
&& e.Date <= item.ToDate.Value.Date.ToUniversalTime());

if (item.Search != null)
query = query.Where(e => EF.Functions.ToTsVector("english", e.Message).Matches(item.Search));

await WriteCsvHeader(writer);

await foreach (var log in query.AsAsyncEnumerable().WithCancellation(ct))
{
await writer.WriteAsync(log.Date.ToString("O"));
await writer.WriteAsync(ColumnSeparator);
await writer.WriteAsync(log.Id.ToString());
await writer.WriteAsync(ColumnSeparator);
await writer.WriteAsync(log.RoundId.ToString());
await writer.WriteAsync(ColumnSeparator);
await writer.WriteAsync(log.Impact.ToString());
await writer.WriteAsync(ColumnSeparator);
await writer.WriteAsync(log.Type.ToString());
await writer.WriteAsync(ColumnSeparator);
await writer.WriteAsync(Quote);
await writer.WriteAsync(log.Message.Replace(Quote.ToString(), EscapedQuote));
await writer.WriteAsync(Quote);
await writer.WriteAsync(ColumnSeparator);
await writer.WriteAsync(Quote);
await writer.WriteAsync(log.Json.RootElement.GetRawText().Replace(Quote.ToString(), EscapedQuote));
await writer.WriteAsync(Quote);
}
}

private async Task WriteCsvHeader(StreamWriter writer)
{
await writer.WriteAsync("timestamp");
await writer.WriteAsync(ColumnSeparator);
await writer.WriteAsync("round_id");
await writer.WriteAsync(ColumnSeparator);
await writer.WriteAsync("id");
await writer.WriteAsync(ColumnSeparator);
await writer.WriteAsync("impact");
await writer.WriteAsync(ColumnSeparator);
await writer.WriteAsync("type");
await writer.WriteAsync(ColumnSeparator);
await writer.WriteAsync("message");
await writer.WriteAsync(ColumnSeparator);
await writer.WriteAsync("json");
}
}
Loading
Loading