Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
35 commits
Select commit Hold shift + click to select a range
027328b
my eclipse workspace is green!!!
hilmarf Aug 20, 2025
0cf67e1
TODO: implement retry, when exception is thrown
hilmarf Aug 20, 2025
4980873
Merge branch 'main' into eclipse
hilmarf Aug 20, 2025
9b224fe
Merge branch 'main' into AuditLog
hilmarf Aug 20, 2025
29c8231
Merge remote-tracking branch 'otel/main' into AuditLog
hilmarf Aug 21, 2025
22fada2
remove gradle nature
hilmarf Aug 21, 2025
63995a4
Merge remote-tracking branch 'otel/main' into eclipse
hilmarf Aug 21, 2025
906d3eb
Merge branch 'eclipse' into AuditLog
hilmarf Aug 21, 2025
f6e2a87
cleanup
hilmarf Aug 25, 2025
5a7e052
simplify MultiLogRecordExporter
hilmarf Aug 25, 2025
ba74fb6
make CompletableLogRecordExporter package private and reusable
hilmarf Aug 25, 2025
d913202
Merge branch 'open-telemetry:main' into AuditLog
hilmarf Aug 25, 2025
124b5e2
Merge remote-tracking branch 'otel/main' into AuditLog
hilmarf Aug 28, 2025
a4402c1
AuditLogFileStore finally works
hilmarf Aug 28, 2025
4a58b10
remove eclipse settings
hilmarf Aug 28, 2025
dd43864
Revert "remove eclipse settings"
hilmarf Aug 28, 2025
f1d9e26
Merge remote-tracking branch 'apeiro/main' into AuditLog
hilmarf Sep 2, 2025
4035ba5
add automatic retry logic
hilmarf Sep 2, 2025
b5127ff
Reapply "remove eclipse settings"
hilmarf Sep 2, 2025
11384e1
Revert "remove eclipse settings"
hilmarf Sep 3, 2025
f0d84c7
Merge remote-tracking branch 'otel/main' into AuditLog
hilmarf Sep 5, 2025
0f3bc18
simplify
hilmarf Sep 5, 2025
b01760b
leverage the usage of io.opentelemetry.sdk.common.export.RetryPolicy
hilmarf Sep 5, 2025
a3fbb45
Reapply "remove eclipse settings"
hilmarf Sep 5, 2025
03e3d6d
Revert "Reapply "remove eclipse settings""
hilmarf Sep 5, 2025
3c68a16
remove eclipse settings
hilmarf Sep 5, 2025
31d9915
add diff opentelemetry-sdk-logs.txt
hilmarf Sep 5, 2025
affc546
Merge branch 'main' into AuditLog
hilmarf Sep 9, 2025
47fa67c
Merge branch 'main' into AuditLog
hilmarf Sep 10, 2025
9aa32c7
simplify try-catch
hilmarf Sep 10, 2025
2fe3fe8
Update opentelemetry-exporter-otlp-common.txt
hilmarf Sep 11, 2025
01726fa
Update opentelemetry-exporter-otlp-common.txt
hilmarf Sep 11, 2025
23c5f8c
Merge branch 'main' into AuditLog
hilmarf Sep 11, 2025
35da889
revert
hilmarf Sep 11, 2025
a2faedf
Merge branch 'main' into AuditLog
hilmarf Sep 12, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
45 changes: 44 additions & 1 deletion docs/apidiffs/current_vs_latest/opentelemetry-sdk-logs.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,45 @@
Comparing source compatibility of opentelemetry-sdk-logs-1.55.0-SNAPSHOT.jar against opentelemetry-sdk-logs-1.54.0.jar
No changes.
+++ NEW CLASS: PUBLIC(+) io.opentelemetry.sdk.logs.export.AuditException (compatible)
+++ CLASS FILE FORMAT VERSION: 52.0 <- n.a.
+++ NEW INTERFACE: java.io.Serializable
+++ NEW SUPERCLASS: java.lang.RuntimeException
+++ NEW FIELD: PUBLIC(+) io.opentelemetry.context.Context context
+++ NEW ANNOTATION: javax.annotation.Nullable
+++ NEW FIELD: PUBLIC(+) java.util.Collection<io.opentelemetry.sdk.logs.data.LogRecordData> logRecords
+++ NEW INTERFACE: PUBLIC(+) ABSTRACT(+) io.opentelemetry.sdk.logs.export.AuditExceptionHandler (not serializable)
+++ CLASS FILE FORMAT VERSION: 52.0 <- n.a.
+++ NEW SUPERCLASS: java.lang.Object
+++ NEW METHOD: PUBLIC(+) ABSTRACT(+) void handle(io.opentelemetry.sdk.logs.export.AuditException)
+++ NEW CLASS: PUBLIC(+) FINAL(+) io.opentelemetry.sdk.logs.export.AuditLogRecordProcessor (not serializable)
+++ CLASS FILE FORMAT VERSION: 52.0 <- n.a.
+++ NEW INTERFACE: io.opentelemetry.sdk.logs.LogRecordProcessor
+++ NEW INTERFACE: java.io.Closeable
+++ NEW INTERFACE: java.lang.AutoCloseable
+++ NEW SUPERCLASS: java.lang.Object
+++ NEW METHOD: PUBLIC(+) STATIC(+) io.opentelemetry.sdk.logs.export.AuditLogRecordProcessorBuilder builder(io.opentelemetry.sdk.logs.export.LogRecordExporter, io.opentelemetry.sdk.logs.export.AuditLogStore)
+++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.common.CompletableResultCode forceFlush()
+++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.common.CompletableResultCode getLastResultCode()
+++ NEW ANNOTATION: javax.annotation.Nullable
+++ NEW METHOD: PUBLIC(+) void onEmit(io.opentelemetry.context.Context, io.opentelemetry.sdk.logs.ReadWriteLogRecord)
+++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.common.CompletableResultCode shutdown()
+++ NEW METHOD: PUBLIC(+) java.lang.String toString()
+++ NEW CLASS: PUBLIC(+) FINAL(+) io.opentelemetry.sdk.logs.export.AuditLogRecordProcessorBuilder (not serializable)
+++ CLASS FILE FORMAT VERSION: 52.0 <- n.a.
+++ NEW SUPERCLASS: java.lang.Object
+++ NEW FIELD: PUBLIC(+) STATIC(+) FINAL(+) long DEFAULT_SCHEDULE_DELAY_MILLIS
+++ NEW FIELD: PUBLIC(+) STATIC(+) FINAL(+) int DEFAULT_MAX_EXPORT_BATCH_SIZE
+++ NEW FIELD: PUBLIC(+) STATIC(+) FINAL(+) int DEFAULT_EXPORT_TIMEOUT_MILLIS
+++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.logs.export.AuditLogRecordProcessor build()
+++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.logs.export.AuditLogRecordProcessorBuilder setExceptionHandler(io.opentelemetry.sdk.logs.export.AuditExceptionHandler)
+++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.logs.export.AuditLogRecordProcessorBuilder setExporterTimeout(long, java.util.concurrent.TimeUnit)
+++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.logs.export.AuditLogRecordProcessorBuilder setMaxExportBatchSize(int)
+++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.logs.export.AuditLogRecordProcessorBuilder setRetryPolicy(io.opentelemetry.sdk.common.export.RetryPolicy)
+++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.logs.export.AuditLogRecordProcessorBuilder setScheduleDelay(long, java.util.concurrent.TimeUnit)
+++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.logs.export.AuditLogRecordProcessorBuilder setWaitOnExport(boolean)
+++ NEW INTERFACE: PUBLIC(+) ABSTRACT(+) io.opentelemetry.sdk.logs.export.AuditLogStore (not serializable)
+++ CLASS FILE FORMAT VERSION: 52.0 <- n.a.
+++ NEW SUPERCLASS: java.lang.Object
+++ NEW METHOD: PUBLIC(+) ABSTRACT(+) java.util.Collection<io.opentelemetry.sdk.logs.data.LogRecordData> getAll()
+++ NEW METHOD: PUBLIC(+) ABSTRACT(+) void removeAll(java.util.Collection<io.opentelemetry.sdk.logs.data.LogRecordData>)
+++ NEW METHOD: PUBLIC(+) ABSTRACT(+) void save(io.opentelemetry.sdk.logs.data.LogRecordData)
+++ NEW EXCEPTION: java.io.IOException
1 change: 1 addition & 0 deletions exporters/otlp/common/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ dependencies {
compileOnly(project(":sdk:trace"))
compileOnly(project(":sdk:logs"))
compileOnly(project(":api:incubator"))
compileOnly("com.fasterxml.jackson.core:jackson-databind")

testImplementation(project(":sdk:metrics"))
testImplementation(project(":sdk:trace"))
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,238 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/

package io.opentelemetry.exporter.internal.otlp.logs;

import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.opentelemetry.sdk.logs.data.LogRecordData;
import io.opentelemetry.sdk.logs.export.AuditLogStore;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.logging.Logger;
import java.util.stream.Stream;
import javax.annotation.Nullable;

/**
* This class is internal and experimental. Its APIs are unstable and can change at any time. Its
* APIs (or a version of them) may be promoted to the public stable API in the future, but no
* guarantees are made.
*
* <p>A file-based implementation of {@link AuditLogStore} that provides thread-safe concurrent
* reading and writing of audit log records to/from the file system.
*/
public final class AuditLogFileStore implements AuditLogStore {

private static final String DEFAULT_LOG_FILE_EXTENSION = ".log";

public static final String DEFAULT_LOG_FILE_NAME = "audit" + DEFAULT_LOG_FILE_EXTENSION;

private static final Logger logger = Logger.getLogger(AuditLogFileStore.class.getName());

/** Generates a unique ID for a log record based on its content. */
static String generateRecordId(@Nullable LogRecordData logRecord) {
if (logRecord == null) {
return "";
}
return String.valueOf(
(logRecord.getTimestampEpochNanos()
+ String.valueOf(logRecord.getBodyValue())
+ logRecord.getSeverity().toString())
.hashCode());
}

private final ReadWriteLock lock = new ReentrantReadWriteLock();

private final Path logFilePath;

private final Set<String> loggedRecords = ConcurrentHashMap.newKeySet();

@Nullable private ObjectMapper objectMapper;

/**
* Creates a new AuditLogFileStore that stores logs in the specified path. If the path is a
* directory, logs will be stored in a default file within that directory. If the path is a file,
* logs will be stored directly in that file.
*
* @param path the path to the log file or directory
* @throws IOException if the file or directory cannot be created or accessed
*/
public AuditLogFileStore(Path path) throws IOException {
if (Files.isDirectory(path)) {
this.logFilePath = path.resolve(DEFAULT_LOG_FILE_NAME);
} else {
this.logFilePath = path;
}

// Ensure parent directories exist
if (logFilePath.getParent() != null) {
Files.createDirectories(logFilePath.getParent());
}

// Create the file if it doesn't exist
if (!Files.exists(logFilePath)) {
Files.createFile(logFilePath);
}

// verify that we can read and write to the file
if (!Files.isReadable(logFilePath)) {
throw new IOException("Can't read: " + logFilePath);
}
if (!Files.isWritable(logFilePath)) {
throw new IOException("Can't write: " + logFilePath);
}

// Load existing log record IDs to avoid duplicates
loadExistingRecordIds();
}

/**
* Creates a new AuditLogFileStore that stores logs in the specified file.
*
* @param filePath the path to the log file
* @throws IOException if the file cannot be created or accessed
*/
public AuditLogFileStore(String filePath) throws IOException {
this(Paths.get(filePath));
}

@Override
public Collection<LogRecordData> getAll() {
Collection<LogRecordData> records = new ArrayList<>();
lock.readLock().lock();
try (Stream<String> lines = Files.lines(logFilePath)) {
lines.forEach(
line -> {
LogRecordData record = parseLogRecord(line);
if (record != null) {
records.add(record);
}
});
} catch (IOException e) {
logger.throwing(AuditLogFileStore.class.getName(), "getAll", e);
}
lock.readLock().unlock();
return records;
}

ObjectMapper json() {
if (objectMapper == null) {
objectMapper = new ObjectMapper();
}
return objectMapper;
}

/** Loads existing record IDs from the log file to prevent duplicates. */
private void loadExistingRecordIds() throws IOException {
if (!Files.exists(logFilePath) || Files.size(logFilePath) == 0) {
return;
}

lock.readLock().lock();
try (Stream<String> lines = Files.lines(logFilePath)) {
lines.forEach(
line -> {
String recordId = generateRecordId(parseLogRecord(line));
if (recordId != null) {
loggedRecords.add(recordId);
}
});
} finally {
lock.readLock().unlock();
}
}

/**
* Parses a log record from a stored line (simplified implementation). In a production system, you
* might want to use JSON or another structured format.
*/
@Nullable
LogRecordData parseLogRecord(String line) {
try {
return json().readValue(line, JsonLogRecordData.class);
} catch (JsonProcessingException e) {
logger.throwing(AuditLogFileStore.class.getName(), "parseLogRecord", e);
}
return null;
}

@Override
public void removeAll(Collection<LogRecordData> logs) {
lock.writeLock().lock();
try {
Set<String> recordIdsToRemove = new HashSet<>();
for (LogRecordData log : logs) {
recordIdsToRemove.add(generateRecordId(log));
}

// Read all lines, filter out the ones to remove, then write back
Collection<String> remainingLines = new ArrayList<>();
try (BufferedReader reader = Files.newBufferedReader(logFilePath)) {
String line;
while ((line = reader.readLine()) != null) {
String recordId = generateRecordId(parseLogRecord(line));
if (recordId == null || !recordIdsToRemove.contains(recordId)) {
remainingLines.add(line);
} else {
loggedRecords.remove(recordId);
}
}
} catch (IOException e) {
logger.throwing(AuditLogFileStore.class.getName(), "removeAll", e);
}

// Write the remaining lines back to the file
try (BufferedWriter writer =
Files.newBufferedWriter(
logFilePath, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING)) {
for (String line : remainingLines) {
writer.write(line);
writer.newLine();
}
} catch (IOException e) {
logger.throwing(AuditLogFileStore.class.getName(), "removeAll", e);
}
} finally {
lock.writeLock().unlock();
}
}

@Override
public void save(LogRecordData logRecord) throws IOException {
String recordId = generateRecordId(logRecord);

// Check if we've already logged this record
if (loggedRecords.contains(recordId)) {
return;
}

lock.writeLock().lock();
try (OutputStream out =
Files.newOutputStream(logFilePath, StandardOpenOption.CREATE, StandardOpenOption.APPEND)) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
LogMarshaler.create(logRecord).writeJsonTo(baos); // closes the stream!
out.write(baos.toByteArray());
out.write(System.lineSeparator().getBytes(StandardCharsets.UTF_8));
loggedRecords.add(recordId);
} finally {
lock.writeLock().unlock();
}
}
}
Loading
Loading