Skip to content

Commit 8f2a80d

Browse files
authored
chore(di): improve flush logging (#14612)
We improve the flush logging by correctly reporting the payload cardinality. We also fix a few typos. ## Checklist - [ ] PR author has checked that all the criteria below are met - The PR description includes an overview of the change - The PR description articulates the motivation for the change - The change includes tests OR the PR description describes a testing strategy - The PR description notes risks associated with the change, if any - Newly-added code is easy to change - The change follows the [library release note guidelines](https://ddtrace.readthedocs.io/en/stable/releasenotes.html) - The change includes or references documentation updates if necessary - Backport labels are set (if [applicable](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting)) ## Reviewer Checklist - [ ] Reviewer has checked that all the criteria below are met - Title is accurate - All changes are related to the pull request's stated goal - Avoids breaking [API](https://ddtrace.readthedocs.io/en/stable/versioning.html#interfaces) changes - Testing strategy adequately addresses listed risks - Newly-added code is easy to change - Release note makes sense to a user of the library - If necessary, author has acknowledged and discussed the performance implications of this PR as reported in the benchmarks PR comment - Backport labels are set in a manner that is consistent with the [release branch maintenance policy](https://ddtrace.readthedocs.io/en/latest/contributing.html#backporting)
1 parent a7c0fc2 commit 8f2a80d

File tree

4 files changed

+26
-14
lines changed

4 files changed

+26
-14
lines changed

ddtrace/debugging/_encoding.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
from typing import Iterator
1414
from typing import List
1515
from typing import Optional
16+
from typing import Tuple
1617
from typing import Union
1718

1819
from ddtrace.debugging._config import di_config
@@ -74,7 +75,7 @@ def put(self, item: Any) -> int:
7475
"""Enqueue the given item and returns its encoded size."""
7576

7677
@abc.abstractmethod
77-
def flush(self) -> Optional[Union[bytes, bytearray]]:
78+
def flush(self) -> Optional[Tuple[Union[bytes, bytearray], int]]:
7879
"""Flush the buffer and return the encoded data."""
7980

8081

@@ -341,17 +342,18 @@ def put_encoded(self, item: Snapshot, encoded: bytes) -> int:
341342
self._on_full(item, encoded)
342343
raise
343344

344-
def flush(self) -> Optional[Union[bytes, bytearray]]:
345+
def flush(self) -> Optional[Tuple[Union[bytes, bytearray], int]]:
345346
with self._lock:
346347
if self.count == 0:
347348
# Reclaim memory
348349
self._buffer._reset()
349350
return None
350351

351352
encoded = self._buffer.flush()
353+
count = self.count
352354
self.count = 0
353355
self._full = False
354-
return encoded
356+
return encoded, count
355357

356358
def is_full(self) -> bool:
357359
with self._lock:

ddtrace/debugging/_signal/collector.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,10 @@ def __init__(self, tracks: Dict[SignalTrack, BufferedEncoder]) -> None:
3737
def _enqueue(self, log_signal: LogSignal) -> None:
3838
try:
3939
log.debug(
40-
"[%s][P: %s] SignalCollector enqueu signal on track %s", os.getpid(), os.getppid(), log_signal.__track__
40+
"[%s][P: %s] SignalCollector enqueue signal on track %s",
41+
os.getpid(),
42+
os.getppid(),
43+
log_signal.__track__,
4144
)
4245
self._tracks[log_signal.__track__].put(log_signal)
4346
except BufferFull:

ddtrace/debugging/_uploader.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -180,18 +180,18 @@ def reset(self) -> None:
180180
self._collector._tracks = {t: ut.queue for t, ut in self._tracks.items()}
181181

182182
def _flush_track(self, track: UploaderTrack) -> None:
183-
queue = track.queue
184-
if (payload := queue.flush()) is not None and track.enabled:
183+
if (data := track.queue.flush()) is not None and track.enabled:
184+
payload, count = data
185185
try:
186186
self._write_with_backoff(payload, track.endpoint)
187-
meter.distribution("batch.cardinality", queue.count)
187+
meter.distribution("batch.cardinality", count)
188188
except SignalUploaderError:
189189
if track.track is SignalTrack.SNAPSHOT and not track.endpoint.startswith("/debugger/v1/diagnostics"):
190190
# Downgrade to diagnostics endpoint and retry once
191191
track.endpoint = f"/debugger/v1/diagnostics{self._endpoint_suffix}"
192192
log.debug("Downgrading snapshot endpoint to %s and trying again", track.endpoint)
193193
self._write_with_backoff(payload, track.endpoint)
194-
meter.distribution("batch.cardinality", queue.count)
194+
meter.distribution("batch.cardinality", count)
195195
else:
196196
raise # Propagate error to transition to agent check state
197197
except Exception:

tests/debugging/test_encoding.py

Lines changed: 13 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -235,8 +235,9 @@ def test_batch_json_encoder():
235235
for _ in range(2 * n_snapshots):
236236
queue.put(s)
237237

238-
count = queue.count
239-
payload = queue.flush()
238+
data = queue.flush()
239+
assert data is not None
240+
payload, count = data
240241
decoded = json.loads(payload.decode())
241242
assert len(decoded) == count
242243
assert n_snapshots <= count + 1 # Allow for rounding errors
@@ -261,13 +262,19 @@ def test_batch_flush_reencode():
261262
queue = SignalQueue(LogSignalJsonEncoder(None))
262263

263264
snapshot_total_size = sum(queue.put(s) for _ in range(2))
264-
assert queue.count == 2
265-
assert len(queue.flush()) == snapshot_total_size + 3
265+
data = queue.flush()
266+
assert data is not None
267+
payload, count = data
268+
assert count == 2
269+
assert len(payload) == snapshot_total_size + 3
266270

267271
a, b = queue.put(s), queue.put(s)
268272
assert abs(a - b) < 1024
269-
assert queue.count == 2
270-
assert len(queue.flush()) == a + b + 3
273+
data = queue.flush()
274+
assert data is not None
275+
payload, count = data
276+
assert count == 2
277+
assert len(payload) == a + b + 3
271278

272279

273280
# ---- Side effects ----

0 commit comments

Comments
 (0)