Skip to content

Commit 4069cdc

Browse files
committed
MOD: Enable map_symbols by default for clients
1 parent c18a6a6 commit 4069cdc

File tree

3 files changed

+27
-11
lines changed

3 files changed

+27
-11
lines changed

CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010

1111
#### Breaking changes
1212
- Several log messages have been reformatted to improve clarity and reduce redundancy, especially at debug levels
13+
- The `map_symbols` parameter for `Historical.batch.submit_job()` now defaults to `True` for JSON and CSV encodings
1314

1415
## 0.66.0 - 2025-11-18
1516

databento/historical/api/batch.py

Lines changed: 25 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ def submit_job(
7171
compression: Compression | str = "zstd",
7272
pretty_px: bool = False,
7373
pretty_ts: bool = False,
74-
map_symbols: bool = False,
74+
map_symbols: bool | None = None,
7575
split_symbols: bool = False,
7676
split_duration: SplitDuration | str = "day",
7777
split_size: int | None = None,
@@ -116,9 +116,10 @@ def submit_job(
116116
pretty_ts : bool, default False
117117
If timestamps should be formatted as ISO 8601 strings.
118118
Only applicable for 'csv' or 'json' encodings.
119-
map_symbols : bool, default False
120-
If the requested symbol should be appended to every text encoded record.
121-
Only applicable for 'csv' or 'json' encodings.
119+
map_symbols : bool, optional
120+
If a symbol field should be included with every text encoded record.
121+
If `None`, will default to `True` for `csv` and `json` encodings and `False` for
122+
`dbn`.
122123
split_symbols : bool, default False
123124
If files should be split by raw symbol. Cannot be requested with `'ALL_SYMBOLS'`.
124125
split_duration : SplitDuration or str {'day', 'week', 'month', 'none'}, default 'day'
@@ -149,6 +150,10 @@ def submit_job(
149150
"""
150151
stype_in_valid = validate_enum(stype_in, SType, "stype_in")
151152
symbols_list = symbols_list_to_list(symbols, stype_in_valid)
153+
encoding_valid = validate_enum(encoding, Encoding, "encoding")
154+
155+
if map_symbols is None:
156+
map_symbols = encoding_valid != Encoding.DBN
152157

153158
data: dict[str, object | None] = {
154159
"dataset": validate_semantic_string(dataset, "dataset"),
@@ -158,7 +163,7 @@ def submit_job(
158163
"schema": str(validate_enum(schema, Schema, "schema")),
159164
"stype_in": str(stype_in_valid),
160165
"stype_out": str(validate_enum(stype_out, SType, "stype_out")),
161-
"encoding": str(validate_enum(encoding, Encoding, "encoding")),
166+
"encoding": str(encoding_valid),
162167
"compression": (
163168
str(validate_enum(compression, Compression, "compression")) if compression else None
164169
),
@@ -292,7 +297,9 @@ def download(
292297
293298
"""
294299
if keep_zip and filename_to_download:
295-
raise ValueError("Cannot specify an individual file to download when `keep_zip=True`")
300+
raise ValueError(
301+
"Cannot specify an individual file to download when `keep_zip=True`",
302+
)
296303

297304
batch_download = _BatchJob(
298305
self,
@@ -369,7 +376,9 @@ async def download_async(
369376
370377
"""
371378
if keep_zip and filename_to_download:
372-
raise ValueError("Cannot specify an individual file to download when `keep_zip=True`")
379+
raise ValueError(
380+
"Cannot specify an individual file to download when `keep_zip=True`",
381+
)
373382

374383
batch_download = _BatchJob(
375384
self,
@@ -458,7 +467,9 @@ def _download_batch_file(
458467
) as response:
459468
check_http_error(response)
460469
with open(output_path, mode=mode) as f:
461-
for chunk in response.iter_content(chunk_size=HTTP_STREAMING_READ_SIZE):
470+
for chunk in response.iter_content(
471+
chunk_size=HTTP_STREAMING_READ_SIZE,
472+
):
462473
f.write(chunk)
463474

464475
# Successfully wrote some data, reset attempts counter
@@ -548,7 +559,9 @@ def _download_batch_zip(
548559
) as response:
549560
check_http_error(response)
550561
with open(output_path, mode="wb") as f:
551-
for chunk in response.iter_content(chunk_size=HTTP_STREAMING_READ_SIZE):
562+
for chunk in response.iter_content(
563+
chunk_size=HTTP_STREAMING_READ_SIZE,
564+
):
552565
f.write(chunk)
553566
except BentoHttpError as exc:
554567
if exc.http_status == 429:
@@ -615,7 +628,9 @@ def __init__(
615628
urls = file_detail["urls"]
616629
except KeyError as exc:
617630
missing_key = exc.args[0]
618-
raise BentoError(f"Batch job manifest missing key '{missing_key}'") from None
631+
raise BentoError(
632+
f"Batch job manifest missing key '{missing_key}'",
633+
) from None
619634
except TypeError:
620635
raise BentoError("Error parsing job manifest") from None
621636

tests/test_historical_batch.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -99,7 +99,7 @@ def test_batch_submit_job_sends_expected_request(
9999
"compression": "zstd",
100100
"pretty_px": False,
101101
"pretty_ts": False,
102-
"map_symbols": False,
102+
"map_symbols": True,
103103
"split_symbols": False,
104104
"split_duration": "day",
105105
"delivery": "download",

0 commit comments

Comments
 (0)