@@ -71,7 +71,7 @@ def submit_job(
7171 compression : Compression | str = "zstd" ,
7272 pretty_px : bool = False ,
7373 pretty_ts : bool = False ,
74- map_symbols : bool = False ,
74+ map_symbols : bool | None = None ,
7575 split_symbols : bool = False ,
7676 split_duration : SplitDuration | str = "day" ,
7777 split_size : int | None = None ,
@@ -116,9 +116,10 @@ def submit_job(
116116 pretty_ts : bool, default False
117117 If timestamps should be formatted as ISO 8601 strings.
118118 Only applicable for 'csv' or 'json' encodings.
119- map_symbols : bool, default False
120- If the requested symbol should be appended to every text encoded record.
121- Only applicable for 'csv' or 'json' encodings.
119+ map_symbols : bool, optional
120+ If a symbol field should be included with every text encoded record.
121+ If `None`, will default to `True` for `csv` and `json` encodings and `False` for
122+ `dbn`.
122123 split_symbols : bool, default False
123124 If files should be split by raw symbol. Cannot be requested with `'ALL_SYMBOLS'`.
124125 split_duration : SplitDuration or str {'day', 'week', 'month', 'none'}, default 'day'
@@ -149,6 +150,10 @@ def submit_job(
149150 """
150151 stype_in_valid = validate_enum (stype_in , SType , "stype_in" )
151152 symbols_list = symbols_list_to_list (symbols , stype_in_valid )
153+ encoding_valid = validate_enum (encoding , Encoding , "encoding" )
154+
155+ if map_symbols is None :
156+ map_symbols = encoding_valid != Encoding .DBN
152157
153158 data : dict [str , object | None ] = {
154159 "dataset" : validate_semantic_string (dataset , "dataset" ),
@@ -158,7 +163,7 @@ def submit_job(
158163 "schema" : str (validate_enum (schema , Schema , "schema" )),
159164 "stype_in" : str (stype_in_valid ),
160165 "stype_out" : str (validate_enum (stype_out , SType , "stype_out" )),
161- "encoding" : str (validate_enum ( encoding , Encoding , "encoding" ) ),
166+ "encoding" : str (encoding_valid ),
162167 "compression" : (
163168 str (validate_enum (compression , Compression , "compression" )) if compression else None
164169 ),
@@ -292,7 +297,9 @@ def download(
292297
293298 """
294299 if keep_zip and filename_to_download :
295- raise ValueError ("Cannot specify an individual file to download when `keep_zip=True`" )
300+ raise ValueError (
301+ "Cannot specify an individual file to download when `keep_zip=True`" ,
302+ )
296303
297304 batch_download = _BatchJob (
298305 self ,
@@ -369,7 +376,9 @@ async def download_async(
369376
370377 """
371378 if keep_zip and filename_to_download :
372- raise ValueError ("Cannot specify an individual file to download when `keep_zip=True`" )
379+ raise ValueError (
380+ "Cannot specify an individual file to download when `keep_zip=True`" ,
381+ )
373382
374383 batch_download = _BatchJob (
375384 self ,
@@ -458,7 +467,9 @@ def _download_batch_file(
458467 ) as response :
459468 check_http_error (response )
460469 with open (output_path , mode = mode ) as f :
461- for chunk in response .iter_content (chunk_size = HTTP_STREAMING_READ_SIZE ):
470+ for chunk in response .iter_content (
471+ chunk_size = HTTP_STREAMING_READ_SIZE ,
472+ ):
462473 f .write (chunk )
463474
464475 # Successfully wrote some data, reset attempts counter
@@ -548,7 +559,9 @@ def _download_batch_zip(
548559 ) as response :
549560 check_http_error (response )
550561 with open (output_path , mode = "wb" ) as f :
551- for chunk in response .iter_content (chunk_size = HTTP_STREAMING_READ_SIZE ):
562+ for chunk in response .iter_content (
563+ chunk_size = HTTP_STREAMING_READ_SIZE ,
564+ ):
552565 f .write (chunk )
553566 except BentoHttpError as exc :
554567 if exc .http_status == 429 :
@@ -615,7 +628,9 @@ def __init__(
615628 urls = file_detail ["urls" ]
616629 except KeyError as exc :
617630 missing_key = exc .args [0 ]
618- raise BentoError (f"Batch job manifest missing key '{ missing_key } '" ) from None
631+ raise BentoError (
632+ f"Batch job manifest missing key '{ missing_key } '" ,
633+ ) from None
619634 except TypeError :
620635 raise BentoError ("Error parsing job manifest" ) from None
621636
0 commit comments