@@ -317,10 +317,14 @@ def scan_documents(
317317 errors , local_scan_results = run_parallel_batched_scan (
318318 scan_batch_thread_func , documents_to_scan , progress_bar = progress_bar
319319 )
320- aggregation_report_url = _try_get_aggregation_report_url_if_needed (
321- scan_parameters , context .obj ['client' ], context .obj ['scan_type' ]
322- )
323- set_aggregation_report_url (context , aggregation_report_url )
320+
321+ if len (local_scan_results ) > 1 :
322+ # if we used more than one batch, we need to fetch aggregate report url
323+ aggregation_report_url = _try_get_aggregation_report_url_if_needed (
324+ scan_parameters , context .obj ['client' ], context .obj ['scan_type' ]
325+ )
326+ set_aggregation_report_url (context , aggregation_report_url )
327+
324328 progress_bar .set_section_length (ScanProgressBarSection .GENERATE_REPORT , 1 )
325329 progress_bar .update (ScanProgressBarSection .GENERATE_REPORT )
326330 progress_bar .stop ()
@@ -863,8 +867,6 @@ def _get_scan_result(
863867 if not scan_details .detections_count :
864868 return init_default_scan_result (cycode_client , scan_id , scan_type , should_get_report )
865869
866- wait_for_detections_creation (cycode_client , scan_type , scan_id , scan_details .detections_count )
867-
868870 scan_detections = cycode_client .get_scan_detections (scan_type , scan_id )
869871
870872 return ZippedFileScanResult (
@@ -899,35 +901,6 @@ def _try_get_report_url_if_needed(
899901 logger .debug ('Failed to get report URL' , exc_info = e )
900902
901903
902- def wait_for_detections_creation (
903- cycode_client : 'ScanClient' , scan_type : str , scan_id : str , expected_detections_count : int
904- ) -> None :
905- logger .debug ('Waiting for detections to be created' )
906-
907- scan_persisted_detections_count = 0
908- polling_timeout = consts .DETECTIONS_COUNT_VERIFICATION_TIMEOUT_IN_SECONDS
909- end_polling_time = time .time () + polling_timeout
910-
911- while time .time () < end_polling_time :
912- scan_persisted_detections_count = cycode_client .get_scan_detections_count (scan_type , scan_id )
913- logger .debug (
914- 'Excepting %s detections, got %s detections (%s more; %s seconds left)' ,
915- expected_detections_count ,
916- scan_persisted_detections_count ,
917- expected_detections_count - scan_persisted_detections_count ,
918- round (end_polling_time - time .time ()),
919- )
920- if scan_persisted_detections_count == expected_detections_count :
921- return
922-
923- time .sleep (consts .DETECTIONS_COUNT_VERIFICATION_WAIT_INTERVAL_IN_SECONDS )
924-
925- logger .debug ('%s detections has been created' , scan_persisted_detections_count )
926- raise custom_exceptions .ScanAsyncError (
927- f'Failed to wait for detections to be created after { polling_timeout } seconds'
928- )
929-
930-
931904def _map_detections_per_file (detections : List [dict ]) -> List [DetectionsPerFile ]:
932905 detections_per_files = {}
933906 for detection in detections :
0 commit comments