@@ -96,6 +96,9 @@ def __init__(
96
96
"""A list of request IDs that should be prioritized (added with forefront=True).
97
97
Most recent forefront requests are added at the beginning of the list."""
98
98
99
+ self ._sequence_counter = 0
100
+ """A counter to track the order of requests added to the queue."""
101
+
99
102
@override
100
103
@property
101
104
def metadata (self ) -> RequestQueueMetadata :
@@ -277,10 +280,6 @@ async def add_batch_of_requests(
277
280
await asyncio .to_thread (self .path_to_rq .mkdir , parents = True , exist_ok = True )
278
281
279
282
for request in requests :
280
- # Ensure the request has an ID
281
- if not request .id :
282
- request .id = crypto_random_object_id ()
283
-
284
283
# Check if the request is already in the queue by unique_key
285
284
existing_request = None
286
285
@@ -357,6 +356,11 @@ async def add_batch_of_requests(
357
356
if request_dict .get ('handled_at' ) is None :
358
357
request_dict .pop ('handled_at' , None )
359
358
359
+ # Add sequence number to ensure FIFO ordering
360
+ sequence_number = self ._sequence_counter
361
+ self ._sequence_counter += 1
362
+ request_dict ['_sequence' ] = sequence_number
363
+
360
364
request_data = await json_dumps (request_dict )
361
365
await asyncio .to_thread (request_path .write_text , request_data , encoding = 'utf-8' )
362
366
@@ -471,10 +475,10 @@ async def fetch_next_request(self) -> Request | None:
471
475
472
476
# List all request files for regular (non-forefront) requests
473
477
request_files = await asyncio .to_thread (list , self .path_to_rq .glob ('*.json' ))
474
- regular_requests = []
475
478
476
- # Get file creation times for sorting regular requests in FIFO order
477
- request_file_times = {}
479
+ # Dictionary to store request files by their sequence number
480
+ request_sequences = {}
481
+ requests_without_sequence = []
478
482
479
483
# Filter out metadata files and in-progress requests
480
484
for request_file in request_files :
@@ -489,25 +493,34 @@ async def fetch_next_request(self) -> Request | None:
489
493
if request_id in self ._in_progress or request_id in self ._forefront_requests :
490
494
continue
491
495
492
- # Get file creation/modification time for FIFO ordering
496
+ # Read the file to get the sequence number
493
497
try :
494
- file_stat = await asyncio .to_thread (request_file .stat )
495
- request_file_times [request_file ] = file_stat .st_mtime
496
- except Exception :
497
- # If we can't get the time, use 0 (oldest)
498
- request_file_times [request_file ] = 0
499
-
500
- regular_requests .append (request_file )
501
-
502
- # Sort regular requests by creation time (FIFO order)
503
- regular_requests .sort (key = lambda f : request_file_times [f ])
498
+ file = await asyncio .to_thread (open , request_file )
499
+ try :
500
+ file_content = json .load (file )
501
+ # Skip if already handled
502
+ if file_content .get ('handled_at' ) is not None :
503
+ continue
504
+
505
+ # Use sequence number for ordering if available
506
+ sequence_number = file_content .get ('_sequence' )
507
+ if sequence_number is not None :
508
+ request_sequences [sequence_number ] = request_file
509
+ else :
510
+ # For backward compatibility with existing files
511
+ requests_without_sequence .append (request_file )
512
+ finally :
513
+ await asyncio .to_thread (file .close )
514
+ except (json .JSONDecodeError , ValidationError ) as exc :
515
+ logger .warning (f'Failed to parse request file { request_file } : { exc !s} ' )
504
516
505
- # Process regular requests in FIFO order
506
- for request_file in regular_requests :
517
+ # Process requests with sequence numbers first, in FIFO order
518
+ for sequence in sorted (request_sequences .keys ()):
519
+ request_file = request_sequences [sequence ]
507
520
file = await asyncio .to_thread (open , request_file )
508
521
try :
509
522
file_content = json .load (file )
510
- # Skip if already handled
523
+ # Skip if already handled (double-check)
511
524
if file_content .get ('handled_at' ) is not None :
512
525
continue
513
526
@@ -526,6 +539,45 @@ async def fetch_next_request(self) -> Request | None:
526
539
finally :
527
540
await asyncio .to_thread (file .close )
528
541
542
+ # Process requests without sequence numbers using file timestamps (backward compatibility)
543
+ if requests_without_sequence :
544
+ # Get file creation times for sorting
545
+ request_file_times = {}
546
+ for request_file in requests_without_sequence :
547
+ try :
548
+ file_stat = await asyncio .to_thread (request_file .stat )
549
+ request_file_times [request_file ] = file_stat .st_mtime
550
+ except Exception : # noqa: PERF203
551
+ # If we can't get the time, use 0 (oldest)
552
+ request_file_times [request_file ] = 0
553
+
554
+ # Sort by creation time
555
+ requests_without_sequence .sort (key = lambda f : request_file_times [f ])
556
+
557
+ # Process requests without sequence in file timestamp order
558
+ for request_file in requests_without_sequence :
559
+ file = await asyncio .to_thread (open , request_file )
560
+ try :
561
+ file_content = json .load (file )
562
+ # Skip if already handled
563
+ if file_content .get ('handled_at' ) is not None :
564
+ continue
565
+
566
+ # Create request object
567
+ request = Request (** file_content )
568
+
569
+ # Mark as in-progress in memory
570
+ self ._in_progress .add (request .id )
571
+
572
+ # Update accessed timestamp
573
+ await self ._update_metadata (update_accessed_at = True )
574
+ except (json .JSONDecodeError , ValidationError ) as exc :
575
+ logger .warning (f'Failed to parse request file { request_file } : { exc !s} ' )
576
+ else :
577
+ return request
578
+ finally :
579
+ await asyncio .to_thread (file .close )
580
+
529
581
return None
530
582
531
583
@override
0 commit comments