@@ -14,22 +14,19 @@ use daphne::{
14
14
} ,
15
15
DapError , VdafConfig ,
16
16
} ;
17
- use futures:: {
18
- future:: { ready, try_join_all} ,
19
- StreamExt , TryStreamExt ,
20
- } ;
17
+ use futures:: { future:: try_join_all, StreamExt , TryStreamExt } ;
21
18
use prio:: codec:: { CodecError , ParameterizedDecode } ;
22
19
use serde:: { Deserialize , Serialize } ;
23
- use std:: { borrow:: Cow , collections:: HashSet , ops:: ControlFlow , time:: Duration } ;
20
+ use std:: { borrow:: Cow , collections:: HashSet , future :: ready , ops:: ControlFlow , time:: Duration } ;
24
21
use tracing:: Instrument ;
25
22
use worker:: * ;
26
23
27
- use super :: { req_parse, Alarmed , DapDurableObject , GarbageCollectable } ;
24
+ use super :: { req_parse, state_set_if_not_exists , Alarmed , DapDurableObject , GarbageCollectable } ;
28
25
29
26
pub ( crate ) const DURABLE_REPORTS_PROCESSED_INITIALIZE : & str =
30
27
"/internal/do/reports_processed/initialize" ;
31
- pub ( crate ) const DURABLE_REPORTS_PROCESSED_MARK_AGGREGATED : & str =
32
- "/internal/do/reports_processed/mark_aggregated " ;
28
+ pub ( crate ) const DURABLE_REPORTS_PROCESSED_INITIALIZED : & str =
29
+ "/internal/do/reports_processed/initialized " ;
33
30
34
31
/// Durable Object (DO) for tracking which reports have been processed.
35
32
///
@@ -63,63 +60,6 @@ impl<'id> From<&'id ReportId> for ReportIdKey<'id> {
63
60
}
64
61
}
65
62
66
- #[ derive( Debug ) ]
67
- enum CheckedReplays < ' s > {
68
- SomeReplayed ( Vec < & ' s ReportId > ) ,
69
- AllFresh ( Vec < ReportIdKey < ' s > > ) ,
70
- }
71
-
72
- impl < ' r > Default for CheckedReplays < ' r > {
73
- fn default ( ) -> Self {
74
- Self :: AllFresh ( vec ! [ ] )
75
- }
76
- }
77
-
78
- impl < ' r > CheckedReplays < ' r > {
79
- fn add_replay ( mut self , id : & ' r ReportId ) -> Self {
80
- match & mut self {
81
- Self :: SomeReplayed ( r) => {
82
- r. push ( id) ;
83
- self
84
- }
85
- Self :: AllFresh ( _) => Self :: SomeReplayed ( vec ! [ id] ) ,
86
- }
87
- }
88
-
89
- fn add_fresh ( mut self , id : ReportIdKey < ' r > ) -> Self {
90
- match & mut self {
91
- Self :: SomeReplayed ( _) => { }
92
- Self :: AllFresh ( r) => r. push ( id) ,
93
- }
94
- self
95
- }
96
- }
97
-
98
- impl ReportsProcessed {
99
- async fn check_replays < ' s > ( & self , report_ids : & ' s [ ReportId ] ) -> Result < CheckedReplays < ' s > > {
100
- futures:: stream:: iter ( report_ids. iter ( ) . map ( ReportIdKey :: from) )
101
- . then ( |id| {
102
- let state = & self . state ;
103
- async move {
104
- state_get :: < bool > ( state, & id. 1 )
105
- . await
106
- . map ( |presence| match presence {
107
- // if it's present then it's a replay
108
- Some ( true ) => Err ( id. 0 ) ,
109
- Some ( false ) | None => Ok ( id) ,
110
- } )
111
- }
112
- } )
113
- . try_fold ( CheckedReplays :: default ( ) , |acc, id| async move {
114
- Ok ( match id {
115
- Ok ( not_replayed) => acc. add_fresh ( not_replayed) ,
116
- Err ( replayed) => acc. add_replay ( replayed) ,
117
- } )
118
- } )
119
- . await
120
- }
121
- }
122
-
123
63
#[ durable_object]
124
64
impl DurableObject for ReportsProcessed {
125
65
fn new ( state : State , env : Env ) -> Self {
@@ -166,6 +106,22 @@ impl ReportsProcessed {
166
106
. await ?;
167
107
168
108
match ( req. path ( ) . as_ref ( ) , req. method ( ) ) {
109
+ ( DURABLE_REPORTS_PROCESSED_INITIALIZED , Method :: Post ) => {
110
+ let to_mark = req_parse :: < Vec < ReportId > > ( & mut req) . await ?;
111
+ let state = & self . state ;
112
+ let replays = futures:: stream:: iter ( & to_mark)
113
+ . map ( |id| async move {
114
+ state_set_if_not_exists ( state, & format ! ( "processed/{id}" ) , & true )
115
+ . await
116
+ . map ( |o| o. is_some ( ) . then_some ( id) )
117
+ } )
118
+ . buffer_unordered ( usize:: MAX )
119
+ . try_filter_map ( |replay| ready ( Ok ( replay) ) )
120
+ . try_collect :: < Vec < _ > > ( )
121
+ . await ?;
122
+
123
+ Response :: from_json ( & replays)
124
+ }
169
125
// Initialize a report:
170
126
// * Ensure the report wasn't replayed
171
127
// * Ensure the report won't be included in a batch that was already collected
@@ -230,31 +186,6 @@ impl ReportsProcessed {
230
186
} )
231
187
}
232
188
233
- // Mark reports as aggregated.
234
- //
235
- // If there are any replays, no reports are marked as aggregated.
236
- //
237
- // Idempotent
238
- // Input: `Vec<ReportId>`
239
- // Output: `Vec<ReportId>`
240
- ( DURABLE_REPORTS_PROCESSED_MARK_AGGREGATED , Method :: Post ) => {
241
- let report_ids: Vec < ReportId > = req_parse ( & mut req) . await ?;
242
- match self . check_replays ( & report_ids) . await ? {
243
- CheckedReplays :: SomeReplayed ( report_ids) => Response :: from_json ( & report_ids) ,
244
- CheckedReplays :: AllFresh ( report_ids) => {
245
- let state = & self . state ;
246
- futures:: stream:: iter ( & report_ids)
247
- . then ( |report_id| async move {
248
- state. storage ( ) . put ( & report_id. 1 , & true ) . await
249
- } )
250
- . try_for_each ( |_| ready ( Ok ( ( ) ) ) )
251
- . await ?;
252
-
253
- Response :: from_json ( & [ ( ) ; 0 ] )
254
- }
255
- }
256
- }
257
-
258
189
_ => Err ( int_err ( format ! (
259
190
"ReportsProcessed: unexpected request: method={:?}; path={:?}" ,
260
191
req. method( ) ,
0 commit comments