@@ -19,7 +19,7 @@ use anyhow::{anyhow, bail, Context, Result};
19
19
use bytes:: BytesMut ;
20
20
use futures:: Future ;
21
21
use postcard:: experimental:: max_size:: MaxSize ;
22
- use tokio:: io:: { AsyncRead , AsyncReadExt , ReadBuf } ;
22
+ use tokio:: io:: { AsyncRead , AsyncReadExt , BufReader , ReadBuf } ;
23
23
use tracing:: { debug, error} ;
24
24
25
25
pub use crate :: util:: Hash ;
@@ -103,18 +103,21 @@ impl Stats {
103
103
/// We guarantee that the data is correct by incrementally verifying a hash
104
104
#[ repr( transparent) ]
105
105
#[ derive( Debug ) ]
106
- pub struct DataStream ( AsyncSliceDecoder < quinn:: RecvStream > ) ;
106
+ pub struct DataStream ( AsyncSliceDecoder < RecvStream > ) ;
107
+
108
+ type RecvStream =
109
+ async_compression:: tokio:: bufread:: ZstdDecoder < tokio:: io:: BufReader < quinn:: RecvStream > > ;
107
110
108
111
impl DataStream {
109
- fn new ( inner : quinn :: RecvStream , hash : Hash ) -> Self {
112
+ fn new ( inner : RecvStream , hash : Hash ) -> Self {
110
113
DataStream ( AsyncSliceDecoder :: new ( inner, & hash. into ( ) , 0 , u64:: MAX ) )
111
114
}
112
115
113
116
async fn read_size ( & mut self ) -> io:: Result < u64 > {
114
117
self . 0 . read_size ( ) . await
115
118
}
116
119
117
- fn into_inner ( self ) -> quinn :: RecvStream {
120
+ fn into_inner ( self ) -> RecvStream {
118
121
self . 0 . into_inner ( )
119
122
}
120
123
}
@@ -149,7 +152,7 @@ where
149
152
let now = Instant :: now ( ) ;
150
153
let connection = setup ( opts) . await ?;
151
154
152
- let ( mut writer, mut reader) = connection. open_bi ( ) . await ?;
155
+ let ( mut writer, reader) = connection. open_bi ( ) . await ?;
153
156
154
157
on_connected ( ) . await ?;
155
158
@@ -181,6 +184,7 @@ where
181
184
{
182
185
debug ! ( "reading response" ) ;
183
186
let mut in_buffer = BytesMut :: with_capacity ( 1024 ) ;
187
+ let mut reader = BufReader :: new ( reader) ;
184
188
185
189
// track total amount of blob data transferred
186
190
let mut data_len = 0 ;
@@ -218,7 +222,7 @@ where
218
222
if blob_reader. read_exact ( & mut [ 0u8 ; 1 ] ) . await . is_ok ( ) {
219
223
bail ! ( "`on_blob` callback did not fully read the blob content" )
220
224
}
221
- reader = blob_reader. into_inner ( ) ;
225
+ reader = blob_reader. into_inner ( ) . into_inner ( ) ;
222
226
}
223
227
}
224
228
@@ -236,11 +240,12 @@ where
236
240
}
237
241
238
242
// Shut down the stream
239
- if let Some ( chunk) = reader. read_chunk ( 8 , false ) . await ? {
240
- reader. stop ( 0u8 . into ( ) ) . ok ( ) ;
241
- error ! ( "Received unexpected data from the provider: {chunk:?}" ) ;
243
+ if let Ok ( bytes) = reader. read_u8 ( ) . await {
244
+ reader. into_inner ( ) . stop ( 0u8 . into ( ) ) . ok ( ) ;
245
+ error ! ( "Received unexpected data from the provider: {bytes:?}" ) ;
246
+ } else {
247
+ drop ( reader) ;
242
248
}
243
- drop ( reader) ;
244
249
245
250
let elapsed = now. elapsed ( ) ;
246
251
@@ -261,7 +266,7 @@ where
261
266
/// The `AsyncReader` can be used to read the content.
262
267
async fn handle_blob_response (
263
268
hash : Hash ,
264
- mut reader : quinn:: RecvStream ,
269
+ mut reader : BufReader < quinn:: RecvStream > ,
265
270
buffer : & mut BytesMut ,
266
271
) -> Result < DataStream > {
267
272
match read_lp ( & mut reader, buffer) . await ? {
@@ -277,7 +282,10 @@ async fn handle_blob_response(
277
282
// next blob in collection will be sent over
278
283
Res :: Found => {
279
284
assert ! ( buffer. is_empty( ) ) ;
280
- let decoder = DataStream :: new ( reader, hash) ;
285
+ // Decompress data
286
+ let decompress_reader =
287
+ async_compression:: tokio:: bufread:: ZstdDecoder :: new ( reader) ;
288
+ let decoder = DataStream :: new ( decompress_reader, hash) ;
281
289
Ok ( decoder)
282
290
}
283
291
}
0 commit comments