Skip to content

Commit 82100a9

Browse files
authored
Add an experimental mixed io mode. (#61)
2 parents 3ceb9c4 + a1dbca0 commit 82100a9

File tree

16 files changed

+536
-81
lines changed

16 files changed

+536
-81
lines changed

Cargo.toml

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,13 +20,15 @@ smallvec = "1"
2020
bytes = { version = "1" }
2121
futures-lite = { version = "2.3", optional = true }
2222
self_cell = { version = "1" }
23-
iroh-io = { version = "0.6.0", default_features = false, optional = true }
24-
positioned-io = { version = "0.3.1", default_features = false }
23+
iroh-io = { version = "0.6.0", default-features = false, optional = true }
24+
positioned-io = { version = "0.3.1", default-features = false }
2525
genawaiter = { version = "0.99.1", features = ["futures03"], optional = true }
26+
tokio = { version = "1", features = ["sync"], default-features = false, optional = true }
2627

2728
[features]
2829
tokio_fsm = ["dep:futures-lite", "dep:iroh-io"]
2930
validate = ["dep:genawaiter"]
31+
experimental-mixed = ["dep:tokio"]
3032
default = ["tokio_fsm", "validate"]
3133

3234
[dev-dependencies]

cli/main.rs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,8 @@
1+
use std::{io::Write, path::PathBuf};
2+
13
use anyhow::Context;
24
use bao_tree::{BaoTree, BlockSize};
35
use clap::{Parser, Subcommand};
4-
use std::{io::Write, path::PathBuf};
56

67
#[derive(Parser, Debug, Clone)]
78
#[clap(version)]

examples/encode_decode_async.rs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
use std::io;
2+
13
use bao_tree::{
24
io::{
35
fsm::{decode_ranges, encode_ranges_validated, valid_ranges, CreateOutboard},
@@ -8,7 +10,6 @@ use bao_tree::{
810
};
911
use bytes::BytesMut;
1012
use futures_lite::StreamExt;
11-
use std::io;
1213

1314
/// Use a block size of 16 KiB, a good default for most cases
1415
const BLOCK_SIZE: BlockSize = BlockSize::from_chunk_log(4);

examples/encode_decode_sync.rs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
use std::io;
2+
13
use bao_tree::{
24
io::{
35
outboard::PreOrderOutboard,
@@ -6,7 +8,6 @@ use bao_tree::{
68
},
79
BlockSize, ByteRanges, ChunkRanges,
810
};
9-
use std::io;
1011

1112
/// Use a block size of 16 KiB, a good default for most cases
1213
const BLOCK_SIZE: BlockSize = BlockSize::from_chunk_log(4);

src/io/error.rs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,10 @@
11
//! Errors when encoding or decoding
22
//!
33
//! These erros contain more specific information about e.g. where a hash mismatch occured
4-
use crate::{ChunkNum, TreeNode};
54
use std::{fmt, io};
65

6+
use crate::{ChunkNum, TreeNode};
7+
78
/// Error when decoding from a reader, after the size has been read
89
#[derive(Debug)]
910
pub enum DecodeError {

src/io/fsm.rs

Lines changed: 11 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -14,30 +14,25 @@ use std::{
1414
result,
1515
};
1616

17-
use crate::{
18-
blake3, hash_subtree,
19-
iter::ResponseIter,
20-
rec::{encode_selected_rec, truncate_ranges, truncate_ranges_owned},
21-
ChunkRanges, ChunkRangesRef,
22-
};
2317
use blake3::guts::parent_cv;
2418
use bytes::Bytes;
19+
pub use iroh_io::{AsyncSliceReader, AsyncSliceWriter};
2520
use iroh_io::{AsyncStreamReader, AsyncStreamWriter};
2621
use smallvec::SmallVec;
2722

2823
pub use super::BaoContentItem;
24+
use super::{combine_hash_pair, DecodeError};
2925
use crate::{
26+
blake3, hash_subtree,
3027
io::{
3128
error::EncodeError,
3229
outboard::{PostOrderOutboard, PreOrderOutboard},
3330
Leaf, Parent,
3431
},
35-
iter::BaoChunk,
36-
BaoTree, BlockSize, TreeNode,
32+
iter::{BaoChunk, ResponseIter},
33+
rec::{encode_selected_rec, truncate_ranges, truncate_ranges_owned},
34+
BaoTree, BlockSize, ChunkRanges, ChunkRangesRef, TreeNode,
3735
};
38-
pub use iroh_io::{AsyncSliceReader, AsyncSliceWriter};
39-
40-
use super::{combine_hash_pair, DecodeError};
4136

4237
/// A binary merkle tree for blake3 hashes of a blob.
4338
///
@@ -136,7 +131,7 @@ pub trait CreateOutboard {
136131
fn init_from(&mut self, data: impl AsyncStreamReader) -> impl Future<Output = io::Result<()>>;
137132
}
138133

139-
impl<'b, O: Outboard> Outboard for &'b mut O {
134+
impl<O: Outboard> Outboard for &mut O {
140135
fn root(&self) -> blake3::Hash {
141136
(**self).root()
142137
}
@@ -173,7 +168,7 @@ impl<R: AsyncSliceReader> Outboard for PreOrderOutboard<R> {
173168
}
174169
}
175170

176-
impl<'b, O: OutboardMut> OutboardMut for &'b mut O {
171+
impl<O: OutboardMut> OutboardMut for &mut O {
177172
async fn save(
178173
&mut self,
179174
node: TreeNode,
@@ -760,13 +755,12 @@ mod validate {
760755
use genawaiter::sync::{Co, Gen};
761756
use iroh_io::AsyncSliceReader;
762757

758+
use super::Outboard;
763759
use crate::{
764760
blake3, hash_subtree, io::LocalBoxFuture, rec::truncate_ranges, split, BaoTree, ChunkNum,
765761
ChunkRangesRef, TreeNode,
766762
};
767763

768-
use super::Outboard;
769-
770764
/// Given a data file and an outboard, compute all valid ranges.
771765
///
772766
/// This is not cheap since it recomputes the hashes for all chunks.
@@ -797,7 +791,7 @@ mod validate {
797791
co: &'a Co<io::Result<Range<ChunkNum>>>,
798792
}
799793

800-
impl<'a, O: Outboard, D: AsyncSliceReader> RecursiveDataValidator<'a, O, D> {
794+
impl<O: Outboard, D: AsyncSliceReader> RecursiveDataValidator<'_, O, D> {
801795
async fn validate(
802796
outboard: O,
803797
data: D,
@@ -923,7 +917,7 @@ mod validate {
923917
co: &'a Co<io::Result<Range<ChunkNum>>>,
924918
}
925919

926-
impl<'a, O: Outboard> RecursiveOutboardValidator<'a, O> {
920+
impl<O: Outboard> RecursiveOutboardValidator<'_, O> {
927921
async fn validate(
928922
outboard: O,
929923
ranges: &ChunkRangesRef,

0 commit comments

Comments
 (0)