diff --git a/changes/17819.md b/changes/17819.md new file mode 100644 index 000000000000..344c12377317 --- /dev/null +++ b/changes/17819.md @@ -0,0 +1,2 @@ +Fix crash in ledger sync check that could occur when trying to load ledger +databases that were only partially synced to a network ledger diff --git a/src/lib/merkle_ledger/any_ledger.ml b/src/lib/merkle_ledger/any_ledger.ml index dcc8845f20ee..f01943fa6739 100644 --- a/src/lib/merkle_ledger/any_ledger.ml +++ b/src/lib/merkle_ledger/any_ledger.ml @@ -81,6 +81,8 @@ module Make_base (Inputs : Intf.Inputs.Intf) : let set_at_index_exn (T ((module Base), t)) = Base.set_at_index_exn t + let get_at_index (T ((module Base), t)) = Base.get_at_index t + let get_at_index_exn (T ((module Base), t)) = Base.get_at_index_exn t let set_batch ?hash_cache (T ((module Base), t)) = @@ -118,6 +120,8 @@ module Make_base (Inputs : Intf.Inputs.Intf) : let token_owners (T ((module Base), t)) = Base.token_owners t + let iteri_untrusted (T ((module Base), t)) = Base.iteri_untrusted t + let iteri (T ((module Base), t)) = Base.iteri t (* ignored_keys must be Base.Keys.Set.t, but that isn't necessarily the same as Keys.Set.t for the diff --git a/src/lib/merkle_ledger/converting_merkle_tree.ml b/src/lib/merkle_ledger/converting_merkle_tree.ml index 35f5c9ad386d..58dc18859a96 100644 --- a/src/lib/merkle_ledger/converting_merkle_tree.ml +++ b/src/lib/merkle_ledger/converting_merkle_tree.ml @@ -109,6 +109,8 @@ end) let to_list_sequential t = Primary_ledger.to_list_sequential t.primary_ledger + let iteri_untrusted t ~f = Primary_ledger.iteri_untrusted t.primary_ledger ~f + let iteri t ~f = Primary_ledger.iteri t.primary_ledger ~f let foldi t ~init ~f = Primary_ledger.foldi t.primary_ledger ~init ~f @@ -180,6 +182,8 @@ end) ~f:(fun (loc, account) -> (loc, convert account)) located_accounts ) + let get_at_index t idx = Primary_ledger.get_at_index t.primary_ledger idx + let get_at_index_exn t idx = Primary_ledger.get_at_index_exn t.primary_ledger idx @@ -264,12 +268,12 @@ struct Primary_db.num_accounts db1 = Converting_db.num_accounts db2 && let is_synced = ref true in - Primary_db.iteri db1 ~f:(fun idx stable_account -> - let expected_unstable_account = convert stable_account in - let actual_unstable_account = Converting_db.get_at_index_exn db2 idx in + Primary_db.iteri_untrusted db1 ~f:(fun idx stable_account -> + let expected_unstable_account = Option.map ~f:convert stable_account in + let actual_unstable_account = Converting_db.get_at_index db2 idx in if not - (Inputs.converted_equal expected_unstable_account + (Option.equal Inputs.converted_equal expected_unstable_account actual_unstable_account ) then is_synced := false ) ; !is_synced diff --git a/src/lib/merkle_ledger/database.ml b/src/lib/merkle_ledger/database.ml index 2d2a138cea45..663dd4b010b7 100644 --- a/src/lib/merkle_ledger/database.ml +++ b/src/lib/merkle_ledger/database.ml @@ -278,9 +278,13 @@ module Make (Inputs : Intf.Inputs.DATABASE) = struct Option.map (last_location mdb) ~f:Location.to_path_exn end - let get_at_index_exn mdb index = + let get_at_index mdb index = let addr = Addr.of_int_exn ~ledger_depth:mdb.depth index in - get mdb (Location.Account addr) |> Option.value_exn + get mdb (Location.Account addr) + + let get_at_index_exn mdb index = + get_at_index mdb index + |> Option.value_exn ~message:"Expected account at index" ~here:[%here] let all_accounts (t : t) = match Account_location.last_location_address t with @@ -572,13 +576,19 @@ module Make (Inputs : Intf.Inputs.DATABASE) = struct | Ok location -> Ok (`Existed, location) - let iteri t ~f = + let iteri_untrusted t ~f = match Account_location.last_location_address t with | None -> () | Some last_addr -> Sequence.range ~stop:`inclusive 0 (Addr.to_int last_addr) - |> Sequence.iter ~f:(fun i -> f i (get_at_index_exn t i)) + |> Sequence.iter ~f:(fun i -> f i (get_at_index t i)) + + let iteri t ~f = + iteri_untrusted t ~f:(fun index account_opt -> + f index + (Option.value_exn ~message:"Expected account at index" ~here:[%here] + account_opt ) ) (* TODO : if key-value store supports iteration mechanism, like RocksDB, maybe use that here, instead of loading all accounts into memory See Issue diff --git a/src/lib/merkle_ledger/intf.ml b/src/lib/merkle_ledger/intf.ml index 8be1280f5497..2e54882ad6c6 100644 --- a/src/lib/merkle_ledger/intf.ml +++ b/src/lib/merkle_ledger/intf.ml @@ -320,6 +320,9 @@ module Ledger = struct (** list of accounts via slower sequential mechanism *) val to_list_sequential : t -> account list + (** iterate over all indexes and accounts, if the ledger is not known to be sound *) + val iteri_untrusted : t -> f:(index -> account option -> unit) -> unit + (** iterate over all indexes and accounts *) val iteri : t -> f:(index -> account -> unit) -> unit @@ -391,6 +394,8 @@ module Ledger = struct val set_batch : ?hash_cache:hash Addr.Map.t -> t -> (Location.t * account) list -> unit + val get_at_index : t -> int -> account option + val get_at_index_exn : t -> int -> account val set_at_index_exn : t -> int -> account -> unit @@ -548,6 +553,8 @@ module Ledger = struct module Config : Config + val dbs_synced : primary_ledger -> converting_ledger -> bool + (** Create a new converting merkle tree with the given configuration. If [In_directories] is given, existing databases will be opened and used to back the converting merkle tree. If the converting database does not exist diff --git a/src/lib/merkle_ledger/null_ledger.ml b/src/lib/merkle_ledger/null_ledger.ml index 066b62644fc6..2d22279fe23e 100644 --- a/src/lib/merkle_ledger/null_ledger.ml +++ b/src/lib/merkle_ledger/null_ledger.ml @@ -95,6 +95,8 @@ end = struct let set_at_index_exn _t = failwith "set_at_index_exn: null ledgers cannot be mutated" + let get_at_index _t _index = None + let get_at_index_exn _t = failwith "get_at_index_exn: null ledgers are empty" let set_batch ?hash_cache:_ _t = @@ -130,6 +132,8 @@ end = struct let tokens _t _pk = Token_id.Set.empty + let iteri_untrusted _t ~f:_ = () + let iteri _t ~f:_ = () let fold_until _t ~init ~f:_ ~finish = Async.Deferred.return @@ finish init diff --git a/src/lib/merkle_ledger/test/test_converting.ml b/src/lib/merkle_ledger/test/test_converting.ml index f849717d2e8b..905a02d347ab 100644 --- a/src/lib/merkle_ledger/test/test_converting.ml +++ b/src/lib/merkle_ledger/test/test_converting.ml @@ -207,6 +207,59 @@ struct [%test_eq: Migrated.Account.t] stored_migrated_account (Db_converting.convert primary_account) ) ) ) ) + let () = + add_test + "sync detection fails without crashing after accounts are added at high \ + addresses" (fun () -> + with_primary ~f:(fun primary -> + let depth = Db.depth primary in + let max_height = Int.min 5 depth - 1 in + populate_primary_db primary max_height ; + with_migrated ~f:(fun migrated -> + let _converting = + Db_converting.of_ledgers_with_migration primary migrated + in + let additional_account = Quickcheck.random_value Account.gen in + let high_index = (1 lsl Int.min 5 depth) - 1 in + let additional_account_addr = + Db.Addr.of_int_exn ~ledger_depth:depth high_index + in + (* Using set_batch_accounts with a high address like this leaves + the databases in an inconsistent state, because it updates + the last added account in the databases but doesn't fill in + the accounts at lower addresses. This state is similar to + what you might get after an incomplete ledger sync. *) + Db.set_batch_accounts primary + [ (additional_account_addr, additional_account) ] ; + Db_migrated.set_batch_accounts migrated + [ ( additional_account_addr + , Db_converting.convert additional_account ) + ] ; + assert (Db_converting.dbs_synced primary migrated) ) ) ) + + let () = + add_test "sync detection fails after converting ledger account is mutated" + (fun () -> + with_primary ~f:(fun primary -> + let depth = Db.depth primary in + let max_height = Int.min 5 depth in + populate_primary_db primary max_height ; + let account_to_mutate = Db.get_at_index_exn primary 0 in + let new_balance, _overflow_flag = + Balance.add_amount_flagged account_to_mutate.balance + Currency.Amount.one + in + let mutated_account = + Db_converting.convert + { account_to_mutate with balance = new_balance } + in + with_migrated ~f:(fun migrated -> + let _converting = + Db_converting.of_ledgers_with_migration primary migrated + in + Db_migrated.set_at_index_exn migrated 0 mutated_account ; + assert (not (Db_converting.dbs_synced primary migrated)) ) ) ) + let () = add_test "create converting ledger, populate randomly, test iteration order" (fun () -> diff --git a/src/lib/merkle_mask/masking_merkle_tree.ml b/src/lib/merkle_mask/masking_merkle_tree.ml index 4fd9eddfd628..0f2170f3e0e4 100644 --- a/src/lib/merkle_mask/masking_merkle_tree.ml +++ b/src/lib/merkle_mask/masking_merkle_tree.ml @@ -879,10 +879,15 @@ module Make (Inputs : Inputs_intf.S) = struct let addr = Location.to_path_exn location in Addr.to_int addr - let get_at_index_exn t index = + let get_at_index t index = assert_is_attached t ; let addr = Addr.of_int_exn ~ledger_depth:t.depth index in - get t (Location.Account addr) |> Option.value_exn + get t (Location.Account addr) + + let get_at_index_exn t index = + assert_is_attached t ; + get_at_index t index + |> Option.value_exn ~message:"Expected account at index" ~here:[%here] let set_at_index_exn t index account = assert_is_attached t ; @@ -906,11 +911,17 @@ module Make (Inputs : Inputs_intf.S) = struct let%map.Async.Deferred accts = to_list t in List.map accts ~f:Account.identifier |> Account_id.Set.of_list - let iteri t ~f = + let iteri_untrusted t ~f = assert_is_attached t ; let num_accounts = num_accounts t in Sequence.range ~stop:`exclusive 0 num_accounts - |> Sequence.iter ~f:(fun i -> f i (get_at_index_exn t i)) + |> Sequence.iter ~f:(fun i -> f i (get_at_index t i)) + + let iteri t ~f = + iteri_untrusted t ~f:(fun index account_opt -> + f index + (Option.value_exn ~message:"Expected account at index" ~here:[%here] + account_opt ) ) let foldi_with_ignored_accounts t ignored_accounts ~init ~f = assert_is_attached t ;