diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 9125fb9f08..e7291f01d2 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,10 +1,10 @@ -/src/app/zeko/* @L-as -/src/app/zeko/circuits/ @L-as -/src/app/zeko/circuits_as_prover/ @L-as -/src/app/zeko/compile_simple/ @L-as -/src/app/zeko/tests/ @L-as -/src/app/zeko/v/ @L-as +/src/app/zeko/* @MartinOndejka +/src/app/zeko/circuits/ @MartinOndejka +/src/app/zeko/circuits_as_prover/ @MartinOndejka +/src/app/zeko/compile_simple/ @MartinOndejka +/src/app/zeko/tests/ @MartinOndejka +/src/app/zeko/v/ @MartinOndejka /src/app/zeko/da_layer/ @MartinOndejka -/src/app/zeko/da_layer/core.ml @L-as +/src/app/zeko/da_layer/core.ml @MartinOndejka /src/app/zeko/sequencer/ @MartinOndejka /src/app/lib/ @L-as @MartinOndejka diff --git a/src/app/zeko/circuits/indexed_merkle_tree.ml b/src/app/zeko/circuits/indexed_merkle_tree.ml index 2d9a263d62..937e8a7e58 100644 --- a/src/app/zeko/circuits/indexed_merkle_tree.ml +++ b/src/app/zeko/circuits/indexed_merkle_tree.ml @@ -1,7 +1,6 @@ open Core_kernel open Snark_params.Tick open Zeko_util -open Checked.Let_syntax module Make (Inputs : sig module Key : SnarkType @@ -28,28 +27,68 @@ struct type t = { hash_other : F.t; is_right : Boolean.t } [@@deriving snarky] end - module Path = - SnarkList - (PathStep) - (struct - let length = height - end) + module Path = struct + include + SnarkList + (PathStep) + (struct + let length = height + end) + end let hash_entry = var_to_hash ~init:Zeko_constants.indexed_merkle_tree_salt Entry.typ - let empty_path = - List.init height ~f:Fn.id - |> List.fold_map ~init:Field.zero ~f:(fun acc _ -> - let next = - Random_oracle.hash - ~init: - (Hash_prefix_create.salt - Zeko_constants.indexed_merkle_tree_merge_salt ) - [| acc; acc |] - in - (next, constant Field.typ acc) ) - |> snd + (** Two leaves are adjacent if and only if: + 1. Their lowest common ancestor is as deep as possible + 2. One leaf is the rightmost leaf of the left subtree + 3. The other is the leftmost leaf of the right subtree + *) + let are_paths_neighbors ~(l : Path.var) ~(r : Path.var) = + let* _in_prefix, seen_div, valid = + foldl + (List.zip_exn l r |> List.rev) + ~init:(Boolean.true_, Boolean.false_, Boolean.true_) + ~f:(fun (in_prefix, seen_div, valid) + ( { PathStep.is_right = l_is_right; _ } + , { PathStep.is_right = r_is_right; _ } ) -> + let* same = Boolean.equal l_is_right r_is_right in + + (* diverge happens exactly when we were in prefix and now differ *) + let* diverge_now = Boolean.(in_prefix && not same) in + + (* At the divergence bit we require l=0 and r=1 *) + let* div_ok = + Boolean.Expr.(((not !l_is_right) && !r_is_right && !valid) |> eval) + in + + (* After divergence we require l=1 and r=0 at every step *) + let* tail_ok = + Boolean.Expr.((!l_is_right && (not !r_is_right) && !valid) |> eval) + in + + (* Update validity: + - before divergence: no constraint + - at divergence: enforce div_ok + - after divergence: enforce tail_ok + *) + let* if_diverged_then_tail_ok = + (* if already diverged earlier, enforce tail constraint *) + let* enforce_tail = Boolean.(seen_div && not diverge_now) in + if_ enforce_tail ~typ:Boolean.typ ~then_:tail_ok ~else_:valid + in + let* valid = + if_ diverge_now ~typ:Boolean.typ ~then_:div_ok + ~else_:if_diverged_then_tail_ok + in + + (* Update flags *) + let* seen_div = Boolean.(seen_div || diverge_now) in + let*| in_prefix = Boolean.(in_prefix && same) in + (in_prefix, seen_div, valid) ) + in + (* Must have diverged at least once, otherwise same leaf *) + Boolean.(valid && seen_div) (* TODO: consider different salt per level. *) (* NB: The first element in the list is the neighbor of init, and the next element @@ -69,7 +108,7 @@ struct let* init = hash_entry entry in implied_root_raw init path - let add_key_var ~check ~x ~path_x ~y ~path_y ~z () = + let add_key_var ~check ~x ~path_x ~y_prev_hash ~path_y_prev ~y ~path_y ~z () = let* () = with_label __LOC__ (fun () -> assert_x_less_than_y_less_than_z ~x ~y ~z) in @@ -86,18 +125,17 @@ struct in let* root_new = implied_root { key = y; next_key = z } path_y in let* root = if_ check ~typ:F.typ ~then_:root ~else_:root_new in - (* Check that no empty indices have been skipped. *) - let* is_y_most_left = - foldl (List.zip_exn path_y empty_path) ~init:Boolean.true_ - ~f:(fun acc (PathStep.{ hash_other; is_right }, empty_hash) -> - let* is_valid_left = - Field.Checked.equal empty_hash hash_other >>= Boolean.( &&& ) acc - in - if_ is_right ~typ:Boolean.typ ~then_:acc ~else_:is_valid_left ) + (* Check that the leaf before y is not empty *) + let* () = + assert_not_equal ~label:__LOC__ Field.typ y_prev_hash + Field.(constant typ zero) in + let* root_new' = implied_root_raw y_prev_hash path_y_prev in + let* () = assert_equal ~label:__LOC__ F.typ root_new root_new' in + (* Check that the path to leaf before y is really before y *) + let* are_neighbors = are_paths_neighbors ~l:path_y_prev ~r:path_y in let* () = - if_ check ~typ:Boolean.typ ~then_:is_y_most_left ~else_:Boolean.true_ - >>= Boolean.Assert.is_true + assert_equal ~label:__LOC__ Boolean.typ are_neighbors Boolean.true_ in Checked.return (`Before_adding_y root, `After_adding_y root_new) diff --git a/src/app/zeko/circuits/indexed_merkle_tree.mli b/src/app/zeko/circuits/indexed_merkle_tree.mli index 20a07b95c6..d6c27ab530 100644 --- a/src/app/zeko/circuits/indexed_merkle_tree.mli +++ b/src/app/zeko/circuits/indexed_merkle_tree.mli @@ -51,6 +51,8 @@ end) : sig check:Boolean.var -> x:Key.var -> path_x:Path.var + -> y_prev_hash:F.var + -> path_y_prev:Path.var -> y:Key.var -> path_y:Path.var -> z:Key.var diff --git a/src/app/zeko/circuits/rule_commit.ml b/src/app/zeko/circuits/rule_commit.ml index 5f395faf03..d7eb29e46f 100644 --- a/src/app/zeko/circuits/rule_commit.ml +++ b/src/app/zeko/circuits/rule_commit.ml @@ -162,7 +162,9 @@ struct @@ fun () -> let input = let open Random_oracle.Input.Chunked in - Ledger_hash.var_to_field target_ledger |> field + append + (Ledger_hash.var_to_field target_ledger |> field) + (Account_set.to_input_var target_acc_set) in let* payload = make_checked (fun () -> diff --git a/src/app/zeko/circuits/txn_state.ml b/src/app/zeko/circuits/txn_state.ml index a8c4983b56..383ec0da59 100644 --- a/src/app/zeko/circuits/txn_state.ml +++ b/src/app/zeko/circuits/txn_state.ml @@ -80,6 +80,8 @@ let constraint_constants : Genesis_constants.Constraint_constants.t = type update_acc_set_witness = { get_account_set_x : unit -> Token_id.t ; get_account_set_z : unit -> Token_id.t + ; get_account_set_y_prev_hash : unit -> Field.t + ; get_account_set_y_prev_path : unit -> Account_set.Path.t ; get_account_set_x_path : unit -> Account_set.Path.t ; get_account_set_y_path : unit -> Account_set.Path.t } @@ -100,6 +102,14 @@ let update_acc_set accounts init ~witness = exists Account_set.Path.typ ~compute:(witness >>| fun x -> x.get_account_set_x_path ()) in + let* y_prev_hash = + exists Field.typ + ~compute:(witness >>| fun x -> x.get_account_set_y_prev_hash ()) + in + let* path_y_prev = + exists Account_set.Path.typ + ~compute:(witness >>| fun x -> x.get_account_set_y_prev_path ()) + in let* path_y = exists Account_set.Path.typ ~compute:(witness >>| fun x -> x.get_account_set_y_path ()) @@ -110,8 +120,8 @@ let update_acc_set accounts init ~witness = in let* y = derive_token_id ~owner:account_id in let* `Before_adding_y set', `After_adding_y new_set = - Account_set.add_key_var ~x ~path_x ~y ~path_y ~z - ~check:is_empty_and_writeable () + Account_set.add_key_var ~x ~path_x ~y_prev_hash ~path_y_prev ~y ~path_y + ~z ~check:is_empty_and_writeable () in let*| () = assert_equal ~label:__LOC__ Account_set.typ set set' in new_set ) diff --git a/src/app/zeko/circuits/zeko_util.ml b/src/app/zeko/circuits/zeko_util.ml index e96bc16775..7c7a731930 100644 --- a/src/app/zeko/circuits/zeko_util.ml +++ b/src/app/zeko/circuits/zeko_util.ml @@ -405,6 +405,14 @@ let var_equal : ('var, 't) Typ.t -> 'var -> 'var -> Boolean.Expr.t Checked.t = in Boolean.Expr.all bools +let assert_not_equal : + label:string -> ('var, 't) Typ.t -> 'var -> 'var -> unit Checked.t = + fun ~label typ x y -> + let* is_equal = var_equal typ x y in + let is_not_equal = Boolean.Expr.not is_equal in + let@ () = with_label label in + Boolean.Expr.assert_ is_not_equal + module Checked32 = struct include Mina_numbers.Nat.Make32 () diff --git a/src/app/zeko/circuits/zeko_util.mli b/src/app/zeko/circuits/zeko_util.mli index 831228d2d3..c5e3b56e2e 100644 --- a/src/app/zeko/circuits/zeko_util.mli +++ b/src/app/zeko/circuits/zeko_util.mli @@ -184,6 +184,9 @@ val assert_equal_safer : val var_equal : ('var, 't) Typ.t -> 'var -> 'var -> Boolean.Expr.t Checked.t +val assert_not_equal : + label:string -> ('var, 't) Typ.t -> 'var -> 'var -> unit Checked.t + module Checked32 : sig include module type of Mina_numbers.Nat.Make32 () diff --git a/src/app/zeko/da_layer/cli.ml b/src/app/zeko/da_layer/cli.ml index e5cb32a5b0..f12989389d 100644 --- a/src/app/zeko/da_layer/cli.ml +++ b/src/app/zeko/da_layer/cli.ml @@ -16,12 +16,6 @@ let run_node = flag "--port" (optional_with_default 8080 int) ~doc:"int Port to listen on" - and node_to_sync = - flag "--da-node-to-sync" (optional string) - ~doc:"string Nodes to sync with" - and hash_to_sync = - flag "--hash-to-sync" (optional string) - ~doc:"string Hash to sync with in decimal string form" and testing_mode = flag "--random-sk" no_arg ~doc:"Run in testing mode, the signer key will be generated randomly" @@ -45,20 +39,6 @@ let run_node = in let logger = Logger.create () in Stdout_log.setup log_json log_level ; - let sync_arg = - match (node_to_sync, hash_to_sync) with - | Some node_to_sync, Some hash_to_sync -> - Some - ( Cli_lib.Flag.Types. - { value = Core_kernel.Host_and_port.of_string node_to_sync - ; name = "node-to-sync" - } - , Mina_base.Ledger_hash.of_decimal_string hash_to_sync ) - | None, None -> - None - | _ -> - failwith "Both node-to-sync and hash-to-sync must be provided" - in let chain = match network_id with | "mainnet" -> @@ -70,7 +50,7 @@ let run_node = in let%bind () = Deferred.ignore_m - @@ Da_layer.Node.create_server ~chain ~sync_arg ~logger ~port ~db_dir + @@ Da_layer.Node.create_server ~chain ~logger ~port ~db_dir ~signer_sk:signer ~no_migrations () in [%log info] "Server started on port %d" port ; diff --git a/src/app/zeko/da_layer/lib/client.ml b/src/app/zeko/da_layer/lib/client.ml index c5692083ba..fdbd973a50 100644 --- a/src/app/zeko/da_layer/lib/client.ml +++ b/src/app/zeko/da_layer/lib/client.ml @@ -24,17 +24,24 @@ module Diff_table = struct type t = { diff : Diff.Stable.V1.t ; ledger_openings : Sparse_ledger.t + ; acc_set_openings : Indexed_merkle_tree.Sparse.t ; genesis : bool ; target_ledger_hash : Ledger_hash.t } [@@deriving hlist, fields, sexp] - let make ~diff ~ledger_openings ~target_ledger_hash ~genesis = - { diff; ledger_openings; target_ledger_hash; genesis } + let make ~diff ~ledger_openings ~acc_set_openings ~target_ledger_hash ~genesis + = + { diff; ledger_openings; acc_set_openings; target_ledger_hash; genesis } let typ = Mina_caqti.Type_spec.custom_type - ~to_hlist:(fun { diff; ledger_openings; target_ledger_hash; genesis } -> + ~to_hlist:(fun { diff + ; ledger_openings + ; acc_set_openings + ; target_ledger_hash + ; genesis + } -> H_list. [ Ledger_hash.to_decimal_string target_ledger_hash ; ( if genesis then None @@ -44,12 +51,15 @@ module Diff_table = struct diff |> Bigstring.to_string ; Sparse_ledger.to_yojson ledger_openings |> Yojson.Safe.to_string + ; Indexed_merkle_tree.Sparse.to_yojson acc_set_openings + |> Yojson.Safe.to_string ] ) ~of_hlist:(fun H_list. [ target_ledger_hash ; source_ledger_hash ; diff ; ledger_openings + ; acc_set_openings ] -> let ok_exn = function | Ppx_deriving_yojson_runtime.Result.Ok x -> @@ -64,16 +74,20 @@ module Diff_table = struct ; ledger_openings = Sparse_ledger.of_yojson (Yojson.Safe.from_string ledger_openings) |> ok_exn + ; acc_set_openings = + Indexed_merkle_tree.Sparse.of_yojson + (Yojson.Safe.from_string acc_set_openings) + |> ok_exn ; genesis = Option.is_none source_ledger_hash ; target_ledger_hash = Ledger_hash.of_decimal_string target_ledger_hash } ) - Caqti_type.[ string; option string; octets; octets ] + Caqti_type.[ string; option string; octets; octets; octets ] let insert (module Conn : CONNECTION) t = Conn.exec (Caqti_request.exec typ - {sql| INSERT INTO da_diff (target_ledger_hash, source_ledger_hash, diff, ledger_openings) - VALUES (?, ?, ?, ?) |sql} ) + {sql| INSERT INTO da_diff (target_ledger_hash, source_ledger_hash, diff, ledger_openings, acc_set_openings) + VALUES (?, ?, ?, ?, ?) |sql} ) t let get_diff_by_source (module Conn : CONNECTION) ledger_hash = @@ -81,12 +95,12 @@ module Diff_table = struct | Some ledger_hash -> Conn.find_opt (Caqti_request.find_opt Caqti_type.string typ - {sql| SELECT target_ledger_hash, source_ledger_hash, diff, ledger_openings FROM da_diff WHERE source_ledger_hash = ? |sql} ) + {sql| SELECT target_ledger_hash, source_ledger_hash, diff, ledger_openings, acc_set_openings FROM da_diff WHERE source_ledger_hash = ? |sql} ) (Ledger_hash.to_decimal_string ledger_hash) | None -> Conn.find_opt (Caqti_request.find_opt Caqti_type.unit typ - {sql| SELECT target_ledger_hash, source_ledger_hash, diff, ledger_openings FROM da_diff WHERE source_ledger_hash IS NULL |sql} ) + {sql| SELECT target_ledger_hash, source_ledger_hash, diff, ledger_openings, acc_set_openings FROM da_diff WHERE source_ledger_hash IS NULL |sql} ) () let get_id_by_target (module Conn : CONNECTION) ledger_hash = @@ -180,34 +194,86 @@ module Rpc = struct in go max_tries [] - let post_diff ~logger ~node_location ~ledger_openings ~diff = + module Versioned_rpc_same_query = struct + type ('q, 'latest) t = + | V : ('q, 'r) Async.Rpc.Rpc.t * ('r -> 'latest) -> ('q, 'latest) t + end + + let rec dispatch_with_fallback_same_query ?(max_tries = 5) ?(timeout = 5.) + ~logger (node_location : Host_and_port.t Cli_lib.Flag.Types.with_name) + data ~(versions : ('q, 'latest) Versioned_rpc_same_query.t list) : + ('latest, Error.t) Result.t Deferred.t = + match versions with + | [] -> + return (Error (Error.of_string "No versions to try")) + | Versioned_rpc_same_query.V (rpc, to_latest) :: versions -> ( + match%bind + dispatch ~max_tries ~timeout ~logger node_location rpc data + with + | Ok result -> + return (Ok (to_latest result)) + | Error e -> + let version_unimplemented = + Error.to_string_mach e + |> String.is_substring + ~substring: + (sprintf "Unimplemented_rpc %s (Version %d)" + (Async.Rpc.Rpc.name rpc) + (Async.Rpc.Rpc.version rpc) ) + in + if version_unimplemented then + dispatch_with_fallback_same_query ~max_tries ~timeout ~logger + node_location data ~versions + else return (Error e) ) + + module Versioned_rpc_same_response = struct + type ('r, 'latest) t = + | V : ('q, 'r) Async.Rpc.Rpc.t * ('latest -> 'q) -> ('r, 'latest) t + end + + let rec dispatch_with_fallback_same_response ?(max_tries = 5) ?(timeout = 5.) + ~logger (node_location : Host_and_port.t Cli_lib.Flag.Types.with_name) + data ~(versions : ('r, 'latest) Versioned_rpc_same_response.t list) : + ('r, Error.t) Result.t Deferred.t = + match versions with + | [] -> + return (Error (Error.of_string "No versions to try")) + | V (rpc, from_latest) :: versions -> ( + match%bind + dispatch ~max_tries ~timeout ~logger node_location rpc + (from_latest data) + with + | Ok result -> + return (Ok result) + | Error e -> + let version_unimplemented = + Error.to_string_mach e + |> String.is_substring + ~substring: + (sprintf "Unimplemented_rpc %s (Version %d)" + (Async.Rpc.Rpc.name rpc) + (Async.Rpc.Rpc.version rpc) ) + in + if version_unimplemented then + dispatch_with_fallback_same_response ~max_tries ~timeout ~logger + node_location data ~versions + else return (Error e) ) + + let post_diff ~logger ~node_location ~ledger_openings ~acc_set_openings ~diff + = dispatch ~max_tries:5 ~logger node_location Rpc.Post_diff.V1.t - { ledger_openings; diff } + { ledger_openings; diff; acc_set_openings } let get_diff ~logger ~node_location ~ledger_hash = - match%bind - dispatch ~max_tries:1 ~logger node_location Rpc.Get_diff.V2.t ledger_hash - with - | Ok diff -> - return (Ok diff) - | Error e -> - let v2_unimplemented = - Error.to_string_mach e - |> String.is_substring - ~substring:"Unimplemented_rpc Get_diff (Version 2)" - in - if v2_unimplemented then - (* Fallback to older version *) - let%bind.Deferred.Result result = - dispatch ~max_tries:1 ~logger node_location Rpc.Get_diff.V1.t - ledger_hash - in - return - (Ok (Option.map result ~f:(fun x -> Diff.Stable.V1.to_latest x))) - else return (Error e) - - let get_all_keys ~logger ~node_location () = - dispatch ~max_tries:1 ~logger node_location Rpc.Get_all_keys.V1.t () + dispatch_with_fallback_same_query ~max_tries:1 ~logger node_location + ledger_hash + ~versions: + [ Versioned_rpc_same_query.V (Rpc.Get_diff.V3.t, Fn.id) + ; Versioned_rpc_same_query.V + (Rpc.Get_diff.V2.t, Option.map ~f:Diff.Stable.V2.to_latest) + ; Versioned_rpc_same_query.V + (Rpc.Get_diff.V1.t, Option.map ~f:Diff.Stable.V1.to_latest) + ] let get_diff_source ~logger ~node_location ~ledger_hash = dispatch ~max_tries:1 ~logger node_location Rpc.Get_diff_source.V1.t @@ -295,12 +361,18 @@ let create ~logger ~(config : Config.t) ~quorum ~da_keys ~db_pool = let stop t = Ivar.fill t.stop () -let enqueue_diff t ~target_ledger_hash ~ledger_openings ~diff ~genesis = +let enqueue_diff t ~target_ledger_hash ~ledger_openings ~acc_set_openings ~diff + ~genesis = let%map () = Pool.use (fun conn -> Diff_table.insert conn - { diff; ledger_openings; target_ledger_hash; genesis } ) + { diff + ; ledger_openings + ; acc_set_openings + ; target_ledger_hash + ; genesis + } ) t.db_pool >>| caqti_ok_exn ~msg:"Failed to insert diff into db: %s" in @@ -331,9 +403,11 @@ let rec start_posting_diffs_from ?pushed_diff t start_posting_diffs_from ~pushed_diff:(Condition.wait t.pushed_diff) t ~node_location ~source_ledger_hash () - | Some { diff; ledger_openings; target_ledger_hash; _ } -> ( + | Some { diff; ledger_openings; acc_set_openings; target_ledger_hash; _ } + -> ( match%bind - Rpc.post_diff ~logger:t.logger ~node_location ~ledger_openings ~diff + Rpc.post_diff ~logger:t.logger ~node_location ~ledger_openings + ~acc_set_openings ~diff with | Error err -> [%log error] "Failed to post diff to da node: %s" @@ -600,12 +674,13 @@ let get_diff ~logger ~config ~ledger_hash = | Error e -> return (Error e) ) -let distribute_diff ~logger ~config ~ledger_openings ~diff = +let distribute_diff ~logger ~config ~ledger_openings ~acc_set_openings ~diff = Deferred.List.iter ~how:`Parallel Config.(config.nodes) ~f:(fun n -> match%map - Rpc.post_diff ~logger ~node_location:n ~ledger_openings ~diff + Rpc.post_diff ~logger ~node_location:n ~ledger_openings + ~acc_set_openings ~diff with | Ok _ -> () @@ -626,6 +701,7 @@ let create_genesis_diffs ?(max_size = 50) ledger = let account = Ledger.get_at_index_exn ledger index in (index, account) ) in + let acc_set = Indexed_merkle_tree.Db.create ~depth:(Ledger.depth ledger) () in Ledger.with_ephemeral_ledger ~depth:(Ledger.depth ledger) ~f:(fun ephemeral -> let account_chunks = List.chunks_of changed_accounts ~length:max_size in List.map account_chunks ~f:(fun chunk -> @@ -640,16 +716,33 @@ let create_genesis_diffs ?(max_size = 50) ledger = ~source_ledger_hash:(Sparse_ledger.merkle_root ledger_openings) ~changed_accounts:chunk ~command_with_action_step_flags:None in - (diff, ledger_openings, `Target (Ledger.merkle_root ephemeral)) ) ) + List.iter chunk ~f:(fun (_, account) -> + ( Indexed_merkle_tree.Db.get_or_create_entry_exn acc_set + (Account_id.derive_token_id + ~owner:(Account.identifier account) ) + : [ `Added | `Existed ] * Indexed_merkle_tree.Db.witness ) + |> ignore ) ; + let acc_set_openings = + Indexed_merkle_tree.Sparse.of_db_subset ~db:acc_set + ~keys: + ( List.map chunk ~f:snd + |> List.map ~f:(fun acc -> + Account_id.derive_token_id + ~owner:(Account.identifier acc) ) ) + in + ( diff + , ledger_openings + , acc_set_openings + , `Target (Ledger.merkle_root ephemeral) ) ) ) (** Distribute diff of initial accounts *) let distribute_genesis_diff ~logger ~config ~ledger = let%bind diffs = create_genesis_diffs ledger in Deferred.List.iter ~how:`Sequential diffs - ~f:(fun (diff, ledger_openings, `Target _) -> - distribute_diff ~logger ~config ~ledger_openings ~diff ) + ~f:(fun (diff, ledger_openings, acc_set_openings, `Target _) -> + distribute_diff ~logger ~config ~ledger_openings ~acc_set_openings ~diff ) -let get_openings ~diff ~ledger = +let get_ledger_openings ~diff ~ledger = let changed_accounts = Diff.changed_accounts diff |> List.sort ~compare:(fun (a, _) (b, _) -> Int.compare a b) @@ -657,10 +750,23 @@ let get_openings ~diff ~ledger = let account_ids = List.map changed_accounts ~f:snd |> List.map ~f:Account.identifier in - let openings = Sparse_ledger.of_ledger_subset_exn ledger account_ids in - List.iter changed_accounts ~f:(fun (index, account) -> - Ledger.set_at_index_exn ledger index account ) ; - openings - -let attach_openings ~diffs ~ledger = - List.map diffs ~f:(fun diff -> (diff, get_openings ~diff ~ledger)) + let ledger_openings = Sparse_ledger.of_ledger_subset_exn ledger account_ids in + ledger_openings + +let attach_ledger_openings ~diffs ~ledger = + List.map diffs ~f:(fun diff -> (diff, get_ledger_openings ~diff ~ledger)) + +let get_acc_set_openings ~diff ~ledger_openings ~imt = + let new_accounts_keys = + List.filter (Diff.changed_accounts diff) ~f:(fun (index, _) -> + Account.equal + (Sparse_ledger.get_exn ledger_openings index) + Account.empty ) + |> List.sort ~compare:(fun (a, _) (b, _) -> Int.compare a b) + |> List.map ~f:(fun (_, account) -> + Account_id.derive_token_id ~owner:(Account.identifier account) ) + in + let acc_set_openings = + Indexed_merkle_tree.Sparse.of_db_subset ~db:imt ~keys:new_accounts_keys + in + acc_set_openings diff --git a/src/app/zeko/da_layer/lib/core.ml b/src/app/zeko/da_layer/lib/core.ml index 5fbed06375..15d88ecdd8 100644 --- a/src/app/zeko/da_layer/lib/core.ml +++ b/src/app/zeko/da_layer/lib/core.ml @@ -9,10 +9,11 @@ open Signature_lib 4. Set each account in [diff.diff] to the [ledger_openings] and call the resulting ledger hash [target_ledger_hash]. 5. Sign [target_ledger_hash]. 6. Check that after applying all the receipts of the command, the receipt chain hashes match the target ledger. - 7. Attach timestamp. - 8. Store the diff under the [target_ledger_hash]. *) + 7. Check that new accounts in ledger openings are in same order as in acc set openings. + 8. Attach timestamp and acc set root. + 9. Store the diff under the [target_ledger_hash]. *) let post_diff ~logger ~proof_cache_db ~kvdb ~network_id ~(signer : Keypair.t) - ~ledger_openings ~diff = + ~ledger_openings ~acc_set_openings ~diff = (* 1 *) let%bind.Result () = match @@ -86,7 +87,9 @@ let post_diff ~logger ~proof_cache_db ~kvdb ~network_id ~(signer : Keypair.t) Random_oracle.Input.Chunked.field @@ Random_oracle.hash ~init:(Hash_prefix_create.salt Zeko_constants.da_layer_check_salt) - [| target_ledger_hash |] + [| target_ledger_hash + ; Indexed_merkle_tree.Sparse.merkle_root acc_set_openings + |] in let signature = Schnorr.Chunked.sign ~signature_kind:network_id signer.private_key message @@ -191,10 +194,54 @@ let post_diff ~logger ~proof_cache_db ~kvdb ~network_id ~(signer : Keypair.t) in (* 7 *) - (* V2 was added time *) - let diff : Diff.Stable.V2.t = Diff.add_time ~logger diff in + let%bind.Result () = + try + let new_accounts = + List.filter (Diff.changed_accounts diff) ~f:(fun (index, _) -> + Account.equal + (Sparse_ledger.get_exn ledger_openings index) + Account.empty ) + |> List.sort ~compare:(fun (a, _) (b, _) -> Int.compare a b) + in + let acc_set_entries = + List.map new_accounts ~f:(fun (_, account) -> + let key = + Account_id.derive_token_id ~owner:(Account.identifier account) + in + let acc_set_index = + Indexed_merkle_tree.Sparse.find_index_exn acc_set_openings + (Indexed_merkle_tree.Account_id.with_empty_key key) + in + let entry = + Indexed_merkle_tree.Sparse.get_exn acc_set_openings acc_set_index + in + (acc_set_index, entry.value) ) + |> List.sort ~compare:(fun (a, _) (b, _) -> Int.compare a b) + |> List.map ~f:(fun (_, value) -> value) + in + let ledger_entries = + List.map new_accounts ~f:(fun (_, account) -> + Account_id.derive_token_id ~owner:(Account.identifier account) ) + in + if List.equal Token_id.equal acc_set_entries ledger_entries then Ok () + else + Error + (Error.create + "New accounts in ledger openings are not in same order as in acc \ + set openings" + (ledger_entries, acc_set_entries) + [%sexp_of: Token_id.t list * Token_id.t list] ) + with e -> Error (Error.of_exn e) + in (* 8 *) + (* V2 was added time *) + let diff : Diff.Stable.V3.t = + Diff.add_time_and_acc_set ~logger diff + ~acc_set:(Indexed_merkle_tree.Sparse.merkle_root acc_set_openings) + in + + (* 9 *) (* We don't care if the diff already existed *) let () = match Db.add_diff kvdb ~ledger_hash:target_ledger_hash ~diff with diff --git a/src/app/zeko/da_layer/lib/db.ml b/src/app/zeko/da_layer/lib/db.ml index 77f48256d7..6d512021be 100644 --- a/src/app/zeko/da_layer/lib/db.ml +++ b/src/app/zeko/da_layer/lib/db.ml @@ -1,26 +1,9 @@ open Core_kernel open Mina_base -(** Holds keys to all the diffes *) -module Index = struct - [%%versioned - module Stable = struct - module V1 = struct - type t = Ledger_hash.Stable.V1.t list - - let to_latest = Fn.id - end - end] - - let to_bigstring = Binable.to_bigstring (module Stable.Latest) - - let of_bigstring = Binable.of_bigstring (module Stable.Latest) -end - module Key_value = struct type _ t = - | Diff : (Ledger_hash.t * Diff.Stable.V2.t) t - | Diff_index : (unit * Index.t) t + | Diff : (Ledger_hash.t * Diff.Stable.V3.t) t | Migration : (unit * int) t let serialize_key : type k v. (k * v) t -> k -> Bigstring.t = @@ -31,8 +14,6 @@ module Key_value = struct [ Bigstring.of_string "diff" ; Bigstring.of_string @@ Ledger_hash.to_decimal_string key ] - | Diff_index -> - Bigstring.of_string "diff_index" | Migration -> Bigstring.of_string "migration" @@ -41,8 +22,6 @@ module Key_value = struct match pair_type with | Diff -> Diff.to_bigstring value - | Diff_index -> - Index.to_bigstring value | Migration -> Bigstring.of_string @@ Int.to_string value @@ -51,26 +30,19 @@ module Key_value = struct match pair_type with | Diff -> Diff.of_bigstring data |> Or_error.ok_exn - | Diff_index -> - Index.of_bigstring data | Migration -> Int.of_string @@ Bigstring.to_string data end include Kvdb_base.Make (Key_value) -let set_index t ~index = set t Diff_index ~key:() ~data:index - -let get_index t = get t Diff_index ~key:() |> Option.value ~default:[] +let has_diff t ~ledger_hash = get t Diff ~key:ledger_hash |> Option.is_some let add_diff t ~ledger_hash ~diff = - let index = get_index t in - if List.mem index ledger_hash ~equal:Ledger_hash.equal then `Already_existed - else - let new_index = ledger_hash :: index in - set_batch t - [ Pack (Diff, ledger_hash, diff); Pack (Diff_index, (), new_index) ] ; - `Added + if has_diff t ~ledger_hash then `Already_existed + else ( + set t Diff ~key:ledger_hash ~data:diff ; + `Added ) let get_diff t ~ledger_hash = get t Diff ~key:ledger_hash @@ -78,13 +50,7 @@ let get_migration t = get t Migration ~key:() |> Option.value ~default:0 let set_migration t ~migration = set t Migration ~key:() ~data:migration -let has_diff t ~ledger_hash = get t Diff ~key:ledger_hash |> Option.is_some - module Async = struct - let set_index t ~index = Async.return (set_index t ~index) - - let get_index t = Async.return (get_index t) - let add_diff t ~ledger_hash ~diff = Async.return (add_diff t ~ledger_hash ~diff) diff --git a/src/app/zeko/da_layer/lib/diff.ml b/src/app/zeko/da_layer/lib/diff.ml index 73bfaf16fc..75bfc0fa9f 100644 --- a/src/app/zeko/da_layer/lib/diff.ml +++ b/src/app/zeko/da_layer/lib/diff.ml @@ -1,6 +1,7 @@ open Core_kernel open Mina_base open Mina_ledger +open Snark_params.Tick [%%versioned module Stable = struct @@ -8,19 +9,39 @@ module Stable = struct [@@@no_toplevel_latest_type] - module V2 = struct + module V3 = struct type t = { source_ledger_hash : Ledger_hash.Stable.V1.t ; changed_accounts : (int * Account.Stable.V2.t) list ; command_with_action_step_flags : (User_command.Stable.V2.t * bool list) option ; timestamp : Block_time.Stable.V1.t + ; acc_set : (Field.t[@version_asserted]) } [@@deriving yojson, fields, sexp_of, compare] let to_latest = Fn.id end + module V2 = struct + type t = + { source_ledger_hash : Ledger_hash.Stable.V1.t + ; changed_accounts : (int * Account.Stable.V2.t) list + ; command_with_action_step_flags : + (User_command.Stable.V2.t * bool list) option + ; timestamp : Block_time.Stable.V1.t + } + [@@deriving yojson, fields, sexp_of, compare] + + let to_latest t = + { V3.source_ledger_hash = t.source_ledger_hash + ; changed_accounts = t.changed_accounts + ; command_with_action_step_flags = t.command_with_action_step_flags + ; timestamp = t.timestamp + ; acc_set = Field.zero + } + end + module V1 = struct type t = { source_ledger_hash : Ledger_hash.Stable.V1.t @@ -30,11 +51,13 @@ module Stable = struct } [@@deriving yojson, fields, sexp] - let to_latest ?(timestamp = Block_time.zero) (t : t) = - { V2.source_ledger_hash = t.source_ledger_hash + let to_latest ?(timestamp = Block_time.zero) ?(acc_set = Field.zero) (t : t) + = + { V3.source_ledger_hash = t.source_ledger_hash ; changed_accounts = t.changed_accounts ; command_with_action_step_flags = t.command_with_action_step_flags ; timestamp + ; acc_set } end end] @@ -62,13 +85,13 @@ let command_with_action_step_flags { Stable.V1.command_with_action_step_flags; _ } = command_with_action_step_flags -let add_time ~logger t = +let add_time_and_acc_set ~logger t ~acc_set = Stable.V1.to_latest ~timestamp:(Block_time.now (Block_time.Controller.basic ~logger)) - t + ~acc_set t let drop_time - { Stable.V2.source_ledger_hash + { Stable.V3.source_ledger_hash ; changed_accounts ; command_with_action_step_flags ; _ @@ -100,6 +123,6 @@ let%test_unit "diff versioning" = let v1_serialized = Binable.to_bigstring (module Stable.V1.With_top_version_tag) v1 in - let v2 = of_bigstring v1_serialized |> Or_error.ok_exn in + let v3 = of_bigstring v1_serialized |> Or_error.ok_exn in - [%test_eq: Stable.V2.t] (Stable.V1.to_latest v1) (Stable.V2.to_latest v2) + [%test_eq: Stable.V3.t] (Stable.V1.to_latest v1) (Stable.V3.to_latest v3) diff --git a/src/app/zeko/da_layer/lib/dune b/src/app/zeko/da_layer/lib/dune index ce7716d5f6..7aaa495134 100644 --- a/src/app/zeko/da_layer/lib/dune +++ b/src/app/zeko/da_layer/lib/dune @@ -5,6 +5,7 @@ kvdb_base zeko_constants relational_db + indexed_merkle_tree ;; mina ;; mina_base mina_caqti diff --git a/src/app/zeko/da_layer/lib/node.ml b/src/app/zeko/da_layer/lib/node.ml index 327a9e47fe..56ee6fe4ff 100644 --- a/src/app/zeko/da_layer/lib/node.ml +++ b/src/app/zeko/da_layer/lib/node.ml @@ -1,6 +1,5 @@ open Core_kernel open Mina_base -open Mina_ledger open Signature_lib module Rpc_def = Rpc open Async @@ -15,41 +14,13 @@ type t = ; proof_cache_db : Proof_cache_tag.cache_db } -let sync t ~node_location ~ledger_hash = - let logger = t.logger in - [%log info] "Syncing" ; - [%log info] "Fetching intervals" ; - let ledger = - Ledger.create_ephemeral ~depth:constraint_constants.ledger_depth () - in - Client.map_diffs ~logger ~depth:constraint_constants.ledger_depth - ~config:(Client.Config.of_node_locations [ node_location ]) - ~source_ledger_hash:`Genesis ~target_ledger_hash:ledger_hash () - ~f:(fun ~current_chunk ~current_diff:_ ~chunks_length diff -> - let progress = Float.of_int current_chunk /. Float.of_int chunks_length in - printf "Progress: %.2f%%\n%!" (progress *. 100.0) ; - let diff = Diff.drop_time diff in - let ledger_openings = Client.get_openings ~diff ~ledger in - match - Core.post_diff ~logger ~proof_cache_db:t.proof_cache_db ~kvdb:t.db - ~network_id:t.chain ~signer:t.signer ~ledger_openings ~diff - with - | Ok _signature -> - return (Ok ()) - | Error e -> - let logger = t.logger in - [%log warn] "Error posting diff: %s" (Error.to_string_hum e) ; - return (Error e) ) - >>| Result.map ~f:(fun asd -> Result.all_unit asd) - >>| Result.join - let get_signature t ~ledger_hash = - let%bind.Option _diff = Db.get_diff t.db ~ledger_hash in + let%bind.Option diff = Db.get_diff t.db ~ledger_hash in let message = Random_oracle.Input.Chunked.field @@ Random_oracle.hash ~init:(Hash_prefix_create.salt Zeko_constants.da_layer_check_salt) - [| ledger_hash |] + [| ledger_hash; diff.acc_set |] in Some (Schnorr.Chunked.sign ~signature_kind:t.chain t.signer.private_key message) @@ -75,7 +46,7 @@ let get_ledger_hashes_chain t >>| fun diff -> Option.value_exn ~here:[%here] ~message:"Get_ledger_hashes_chain: diff not found" diff - |> Diff.Stable.V2.source_ledger_hash + |> Diff.Stable.V3.source_ledger_hash in let%map next = go (n - 1) source in current :: next @@ -87,11 +58,11 @@ let implementations t = ~implementations: [ (* Post_diff *) Rpc.Rpc.implement Rpc_def.Post_diff.V1.t - (fun () { ledger_openings; diff } -> + (fun () { ledger_openings; acc_set_openings; diff } -> match Core.post_diff ~logger:t.logger ~proof_cache_db:t.proof_cache_db ~kvdb:t.db ~network_id:t.chain ~signer:t.signer ~ledger_openings - ~diff + ~acc_set_openings ~diff with | Ok signature -> let pk = Public_key.compress t.signer.public_key in @@ -105,14 +76,11 @@ let implementations t = let%map v2_diff = Db.Async.get_diff t.db ~ledger_hash:query in let v1_diff = Option.map v2_diff ~f:Diff.drop_time in v1_diff ) - ; Rpc.Rpc.implement Rpc_def.Get_diff.V2.t (fun () query -> + ; Rpc.Rpc.implement Rpc_def.Get_diff.V3.t (fun () query -> Db.Async.get_diff t.db ~ledger_hash:query ) ; (* Has_diff *) Rpc.Rpc.implement Rpc_def.Has_diff.V1.t (fun () query -> Db.Async.has_diff t.db ~ledger_hash:query ) - ; (* Get_all_keys *) - Rpc.Rpc.implement Rpc_def.Get_all_keys.V1.t (fun () () -> - Db.Async.get_index t.db ) ; (* Get_diff_source *) Rpc.Rpc.implement Rpc_def.Get_diff_source.V1.t (fun () query -> Db.Async.get_diff t.db ~ledger_hash:query @@ -149,8 +117,7 @@ let implementations t = Option.value_exn ~here:[%here] ~message:"Diff not found" diff ) ) ] -let create_server ~chain ~sync_arg ~port ~logger ~db_dir ~signer_sk - ~no_migrations () = +let create_server ~chain ~port ~logger ~db_dir ~signer_sk ~no_migrations () = let where_to_listen = Tcp.Where_to_listen.bind_to All_addresses (On_port port) in @@ -171,18 +138,6 @@ let create_server ~chain ~sync_arg ~port ~logger ~db_dir ~signer_sk if not no_migrations then Migrations.run_migrations ~logger t.db ; - let%bind () = - match sync_arg with - | None -> - return () - | Some (node_location, ledger_hash) -> ( - match%bind sync t ~node_location ~ledger_hash with - | Ok () -> - return () - | Error e -> - failwith (Error.to_string_hum e) ) - in - let implementations = implementations t in Tcp.Server.create ~on_handler_error: diff --git a/src/app/zeko/da_layer/lib/rpc.ml b/src/app/zeko/da_layer/lib/rpc.ml index b03c235cc2..0c25ac9a7f 100644 --- a/src/app/zeko/da_layer/lib/rpc.ml +++ b/src/app/zeko/da_layer/lib/rpc.ml @@ -10,7 +10,10 @@ module Post_diff = struct module Query = struct (* Use Diff.V1 without timestamp, the node determines the timestamp itself *) type t = - { ledger_openings : Sparse_ledger.Stable.V2.t; diff : Diff.Stable.V1.t } + { ledger_openings : Sparse_ledger.Stable.V2.t + ; diff : Diff.Stable.V1.t + ; acc_set_openings : Indexed_merkle_tree.Sparse.Stable.V1.t + } [@@deriving bin_io_unversioned] end @@ -46,6 +49,16 @@ module Get_diff = struct Rpc.Rpc.create ~name:"Get_diff" ~version:2 ~bin_query:Ledger_hash.Stable.V1.bin_t ~bin_response:Response.bin_t end + + module V3 = struct + module Response = struct + type t = Diff.Stable.V3.t option [@@deriving bin_io_unversioned] + end + + let t : (Ledger_hash.t, Response.t) Rpc.Rpc.t = + Rpc.Rpc.create ~name:"Get_diff" ~version:3 + ~bin_query:Ledger_hash.Stable.V1.bin_t ~bin_response:Response.bin_t + end end (* val has_diff : Ledger_hash.t -> bool *) @@ -57,19 +70,6 @@ module Has_diff = struct end end -(* val get_all_keys : unit -> Ledger_hash.t list *) -module Get_all_keys = struct - module V1 = struct - module Response = struct - type t = Ledger_hash.Stable.V1.t list [@@deriving bin_io_unversioned] - end - - let t : (unit, Response.t) Rpc.Rpc.t = - Rpc.Rpc.create ~name:"Get_all_keys" ~version:1 ~bin_query:Unit.bin_t - ~bin_response:Response.bin_t - end -end - (* val get_diff_source : Ledger_hash.t -> Ledger_hash.t *) module Get_diff_source = struct module V1 = struct @@ -145,7 +145,7 @@ module Get_diffs_chain = struct end module Response = struct - type t = Diff.Stable.V2.t list [@@deriving bin_io_unversioned] + type t = Diff.Stable.V3.t list [@@deriving bin_io_unversioned] end let t : (Query.t, Response.t) Rpc.Rpc.t = diff --git a/src/app/zeko/indexed_merkle_tree/indexed_merkle_tree.ml b/src/app/zeko/indexed_merkle_tree/indexed_merkle_tree.ml index 2bc4758a77..1d99275873 100644 --- a/src/app/zeko/indexed_merkle_tree/indexed_merkle_tree.ml +++ b/src/app/zeko/indexed_merkle_tree/indexed_merkle_tree.ml @@ -148,10 +148,14 @@ module type Database_intf = sig type witness = [ `X of Token_id.t ] * [ `X_path of Path.t ] + * [ `Y_prev_hash of Field.t ] + * [ `Y_prev_path of Path.t ] * [ `Y of Token_id.t ] * [ `Y_path of Path.t ] * [ `Z of Token_id.t ] + val depth : t -> int + val merkle_root : t -> Hash.t val create : ?directory_name:string -> depth:index -> unit -> t @@ -171,6 +175,10 @@ module type Database_intf = sig val make_checkpoint : t -> directory_name:string -> unit val create_checkpoint : t -> directory_name:string -> unit -> t + + val get_entry_by_tid : t -> Token_id.t -> Entry.t option + + val get_path_by_tid : t -> Token_id.t -> Path.t option end let lowest_key = Token_id.of_field Field.zero @@ -194,6 +202,8 @@ module Db : Database_intf = struct type witness = [ `X of Token_id.t ] * [ `X_path of Path.t ] + * [ `Y_prev_hash of Field.t ] + * [ `Y_prev_path of Path.t ] * [ `Y of Token_id.t ] * [ `Y_path of Path.t ] * [ `Z of Token_id.t ] @@ -309,12 +319,24 @@ module Db : Database_intf = struct ~error:(Db_error.Malformed_database "Could not find new entry") |> Db_error.ok_exn in + let y_prev_location = + Location_at_depth.prev new_location + |> Option.value_exn ~message:"Can't get prev Y for first entry" + in + let y_prev = + get t y_prev_location + |> Result.of_option + ~error:(Db_error.Malformed_database "Could not find Y prev entry") + |> Db_error.ok_exn + in assert ( Token_id.(lower_entry.value < new_entry.value) && Token_id.(new_entry.value < new_entry.value_next) ) ; ( `Existed , ( `X lower_entry.value , `X_path x_path + , `Y_prev_hash (Entry.data_hash y_prev) + , `Y_prev_path (merkle_path t y_prev_location) , `Y new_entry.value , `Y_path (merkle_path t new_location) , `Z new_entry.value_next ) ) @@ -330,12 +352,25 @@ module Db : Database_intf = struct | Ok (`Added, new_location) -> let lower_entry = { lower_entry with value_next = tid } in set t lower_entry_location lower_entry ; + let y_prev_location = + Location_at_depth.prev new_location + |> Option.value_exn ~message:"Can't get prev Y for first entry" + in + let y_prev = + get t y_prev_location + |> Result.of_option + ~error: + (Db_error.Malformed_database "Could not find Y prev entry") + |> Db_error.ok_exn + in assert ( Token_id.(lower_entry.value < new_entry.value) && Token_id.(new_entry.value < new_entry.value_next) ) ; ( `Added , ( `X lower_entry.value , `X_path x_path + , `Y_prev_hash (Entry.data_hash y_prev) + , `Y_prev_path (merkle_path t y_prev_location) , `Y new_entry.value , `Y_path (merkle_path t new_location) , `Z new_entry.value_next ) ) ) @@ -349,4 +384,62 @@ module Db : Database_intf = struct , List.map tids ~f:(fun tid -> let _added, witness = get_or_create_entry_exn t tid in witness ) ) + + let get_entry_by_tid t tid = + let%bind.Option location = + match Account_location.get t (Account_id.with_empty_key tid) with + | Ok location -> + Some location + | Error Db_error.Account_location_not_found -> + None + | Error e -> + Db_error.raise e + in + get t location + + let get_path_by_tid t tid = + let%map.Option location = + match Account_location.get t (Account_id.with_empty_key tid) with + | Ok location -> + Some location + | Error Db_error.Account_location_not_found -> + None + | Error e -> + Db_error.raise e + in + merkle_path t location +end + +module Sparse = struct + [%%versioned + module Stable = struct + [@@@no_toplevel_latest_type] + + module V1 = struct + type t = + ( Ledger_hash.Stable.V1.t + , Account_id.Stable.V2.t + , Entry.Stable.V2.t ) + Sparse_ledger_lib.Sparse_ledger.T.Stable.V2.t + [@@deriving yojson, sexp] + + let to_latest = Fn.id + end + end] + + include Sparse_ledger_lib.Sparse_ledger.Make (Hash) (Account_id) (Entry) + + let of_db_subset ~db ~keys = + let sparse = of_hash ~depth:(Db.depth db) (Db.merkle_root db) in + List.fold keys ~init:sparse ~f:(fun sparse key -> + let aid = Account_id.with_empty_key key in + let entry = + Db.get_entry_by_tid db key + |> Option.value_exn ~message:"Could not find entry" + in + let path = + Db.get_path_by_tid db key + |> Option.value_exn ~message:"Could not find path" + in + add_path sparse path aid entry ) end diff --git a/src/app/zeko/sequencer/cli.ml b/src/app/zeko/sequencer/cli.ml index dd30e9533e..bffb97f022 100644 --- a/src/app/zeko/sequencer/cli.ml +++ b/src/app/zeko/sequencer/cli.ml @@ -85,19 +85,19 @@ let update_outer_verification_keys = in let%bind fetched_outer_vk = - Gql_client.fetch_vk l1_uri + Gql_client.fetch_vk ~logger l1_uri ( Account_id.of_public_key @@ Public_key.decompress_exn Zeko_circuits_config.t.zeko_l1 ) >>| Or_error.ok_exn >>| Compile_simple.Verification_key.of_pickles >>| Compile_simple.Verification_key.hash and fetched_bridge_holder_vk = - Gql_client.fetch_vk l1_uri + Gql_client.fetch_vk ~logger l1_uri ( Account_id.of_public_key @@ Public_key.decompress_exn @@ List.hd_exn Zeko_circuits_config.t.holder_accounts_l1 ) >>| Or_error.ok_exn >>| Compile_simple.Verification_key.of_pickles >>| Compile_simple.Verification_key.hash and fetched_helper_token_owner_vk = - Gql_client.fetch_vk l1_uri + Gql_client.fetch_vk ~logger l1_uri ( Account_id.of_public_key @@ Public_key.decompress_exn Zeko_circuits_config.t.helper_token_owner_l1 ) @@ -140,7 +140,7 @@ let update_outer_verification_keys = if only_check then return () else let%bind nonce = - Gql_client.infer_nonce l1_uri + Gql_client.infer_nonce ~logger l1_uri (Public_key.compress sender.public_key) >>| Or_error.ok_exn in @@ -194,7 +194,7 @@ let update_inner_verification_keys = (* Fetch current state *) let%bind commited_ledger_hash = - Gql_client.infer_state l1_uri + Gql_client.infer_state ~logger l1_uri ~zkapp_pk:Zeko_circuits_config.Inputs.zeko_l1 ~signer_pk:(Public_key.compress sender.public_key) >>| Or_error.ok_exn @@ -208,6 +208,10 @@ let update_inner_verification_keys = Ledger.create_ephemeral ~depth:Zeko_constants.constraint_constants.ledger_depth () in + let imt = + Indexed_merkle_tree.Db.create + ~depth:Zeko_constants.constraint_constants.ledger_depth () + in let da_config = Da_layer.Client.Config.of_string_list [ da_node ] in let%bind () = Da_layer.Client.map_diffs ~logger ~config:da_config @@ -229,6 +233,14 @@ let update_inner_verification_keys = in List.iter changed_accounts ~f:(fun (index, account) -> Ledger.set_at_index_exn ledger index account ) ; + (* Add to Indexed Merkle Tree *) + List.iter changed_accounts ~f:(fun (_, account) -> + let aid = Account.identifier account in + let _w = + Indexed_merkle_tree.Db.get_or_create_entry_exn imt + (Account_id.derive_token_id ~owner:aid) + in + () ) ; return () ) >>| Or_error.ok_exn >>| ignore in @@ -339,13 +351,28 @@ let update_inner_verification_keys = Da_layer.Diff.create ~source_ledger_hash ~changed_accounts:diff ~command_with_action_step_flags:None in + let new_accounts_keys = + List.filter (Da_layer.Diff.changed_accounts diff) + ~f:(fun (index, _) -> + Account.equal + (Sparse_ledger.get_exn ledger_openings index) + Account.empty ) + |> List.sort ~compare:(fun (a, _) (b, _) -> Int.compare a b) + |> List.map ~f:(fun (_, account) -> + Account_id.derive_token_id + ~owner:(Account.identifier account) ) + in let%bind () = Da_layer.Client.distribute_diff ~logger ~config:da_config - ~ledger_openings ~diff + ~ledger_openings + ~acc_set_openings: + (Indexed_merkle_tree.Sparse.of_db_subset ~db:imt + ~keys:new_accounts_keys ) + ~diff in let%bind command = let%map nonce = - Gql_client.infer_nonce l1_uri + Gql_client.infer_nonce ~logger l1_uri (Public_key.compress sender.public_key) >>| Or_error.ok_exn in @@ -416,7 +443,7 @@ let update_da_key = (* Fetch current da key *) let%bind current_da_key = - Gql_client.infer_state l1_uri + Gql_client.infer_state ~logger l1_uri ~zkapp_pk:Zeko_circuits_config.Inputs.zeko_l1 ~signer_pk:(Public_key.compress sender.public_key) >>| Or_error.ok_exn @@ -443,7 +470,7 @@ let update_da_key = else let%bind command = let%map nonce = - Gql_client.infer_nonce l1_uri + Gql_client.infer_nonce ~logger l1_uri (Public_key.compress sender.public_key) >>| Or_error.ok_exn in @@ -503,11 +530,12 @@ let update_permissions = Stdout_log.setup log_json log_level ; let%bind nonce = - Gql_client.infer_nonce l1_uri (Public_key.compress sender.public_key) + Gql_client.infer_nonce ~logger l1_uri + (Public_key.compress sender.public_key) >>| Or_error.ok_exn in let%bind command = - Deploy.update_permissions + Deploy.update_permissions ~logger ~signature_kind:Zeko_circuits_config.t.chain_l1 ~signer:sender ~fee:(Currency.Fee.of_mina_string_exn "0.1") ~nonce ~gql_uri:l1_uri @@ -562,7 +590,7 @@ let set_pause = (* Fetch current state *) let%bind current_paused = - Gql_client.infer_state l1_uri + Gql_client.infer_state ~logger l1_uri ~zkapp_pk:Zeko_circuits_config.Inputs.zeko_l1 ~signer_pk:(Public_key.compress sender.public_key) >>| Or_error.ok_exn @@ -575,7 +603,7 @@ let set_pause = let%bind command = let%map nonce = - Gql_client.infer_nonce l1_uri + Gql_client.infer_nonce ~logger l1_uri (Public_key.compress sender.public_key) >>| Or_error.ok_exn in diff --git a/src/app/zeko/sequencer/deploy.ml b/src/app/zeko/sequencer/deploy.ml index 14bb7e0429..543975c839 100644 --- a/src/app/zeko/sequencer/deploy.ml +++ b/src/app/zeko/sequencer/deploy.ml @@ -44,14 +44,14 @@ let run ~l1_uri ~sk ~ledger_input ~faucet_aid ~da_nodes ~pause_key Thread_safe.block_on_async_exn (fun () -> let%bind nonce = - Gql_client.infer_nonce l1_uri + Gql_client.infer_nonce ~logger l1_uri (Public_key.compress sender_keypair.public_key) >>| Or_error.ok_exn in let%bind `Inner inner_account, `Holder holder_account = Sequencer_lib.Deploy.Z.Inner.initial_accounts () in - let old_ledger_witness, new_ledger, imt_hash = + let old_ledger_witness, new_ledger, imt_hash, imt = let ledger = L.create_ephemeral ~depth:constraint_constants.ledger_depth () in @@ -85,15 +85,15 @@ let run ~l1_uri ~sk ~ledger_input ~faucet_aid ~da_nodes ~pause_key (Account.create faucet_aid Currency.Balance.max_int) ; [ Account_id.derive_token_id ~owner:faucet_aid ] ) in + printf "Creating imt\n%!" ; + let imt, _witnesses = + Indexed_merkle_tree.Db.create_of_entries_exn + ~depth:constraint_constants.ledger_depth tids + in let imt_hash = - printf "Creating imt\n%!" ; - let imt, _witnesses = - Indexed_merkle_tree.Db.create_of_entries_exn - ~depth:constraint_constants.ledger_depth tids - in Account_set.of_fields [| Indexed_merkle_tree.Db.merkle_root imt |] in - (None, ledger, imt_hash) + (None, ledger, imt_hash, imt) | Some ledger_input_json -> print_endline "(* Load ledger from json file *)" ; Yojson.Safe.from_file ledger_input_json @@ -143,25 +143,25 @@ let run ~l1_uri ~sk ~ledger_input ~faucet_aid ~da_nodes ~pause_key (Ledger_hash.to_decimal_string @@ L.merkle_root ledger) ; print_endline "(* Construct IMT *)" ; + printf "Creating imt\n%!" ; + let tids = + L.to_list_sequential ledger + |> List.map ~f:Account.identifier + |> List.map ~f:(fun aid -> Account_id.derive_token_id ~owner:aid) + in + let imt, _witnesses = + Indexed_merkle_tree.Db.create_of_entries_exn + ~depth:constraint_constants.ledger_depth tids + in let imt_hash = - printf "Creating imt\n%!" ; - let tids = - L.to_list_sequential ledger - |> List.map ~f:Account.identifier - |> List.map ~f:(fun aid -> - Account_id.derive_token_id ~owner:aid ) - in - let imt, _witnesses = - Indexed_merkle_tree.Db.create_of_entries_exn - ~depth:constraint_constants.ledger_depth tids - in let imt_hash = Indexed_merkle_tree.Db.merkle_root imt in printf "IMT hash: %s\n%!" (Ledger_hash.to_decimal_string imt_hash) ; Account_set.of_fields [| imt_hash |] in ( Some (old_ledger_hash, old_ledger_openings, accounts_diff) , ledger - , imt_hash ) + , imt_hash + , imt ) in let%bind command = Sequencer_lib.Deploy.deploy_command_exn @@ -217,8 +217,22 @@ let run ~l1_uri ~sk ~ledger_input ~faucet_aid ~da_nodes ~pause_key (Sparse_ledger.merkle_root old_ledger_openings) ~changed_accounts ~command_with_action_step_flags:None in + let new_accounts_keys = + List.filter changed_accounts ~f:(fun (index, _) -> + Account.equal + (Sparse_ledger.get_exn old_ledger_openings index) + Account.empty ) + |> List.sort ~compare:(fun (a, _) (b, _) -> Int.compare a b) + |> List.map ~f:(fun (_, account) -> + Account_id.derive_token_id + ~owner:(Account.identifier account) ) + in Da_layer.Client.distribute_diff ~logger ~config:da_config - ~ledger_openings:old_ledger_openings ~diff + ~ledger_openings:old_ledger_openings + ~acc_set_openings: + (Indexed_merkle_tree.Sparse.of_db_subset ~db:imt + ~keys:new_accounts_keys ) + ~diff else let () = print_endline diff --git a/src/app/zeko/sequencer/gql_client/gql_client.ml b/src/app/zeko/sequencer/gql_client/gql_client.ml index 36b0d499ee..42f05251b9 100644 --- a/src/app/zeko/sequencer/gql_client/gql_client.ml +++ b/src/app/zeko/sequencer/gql_client/gql_client.ml @@ -4,9 +4,9 @@ open Mina_base open Init module Field = Snark_params.Tick.Field -let query_with_retry ?(max_attempts = 10) ?(delay = Time.Span.of_sec 5.) ~label - query_obj uri ~f = - Utils.retry ~max_attempts ~delay +let query_with_retry ~logger ?(max_attempts = 10) ?(delay = Time.Span.of_sec 5.) + ~label query_obj uri ~f = + Utils.retry ~logger ~max_attempts ~delay ~f:(fun () -> Graphql_client.Client.query_json query_obj uri >>| Result.map_error ~f:(function @@ -70,7 +70,7 @@ let fetch_action_state uri pk = |> to_string) |> Field.of_string ) -let fetch_actions uri ?from_action_state ?end_action_state pk : +let fetch_actions ~logger uri ?from_action_state ?end_action_state pk : ( Field.t array list * [ `Block_height of int ] * [ `Distance_from_max_block_height of int ] @@ -151,7 +151,7 @@ let fetch_actions uri ?from_action_state ?end_action_state pk : ] end in - query_with_retry ~label:"fetch actions" q uri ~f:(fun result -> + query_with_retry ~logger ~label:"fetch actions" q uri ~f:(fun result -> let result = M.actions_of_yojson result |> ok_exn in List.map result.actions ~f:(fun @@ -293,10 +293,10 @@ let fetch_pooled_signed_commands uri pk = |> List.map ~f:(Fn.compose ok_exn Signed_command.of_base64)) ) (* Infers nonce based on pooled commands *) -let infer_nonce uri pk = +let infer_nonce ~logger uri pk = let%bind.Deferred.Result pooled_zkapp_commands = - fetch_pooled_zkapp_commands uri pk - and pooled_signed_commands = fetch_pooled_signed_commands uri pk in + fetch_pooled_zkapp_commands ~logger uri pk + and pooled_signed_commands = fetch_pooled_signed_commands ~logger uri pk in let max_pooled_nonce = let max_zkapp_commands_nonce = List.map pooled_zkapp_commands ~f:(fun command -> @@ -315,7 +315,7 @@ let infer_nonce uri pk = in Unsigned.UInt32.(max max_zkapp_commands_nonce max_signed_commands_nonce) in - let%map.Deferred.Result committed_nonce = fetch_nonce uri pk in + let%map.Deferred.Result committed_nonce = fetch_nonce ~logger uri pk in Unsigned.UInt32.max max_pooled_nonce committed_nonce let fetch_state uri aid = @@ -410,12 +410,14 @@ let fetch_vk uri aid = |> member "verificationKey" |> to_string |> Side_loaded_verification_key.of_base64 |> Or_error.ok_exn) ) -let infer_state uri ~zkapp_pk ~signer_pk = +let infer_state ~logger uri ~zkapp_pk ~signer_pk = let%map.Deferred.Result committed_state = - fetch_state uri + fetch_state ~logger uri ( Account_id.of_public_key @@ Signature_lib.Public_key.decompress_exn zkapp_pk ) - and pooled_zkapp_commands = fetch_pooled_zkapp_commands uri signer_pk in + and pooled_zkapp_commands = + fetch_pooled_zkapp_commands ~logger uri signer_pk + in let pooled_zkapp_commands = List.sort pooled_zkapp_commands ~compare:(fun a b -> Zkapp_command.( @@ -555,7 +557,7 @@ let fetch_fork_slot uri = |> Mina_numbers.Global_slot_since_genesis.of_int ) module For_tests = struct - let create_account uri pk = + let create_account ~logger uri pk = let q = object method query = @@ -575,12 +577,13 @@ module For_tests = struct end in let%map result = - query_with_retry ~max_attempts:1 ~label:"create account" q uri ~f:Fn.id + query_with_retry ~logger ~max_attempts:1 ~label:"create account" q uri + ~f:Fn.id >>| Or_error.ok_exn in Yojson.Safe.(to_string result) - let create_new_block uri = + let create_new_block ~logger uri = let q = object method query = @@ -595,12 +598,13 @@ module For_tests = struct end in let%map result = - query_with_retry ~max_attempts:1 ~label:"create new block" q uri ~f:Fn.id + query_with_retry ~logger ~max_attempts:1 ~label:"create new block" q uri + ~f:Fn.id >>| Or_error.ok_exn in Yojson.Safe.(to_string result) - let clear_pool uri = + let clear_pool ~logger uri = let q = object method query = @@ -615,12 +619,13 @@ module For_tests = struct end in let%map result = - query_with_retry ~max_attempts:1 ~label:"clear pool" q uri ~f:Fn.id + query_with_retry ~logger ~max_attempts:1 ~label:"clear pool" q uri + ~f:Fn.id >>| Or_error.ok_exn in Yojson.Safe.(to_string result) - let reset_state uri = + let reset_state ~logger uri = let q = object method query = @@ -635,11 +640,12 @@ module For_tests = struct end in let%map result = - query_with_retry ~label:"reset state" q uri ~f:Fn.id >>| Or_error.ok_exn + query_with_retry ~logger ~label:"reset state" q uri ~f:Fn.id + >>| Or_error.ok_exn in Yojson.Safe.(to_string result) - let shift_slots uri slots = + let shift_slots ~logger uri slots = let q = object method query = @@ -654,12 +660,13 @@ module For_tests = struct end in let%map result = - query_with_retry ~max_attempts:1 ~label:"shift slots" q uri ~f:Fn.id + query_with_retry ~logger ~max_attempts:1 ~label:"shift slots" q uri + ~f:Fn.id >>| Or_error.ok_exn in Yojson.Safe.(to_string result) - let get_zkapp_command_status uri hash = + let get_zkapp_command_status ~logger uri hash = let q = object method query = @@ -683,8 +690,8 @@ module For_tests = struct end in let%map result = - query_with_retry ~max_attempts:1 ~label:"get zkapp command status" q uri - ~f:Fn.id + query_with_retry ~logger ~max_attempts:1 ~label:"get zkapp command status" + q uri ~f:Fn.id >>| Or_error.ok_exn in Yojson.Safe.Util.( diff --git a/src/app/zeko/sequencer/lib/committer.ml b/src/app/zeko/sequencer/lib/committer.ml index 9240f3729f..6e5206a73e 100644 --- a/src/app/zeko/sequencer/lib/committer.ml +++ b/src/app/zeko/sequencer/lib/committer.ml @@ -96,7 +96,7 @@ let prove_commit ~logger ~proof_cache_db ~provers ~(executor : Executor.t) let new_inner_acc, new_inner_acc_path = get_inner_acc new_inner_ledger in let%bind inner_ase_source = let%map { inner_action_state = committed_inner_action_state; _ } = - Gql_client.infer_state + Gql_client.infer_state ~logger Executor.(executor.l1_uri) ~zkapp_pk ~signer_pk:(Public_key.compress executor.signer.public_key) @@ -143,7 +143,7 @@ let prove_commit ~logger ~proof_cache_db ~provers ~(executor : Executor.t) |> Deferred.return in let%bind unprocessed_actions = - Gql_client.fetch_actions archive_uri + Gql_client.fetch_actions ~logger archive_uri ~from_action_state:processed_actions_pointer zkapp_pk >>| List.map ~f:(fun (fields, _, _, _, _) -> fields) >>| List.map ~f:Zkapp_account.Actions_impl.hash @@ -205,7 +205,7 @@ let recommit_all ~logger ~proof_cache_db ~db_pool ~provers ~commit_validity_period = let open Deferred.Result.Let_syntax in let%bind { ledger_hash; _ } = - Gql_client.infer_state executor.l1_uri ~zkapp_pk + Gql_client.infer_state ~logger executor.l1_uri ~zkapp_pk ~signer_pk:(Public_key.compress executor.signer.public_key) >>| Utils.value_of_zkapp_state Rollup_state.Outer_state.typ in diff --git a/src/app/zeko/sequencer/lib/db.ml b/src/app/zeko/sequencer/lib/db.ml index 8ff3f5ceb1..447a153ad0 100644 --- a/src/app/zeko/sequencer/lib/db.ml +++ b/src/app/zeko/sequencer/lib/db.ml @@ -145,6 +145,12 @@ let migrations : Db.Migration.t list = () in return () ) + ; Db.Migration.make 5 "add_acc_set_column_to_da_diff" + (fun (module Conn : CONNECTION) -> + Conn.exec + (Caqti_request.exec Caqti_type.unit + {sql| ALTER TABLE da_diff ADD COLUMN acc_set_openings BYTEA NOT NULL |sql} ) + () ) ] let create_and_migrate ~postgres_uri ~logger = diff --git a/src/app/zeko/sequencer/lib/deploy.ml b/src/app/zeko/sequencer/lib/deploy.ml index 7244d24934..cdcfa3f725 100644 --- a/src/app/zeko/sequencer/lib/deploy.ml +++ b/src/app/zeko/sequencer/lib/deploy.ml @@ -441,12 +441,12 @@ let update_outer_state ~signature_kind ~(signer : Keypair.t) module Change_permissions = Zeko_circuits.Rule_change_permissions.Make (Zeko_circuits_config.Inputs) () -let update_permissions ~signature_kind ~(signer : Keypair.t) +let update_permissions ~logger ~signature_kind ~(signer : Keypair.t) ~(fee : Currency.Fee.t) ~(nonce : Account.Nonce.t) ~gql_uri ~(permissions : Permissions.t) = let proof_cache_db = Proof_cache_tag.create_identity_db () in let%bind old_vk = - Gql_client.fetch_vk gql_uri + Gql_client.fetch_vk ~logger gql_uri ( Account_id.of_public_key @@ Public_key.decompress_exn Zeko_circuits_config.t.zeko_l1 ) >>| Or_error.ok_exn diff --git a/src/app/zeko/sequencer/lib/executor.ml b/src/app/zeko/sequencer/lib/executor.ml index 56d725c46a..d6d6a3fa37 100644 --- a/src/app/zeko/sequencer/lib/executor.ml +++ b/src/app/zeko/sequencer/lib/executor.ml @@ -47,7 +47,7 @@ let process_command ~logger t (command : Zkapp_command.t) = | Some nonce -> return (Ok nonce) | None -> - Gql_client.infer_nonce t.l1_uri + Gql_client.infer_nonce ~logger t.l1_uri (Public_key.compress t.signer.public_key) >>| Result.map_error ~f:(fun err -> `Nonce_inference_error err) in diff --git a/src/app/zeko/sequencer/lib/zeko_sequencer.ml b/src/app/zeko/sequencer/lib/zeko_sequencer.ml index 968d6bc9fa..5f6c3b77de 100644 --- a/src/app/zeko/sequencer/lib/zeko_sequencer.ml +++ b/src/app/zeko/sequencer/lib/zeko_sequencer.ml @@ -483,9 +483,23 @@ module Sequencer = struct Zkapp_command.all_account_updates_list command |> List.map ~f:(fun _ -> true) ) ) in + let new_accounts_keys = + List.filter changed_accounts ~f:(fun (index, _) -> + Account.equal + (Sparse_ledger.get_exn source_ledger index) + Account.empty ) + |> List.sort ~compare:(fun (a, _) (b, _) -> Int.compare a b) + |> List.map ~f:(fun (_, account) -> + Account_id.derive_token_id + ~owner:(Account.identifier account) ) + in let%bind () = Da_layer.Client.enqueue_diff t.da_client ~genesis:false - ~ledger_openings:source_ledger ~diff + ~ledger_openings:source_ledger + ~acc_set_openings: + (Indexed_merkle_tree.Sparse.of_db_subset ~db:t.imt + ~keys:new_accounts_keys ) + ~diff ~target_ledger_hash:(L.Db.merkle_root t.ledger) in @@ -545,9 +559,23 @@ module Sequencer = struct ~source_ledger_hash:(Sparse_ledger.merkle_root source_ledger) ~changed_accounts ~command_with_action_step_flags:None in + let new_accounts_keys = + List.filter changed_accounts ~f:(fun (index, _) -> + Account.equal + (Sparse_ledger.get_exn source_ledger index) + Account.empty ) + |> List.sort ~compare:(fun (a, _) (b, _) -> Int.compare a b) + |> List.map ~f:(fun (_, account) -> + Account_id.derive_token_id + ~owner:(Account.identifier account) ) + in let%bind () = Da_layer.Client.enqueue_diff t.da_client ~genesis:false - ~ledger_openings:source_ledger ~diff + ~ledger_openings:source_ledger + ~acc_set_openings: + (Indexed_merkle_tree.Sparse.of_db_subset ~db:t.imt + ~keys:new_accounts_keys ) + ~diff ~target_ledger_hash:(L.Db.merkle_root t.ledger) in match%map @@ -569,12 +597,14 @@ module Sequencer = struct C.Rollup_state.Outer_action_state.With_length.(raw s, length s) in let%bind all_new_actions = - Gql_client.fetch_actions t.config.archive_uri + Gql_client.fetch_actions ~logger t.config.archive_uri ~from_action_state:old_synced_outer_action_state Zeko_circuits_config.Inputs.zeko_l1 in [%log info] "All new actions: %d" (List.length all_new_actions) ; - let%bind current_height = Gql_client.fetch_block_height t.config.l1_uri in + let%bind current_height = + Gql_client.fetch_block_height ~logger t.config.l1_uri + in (* Find pointer for actions to be processed *) let processed_pointer, processed_new_actions = List.fold all_new_actions ~init:(old_synced_outer_action_state, []) @@ -736,7 +766,7 @@ module Sequencer = struct let sync ~logger ({ config; _ } as t) da_config source = [%log info] "Syncing" ; let%bind commited_ledger_hash = - Gql_client.infer_state config.l1_uri + Gql_client.infer_state ~logger config.l1_uri ~zkapp_pk:Zeko_circuits_config.Inputs.zeko_l1 ~signer_pk:(Public_key.compress config.signer.public_key) >>| Or_error.ok_exn @@ -762,9 +792,15 @@ module Sequencer = struct let%bind diffs = Da_layer.Client.create_genesis_diffs ledger in let%bind () = Deferred.List.iteri ~how:`Sequential diffs - ~f:(fun i (diff, ledger_openings, `Target target_ledger_hash) -> + ~f:(fun + i + ( diff + , ledger_openings + , acc_set_openings + , `Target target_ledger_hash ) + -> Da_layer.Client.enqueue_diff t.da_client ~diff ~ledger_openings - ~target_ledger_hash ~genesis:(i = 0) ) + ~acc_set_openings ~target_ledger_hash ~genesis:(i = 0) ) in [%log info] "Enqueued genesis diff" ; return () @@ -791,7 +827,7 @@ module Sequencer = struct (* Apply accounts diff *) let mask = L.of_database t.ledger in let ledger_openings = - Da_layer.Client.get_openings + Da_layer.Client.get_ledger_openings ~diff:(Da_layer.Diff.drop_time diff) ~ledger:mask in @@ -812,11 +848,17 @@ module Sequencer = struct in () ) ; + let acc_set_openings = + Da_layer.Client.get_acc_set_openings + ~diff:(Da_layer.Diff.drop_time diff) + ~ledger_openings ~imt:t.imt + in + (* Store diff to DA client *) let%bind () = Da_layer.Client.enqueue_diff t.da_client ~diff:(Da_layer.Diff.drop_time diff) - ~ledger_openings + ~ledger_openings ~acc_set_openings ~target_ledger_hash:(L.Db.merkle_root t.ledger) ~genesis:(current_chunk = 0 && current_diff = 0) in @@ -877,7 +919,7 @@ module Sequencer = struct | false, false -> ( [%log info] "No ledger and IMT directories exist, fetching commits" ; let%bind commits = - Gql_client.fetch_actions archive_uri zkapp_pk + Gql_client.fetch_actions ~logger archive_uri zkapp_pk >>| Or_error.ok_exn >>| List.filter_map ~f:(fun (fields, _, _, _, _) -> match fields with @@ -892,7 +934,7 @@ module Sequencer = struct None ) in let%map commited_ledger_hash = - Gql_client.infer_state l1_uri + Gql_client.infer_state ~logger l1_uri ~zkapp_pk:Zeko_circuits_config.Inputs.zeko_l1 ~signer_pk:(Public_key.compress signer_pk) >>| Or_error.ok_exn diff --git a/src/app/zeko/sequencer/run.ml b/src/app/zeko/sequencer/run.ml index 509eaee995..31dd8b2490 100644 --- a/src/app/zeko/sequencer/run.ml +++ b/src/app/zeko/sequencer/run.ml @@ -16,12 +16,12 @@ let run ~logger ~port ~max_pool_size ~commitment_period ~da_config ~da_keys let l1_config : Utils.Slot.l1_config = let genesis_timestamp = Thread_safe.block_on_async_exn (fun () -> - Gql_client.fetch_genesis_timestamp l1_uri >>| Or_error.ok_exn ) + Gql_client.fetch_genesis_timestamp ~logger l1_uri >>| Or_error.ok_exn ) in { fork_timestamp = genesis_timestamp ; fork_slot = Thread_safe.block_on_async_exn (fun () -> - Gql_client.fetch_fork_slot l1_uri >>| Or_error.ok_exn ) + Gql_client.fetch_fork_slot ~logger l1_uri >>| Or_error.ok_exn ) } in [%log info] "Current slot: %d" diff --git a/src/app/zeko/sequencer/tests/sequencer_test.ml b/src/app/zeko/sequencer/tests/sequencer_test.ml index 4f1ae0ece8..5581bae44b 100644 --- a/src/app/zeko/sequencer/tests/sequencer_test.ml +++ b/src/app/zeko/sequencer/tests/sequencer_test.ml @@ -138,7 +138,7 @@ let () = Executor.wait_to_finish !sequencer.merger_ctx.executor in let%bind { ledger_hash = committed_ledger_hash; _ } = - Gql_client.infer_state gql_uri + Gql_client.infer_state ~logger gql_uri ~signer_pk:(Public_key.compress signer.public_key) ~zkapp_pk:(Public_key.compress outer_kp.public_key) >>| Or_error.ok_exn @@ -166,9 +166,11 @@ let () = let%bind () = Executor.wait_to_finish !sequencer.merger_ctx.executor in - let%bind _created = Gql_client.For_tests.create_new_block gql_uri in + let%bind _created = + Gql_client.For_tests.create_new_block ~logger gql_uri + in let%map { ledger_hash = committed_ledger_hash; _ } = - Gql_client.infer_state gql_uri + Gql_client.infer_state ~logger gql_uri ~signer_pk:(Public_key.compress signer.public_key) ~zkapp_pk:(Public_key.compress outer_kp.public_key) >>| Or_error.ok_exn @@ -191,9 +193,11 @@ let () = let%bind () = Executor.wait_to_finish !sequencer.merger_ctx.executor in - let%bind _created = Gql_client.For_tests.create_new_block gql_uri in + let%bind _created = + Gql_client.For_tests.create_new_block ~logger gql_uri + in let%bind { ledger_hash = committed_ledger_hash; _ } = - Gql_client.infer_state gql_uri + Gql_client.infer_state ~logger gql_uri ~signer_pk:(Public_key.compress signer.public_key) ~zkapp_pk:(Public_key.compress outer_kp.public_key) >>| Or_error.ok_exn @@ -334,7 +338,7 @@ let () = Executor.wait_to_finish new_sequencer.merger_ctx.executor in let%map { ledger_hash = committed_ledger_hash; _ } = - Gql_client.infer_state gql_uri + Gql_client.infer_state ~logger gql_uri ~signer_pk:(Public_key.compress signer.public_key) ~zkapp_pk:(Public_key.compress outer_kp.public_key) >>| Or_error.ok_exn @@ -429,7 +433,7 @@ let () = Executor.wait_to_finish !sequencer.merger_ctx.executor in let%map { ledger_hash = committed_ledger_hash; _ } = - Gql_client.infer_state gql_uri + Gql_client.infer_state ~logger gql_uri ~signer_pk:(Public_key.compress signer.public_key) ~zkapp_pk:(Public_key.compress outer_kp.public_key) >>| Or_error.ok_exn @@ -533,9 +537,11 @@ let () = let%bind () = Executor.wait_to_finish !sequencer.merger_ctx.executor in - let%bind _cleared = Gql_client.For_tests.clear_pool gql_uri in + let%bind _cleared = + Gql_client.For_tests.clear_pool ~logger gql_uri + in let%map { ledger_hash = committed_ledger_hash; _ } = - Gql_client.infer_state gql_uri + Gql_client.infer_state ~logger gql_uri ~signer_pk:(Public_key.compress signer.public_key) ~zkapp_pk:(Public_key.compress outer_kp.public_key) >>| Or_error.ok_exn @@ -563,9 +569,11 @@ let () = print_endline "(* Check that after restart it recommited *)" ; run (fun () -> - let%bind _created = Gql_client.For_tests.create_new_block gql_uri in + let%bind _created = + Gql_client.For_tests.create_new_block ~logger gql_uri + in let%map { ledger_hash = committed_ledger_hash; _ } = - Gql_client.infer_state gql_uri + Gql_client.infer_state ~logger gql_uri ~signer_pk:(Public_key.compress signer.public_key) ~zkapp_pk:(Public_key.compress outer_kp.public_key) >>| Or_error.ok_exn @@ -716,9 +724,11 @@ let () = let%bind () = Executor.wait_to_finish !sequencer.merger_ctx.executor in - let%bind _created = Gql_client.For_tests.create_new_block gql_uri in + let%bind _created = + Gql_client.For_tests.create_new_block ~logger gql_uri + in let%map { ledger_hash = committed_ledger_hash; _ } = - Gql_client.infer_state gql_uri + Gql_client.infer_state ~logger gql_uri ~signer_pk:(Public_key.compress signer.public_key) ~zkapp_pk:(Public_key.compress outer_kp.public_key) >>| Or_error.ok_exn @@ -757,14 +767,14 @@ let () = run (fun () -> Deferred.List.iter l1_accounts ~f:(fun keypair -> let%bind _res = - Gql_client.For_tests.create_account gql_uri + Gql_client.For_tests.create_account ~logger gql_uri (Signature_lib.Public_key.compress keypair.public_key) in return () ) ) ; let submit_deposit ~fee (signer : Keypair.t) deposit_params = let%bind nonce = - Gql_client.fetch_nonce gql_uri + Gql_client.fetch_nonce ~logger gql_uri (Signature_lib.Public_key.compress signer.public_key) >>| Or_error.ok_exn in @@ -854,7 +864,9 @@ let () = submit_deposit ~fee:4 account3 deposit3 >>= Gql_client.send_zkapp gql_uri in - let%bind _created = Gql_client.For_tests.create_new_block gql_uri in + let%bind _created = + Gql_client.For_tests.create_new_block ~logger gql_uri + in return [ (account1, deposit1) ; (account2, deposit2) @@ -869,9 +881,11 @@ let () = let%bind () = Executor.wait_to_finish !sequencer.merger_ctx.executor in - let%bind _created = Gql_client.For_tests.create_new_block gql_uri in + let%bind _created = + Gql_client.For_tests.create_new_block ~logger gql_uri + in let%map { ledger_hash = committed_ledger_hash; _ } = - Gql_client.infer_state gql_uri + Gql_client.infer_state ~logger gql_uri ~signer_pk:(Public_key.compress signer.public_key) ~zkapp_pk:(Public_key.compress outer_kp.public_key) >>| Or_error.ok_exn @@ -902,7 +916,7 @@ let () = >>= Gql_client.send_zkapp gql_uri in let%bind _created = - Gql_client.For_tests.create_new_block gql_uri + Gql_client.For_tests.create_new_block ~logger gql_uri in return [ (account1, deposit4) @@ -918,9 +932,11 @@ let () = let%bind () = Executor.wait_to_finish !sequencer.merger_ctx.executor in - let%bind _created = Gql_client.For_tests.create_new_block gql_uri in + let%bind _created = + Gql_client.For_tests.create_new_block ~logger gql_uri + in let%map { ledger_hash = committed_ledger_hash; _ } = - Gql_client.infer_state gql_uri + Gql_client.infer_state ~logger gql_uri ~signer_pk:(Public_key.compress signer.public_key) ~zkapp_pk:(Public_key.compress outer_kp.public_key) >>| Or_error.ok_exn @@ -937,9 +953,11 @@ let () = let%bind () = Executor.wait_to_finish !sequencer.merger_ctx.executor in - let%bind _created = Gql_client.For_tests.create_new_block gql_uri in + let%bind _created = + Gql_client.For_tests.create_new_block ~logger gql_uri + in let%map { ledger_hash = committed_ledger_hash; _ } = - Gql_client.infer_state gql_uri + Gql_client.infer_state ~logger gql_uri ~signer_pk:(Public_key.compress signer.public_key) ~zkapp_pk:(Public_key.compress outer_kp.public_key) >>| Or_error.ok_exn @@ -962,7 +980,7 @@ let () = } in let%bind actions = - Gql_client.fetch_actions gql_uri + Gql_client.fetch_actions ~logger gql_uri (Public_key.compress outer_kp.public_key) >>| Or_error.ok_exn >>| List.map ~f:(fun (fields, _, _, before, after) -> @@ -1014,7 +1032,7 @@ let () = Sequencer.current_synced_outer_action_state !sequencer in let%bind ase_actions = - Gql_client.fetch_actions gql_uri + Gql_client.fetch_actions ~logger gql_uri ~from_action_state:after_nearest_commit_action_state ~end_action_state: C.Rollup_state.Outer_action_state.( @@ -1136,7 +1154,9 @@ let () = submit_deposit ~fee:1 account3 deposit9 >>= Gql_client.send_zkapp gql_uri in - let%bind _created = Gql_client.For_tests.create_new_block gql_uri in + let%bind _created = + Gql_client.For_tests.create_new_block ~logger gql_uri + in return [ (account1, deposit7) ; (account2, deposit8) @@ -1149,16 +1169,20 @@ let () = (* shift has to be more than timeout but less than commit validity period for canceled deposit *) print_endline "(* Commit 7-9 deposits after timeout *)" ; run (fun () -> - let%bind _shifted = Gql_client.For_tests.shift_slots gql_uri 15 in + let%bind _shifted = + Gql_client.For_tests.shift_slots ~logger gql_uri 15 + in Utils.Slot.For_tests.add_to_global_slot := 15 ; let%bind commit_result = commit !sequencer >>| Or_error.ok_exn in let%bind _txn_snark = commit_result >>| Or_error.ok_exn in let%bind () = Executor.wait_to_finish !sequencer.merger_ctx.executor in - let%bind _created = Gql_client.For_tests.create_new_block gql_uri in + let%bind _created = + Gql_client.For_tests.create_new_block ~logger gql_uri + in let%map { ledger_hash = committed_ledger_hash; _ } = - Gql_client.infer_state gql_uri + Gql_client.infer_state ~logger gql_uri ~signer_pk:(Public_key.compress signer.public_key) ~zkapp_pk:(Public_key.compress outer_kp.public_key) >>| Or_error.ok_exn @@ -1170,7 +1194,7 @@ let () = let cancel_deposit ~fee (signer : Keypair.t) deposit_params = let%bind nonce = - Gql_client.fetch_nonce gql_uri + Gql_client.fetch_nonce ~logger gql_uri (Signature_lib.Public_key.compress signer.public_key) >>| Or_error.ok_exn in @@ -1186,7 +1210,7 @@ let () = } in let%bind actions = - Gql_client.fetch_actions gql_uri + Gql_client.fetch_actions ~logger gql_uri (Public_key.compress outer_kp.public_key) >>| Or_error.ok_exn >>| List.map ~f:(fun (fields, _, _, before, after) -> @@ -1284,7 +1308,8 @@ let () = Zeko_circuits_config.Inputs.helper_token_owner_l1 ) ) ) in match%map - Gql_client.fetch_state_opt gql_uri helper_aid >>| Or_error.ok_exn + Gql_client.fetch_state_opt ~logger gql_uri helper_aid + >>| Or_error.ok_exn with | Some (next_cancelled_deposit :: _next_withdrawal :: _) -> Some (UInt32.of_string (Field.to_string next_cancelled_deposit)) @@ -1368,10 +1393,10 @@ let () = in let%bind _ = Gql_client.send_zkapp gql_uri command in let%bind _created = - Gql_client.For_tests.create_new_block gql_uri + Gql_client.For_tests.create_new_block ~logger gql_uri in let%map status = - Gql_client.For_tests.get_zkapp_command_status gql_uri + Gql_client.For_tests.get_zkapp_command_status ~logger gql_uri (Mina_transaction.Transaction_hash.hash_command (Zkapp_command command) ) in @@ -1482,9 +1507,11 @@ let () = let%bind () = Executor.wait_to_finish !sequencer.merger_ctx.executor in - let%bind _created = Gql_client.For_tests.create_new_block gql_uri in + let%bind _created = + Gql_client.For_tests.create_new_block ~logger gql_uri + in let%map { ledger_hash = committed_ledger_hash; _ } = - Gql_client.infer_state gql_uri + Gql_client.infer_state ~logger gql_uri ~signer_pk:(Public_key.compress signer.public_key) ~zkapp_pk:(Public_key.compress outer_kp.public_key) >>| Or_error.ok_exn @@ -1521,7 +1548,7 @@ let () = >>| [%test_eq: unit Or_error.t] (Ok ()) in let%bind _created = - Gql_client.For_tests.create_new_block gql_uri + Gql_client.For_tests.create_new_block ~logger gql_uri in return [ (account1, withdrawal4) @@ -1537,9 +1564,11 @@ let () = let%bind () = Executor.wait_to_finish !sequencer.merger_ctx.executor in - let%bind _created = Gql_client.For_tests.create_new_block gql_uri in + let%bind _created = + Gql_client.For_tests.create_new_block ~logger gql_uri + in let%map { ledger_hash = committed_ledger_hash; _ } = - Gql_client.infer_state gql_uri + Gql_client.infer_state ~logger gql_uri ~signer_pk:(Public_key.compress signer.public_key) ~zkapp_pk:(Public_key.compress outer_kp.public_key) >>| Or_error.ok_exn @@ -1551,7 +1580,7 @@ let () = let finalize_withdrawal ~fee (signer : Keypair.t) withdrawal_params = let%bind nonce = - Gql_client.fetch_nonce gql_uri + Gql_client.fetch_nonce ~logger gql_uri (Signature_lib.Public_key.compress signer.public_key) >>| Or_error.ok_exn in @@ -1567,7 +1596,7 @@ let () = } in let%bind l1_actions = - Gql_client.fetch_actions gql_uri + Gql_client.fetch_actions ~logger gql_uri (Public_key.compress outer_kp.public_key) >>| Or_error.ok_exn >>| List.map ~f:(fun (fields, _, _, before, after) -> @@ -1659,7 +1688,8 @@ let () = Zeko_circuits_config.Inputs.helper_token_owner_l1 ) ) ) in match%map - Gql_client.fetch_state_opt gql_uri helper_aid >>| Or_error.ok_exn + Gql_client.fetch_state_opt ~logger gql_uri helper_aid + >>| Or_error.ok_exn with | Some (_next_cancelled_deposit :: next_withdrawal :: _) -> Some (UInt32.of_string (Field.to_string next_withdrawal)) @@ -1732,7 +1762,9 @@ let () = print_endline "(* Finalize all withdrawals *)" ; run (fun () -> - let%bind _shifted = Gql_client.For_tests.shift_slots gql_uri 200 in + let%bind _shifted = + Gql_client.For_tests.shift_slots ~logger gql_uri 200 + in let%bind () = Deferred.List.iteri withdrawals ~f:(fun i (signer, withdrawal_params) -> @@ -1742,10 +1774,10 @@ let () = in let%bind _ = Gql_client.send_zkapp gql_uri command in let%bind _created = - Gql_client.For_tests.create_new_block gql_uri + Gql_client.For_tests.create_new_block ~logger gql_uri in let%map status = - Gql_client.For_tests.get_zkapp_command_status gql_uri + Gql_client.For_tests.get_zkapp_command_status ~logger gql_uri (Mina_transaction.Transaction_hash.hash_command (Zkapp_command command) ) in diff --git a/src/app/zeko/sequencer/tests/test_spec.ml b/src/app/zeko/sequencer/tests/test_spec.ml index c069001bd1..e649220ba7 100644 --- a/src/app/zeko/sequencer/tests/test_spec.ml +++ b/src/app/zeko/sequencer/tests/test_spec.ml @@ -373,7 +373,9 @@ module Sequencer_spec = struct ?checkpoints_dir ?(commit_validity_period = Global_slot_span.of_int 10) ~logger ~postgres_uri ~gql_uri ~da_config ~da_keys ~da_quorum ~mq_host ~slot_acceptance () = - let _reset = run @@ fun () -> Gql_client.For_tests.reset_state gql_uri in + let _reset = + run @@ fun () -> Gql_client.For_tests.reset_state ~logger gql_uri + in let deploy_config = Option.value_exn ~message:"ZEKO_DEPLOY_CONFIG is not set" Zeko_circuits_config.deploy_config @@ -394,7 +396,7 @@ module Sequencer_spec = struct let signer = create_even_signer () in run (fun () -> let%bind _res = - Gql_client.For_tests.create_account gql_uri + Gql_client.For_tests.create_account ~logger gql_uri (Public_key.compress signer.public_key) in return () ) ; @@ -469,7 +471,8 @@ module Sequencer_spec = struct Compressed.to_base58_check @@ compress token_holder_kp.public_key) ) ; let%bind nonce = - Gql_client.infer_nonce gql_uri (Public_key.compress signer.public_key) + Gql_client.infer_nonce ~logger gql_uri + (Public_key.compress signer.public_key) >>| Or_error.ok_exn in let%bind command = @@ -491,14 +494,16 @@ module Sequencer_spec = struct Gql_client.send_zkapp gql_uri (Zkapp_command.read_all_proofs_from_disk command) in - let%bind _created = Gql_client.For_tests.create_new_block gql_uri in + let%bind _created = + Gql_client.For_tests.create_new_block ~logger gql_uri + in return () ) ; let l1_config : Utils.Slot.l1_config = let genesis_timestamp = run @@ fun () -> - Gql_client.fetch_genesis_timestamp gql_uri >>| Or_error.ok_exn + Gql_client.fetch_genesis_timestamp ~logger gql_uri >>| Or_error.ok_exn in { fork_timestamp = genesis_timestamp ; fork_slot = Mina_numbers.Global_slot_since_genesis.zero diff --git a/src/app/zeko/sequencer/utils/utils.ml b/src/app/zeko/sequencer/utils/utils.ml index a9926f1fd6..2dd7c45b26 100644 --- a/src/app/zeko/sequencer/utils/utils.ml +++ b/src/app/zeko/sequencer/utils/utils.ml @@ -5,7 +5,7 @@ open Snark_params.Tick open Async open Zeko_circuits.Zeko_util -let retry ?(max_attempts = 5) ?(delay = Time.Span.of_sec 1.) ~f () = +let retry ?logger ?(max_attempts = 5) ?(delay = Time.Span.of_sec 1.) ~f () = let rec go attempt = match%bind Monitor.try_with ~here:[%here] f @@ -14,7 +14,15 @@ let retry ?(max_attempts = 5) ?(delay = Time.Span.of_sec 1.) ~f () = with | Ok x -> return (Ok x) - | Error _ when attempt < max_attempts -> + | Error e when attempt < max_attempts -> + let () = + match logger with + | None -> + () + | Some logger -> + [%log warn] "Failed to execute function, retrying..." + ~metadata:[ ("error", `String (Error.to_string_hum e)) ] + in let%bind () = after delay in go (attempt + 1) | Error e -> diff --git a/src/app/zeko/zeko_types/zeko_types.ml b/src/app/zeko/zeko_types/zeko_types.ml index 5772902db4..18bfa09fef 100644 --- a/src/app/zeko/zeko_types/zeko_types.ml +++ b/src/app/zeko/zeko_types/zeko_types.ml @@ -107,12 +107,15 @@ module Acc_set_witness = struct type serializable = { x : Token_id.t list ; z : Token_id.t list + ; y_prev_hash : Field.t list + ; y_prev_path : Path.t list ; x_path : Path.t list ; y_path : Path.t list } [@@deriving yojson] - let of_serializable ({ x; z; x_path; y_path } : serializable) : t = + let of_serializable + ({ x; z; y_prev_hash; y_prev_path; x_path; y_path } : serializable) : t = let list_to_fun l = let l = ref l in fun () -> @@ -125,16 +128,27 @@ module Acc_set_witness = struct in { get_account_set_x = list_to_fun x ; get_account_set_z = list_to_fun z + ; get_account_set_y_prev_hash = list_to_fun y_prev_hash + ; get_account_set_y_prev_path = list_to_fun y_prev_path ; get_account_set_x_path = list_to_fun x_path ; get_account_set_y_path = list_to_fun y_path } - let empty = { x = []; x_path = []; y_path = []; z = [] } + let empty = + { x = [] + ; x_path = [] + ; y_prev_hash = [] + ; y_prev_path = [] + ; y_path = [] + ; z = [] + } let add t - ((x, x_path, _, y_path, z) : + ((x, x_path, y_prev_hash, y_prev_path, _, y_path, z) : [ `X of Token_id.t ] * [ `X_path of Ledger.Path.t ] + * [ `Y_prev_hash of Field.t ] + * [ `Y_prev_path of Ledger.Path.t ] * [ `Y of Token_id.t ] * [ `Y_path of [ `Left of Field.t | `Right of Field.t ] list ] * [ `Z of Token_id.t ] ) = @@ -148,10 +162,14 @@ module Acc_set_witness = struct in let x = match x with `X x -> x in let x_path = match x_path with `X_path path -> path in + let y_prev_hash = match y_prev_hash with `Y_prev_hash hash -> hash in + let y_prev_path = match y_prev_path with `Y_prev_path path -> path in let y_path = match y_path with `Y_path path -> path in let z = match z with `Z z -> z in { x = t.x @ [ x ] ; x_path = t.x_path @ [ mina_path_to_zeko_path x_path ] + ; y_prev_hash = t.y_prev_hash @ [ y_prev_hash ] + ; y_prev_path = t.y_prev_path @ [ mina_path_to_zeko_path y_prev_path ] ; y_path = t.y_path @ [ mina_path_to_zeko_path y_path ] ; z = t.z @ [ z ] } @@ -159,6 +177,8 @@ module Acc_set_witness = struct let join t1 t2 = { x = t1.x @ t2.x ; x_path = t1.x_path @ t2.x_path + ; y_prev_hash = t1.y_prev_hash @ t2.y_prev_hash + ; y_prev_path = t1.y_prev_path @ t2.y_prev_path ; y_path = t1.y_path @ t2.y_path ; z = t1.z @ t2.z }