diff --git a/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml b/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml index 491c63c8125..d6b4581f6d5 100644 --- a/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml +++ b/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml @@ -1268,6 +1268,7 @@ Pass one of -peer, -peer-list-file, -seed, -peer-list-url.|} ; ~wallets_disk_location:(conf_dir ^/ "wallets") ~persistent_root_location:(conf_dir ^/ "root") ~persistent_frontier_location:(conf_dir ^/ "frontier") + ~cache_proof_db_location:((conf_dir ^/ "proof_cache.db")) ~epoch_ledger_location ~snark_work_fee ~time_controller ~block_production_keypairs ~monitor ~consensus_local_state ~is_archive_rocksdb ~work_reassignment_wait diff --git a/src/app/cli/src/init/client.ml b/src/app/cli/src/init/client.ml index ca4ecd202c2..6cf3cac9153 100644 --- a/src/app/cli/src/init/client.ml +++ b/src/app/cli/src/init/client.ml @@ -865,7 +865,7 @@ let hash_ledger = fun () -> let open Deferred.Let_syntax in let%bind constraint_constants = - let logger = Logger.create () in + let logger = Logger.null () in let%map conf = Runtime_config.Constants.load_constants ~logger config_files in diff --git a/src/lib/block_producer/block_producer.ml b/src/lib/block_producer/block_producer.ml index 8c4706c7f15..931bec1ffcc 100644 --- a/src/lib/block_producer/block_producer.ml +++ b/src/lib/block_producer/block_producer.ml @@ -20,6 +20,7 @@ module type CONTEXT = sig val zkapp_cmd_limit : int option ref val vrf_poll_interval : Time.Span.t + end type Structured_log_events.t += Block_produced @@ -170,7 +171,7 @@ let generate_next_state ~commit_id ~zkapp_cmd_limit ~constraint_constants ~previous_protocol_state ~time_controller ~staged_ledger ~transactions ~get_completed_work ~logger ~(block_data : Consensus.Data.Block_data.t) ~winner_pk ~scheduled_time ~log_block_creation ~block_reward_threshold - ~zkapp_cmd_limit_hardcap ~slot_tx_end ~slot_chain_end = + ~zkapp_cmd_limit_hardcap ~slot_tx_end ~slot_chain_end ~cache_proof_db = let open Interruptible.Let_syntax in let global_slot_since_hard_fork = Consensus.Data.Block_data.global_slot block_data @@ -344,7 +345,7 @@ let generate_next_state ~commit_id ~zkapp_cmd_limit ~constraint_constants let opt_ledger_proof_statement = Option.map ledger_proof_opt ~f:(fun (proof, _) -> Ledger_proof.statement - @@ Ledger_proof.Cache_tag.unwrap proof ) + @@ Ledger_proof.Cache_tag.unwrap proof cache_proof_db) in let ledger_proof_statement = match ledger_proof_opt with @@ -414,7 +415,7 @@ let generate_next_state ~commit_id ~zkapp_cmd_limit ~constraint_constants ~staged_ledger_diff:(Staged_ledger_diff.forget diff) ~ledger_proof: (Option.map ledger_proof_opt ~f:(fun (proof, _) -> - Ledger_proof.Cache_tag.unwrap proof ) ) ) + Ledger_proof.Cache_tag.unwrap proof cache_proof_db) ) ) in let witness = { Pending_coinbase_witness.pending_coinbases = @@ -701,6 +702,7 @@ let produce ~genesis_breadcrumb ~context:(module Context : CONTEXT) ~prover ~verifier ~trust_system ~get_completed_work ~transaction_resource_pool ~frontier_reader ~time_controller ~transition_writer ~log_block_creation ~block_reward_threshold ~block_produced_bvar ~slot_tx_end ~slot_chain_end + ~cache_proof_db ~net ~zkapp_cmd_limit_hardcap ivar (scheduled_time, block_data, winner_pubkey) = let open Context in @@ -798,7 +800,7 @@ let produce ~genesis_breadcrumb ~context:(module Context : CONTEXT) ~prover ~transactions ~get_completed_work ~logger ~log_block_creation ~winner_pk:winner_pubkey ~block_reward_threshold ~zkapp_cmd_limit:!zkapp_cmd_limit ~zkapp_cmd_limit_hardcap - ~slot_tx_end ~slot_chain_end + ~slot_tx_end ~slot_chain_end ~cache_proof_db in [%log internal] "Generate_next_state_done" ; match next_state_opt with @@ -932,10 +934,11 @@ let produce ~genesis_breadcrumb ~context:(module Context : CONTEXT) ~prover "Build breadcrumb on produced block" (fun () -> Breadcrumb.build ~logger ~precomputed_values ~verifier ~get_completed_work:(Fn.const None) ~trust_system - ~parent:crumb ~transition + ~parent:crumb + ~transition ~sender:None (* Consider skipping `All here *) ~skip_staged_ledger_verification:`Proofs - ~transition_receipt_time () ) + ~transition_receipt_time ~cache_proof_db () ) |> Deferred.Result.map_error ~f:(function | `Invalid_staged_ledger_diff e -> `Invalid_staged_ledger_diff (e, staged_ledger_diff) @@ -1224,7 +1227,7 @@ let run ~context:(module Context : CONTEXT) ~vrf_evaluator ~prover ~verifier ~time_controller ~consensus_local_state ~coinbase_receiver ~frontier_reader ~transition_writer ~set_next_producer_timing ~log_block_creation ~block_reward_threshold ~block_produced_bvar ~vrf_evaluation_state ~net - ~zkapp_cmd_limit_hardcap = + ~zkapp_cmd_limit_hardcap ~cache_proof_db = let open Context in O1trace.sync_thread "produce_blocks" (fun () -> let genesis_breadcrumb = @@ -1243,7 +1246,7 @@ let run ~context:(module Context : CONTEXT) ~vrf_evaluator ~prover ~verifier ~transaction_resource_pool ~frontier_reader ~time_controller ~transition_writer ~log_block_creation ~block_reward_threshold ~block_produced_bvar ~slot_tx_end ~slot_chain_end ~net - ~zkapp_cmd_limit_hardcap + ~zkapp_cmd_limit_hardcap ~cache_proof_db in let module Breadcrumb = Transition_frontier.Breadcrumb in let production_supervisor = Singleton_supervisor.create ~task:produce in @@ -1372,7 +1375,7 @@ let run ~context:(module Context : CONTEXT) ~vrf_evaluator ~prover ~verifier : unit Block_time.Timeout.t ) ) let run_precomputed ~context:(module Context : CONTEXT) ~verifier ~trust_system - ~time_controller ~frontier_reader ~transition_writer ~precomputed_blocks = + ~time_controller ~frontier_reader ~transition_writer ~precomputed_blocks ~cache_proof_db = let open Context in let rejected_blocks_logger = Logger.create ~id:Logger.Logger_id.rejected_blocks () @@ -1499,7 +1502,7 @@ let run_precomputed ~context:(module Context : CONTEXT) ~verifier ~trust_system ~get_completed_work:(Fn.const None) ~trust_system ~parent:crumb ~transition ~sender:None ~skip_staged_ledger_verification:`Proofs - ~transition_receipt_time () + ~transition_receipt_time ~cache_proof_db () |> Deferred.Result.map_error ~f:(function | `Invalid_staged_ledger_diff e -> `Invalid_staged_ledger_diff (e, staged_ledger_diff) diff --git a/src/lib/bootstrap_controller/bootstrap_controller.ml b/src/lib/bootstrap_controller/bootstrap_controller.ml index 07ff47f8b9c..c24b2f11c8b 100644 --- a/src/lib/bootstrap_controller/bootstrap_controller.ml +++ b/src/lib/bootstrap_controller/bootstrap_controller.ml @@ -276,7 +276,7 @@ let external_transition_compare ~context:(module Context : CONTEXT) = *) let run ~context:(module Context : CONTEXT) ~trust_system ~verifier ~network ~consensus_local_state ~transition_reader ~preferred_peers ~persistent_root - ~persistent_frontier ~initial_root_transition ~catchup_mode = + ~persistent_frontier ~initial_root_transition ~catchup_mode ~cache_proof_db = let open Context in O1trace.thread "bootstrap" (fun () -> let rec loop previous_cycles = @@ -627,7 +627,7 @@ let run ~context:(module Context : CONTEXT) ~trust_system ~verifier ~network Transition_frontier.load ~context:(module Consensus_context) ~retry_with_fresh_db:false ~verifier ~consensus_local_state - ~persistent_root ~persistent_frontier ~catchup_mode () + ~persistent_root ~persistent_frontier ~catchup_mode ~cache_proof_db () >>| function | Ok frontier -> frontier @@ -886,7 +886,7 @@ let%test_module "Bootstrap_controller tests" = (E.Set.of_list saved_transitions) ~expect:(E.Set.of_list expected_transitions) ) - let run_bootstrap ~timeout_duration ~my_net ~transition_reader = + let run_bootstrap ~timeout_duration ~my_net ~transition_reader ~cache_proof_db= let open Fake_network in let time_controller = Block_time.Controller.basic ~logger in let persistent_root = @@ -909,7 +909,7 @@ let%test_module "Bootstrap_controller tests" = ~trust_system ~verifier ~network:my_net.network ~preferred_peers:[] ~consensus_local_state:my_net.state.consensus_local_state ~transition_reader ~persistent_root ~persistent_frontier - ~catchup_mode:`Normal ~initial_root_transition ) + ~catchup_mode:`Normal ~initial_root_transition ~cache_proof_db) let assert_transitions_increasingly_sorted ~root (incoming_transitions : Transition_cache.element list) = @@ -941,7 +941,9 @@ let%test_module "Bootstrap_controller tests" = : Mina_block.Header.t ) let%test_unit "sync with one node after receiving a transition" = + Quickcheck.test ~trials:1 + Fake_network.Generator.( gen ~precomputed_values ~verifier ~max_frontier_length ~use_super_catchup:false @@ -950,6 +952,7 @@ let%test_module "Bootstrap_controller tests" = ~frontier_branch_size:((max_frontier_length * 2) + 2) ]) ~f:(fun fake_network -> + let cache_proof_db = Ledger_proof.Cache_tag.For_tests.random () in let [ my_net; peer_net ] = fake_network.peer_networks in let transition_reader, transition_writer = Pipe_lib.Strict_pipe.create ~name:(__MODULE__ ^ __LOC__) @@ -971,7 +974,7 @@ let%test_module "Bootstrap_controller tests" = Async.Thread_safe.block_on_async_exn (fun () -> run_bootstrap ~timeout_duration:(Block_time.Span.of_ms 30_000L) - ~my_net ~transition_reader ) + ~my_net ~transition_reader ~cache_proof_db) in assert_transitions_increasingly_sorted ~root:(Transition_frontier.root new_frontier) diff --git a/src/lib/bootstrap_controller/bootstrap_controller.mli b/src/lib/bootstrap_controller/bootstrap_controller.mli index d9730576a92..0b58225b02a 100644 --- a/src/lib/bootstrap_controller/bootstrap_controller.mli +++ b/src/lib/bootstrap_controller/bootstrap_controller.mli @@ -44,4 +44,5 @@ val run : -> persistent_frontier:Transition_frontier.Persistent_frontier.t -> initial_root_transition:Mina_block.Validated.t -> catchup_mode:[ `Normal | `Super ] + -> cache_proof_db:Ledger_proof.Cache_tag.Cache.t -> (Transition_frontier.t * Transition_cache.element list) Deferred.t diff --git a/src/lib/ledger_catchup/ledger_catchup.ml b/src/lib/ledger_catchup/ledger_catchup.ml index 5e452ab565e..c87863b9178 100644 --- a/src/lib/ledger_catchup/ledger_catchup.ml +++ b/src/lib/ledger_catchup/ledger_catchup.ml @@ -13,15 +13,15 @@ end let run ~context:(module Context : CONTEXT) ~trust_system ~verifier ~network ~frontier ~catchup_job_reader ~catchup_breadcrumbs_writer - ~unprocessed_transition_cache : unit = + ~unprocessed_transition_cache ~cache_proof_db : unit = match Transition_frontier.catchup_state frontier with | Hash _ -> Normal_catchup.run ~context:(module Context) ~trust_system ~verifier ~network ~frontier ~catchup_job_reader - ~catchup_breadcrumbs_writer ~unprocessed_transition_cache + ~catchup_breadcrumbs_writer ~unprocessed_transition_cache ~cache_proof_db | Full _ -> Super_catchup.run ~context:(module Context) ~trust_system ~verifier ~network ~frontier ~catchup_job_reader - ~catchup_breadcrumbs_writer ~unprocessed_transition_cache + ~catchup_breadcrumbs_writer ~unprocessed_transition_cache ~cache_proof_db diff --git a/src/lib/ledger_catchup/ledger_catchup.mli b/src/lib/ledger_catchup/ledger_catchup.mli index 0a957e7e835..ef33feec275 100644 --- a/src/lib/ledger_catchup/ledger_catchup.mli +++ b/src/lib/ledger_catchup/ledger_catchup.mli @@ -45,4 +45,5 @@ val run : Strict_pipe.Writer.t -> unprocessed_transition_cache: Transition_handler.Unprocessed_transition_cache.t + -> cache_proof_db:Ledger_proof.Cache_tag.Cache.t -> unit diff --git a/src/lib/ledger_catchup/normal_catchup.ml b/src/lib/ledger_catchup/normal_catchup.ml index 15f6be8dec7..e08448dd739 100644 --- a/src/lib/ledger_catchup/normal_catchup.ml +++ b/src/lib/ledger_catchup/normal_catchup.ml @@ -496,7 +496,7 @@ let download_transitions ~target_hash ~logger ~trust_system ~network let verify_transitions_and_build_breadcrumbs ~context:(module Context : CONTEXT) ~trust_system ~verifier ~frontier ~unprocessed_transition_cache ~transitions - ~target_hash ~subtrees = + ~target_hash ~subtrees ~cache_proof_db = let open Context in let open Deferred.Or_error.Let_syntax in let verification_start_time = Core.Time.now () in @@ -610,7 +610,7 @@ let verify_transitions_and_build_breadcrumbs ~context:(module Context : CONTEXT) let open Deferred.Let_syntax in match%bind Transition_handler.Breadcrumb_builder.build_subtrees_of_breadcrumbs ~logger - ~precomputed_values ~verifier ~trust_system ~frontier ~initial_hash + ~precomputed_values ~verifier ~trust_system ~frontier ~initial_hash ~cache_proof_db trees_of_transitions with | Ok result -> @@ -659,7 +659,7 @@ let garbage_collect_subtrees ~logger ~subtrees = let run ~context:(module Context : CONTEXT) ~trust_system ~verifier ~network ~frontier ~catchup_job_reader ~catchup_breadcrumbs_writer - ~unprocessed_transition_cache : unit = + ~unprocessed_transition_cache ~cache_proof_db : unit = let open Context in let hash_tree = match Transition_frontier.catchup_state frontier with @@ -828,7 +828,7 @@ let run ~context:(module Context : CONTEXT) ~trust_system ~verifier ~network verify_transitions_and_build_breadcrumbs ~context:(module Context) ~trust_system ~verifier ~frontier ~unprocessed_transition_cache - ~transitions ~target_hash ~subtrees + ~transitions ~target_hash ~subtrees ~cache_proof_db with | Ok trees_of_breadcrumbs -> [%log trace] @@ -965,7 +965,7 @@ let%test_module "Ledger_catchup tests" = Strict_pipe.Reader.t } - let run_catchup ~network ~frontier = + let run_catchup ~network ~frontier ~cache_proof_db = let catchup_job_reader, catchup_job_writer = Strict_pipe.create ~name:(__MODULE__ ^ __LOC__) (Buffered (`Capacity 10, `Overflow Crash)) @@ -981,14 +981,14 @@ let%test_module "Ledger_catchup tests" = run ~context:(module Context) ~verifier ~trust_system ~network ~frontier ~catchup_breadcrumbs_writer - ~catchup_job_reader ~unprocessed_transition_cache ; + ~catchup_job_reader ~unprocessed_transition_cache ~cache_proof_db; { cache = unprocessed_transition_cache ; job_writer = catchup_job_writer ; breadcrumbs_reader = catchup_breadcrumbs_reader } - let run_catchup_with_target ~network ~frontier ~target_breadcrumb = - let test = run_catchup ~network ~frontier in + let run_catchup_with_target ~network ~frontier ~target_breadcrumb ~cache_proof_db= + let test = run_catchup ~network ~frontier ~cache_proof_db in let parent_hash = Transition_frontier.Breadcrumb.parent_hash target_breadcrumb in @@ -1000,12 +1000,12 @@ let%test_module "Ledger_catchup tests" = (parent_hash, [ Rose_tree.T ((target_transition, None), []) ]) ; (`Test test, `Cached_transition target_transition) - let test_successful_catchup ~my_net ~target_best_tip_path = + let test_successful_catchup ~my_net ~target_best_tip_path ~cache_proof_db = let open Fake_network in let target_breadcrumb = List.last_exn target_best_tip_path in let `Test { breadcrumbs_reader; _ }, _ = run_catchup_with_target ~network:my_net.network - ~frontier:my_net.state.frontier ~target_breadcrumb + ~frontier:my_net.state.frontier ~target_breadcrumb ~cache_proof_db in (* TODO: expose Strict_pipe.read *) let%map cached_catchup_breadcrumbs = @@ -1044,6 +1044,7 @@ let%test_module "Ledger_catchup tests" = let%test_unit "can catchup to a peer within [k/2,k]" = [%log info] "running catchup to peer" ; + let cache_proof_db = Ledger_proof.Cache_tag.For_tests.random () in Quickcheck.test ~trials:5 Fake_network.Generator.( let open Quickcheck.Generator.Let_syntax in @@ -1065,10 +1066,11 @@ let%test_module "Ledger_catchup tests" = (best_tip peer_net.state.frontier)) in Thread_safe.block_on_async_exn (fun () -> - test_successful_catchup ~my_net ~target_best_tip_path ) ) + test_successful_catchup ~my_net ~target_best_tip_path ~cache_proof_db) ) let%test_unit "catchup succeeds even if the parent transition is already \ in the frontier" = + let cache_proof_db = Ledger_proof.Cache_tag.For_tests.random () in Quickcheck.test ~trials:1 Fake_network.Generator.( gen ~precomputed_values ~verifier ~max_frontier_length @@ -1081,9 +1083,10 @@ let%test_module "Ledger_catchup tests" = [ Transition_frontier.best_tip peer_net.state.frontier ] in Thread_safe.block_on_async_exn (fun () -> - test_successful_catchup ~my_net ~target_best_tip_path ) ) + test_successful_catchup ~my_net ~target_best_tip_path ~cache_proof_db ) ) let%test_unit "catchup fails if one of the parent transitions fail" = + let cache_proof_db = Ledger_proof.Cache_tag.For_tests.random () in Quickcheck.test ~trials:1 Fake_network.Generator.( gen ~precomputed_values ~verifier ~max_frontier_length @@ -1114,7 +1117,7 @@ let%test_module "Ledger_catchup tests" = Thread_safe.block_on_async_exn (fun () -> let `Test { cache; _ }, `Cached_transition cached_transition = run_catchup_with_target ~network:my_net.network - ~frontier:my_net.state.frontier ~target_breadcrumb + ~frontier:my_net.state.frontier ~target_breadcrumb ~cache_proof_db in let cached_failing_transition = Transition_handler.Unprocessed_transition_cache.register_exn diff --git a/src/lib/ledger_catchup/super_catchup.ml b/src/lib/ledger_catchup/super_catchup.ml index 8ff8e4f00c9..bb6e3353427 100644 --- a/src/lib/ledger_catchup/super_catchup.ml +++ b/src/lib/ledger_catchup/super_catchup.ml @@ -1406,7 +1406,7 @@ let run_catchup ~context:(module Context : CONTEXT) ~trust_system ~verifier let run ~context:(module Context : CONTEXT) ~trust_system ~verifier ~network ~frontier ~catchup_job_reader ~catchup_breadcrumbs_writer - ~unprocessed_transition_cache : unit = + ~unprocessed_transition_cache ~cache_proof_db : unit = O1trace.background_thread "perform_super_catchup" (fun () -> run_catchup ~context:(module Context) @@ -1414,7 +1414,7 @@ let run ~context:(module Context : CONTEXT) ~trust_system ~verifier ~network ~unprocessed_transition_cache ~catchup_breadcrumbs_writer ~build_func: (Transition_frontier.Breadcrumb.build - ~get_completed_work:(Fn.const None) ) ) + ~get_completed_work:(Fn.const None) ~cache_proof_db ) ) (* Unit tests *) @@ -1518,7 +1518,7 @@ let%test_module "Ledger_catchup tests" = Strict_pipe.Reader.t } - let setup_catchup_pipes ~network ~frontier = + let setup_catchup_pipes ~network ~frontier ~cache_proof_db = let catchup_job_reader, catchup_job_writer = Strict_pipe.create ~name:(__MODULE__ ^ __LOC__) (Buffered (`Capacity 10, `Overflow Crash)) @@ -1534,7 +1534,7 @@ let%test_module "Ledger_catchup tests" = run ~context:(module Context) ~verifier ~trust_system ~network ~frontier ~catchup_breadcrumbs_writer - ~catchup_job_reader ~unprocessed_transition_cache ; + ~catchup_job_reader ~unprocessed_transition_cache ~cache_proof_db; { cache = unprocessed_transition_cache ; job_writer = catchup_job_writer ; breadcrumbs_reader = catchup_breadcrumbs_reader @@ -1560,8 +1560,8 @@ let%test_module "Ledger_catchup tests" = ; breadcrumbs_reader = catchup_breadcrumbs_reader } *) - let setup_catchup_with_target ~network ~frontier ~target_breadcrumb = - let test = setup_catchup_pipes ~network ~frontier in + let setup_catchup_with_target ~network ~frontier ~target_breadcrumb ~cache_proof_db = + let test = setup_catchup_pipes ~network ~frontier ~cache_proof_db in let parent_hash = Transition_frontier.Breadcrumb.parent_hash target_breadcrumb in @@ -1610,12 +1610,12 @@ let%test_module "Ledger_catchup tests" = (n + 1) else Deferred.return b_list - let test_successful_catchup ~my_net ~target_best_tip_path = + let test_successful_catchup ~my_net ~target_best_tip_path ~cache_proof_db = let open Fake_network in let target_breadcrumb = List.last_exn target_best_tip_path in let `Test { breadcrumbs_reader; _ }, _ = setup_catchup_with_target ~network:my_net.network - ~frontier:my_net.state.frontier ~target_breadcrumb + ~frontier:my_net.state.frontier ~target_breadcrumb ~cache_proof_db in let%map breadcrumb_list = call_read ~breadcrumbs_reader ~target_best_tip_path ~my_peer:my_net [] 0 @@ -1646,6 +1646,7 @@ let%test_module "Ledger_catchup tests" = let%test_unit "can catchup to a peer within [k/2,k]" = [%log info] "running catchup to peer" ; + let cache_proof_db = Ledger_proof.Cache_tag.For_tests.random () in Quickcheck.test ~trials:5 Fake_network.Generator.( let open Quickcheck.Generator.Let_syntax in @@ -1666,10 +1667,11 @@ let%test_module "Ledger_catchup tests" = (best_tip peer_net.state.frontier)) in Thread_safe.block_on_async_exn (fun () -> - test_successful_catchup ~my_net ~target_best_tip_path ) ) + test_successful_catchup ~my_net ~target_best_tip_path ~cache_proof_db ) ) let%test_unit "catchup succeeds even if the parent transition is already \ in the frontier" = + let cache_proof_db = Ledger_proof.Cache_tag.For_tests.random () in Quickcheck.test ~trials:1 Fake_network.Generator.( gen ~precomputed_values ~verifier ~max_frontier_length @@ -1682,10 +1684,11 @@ let%test_module "Ledger_catchup tests" = [ Transition_frontier.best_tip peer_net.state.frontier ] in Thread_safe.block_on_async_exn (fun () -> - test_successful_catchup ~my_net ~target_best_tip_path ) ) + test_successful_catchup ~my_net ~target_best_tip_path ~cache_proof_db) ) let%test_unit "catchup succeeds even if the parent transition is already \ in the frontier" = + let cache_proof_db = Ledger_proof.Cache_tag.For_tests.random () in Quickcheck.test ~trials:1 Fake_network.Generator.( gen ~precomputed_values ~verifier ~max_frontier_length @@ -1698,11 +1701,12 @@ let%test_module "Ledger_catchup tests" = [ Transition_frontier.best_tip peer_net.state.frontier ] in Thread_safe.block_on_async_exn (fun () -> - test_successful_catchup ~my_net ~target_best_tip_path ) ) + test_successful_catchup ~my_net ~target_best_tip_path ~cache_proof_db ) ) let%test_unit "when catchup fails to download state hashes, catchup will \ properly clear the unprocessed_transition_cache of the \ blocks that triggered catchup" = + let cache_proof_db = Ledger_proof.Cache_tag.For_tests.random () in Quickcheck.test ~trials:1 Fake_network.Generator.( gen ~precomputed_values ~verifier ~max_frontier_length @@ -1721,7 +1725,7 @@ let%test_module "Ledger_catchup tests" = let target_breadcrumb = List.last_exn target_best_tip_path in let test = setup_catchup_pipes ~network:my_net.network - ~frontier:my_net.state.frontier + ~frontier:my_net.state.frontier ~cache_proof_db in let parent_hash = Transition_frontier.Breadcrumb.parent_hash target_breadcrumb @@ -1781,6 +1785,7 @@ let%test_module "Ledger_catchup tests" = in Deferred.return (Some []) in + let cache_proof_db = Ledger_proof.Cache_tag.For_tests.random () in Quickcheck.test ~trials:1 Fake_network.Generator.( gen ~precomputed_values ~verifier ~max_frontier_length @@ -1819,7 +1824,7 @@ let%test_module "Ledger_catchup tests" = let target_breadcrumb = List.last_exn target_best_tip_path in let test = setup_catchup_pipes ~network:my_net.network - ~frontier:my_net.state.frontier + ~frontier:my_net.state.frontier ~cache_proof_db in let parent_hash = Transition_frontier.Breadcrumb.parent_hash target_breadcrumb diff --git a/src/lib/ledger_proof/ledger_proof.ml b/src/lib/ledger_proof/ledger_proof.ml index 5d77be0ba27..c234c285d68 100644 --- a/src/lib/ledger_proof/ledger_proof.ml +++ b/src/lib/ledger_proof/ledger_proof.ml @@ -35,14 +35,21 @@ module Prod : Ledger_proof_intf.S with type t = Transaction_snark.t = struct Transaction_snark.create ~statement:{ statement with sok_digest } ~proof module Cache_tag = struct + + module Cache = Transaction_snark.Cache_tag.Cache + type value = t [@@deriving compare, equal, sexp, yojson, hash] + type t = Transaction_snark.Cache_tag.t [@@deriving compare, equal, sexp, yojson, hash] - let unwrap (x : t) : value = Transaction_snark.Cache_tag.unwrap x + let unwrap (x : t) (db : Cache.t) : value = Transaction_snark.Cache_tag.unwrap x db + + let generate (x : value) (db : Cache.t) : t = Transaction_snark.Cache_tag.generate x db + + module For_tests = Transaction_snark.Cache_tag.For_tests - let generate (x : value) : t = Transaction_snark.Cache_tag.generate x end end diff --git a/src/lib/ledger_proof/ledger_proof_intf.ml b/src/lib/ledger_proof/ledger_proof_intf.ml index 8c979f3f750..bd32c5a290f 100644 --- a/src/lib/ledger_proof/ledger_proof_intf.ml +++ b/src/lib/ledger_proof/ledger_proof_intf.ml @@ -46,12 +46,26 @@ module type S = sig val snarked_ledger_hash : t -> Frozen_ledger_hash.t module Cache_tag : sig + + module Cache : sig + + type t + + val initialize : string -> t + + end + type value := t type t [@@deriving compare, equal, sexp, yojson, hash] - val unwrap : t -> value + val unwrap : t -> Cache.t -> value + + val generate : value -> Cache.t -> t - val generate : value -> t + module For_tests : sig + + val random : unit -> Cache.t + end end end diff --git a/src/lib/mina_base/proof_cache_tag/dune b/src/lib/mina_base/proof_cache_tag/dune index 44cf7a969f4..29d7c9fe39c 100644 --- a/src/lib/mina_base/proof_cache_tag/dune +++ b/src/lib/mina_base/proof_cache_tag/dune @@ -4,6 +4,7 @@ (default_implementation proof_cache_tag.identity) (libraries ;; opam libraries + async ppx_inline_test.config base.base_internalhash_types bin_prot.shape diff --git a/src/lib/mina_base/proof_cache_tag/filesystem/proof_cache_tag.ml b/src/lib/mina_base/proof_cache_tag/filesystem/proof_cache_tag.ml index c04f9b8262d..c093f4137a6 100644 --- a/src/lib/mina_base/proof_cache_tag/filesystem/proof_cache_tag.ml +++ b/src/lib/mina_base/proof_cache_tag/filesystem/proof_cache_tag.ml @@ -6,31 +6,43 @@ type value = Pickles.Proof.Proofs_verified_2.t let counter = ref 0 -let prefix = Filename.temp_dir "mina" "proof_cache" +module Cache = struct -let path i = prefix ^ Filename.dir_sep ^ Int.to_string i + let unique_sub_path i t = t ^ Filename.dir_sep ^ Int.to_string i + + let initialize path = path + + type t = string + +end type t = { idx : int } [@@deriving compare, equal, sexp, yojson, hash] -let unwrap ({ idx = x } : t) : value = +let unwrap ({ idx = x } : t) (db : Cache.t) = (* Read from the file. *) - In_channel.with_file ~binary:true (path x) ~f:(fun chan -> + In_channel.with_file ~binary:true (Cache.unique_sub_path x db) ~f:(fun chan -> let str = In_channel.input_all chan in Binable.of_string (module Pickles.Proof.Proofs_verified_2.Stable.Latest) str ) -let generate (x : value) : t = +let generate (x : value) (db : Cache.t) = let new_counter = !counter in incr counter ; let res = { idx = new_counter } in (* When this reference is GC'd, delete the file. *) Gc.Expert.add_finalizer_last_exn res (fun () -> - Core.Unix.unlink (path new_counter) ) ; + Core.Unix.unlink (Cache.unique_sub_path new_counter db) ) ; (* Write the proof to the file. *) - Out_channel.with_file ~binary:true (path new_counter) ~f:(fun chan -> + Out_channel.with_file ~binary:true (Cache.unique_sub_path new_counter db) ~f:(fun chan -> Out_channel.output_string chan @@ Binable.to_string (module Pickles.Proof.Proofs_verified_2.Stable.Latest) x ) ; res + +module For_tests = struct + + let random = Cache.initialize @@ Filename.temp_dir "mina" "proof_cache" + +end \ No newline at end of file diff --git a/src/lib/mina_base/proof_cache_tag/identity/proof_cache_tag.ml b/src/lib/mina_base/proof_cache_tag/identity/proof_cache_tag.ml index 86f2a08b672..afcc3107552 100644 --- a/src/lib/mina_base/proof_cache_tag/identity/proof_cache_tag.ml +++ b/src/lib/mina_base/proof_cache_tag/identity/proof_cache_tag.ml @@ -1,8 +1,23 @@ open Core_kernel +module Cache = struct + + type t = unit + + let initialize _path = () + +end + + type t = Pickles.Proof.Proofs_verified_2.t [@@deriving compare, equal, sexp, yojson, hash] -let unwrap = Fn.id +let unwrap t _db = Fn.id t + +let generate t _db = Fn.id t + +module For_tests = struct + + let random = Cache.initialize -let generate = Fn.id +end \ No newline at end of file diff --git a/src/lib/mina_base/proof_cache_tag/lmdb/proof_cache_tag.ml b/src/lib/mina_base/proof_cache_tag/lmdb/proof_cache_tag.ml index 0807e2b90bb..f9d2bcf9115 100644 --- a/src/lib/mina_base/proof_cache_tag/lmdb/proof_cache_tag.ml +++ b/src/lib/mina_base/proof_cache_tag/lmdb/proof_cache_tag.ml @@ -5,6 +5,7 @@ open Lmdb_storage.Generic type value = Pickles.Proof.Proofs_verified_2.Stable.Latest.t + module F (Db : Db) = struct type holder = (int, value) Db.t @@ -18,17 +19,37 @@ end module Rw = Read_write (F) -let db_dir = Filename.temp_dir "mina" "proof_cache.lmdb" - type t = { idx : int } [@@deriving compare, equal, sexp, yojson, hash] -let unwrap ({ idx = x } : t) : value = +let counter = ref 0 + +module Cache = struct + + + let initialize path = Rw.create path + + type t = Rw.t * Rw.holder + +end + +let unwrap ({ idx = x } : t) (db: Cache.t) : value = (* Read from the db. *) - let env, db = Rw.create db_dir in + let env, db = db in Rw.get ~env db x |> Option.value_exn -let generate (x : value) : t = - let env, db = Rw.create db_dir in - let hash = Pickles.Proof.Proofs_verified_2.Stable.Latest.hash x in - let res = { idx = hash } in - Rw.set ~env db hash x ; res +let generate (x : value) (db: Cache.t) : t = + let env, db = db in + let idx= !counter in + incr counter ; + let res = { idx } in + Gc.Expert.add_finalizer_last_exn res (fun () -> + Rw.remove ~env db idx + ) ; + Rw.set ~env db idx x ; res + + +module For_tests = struct + + let random () = Cache.initialize @@ Filename.temp_dir "mina" "proof_cache" + +end \ No newline at end of file diff --git a/src/lib/mina_base/proof_cache_tag/proof_cache_tag.mli b/src/lib/mina_base/proof_cache_tag/proof_cache_tag.mli index 286634ceef2..57fa8c62c7f 100644 --- a/src/lib/mina_base/proof_cache_tag/proof_cache_tag.mli +++ b/src/lib/mina_base/proof_cache_tag/proof_cache_tag.mli @@ -1,5 +1,29 @@ + + +(* Cache is a storage type depending on proof_cache_tag implementation. From database handle to + filesystem folder. It introduce more flexibility to the underlying cache implementation *) +module Cache : sig + + type t + + (** Initialize the on-disk cache explicitly before interactions with it take place. *) + val initialize : string -> t + + end + + + type t [@@deriving compare, equal, sexp, yojson, hash] -val unwrap : t -> Mina_base.Proof.t +(* returns proof from cache *) +val unwrap : t -> Cache.t -> Mina_base.Proof.t + +(* cache proof by inserting it to storage *) +val generate : Mina_base.Proof.t -> Cache.t -> t + + +module For_tests : sig + + val random : unit -> Cache.t -val generate : Mina_base.Proof.t -> t +end \ No newline at end of file diff --git a/src/lib/mina_block/validation.ml b/src/lib/mina_block/validation.ml index 2954f1e74ff..bae2f1b3732 100644 --- a/src/lib/mina_block/validation.ml +++ b/src/lib/mina_block/validation.ml @@ -20,6 +20,7 @@ module type CONTEXT = sig val consensus_constants : Consensus.Constants.t val compile_config : Mina_compile_config.t + end let validation (_, v) = v @@ -474,7 +475,7 @@ let reset_frontier_dependencies_validation (transition_with_hash, validation) = let validate_staged_ledger_diff ?skip_staged_ledger_verification ~logger ~get_completed_work ~precomputed_values ~verifier ~parent_staged_ledger - ~parent_protocol_state (t, validation) = + ~cache_proof_db ~parent_protocol_state (t, validation) = [%log internal] "Validate_staged_ledger_diff" ; let block = With_hash.data t in let header = Block.header block in @@ -535,7 +536,7 @@ let validate_staged_ledger_diff ?skip_staged_ledger_verification ~logger (*There was no proof emitted, snarked ledger hash shouldn't change*) Protocol_state.snarked_ledger_hash parent_protocol_state | Some (proof, _) -> - Ledger_proof.snarked_ledger_hash (Ledger_proof.Cache_tag.unwrap proof) + Ledger_proof.snarked_ledger_hash (Ledger_proof.Cache_tag.unwrap proof cache_proof_db) in let hash_errors = Result.combine_errors_unit diff --git a/src/lib/mina_block/validation.mli b/src/lib/mina_block/validation.mli index 71881258304..bd468830c59 100644 --- a/src/lib/mina_block/validation.mli +++ b/src/lib/mina_block/validation.mli @@ -273,6 +273,7 @@ val validate_staged_ledger_diff : -> precomputed_values:Genesis_proof.t -> verifier:Verifier.t -> parent_staged_ledger:Staged_ledger.t + -> cache_proof_db:Ledger_proof.Cache_tag.Cache.t -> parent_protocol_state:Protocol_state.Value.t -> ( 'a , 'b diff --git a/src/lib/mina_intf/transition_frontier_components_intf.ml b/src/lib/mina_intf/transition_frontier_components_intf.ml index 4521e31c0f8..913b6ff49d4 100644 --- a/src/lib/mina_intf/transition_frontier_components_intf.ml +++ b/src/lib/mina_intf/transition_frontier_components_intf.ml @@ -357,6 +357,7 @@ module type Transition_router_intf = sig -> Transaction_snark_work.Checked.t option ) -> catchup_mode:[ `Normal | `Super ] -> notify_online:(unit -> unit Deferred.t) + -> cache_proof_db:Ledger_proof.Cache_tag.Cache.t -> unit -> ( [ `Transition of Mina_block.Validated.t ] * [ `Source of [ `Gossip | `Catchup | `Internal ] ] diff --git a/src/lib/mina_lib/config.ml b/src/lib/mina_lib/config.ml index c7e00d2471c..df9eaa3279d 100644 --- a/src/lib/mina_lib/config.ml +++ b/src/lib/mina_lib/config.ml @@ -37,6 +37,7 @@ type t = ; wallets_disk_location : string ; persistent_root_location : string ; persistent_frontier_location : string + ; cache_proof_db_location : string ; epoch_ledger_location : string ; staged_ledger_transition_backup_capacity : int [@default 10] ; time_controller : Block_time.Controller.t diff --git a/src/lib/mina_lib/mina_lib.ml b/src/lib/mina_lib/mina_lib.ml index f620e6cc075..821ebb40290 100644 --- a/src/lib/mina_lib/mina_lib.ml +++ b/src/lib/mina_lib/mina_lib.ml @@ -108,6 +108,7 @@ type t = ; wallets : Secrets.Wallets.t ; coinbase_receiver : Consensus.Coinbase_receiver.t ref ; snark_job_state : Work_selector.State.t + ; cache_proof_db : Ledger_proof.Cache_tag.Cache.t ; mutable next_producer_timing : Daemon_rpcs.Types.Status.Next_producer_timing.t option ; subscriptions : Mina_subscriptions.t @@ -1250,6 +1251,7 @@ module type CONTEXT = sig val compaction_interval : Time.Span.t option val compile_config : Mina_compile_config.t + end let context ~commit_id (config : Config.t) : (module CONTEXT) = @@ -1279,10 +1281,12 @@ let context ~commit_id (config : Config.t) : (module CONTEXT) = (*Same as config.precomputed_values.compile_config. TODO: Remove redundant fields *) let compile_config = config.compile_config + end ) let start t = let commit_id_short = String.sub ~pos:0 ~len:8 t.commit_id in + let cache_proof_db = t.cache_proof_db in let set_next_producer_timing timing consensus_state = let block_production_status, next_producer_timing = let generated_from_consensus_at : @@ -1366,7 +1370,9 @@ let start t = ~block_produced_bvar:t.components.block_produced_bvar ~vrf_evaluation_state:t.vrf_evaluation_state ~net:t.components.net ~zkapp_cmd_limit_hardcap: - t.config.precomputed_values.genesis_constants.zkapp_cmd_limit_hardcap ) ; + t.config.precomputed_values.genesis_constants.zkapp_cmd_limit_hardcap + ~cache_proof_db + ) ; perform_compaction t.config.compile_config.compaction_interval t ; let () = match t.config.node_status_url with @@ -1411,6 +1417,7 @@ let start t = ~protocol_constants:t.config.precomputed_values.genesis_constants.protocol ~time_controller:t.config.time_controller ~block_produced_bvar:t.components.block_produced_bvar + ~cache_proof_db ~uptime_submitter_keypair:t.config.uptime_submitter_keypair ~graphql_control_port:t.config.graphql_control_port ~built_with_commit_sha ~get_next_producer_timing:(fun () -> t.next_producer_timing) @@ -1419,7 +1426,7 @@ let start t = stop_long_running_daemon t ; Snark_worker.start t -let start_with_precomputed_blocks t blocks = +let start_with_precomputed_blocks t ~cache_proof_db blocks = let module Context = (val context ~commit_id:t.commit_id t.config) in let%bind () = Block_producer.run_precomputed @@ -1428,7 +1435,7 @@ let start_with_precomputed_blocks t blocks = ~time_controller:t.config.time_controller ~frontier_reader:t.components.transition_frontier ~transition_writer:t.pipes.producer_transition_writer - ~precomputed_blocks:blocks + ~precomputed_blocks:blocks ~cache_proof_db in start t @@ -1644,6 +1651,7 @@ let create ~commit_id ?wallets (config : Config.t) = config.precomputed_values.constraint_constants.block_window_duration_ms |> Time.Span.of_ms in + let cache_proof_db = Ledger_proof.Cache_tag.Cache.initialize config.cache_proof_db_location in let monitor = Option.value ~default:(Monitor.create ()) config.monitor in Async.Scheduler.within' ~monitor (fun () -> let set_itn_data (type t) (module M : Itn_settable with type t = t) (t : t) @@ -2114,7 +2122,7 @@ let create ~commit_id ?wallets (config : Config.t) = ~most_recent_valid_block_writer ~get_completed_work: (Network_pool.Snark_pool.get_completed_work snark_pool) - ~notify_online () + ~notify_online ~cache_proof_db () in let ( valid_transitions_for_network , valid_transitions_for_api @@ -2453,6 +2461,7 @@ let create ~commit_id ?wallets (config : Config.t) = ; wallets ; coinbase_receiver = ref config.coinbase_receiver ; snark_job_state = snark_jobs_state + ; cache_proof_db ; subscriptions ; sync_status ; precomputed_block_writer diff --git a/src/lib/mina_lib/mina_lib.mli b/src/lib/mina_lib/mina_lib.mli index 2878d7dcfa0..7e2f5ee14bd 100644 --- a/src/lib/mina_lib/mina_lib.mli +++ b/src/lib/mina_lib/mina_lib.mli @@ -44,6 +44,7 @@ module type CONTEXT = sig val compaction_interval : Time.Span.t option val compile_config : Mina_compile_config.t + end exception Snark_worker_error of int @@ -203,7 +204,7 @@ val snark_pool : t -> Network_pool.Snark_pool.t val start : t -> unit Deferred.t val start_with_precomputed_blocks : - t -> Block_producer.Precomputed.t Sequence.t -> unit Deferred.t + t -> cache_proof_db:Ledger_proof.Cache_tag.Cache.t -> Block_producer.Precomputed.t Sequence.t -> unit Deferred.t val stop_snark_worker : ?should_wait_kill:bool -> t -> unit Deferred.t diff --git a/src/lib/mina_net2/sink.ml b/src/lib/mina_net2/sink.ml index 8ca5e85eb01..8cfd5933fbe 100644 --- a/src/lib/mina_net2/sink.ml +++ b/src/lib/mina_net2/sink.ml @@ -5,7 +5,9 @@ module type S = sig type msg - val push : t -> msg -> unit Deferred.t + type cache_proof_db + + val push : t -> msg -> cache_proof_db -> unit Deferred.t end module type S_with_void = sig diff --git a/src/lib/network_pool/intf.ml b/src/lib/network_pool/intf.ml index 507aa01db71..95d582fe3fa 100644 --- a/src/lib/network_pool/intf.ml +++ b/src/lib/network_pool/intf.ml @@ -106,6 +106,7 @@ module type Resource_pool_diff_intf = sig val verify : pool -> t Envelope.Incoming.t + -> Ledger_proof.Cache_tag.Cache.t -> (verified Envelope.Incoming.t, Verification_error.t) Deferred.Result.t (** Warning: Using this directly could corrupt the resource pool if it @@ -114,6 +115,7 @@ module type Resource_pool_diff_intf = sig val unsafe_apply : pool -> verified Envelope.Incoming.t + -> Ledger_proof.Cache_tag.Cache.t -> ( [ `Accept | `Reject ] * t * rejected , [ `Locally_generated of t * rejected | `Other of Error.t ] ) Result.t @@ -194,6 +196,8 @@ module type Network_pool_base_intf = sig type transition_frontier + type cache_proof_db + module Local_sink : Mina_net2.Sink.S_with_void with type msg := @@ -206,8 +210,8 @@ module type Network_pool_base_intf = sig module Remote_sink : Mina_net2.Sink.S_with_void - with type msg := - resource_pool_diff Envelope.Incoming.t * Mina_net2.Validation_callback.t + with type msg := resource_pool_diff Envelope.Incoming.t * Mina_net2.Validation_callback.t + and type cache_proof_db:= cache_proof_db module Broadcast_callback : Broadcast_callback @@ -225,6 +229,7 @@ module type Network_pool_base_intf = sig -> log_gossip_heard:bool -> on_remote_push:(unit -> unit Deferred.t) -> block_window_duration:Time.Span.t + -> cache_proof_db:Ledger_proof.Cache_tag.Cache.t -> t * Remote_sink.t * Local_sink.t val of_resource_pool_and_diffs : @@ -235,6 +240,7 @@ module type Network_pool_base_intf = sig -> log_gossip_heard:bool -> on_remote_push:(unit -> unit Deferred.t) -> block_window_duration:Time.Span.t + -> cache_proof_db:Ledger_proof.Cache_tag.Cache.t -> t * Remote_sink.t * Local_sink.t val resource_pool : t -> resource_pool @@ -247,6 +253,7 @@ module type Network_pool_base_intf = sig t -> resource_pool_diff_verified Envelope.Incoming.t -> Broadcast_callback.t + -> Ledger_proof.Cache_tag.Cache.t -> unit val apply_no_broadcast : @@ -269,12 +276,14 @@ module type Snark_resource_pool_intf = sig -> t -> work:Transaction_snark_work.Statement.t -> proof:Ledger_proof.t One_or_two.t + -> cache_proof_db:Ledger_proof.Cache_tag.Cache.t -> fee:Fee_with_prover.t -> [ `Added | `Statement_not_referenced ] val request_proof : t -> Transaction_snark_work.Statement.t + -> Ledger_proof.Cache_tag.Cache.t -> Ledger_proof.t One_or_two.t Priced_proof.t option val verify_and_act : diff --git a/src/lib/network_pool/network_pool_base.ml b/src/lib/network_pool/network_pool_base.ml index 43dc22bb518..3b22215f5b2 100644 --- a/src/lib/network_pool/network_pool_base.ml +++ b/src/lib/network_pool/network_pool_base.ml @@ -15,7 +15,9 @@ end) and type transition_frontier := Transition_frontier.t and type transition_frontier_diff := Resource_pool.transition_frontier_diff and type config := Resource_pool.Config.t + and type cache_proof_db := Ledger_proof.Cache_tag.Cache.t and type rejected_diff := Resource_pool.Diff.rejected = struct + let apply_and_broadcast_thread_label = "apply_and_broadcast_" ^ Resource_pool.label ^ "_diffs" @@ -88,6 +90,8 @@ end) let label = Resource_pool.label type pool = Resource_pool.t + + type cache_proof_db = Ledger_proof.Cache_tag.Cache.t end) (Broadcast_callback) @@ -99,6 +103,7 @@ end) let label = Resource_pool.label type pool = Resource_pool.t + end) (Broadcast_callback) @@ -121,7 +126,7 @@ end) (Resource_pool.Diff.max_per_15_seconds, `Per (Time.Span.of_sec 15.0)) let apply_and_broadcast ({ logger; _ } as t) - (diff : Resource_pool.Diff.verified Envelope.Incoming.t) cb = + (diff : Resource_pool.Diff.verified Envelope.Incoming.t) cb cache_proof_db = let env = Envelope.Incoming.map ~f:Resource_pool.Diff.t_of_verified diff in let rebroadcast (diff', rejected) = let open Broadcast_callback in @@ -139,7 +144,7 @@ end) forward t.write_broadcasts diff' rejected cb ) in O1trace.sync_thread apply_and_broadcast_thread_label (fun () -> - match Resource_pool.Diff.unsafe_apply t.resource_pool diff with + match Resource_pool.Diff.unsafe_apply t.resource_pool diff cache_proof_db with | Ok (`Accept, accepted, rejected) -> Resource_pool.Diff.log_internal ~logger "accepted" env ; rebroadcast (accepted, rejected) @@ -189,7 +194,7 @@ end) | Transition_frontier_extension of Resource_pool.transition_frontier_diff let of_resource_pool_and_diffs resource_pool ~logger ~constraint_constants - ~tf_diffs ~log_gossip_heard ~on_remote_push ~block_window_duration = + ~tf_diffs ~log_gossip_heard ~on_remote_push ~block_window_duration ~cache_proof_db= let read_broadcasts, write_broadcasts = Linear_pipe.create () in let network_pool = { resource_pool @@ -228,7 +233,7 @@ end) match diff_source with | Diff ((verified_diff, cb) : Remote_sink.unwrapped_t) -> O1trace.sync_thread processing_diffs_thread_label (fun () -> - apply_and_broadcast network_pool verified_diff cb ) + apply_and_broadcast network_pool verified_diff cb cache_proof_db ) | Transition_frontier_extension diff -> O1trace.sync_thread processing_transition_frontier_diffs_thread_label @@ -286,7 +291,7 @@ end) let create ~config ~constraint_constants ~consensus_constants ~time_controller ~frontier_broadcast_pipe ~logger ~log_gossip_heard ~on_remote_push - ~block_window_duration = + ~block_window_duration ~cache_proof_db = (* Diffs from transition frontier extensions *) let tf_diff_reader, tf_diff_writer = Strict_pipe.( @@ -298,7 +303,7 @@ end) ~time_controller ~config ~logger ~frontier_broadcast_pipe ~tf_diff_writer ) ~constraint_constants ~logger ~tf_diffs:tf_diff_reader ~log_gossip_heard - ~on_remote_push ~block_window_duration + ~on_remote_push ~block_window_duration ~cache_proof_db in O1trace.background_thread rebroadcast_loop_thread_label (fun () -> rebroadcast_loop t logger ) ; diff --git a/src/lib/network_pool/pool_sink.ml b/src/lib/network_pool/pool_sink.ml index 8be8f96c3b6..0c0434fd7d0 100644 --- a/src/lib/network_pool/pool_sink.ml +++ b/src/lib/network_pool/pool_sink.ml @@ -46,6 +46,7 @@ module Base Pool_sink with type pool := Diff.pool and type unwrapped_t = Diff.verified Envelope.Incoming.t * BC.t + and type cache_proof_db := Ledger_proof.Cache_tag.Cache.t and type msg := Msg.raw_msg * Msg.raw_callback = struct type unwrapped_t = Diff.verified Envelope.Incoming.t * BC.t @@ -86,7 +87,7 @@ module Base BC.drop Diff.empty (Diff.reject_overloaded_diff diff) cb ; Deferred.unit - let verify_impl ~logger ~trace_label resource_pool rl env cb : + let verify_impl ~logger ~trace_label resource_pool rl env cb cache_proof_db : Diff.verified Envelope.Incoming.t option Deferred.t = let handle_diffs_thread_label = "handle_" ^ trace_label ^ "_diffs" in @@ -112,7 +113,7 @@ module Base Deferred.return None | `Within_capacity -> O1trace.thread verify_diffs_thread_label (fun () -> - match%map Diff.verify resource_pool env with + match%map Diff.verify resource_pool env cache_proof_db with | Error ver_err -> Diff.log_internal ~logger "rejected" ~reason: @@ -131,7 +132,7 @@ module Base [%log debug] "Verified diff: $diff" ~metadata ; Some verified_diff ) ) - let push t (msg, cb) = + let push t (msg, cb) cache_proof_db = match t with | Sink { writer = w @@ -172,7 +173,7 @@ module Base don't_wait_for (Throttle.enqueue throttle (fun () -> match%bind - verify_impl ~logger ~trace_label pool rl env' cb' + verify_impl ~logger ~trace_label pool rl env' cb' cache_proof_db with | None -> [%log debug] "Received unverified gossip on %s" trace_label @@ -231,6 +232,7 @@ module Local_sink Pool_sink with type pool := Diff.pool and type unwrapped_t = Diff.verified Envelope.Incoming.t * BC.t + and type cache_proof_db := Ledger_proof.Cache_tag.Cache.t and type msg := BC.resource_pool_diff * ( ( [ `Broadcasted | `Not_broadcasted ] @@ -262,6 +264,7 @@ module Remote_sink Pool_sink with type pool := Diff.pool and type unwrapped_t = Diff.verified Envelope.Incoming.t * BC.t + and type cache_proof_db := Ledger_proof.Cache_tag.Cache.t and type msg := BC.resource_pool_diff Envelope.Incoming.t * Mina_net2.Validation_callback.t = diff --git a/src/lib/network_pool/snark_pool.ml b/src/lib/network_pool/snark_pool.ml index d1e944015c0..e838bcb6ab3 100644 --- a/src/lib/network_pool/snark_pool.ml +++ b/src/lib/network_pool/snark_pool.ml @@ -15,8 +15,8 @@ module Snark_tables = struct [@@deriving sexp, equal] end -let unwrap_cache_tag = - Priced_proof.map ~f:(One_or_two.map ~f:Ledger_proof.Cache_tag.unwrap) +let unwrap_cache_tag proof_cache_db = + Priced_proof.map ~f:(One_or_two.map ~f:(fun proof ->Ledger_proof.Cache_tag.unwrap proof proof_cache_db)) module type S = sig type transition_frontier @@ -35,6 +35,7 @@ module type S = sig val get_rebroadcastable : Resource_pool.t -> has_timed_out:(Time.t -> [ `Timed_out | `Ok ]) + -> Ledger_proof.Cache_tag.Cache.t -> Resource_pool.Diff.t list end @@ -273,16 +274,16 @@ struct let get_logger t = t.logger - let request_proof t x = - Option.map ~f:unwrap_cache_tag (Map.find !(t.snark_tables).all x) + let request_proof t x proof_cache_db = + Option.map ~f:(unwrap_cache_tag proof_cache_db) (Map.find !(t.snark_tables).all x) let add_snark ?(is_local = false) t ~work - ~(proof : Ledger_proof.t One_or_two.t) ~fee = + ~(proof : Ledger_proof.t One_or_two.t) ~cache_proof_db ~fee = if work_is_referenced t work then ( (*Note: fee against existing proofs and the new proofs are checked in Diff.unsafe_apply which calls this function*) let cached_proof = - One_or_two.map ~f:Ledger_proof.Cache_tag.generate proof + One_or_two.map ~f:(fun proof -> Ledger_proof.Cache_tag.generate proof cache_proof_db) proof in t.snark_tables := { all = @@ -486,7 +487,7 @@ struct (** Returns locally-generated snark work for re-broadcast. This is limited to recent work which is yet to appear in a block. *) - let get_rebroadcastable t ~has_timed_out:_ = + let get_rebroadcastable t proof_cache_db ~has_timed_out:_ = match best_tip_table t with | None -> [] @@ -494,7 +495,7 @@ struct Map.to_alist !(t.snark_tables).rebroadcastable |> List.filter_map ~f:(fun (stmt, (snark, _time)) -> if Set.mem best_tips stmt then - Some (Diff.Add_solved_work (stmt, unwrap_cache_tag snark)) + Some (Diff.Add_solved_work (stmt, unwrap_cache_tag proof_cache_db snark)) else None ) let remove_solved_work t work = @@ -510,9 +511,9 @@ struct let get_rebroadcastable = Resource_pool.get_rebroadcastable end - let get_completed_work t statement = + let get_completed_work t proof_cache_db statement = Option.map - (Resource_pool.request_proof (resource_pool t) statement) + (Resource_pool.request_proof (resource_pool t) statement proof_cache_db) ~f:(fun Priced_proof.{ proof; fee = { fee; prover } } -> Transaction_snark_work.Checked.create_unsafe { Transaction_snark_work.fee; proofs = proof; prover } ) @@ -607,7 +608,7 @@ let%test_module "random set test" = Verifier.For_tests.default ~constraint_constants ~logger ~proof_level () ) - let apply_diff resource_pool work + let apply_diff resource_pool work cache_proof_db ?(proof = One_or_two.map ~f:mk_dummy_proof) ?(sender = Envelope.Sender.Local) fee = let diff = @@ -619,8 +620,8 @@ let%test_module "random set test" = Mock_snark_pool.Resource_pool.Diff.verify resource_pool enveloped_diff with | Ok _ -> - Mock_snark_pool.Resource_pool.Diff.unsafe_apply resource_pool - enveloped_diff + Mock_snark_pool.Resource_pool.Diff.unsafe_apply resource_pool cache_proof_db + enveloped_diff cache_proof_db | Error _ -> Error (`Other (Error.of_string "Invalid diff")) @@ -628,7 +629,7 @@ let%test_module "random set test" = Mock_snark_pool.Resource_pool.make_config ~verifier ~trust_system ~disk_location:"/tmp/snark-pool" - let gen ?length () = + let gen ?length cache_proof_db () = let open Quickcheck.Generator.Let_syntax in let gen_entry = Quickcheck.Generator.tuple2 Mocks.Transaction_snark_work.Statement.gen @@ -649,7 +650,7 @@ let%test_module "random set test" = ~consensus_constants ~time_controller ~frontier_broadcast_pipe:frontier_broadcast_pipe_r ~log_gossip_heard:false ~on_remote_push:(Fn.const Deferred.unit) - ~block_window_duration + ~block_window_duration ~cache_proof_db (* |> *) in let pool = Mock_snark_pool.resource_pool mock_pool in @@ -660,7 +661,7 @@ let%test_module "random set test" = in let%map () = Deferred.List.iter sample_solved_work ~f:(fun (work, fee) -> - let%map res = apply_diff pool work fee in + let%map res = apply_diff pool work cache_proof_db fee in assert (Result.is_ok res) ) in (pool, tf) @@ -739,6 +740,7 @@ let%test_module "random set test" = let%test_unit "When two priced proofs of the same work are inserted into \ the snark pool, the fee of the work is at most the minimum \ of those fees" = + let proof_cache_db = Ledger_proof.Cache_tag.For_tests.random () in Quickcheck.test ~trials:5 ~sexp_of: [%sexp_of: @@ -757,18 +759,19 @@ let%test_module "random set test" = let%bind () = Mocks.Transition_frontier.refer_statements tf [ work ] in - let%bind _ = apply_diff t work fee_1 in - let%map _ = apply_diff t work fee_2 in + let%bind _ = apply_diff t work proof_cache_db fee_1 in + let%map _ = apply_diff t work proof_cache_db fee_2 in let fee_upper_bound = Currency.Fee.min fee_1.fee fee_2.fee in let { Priced_proof.fee = { fee; _ }; _ } = Option.value_exn - (Mock_snark_pool.Resource_pool.request_proof t work) + (Mock_snark_pool.Resource_pool.request_proof t work proof_cache_db) in assert (Currency.Fee.(fee <= fee_upper_bound)) ) ) let%test_unit "A priced proof of a work will replace an existing priced \ proof of the same work only if it's fee is smaller than the \ existing priced proof" = + let proof_cache_db = Ledger_proof.Cache_tag.For_tests.random () in Quickcheck.test ~trials:5 ~sexp_of: [%sexp_of: @@ -777,7 +780,7 @@ let%test_module "random set test" = * Mocks.Transaction_snark_work.Statement.t * Fee_with_prover.t * Fee_with_prover.t] - (Quickcheck.Generator.tuple4 (gen ()) + (Quickcheck.Generator.tuple4 (gen proof_cache_db ()) Mocks.Transaction_snark_work.Statement.gen Fee_with_prover.gen Fee_with_prover.gen ) ~f:(fun (t, work, fee_1, fee_2) -> @@ -790,13 +793,13 @@ let%test_module "random set test" = Mock_snark_pool.Resource_pool.remove_solved_work t work ; let expensive_fee = Fee_with_prover.max fee_1 fee_2 and cheap_fee = Fee_with_prover.min fee_1 fee_2 in - let%bind _ = apply_diff t work cheap_fee in - let%map res = apply_diff t work expensive_fee in + let%bind _ = apply_diff t work proof_cache_db cheap_fee in + let%map res = apply_diff t work proof_cache_db expensive_fee in assert (Result.is_error res) ; assert ( Currency.Fee.equal cheap_fee.fee (Option.value_exn - (Mock_snark_pool.Resource_pool.request_proof t work) ) + (Mock_snark_pool.Resource_pool.request_proof t work proof_cache_db) ) .fee .fee ) ) ) @@ -807,6 +810,7 @@ let%test_module "random set test" = let%test_unit "Work that gets fed into apply_and_broadcast will be \ received in the pool's reader" = + let proof_cache_db = Ledger_proof.Cache_tag.For_tests.random () in Async.Thread_safe.block_on_async_exn (fun () -> let frontier_broadcast_pipe_r, _ = Broadcast_pipe.create (Some (Mocks.Transition_frontier.create [])) @@ -816,7 +820,7 @@ let%test_module "random set test" = ~consensus_constants ~time_controller ~logger ~frontier_broadcast_pipe:frontier_broadcast_pipe_r ~log_gossip_heard:false ~on_remote_push:(Fn.const Deferred.unit) - ~block_window_duration + ~block_window_duration ~cache_proof_db:proof_cache_db in let priced_proof = { Priced_proof.proof = @@ -840,7 +844,7 @@ let%test_module "random set test" = ~f:(fun _ -> let pool = Mock_snark_pool.resource_pool network_pool in ( match - Mock_snark_pool.Resource_pool.request_proof pool fake_work + Mock_snark_pool.Resource_pool.request_proof pool fake_work proof_cache_db with | Some { proof; fee = _ } -> assert ( @@ -851,12 +855,13 @@ let%test_module "random set test" = Deferred.unit ) ; Mock_snark_pool.apply_and_broadcast network_pool (Envelope.Incoming.local command) - (Mock_snark_pool.Broadcast_callback.Local (Fn.const ())) ; + (Mock_snark_pool.Broadcast_callback.Local (Fn.const ())) proof_cache_db; Deferred.unit ) let%test_unit "when creating a network, the incoming diffs and locally \ generated diffs in reader pipes will automatically get \ process" = + let proof_cache_db = Ledger_proof.Cache_tag.For_tests.random () in Async.Thread_safe.block_on_async_exn (fun () -> let work_count = 10 in let works = @@ -889,7 +894,7 @@ let%test_module "random set test" = ~consensus_constants ~time_controller ~frontier_broadcast_pipe:frontier_broadcast_pipe_r ~log_gossip_heard:false ~on_remote_push:(Fn.const Deferred.unit) - ~block_window_duration + ~block_window_duration ~cache_proof_db:proof_cache_db in List.map (List.take works per_reader) ~f:create_work |> List.map ~f:(fun work -> @@ -927,6 +932,7 @@ let%test_module "random set test" = let%test_unit "rebroadcast behavior" = let tf = Mocks.Transition_frontier.create [] in let frontier_broadcast_pipe_r, _w = Broadcast_pipe.create (Some tf) in + let cache_proof_db = Ledger_proof.Cache_tag.For_tests.random () in let stmt1, stmt2, stmt3, stmt4 = let gen_not_any l = Quickcheck.Generator.filter Mocks.Transaction_snark_work.Statement.gen @@ -967,6 +973,7 @@ let%test_module "random set test" = [%test_eq: Mock_snark_pool.Resource_pool.Diff.t list] (sort got) (sort expected) in + let proof_cache_db = Ledger_proof.Cache_tag.For_tests.random () in Async.Thread_safe.block_on_async_exn (fun () -> let open Deferred.Let_syntax in let network_pool, _, _ = @@ -974,7 +981,7 @@ let%test_module "random set test" = ~constraint_constants ~consensus_constants ~time_controller ~frontier_broadcast_pipe:frontier_broadcast_pipe_r ~log_gossip_heard:false ~on_remote_push:(Fn.const Deferred.unit) - ~block_window_duration + ~block_window_duration ~cache_proof_db in let resource_pool = Mock_snark_pool.resource_pool network_pool in let%bind () = @@ -982,7 +989,7 @@ let%test_module "random set test" = [ stmt1; stmt2; stmt3; stmt4 ] in let%bind res1 = - apply_diff ~sender:fake_sender resource_pool stmt1 fee1 + apply_diff ~sender:fake_sender resource_pool stmt1 cache_proof_db fee1 in let ok_exn = function | Ok e -> @@ -998,11 +1005,11 @@ let%test_module "random set test" = * Mock_snark_pool.Resource_pool.Diff.verified * Mock_snark_pool.Resource_pool.Diff.rejected ) ; let rebroadcastable1 = - Mock_snark_pool.For_tests.get_rebroadcastable resource_pool + Mock_snark_pool.For_tests.get_rebroadcastable resource_pool proof_cache_db ~has_timed_out:(Fn.const `Ok) in check_work ~got:rebroadcastable1 ~expected:[] ; - let%bind res2 = apply_diff resource_pool stmt2 fee2 in + let%bind res2 = apply_diff resource_pool stmt2 cache_proof_db fee2 in let proof2 = One_or_two.map ~f:mk_dummy_proof stmt2 in ignore ( ok_exn res2 @@ -1010,13 +1017,13 @@ let%test_module "random set test" = * Mock_snark_pool.Resource_pool.Diff.verified * Mock_snark_pool.Resource_pool.Diff.rejected ) ; let rebroadcastable2 = - Mock_snark_pool.For_tests.get_rebroadcastable resource_pool + Mock_snark_pool.For_tests.get_rebroadcastable resource_pool proof_cache_db ~has_timed_out:(Fn.const `Ok) in check_work ~got:rebroadcastable2 ~expected: [ Add_solved_work (stmt2, { proof = proof2; fee = fee2 }) ] ; - let%bind res3 = apply_diff resource_pool stmt3 fee3 in + let%bind res3 = apply_diff resource_pool stmt3 cache_proof_db fee3 in let proof3 = One_or_two.map ~f:mk_dummy_proof stmt3 in ignore ( ok_exn res3 @@ -1024,7 +1031,7 @@ let%test_module "random set test" = * Mock_snark_pool.Resource_pool.Diff.verified * Mock_snark_pool.Resource_pool.Diff.rejected ) ; let rebroadcastable3 = - Mock_snark_pool.For_tests.get_rebroadcastable resource_pool + Mock_snark_pool.For_tests.get_rebroadcastable resource_pool proof_cache_db ~has_timed_out:(Fn.const `Ok) in check_work ~got:rebroadcastable3 @@ -1036,7 +1043,7 @@ let%test_module "random set test" = hasn't appeared in a block yet. *) let rebroadcastable4 = - Mock_snark_pool.For_tests.get_rebroadcastable resource_pool + Mock_snark_pool.For_tests.get_rebroadcastable resource_pool proof_cache_db ~has_timed_out:(Fn.const `Timed_out) in check_work ~got:rebroadcastable4 @@ -1044,7 +1051,7 @@ let%test_module "random set test" = [ Add_solved_work (stmt2, { proof = proof2; fee = fee2 }) ; Add_solved_work (stmt3, { proof = proof3; fee = fee3 }) ] ; - let%bind res6 = apply_diff resource_pool stmt4 fee4 in + let%bind res6 = apply_diff resource_pool stmt4 cache_proof_db fee4 in let proof4 = One_or_two.map ~f:mk_dummy_proof stmt4 in ignore ( ok_exn res6 @@ -1056,7 +1063,7 @@ let%test_module "random set test" = Mocks.Transition_frontier.remove_from_best_tip tf [ stmt3 ] in let rebroadcastable5 = - Mock_snark_pool.For_tests.get_rebroadcastable resource_pool + Mock_snark_pool.For_tests.get_rebroadcastable resource_pool proof_cache_db ~has_timed_out:(Fn.const `Ok) in check_work ~got:rebroadcastable5 diff --git a/src/lib/network_pool/snark_pool_diff.ml b/src/lib/network_pool/snark_pool_diff.ml index cb1028093a9..de254b9ee69 100644 --- a/src/lib/network_pool/snark_pool_diff.ml +++ b/src/lib/network_pool/snark_pool_diff.ml @@ -99,7 +99,7 @@ module Make (** Check whether there is a proof with lower fee in the pool. Returns [Ok ()] is the [~fee] would be the lowest in pool. *) - let has_no_lower_fee pool work ~fee ~sender = + let has_no_lower_fee pool work ~fee ~sender ~cache_proof_db = let reject_and_log_if_local reason = [%log' trace (Pool.get_logger pool)] "Rejecting snark work $work from $sender: $reason" @@ -112,7 +112,7 @@ module Make ] ; Result.fail reason in - match Pool.request_proof pool work with + match Pool.request_proof pool work cache_proof_db with | None -> Ok () | Some { fee = { fee = prev; _ }; _ } -> @@ -122,7 +122,7 @@ module Make reject_and_log_if_local Intf.Verification_error.Fee_equal else reject_and_log_if_local Intf.Verification_error.Fee_higher - let verify pool ({ data; sender; _ } as t : t Envelope.Incoming.t) = + let verify pool ({ data; sender; _ } as t : t Envelope.Incoming.t) cache_proof_db= match data with | Empty -> Deferred.Result.fail @@ -137,11 +137,11 @@ module Make (*reject higher priced gossiped proofs*) if is_local then verify () else - Deferred.return (has_no_lower_fee pool work ~fee:fee.fee ~sender) - >>= verify + Deferred.return (has_no_lower_fee pool ~cache_proof_db work ~fee:fee.fee ~sender) + >>= verify (* This is called after verification has occurred.*) - let unsafe_apply (pool : Pool.t) (t : t Envelope.Incoming.t) = + let unsafe_apply (pool : Pool.t) (t : t Envelope.Incoming.t) cache_proof_db = let { Envelope.Incoming.data = diff; sender; _ } = t in match diff with | Empty -> @@ -154,10 +154,10 @@ module Make | `Added -> Ok (diff, ()) in - match has_no_lower_fee pool work ~fee:fee.fee ~sender with + match has_no_lower_fee pool work ~fee:fee.fee ~sender ~cache_proof_db with | Ok () -> let%map.Result accepted, rejected = - Pool.add_snark ~is_local pool ~work ~proof ~fee |> to_or_error + Pool.add_snark ~is_local pool ~work ~proof ~cache_proof_db ~fee |> to_or_error in (`Accept, accepted, rejected) | Error e -> diff --git a/src/lib/network_pool/test.ml b/src/lib/network_pool/test.ml index 6c615b9a98f..dc28a5049aa 100644 --- a/src/lib/network_pool/test.ml +++ b/src/lib/network_pool/test.ml @@ -56,13 +56,14 @@ let%test_module "network pool test" = } } in + let cache_proof_db = Ledger_proof.Cache_tag.For_tests.random () in Async.Thread_safe.block_on_async_exn (fun () -> let network_pool, _, _ = Mock_snark_pool.create ~config ~logger ~constraint_constants ~consensus_constants ~time_controller ~frontier_broadcast_pipe:frontier_broadcast_pipe_r ~log_gossip_heard:false ~on_remote_push:(Fn.const Deferred.unit) - ~block_window_duration + ~block_window_duration ~cache_proof_db in let%bind () = Mocks.Transition_frontier.refer_statements tf [ work ] @@ -73,12 +74,12 @@ let%test_module "network pool test" = in Mock_snark_pool.apply_and_broadcast network_pool (Envelope.Incoming.local command) - (Mock_snark_pool.Broadcast_callback.Local (Fn.const ())) ; + (Mock_snark_pool.Broadcast_callback.Local (Fn.const ())) cache_proof_db; let%map _ = Linear_pipe.read (Mock_snark_pool.broadcasts network_pool) in let pool = Mock_snark_pool.resource_pool network_pool in - match Mock_snark_pool.Resource_pool.request_proof pool work with + match Mock_snark_pool.Resource_pool.request_proof pool work cache_proof_db with | Some { proof; fee = _ } -> assert ( [%equal: Ledger_proof.t One_or_two.t] proof priced_proof.proof ) @@ -112,23 +113,24 @@ let%test_module "network pool test" = let%bind () = Async.Scheduler.yield_until_no_jobs_remain () in let tf = Mocks.Transition_frontier.create [] in let frontier_broadcast_pipe_r, _ = Broadcast_pipe.create (Some tf) in + let cache_proof_db = Ledger_proof.Cache_tag.For_tests.random () in let network_pool, remote_sink, local_sink = Mock_snark_pool.create ~config ~logger ~constraint_constants ~consensus_constants ~time_controller ~frontier_broadcast_pipe:frontier_broadcast_pipe_r ~log_gossip_heard:false ~on_remote_push:(Fn.const Deferred.unit) - ~block_window_duration + ~block_window_duration ~cache_proof_db in List.map (List.take works per_reader) ~f:create_work |> List.map ~f:(fun work -> ( Envelope.Incoming.local work , Mina_net2.Validation_callback.create_without_expiration () ) ) |> List.iter ~f:(fun diff -> - Mock_snark_pool.Remote_sink.push remote_sink diff + Mock_snark_pool.Remote_sink.push remote_sink diff cache_proof_db |> Deferred.don't_wait_for ) ; List.map (List.drop works per_reader) ~f:create_work |> List.iter ~f:(fun diff -> - Mock_snark_pool.Local_sink.push local_sink (diff, Fn.const ()) + Mock_snark_pool.Local_sink.push local_sink (diff, Fn.const ()) cache_proof_db |> Deferred.don't_wait_for ) ; let%bind () = Mocks.Transition_frontier.refer_statements tf works in don't_wait_for diff --git a/src/lib/network_pool/transaction_pool.ml b/src/lib/network_pool/transaction_pool.ml index d0f50e770f0..cdfca016ef8 100644 --- a/src/lib/network_pool/transaction_pool.ml +++ b/src/lib/network_pool/transaction_pool.ml @@ -1449,7 +1449,7 @@ struct in (decision, accepted, rejected) - let unsafe_apply (t : pool) (diff : verified Envelope.Incoming.t) : + let unsafe_apply (t : pool) (diff : verified Envelope.Incoming.t) (_cache_proof_db : Ledger_proof.Cache_tag.Cache.t) : ([ `Accept | `Reject ] * t * rejected, _) Result.t = match apply t diff with | Ok (decision, accepted, rejected) -> @@ -1671,6 +1671,8 @@ let%test_module _ = let time_controller = Block_time.Controller.basic ~logger + let cache_proof_db = Ledger_proof.Cache_tag.For_tests.random () + let verifier = Async.Thread_safe.block_on_async_exn (fun () -> Verifier.For_tests.default ~constraint_constants ~logger ~proof_level @@ -1929,7 +1931,7 @@ let%test_module _ = Test.create ~config ~logger ~constraint_constants ~consensus_constants ~time_controller ~frontier_broadcast_pipe:frontier_pipe_r ~log_gossip_heard:false ~on_remote_push:(Fn.const Deferred.unit) - ~block_window_duration + ~block_window_duration ~cache_proof_db in let txn_pool = Test.resource_pool pool_ in let%map () = Async.Scheduler.yield_until_no_jobs_remain () in @@ -2174,7 +2176,7 @@ let%test_module _ = let mk_with_status (cmd : User_command.Valid.t) = { With_status.data = cmd; status = Applied } - let add_commands ?(local = true) test cs = + let add_commands ?(local = true) ?(cache_proof_db=cache_proof_db) test cs = let sender = if local then Envelope.Sender.Local else @@ -2195,7 +2197,7 @@ let%test_module _ = (Result.map_error ~f:Intf.Verification_error.to_error) in let result = - Test.Resource_pool.Diff.unsafe_apply test.txn_pool verified + Test.Resource_pool.Diff.unsafe_apply test.txn_pool verified cache_proof_db in let tm1 = Time.now () in [%log' info test.txn_pool.logger] "Time for add_commands: %0.04f sec" @@ -2218,8 +2220,8 @@ let%test_module _ = (Error.to_string_hum err) ) ; result - let add_commands' ?local test cs = - add_commands ?local test cs >>| assert_pool_apply cs + let add_commands' ?local ?cache_proof_db test cs = + add_commands ?local ?cache_proof_db test cs >>| assert_pool_apply cs let reorg ?(reorg_best_tip = false) test new_commands removed_commands = let%bind () = diff --git a/src/lib/staged_ledger/staged_ledger.ml b/src/lib/staged_ledger/staged_ledger.ml index cf0cc31f7c2..61ab318af4f 100644 --- a/src/lib/staged_ledger/staged_ledger.ml +++ b/src/lib/staged_ledger/staged_ledger.ml @@ -217,17 +217,17 @@ module T = struct include Scan_state.Make_statement_scanner (struct type t = unit - let verify ~verifier:() _proofs = Deferred.Or_error.return (Ok ()) + let verify ~verifier:() ~cache_proof_db _proofs = Deferred.Or_error.return (Ok ()) end) end module Statement_scanner_proof_verifier = struct type t = { logger : Logger.t; verifier : Verifier.t } - let verify ~verifier:{ logger; verifier } ts = + let verify ~verifier:{ logger; verifier } ~cache_proof_db ts = verify_proofs ~logger ~verifier (List.map ts ~f:(fun (p, m) -> - let p = Ledger_proof.Cache_tag.unwrap p in + let p = Ledger_proof.Cache_tag.unwrap p cache_proof_db in (p, Ledger_proof.statement p, m) ) ) end @@ -269,6 +269,7 @@ module T = struct let verify_scan_state_after_apply ~constraint_constants ~pending_coinbase_stack ~first_pass_ledger_end ~second_pass_ledger_end + ~cache_proof_db (scan_state : Scan_state.t) = let error_prefix = "Error verifying the parallel scan state after applying the diff." @@ -284,7 +285,7 @@ module T = struct let last_proof_statement = Option.map ~f:(fun ((p, _), _) -> - Ledger_proof.statement @@ Ledger_proof.Cache_tag.unwrap p ) + Ledger_proof.statement @@ Ledger_proof.Cache_tag.unwrap p cache_proof_db) (Scan_state.latest_ledger_proof scan_state) in Statement_scanner.check_invariants ~constraint_constants scan_state @@ -1003,7 +1004,7 @@ module T = struct let apply_diff ?(skip_verification = false) ~logger ~constraint_constants ~global_slot t pre_diff_info ~current_state_view ~state_and_body_hash - ~log_prefix ~zkapp_cmd_limit_hardcap = + ~log_prefix ~zkapp_cmd_limit_hardcap ~cache_proof_db = let open Deferred.Result.Let_syntax in let max_throughput = Int.pow 2 t.constraint_constants.transaction_capacity_log_2 @@ -1127,7 +1128,7 @@ module T = struct t.pending_coinbase_collection stack_update ~is_new_stack ~ledger_proof: (Option.map - ~f:(fun (p, x) -> (Ledger_proof.Cache_tag.unwrap p, x)) + ~f:(fun (p, x) -> (Ledger_proof.Cache_tag.unwrap p cache_proof_db, x)) res_opt ) |> Deferred.return ) in @@ -1153,6 +1154,7 @@ module T = struct (Frozen_ledger_hash.of_ledger_hash (Ledger.merkle_root new_ledger) ) ~pending_coinbase_stack:latest_pending_coinbase_stack + ~cache_proof_db scan_state' >>| to_staged_ledger_or_error) ) ) in @@ -1248,7 +1250,7 @@ module T = struct let apply ?skip_verification ~constraint_constants ~global_slot t ~get_completed_work (witness : Staged_ledger_diff.t) ~logger ~verifier ~current_state_view ~state_and_body_hash ~coinbase_receiver - ~supercharge_coinbase ~zkapp_cmd_limit_hardcap = + ~supercharge_coinbase ~zkapp_cmd_limit_hardcap ~cache_proof_db = let open Deferred.Result.Let_syntax in let work = Staged_ledger_diff.completed_works witness in let%bind () = @@ -1282,6 +1284,7 @@ module T = struct (forget_prediff_info prediff) ~logger ~current_state_view ~state_and_body_hash ~log_prefix:"apply_diff" ~zkapp_cmd_limit_hardcap + ~cache_proof_db in [%log internal] "Diff_applied" ; [%log debug] @@ -1303,7 +1306,7 @@ module T = struct let apply_diff_unchecked ~constraint_constants ~global_slot t (sl_diff : Staged_ledger_diff.With_valid_signatures_and_proofs.t) ~logger ~current_state_view ~state_and_body_hash ~coinbase_receiver - ~supercharge_coinbase ~zkapp_cmd_limit_hardcap = + ~supercharge_coinbase ~zkapp_cmd_limit_hardcap ~cache_proof_db = let open Deferred.Result.Let_syntax in let%bind prediff = Result.map_error ~f:(fun error -> Staged_ledger_error.Pre_diff error) @@ -1315,7 +1318,7 @@ module T = struct (forget_prediff_info prediff) ~constraint_constants ~global_slot ~logger ~current_state_view ~state_and_body_hash ~log_prefix:"apply_diff_unchecked" - ~zkapp_cmd_limit_hardcap + ~zkapp_cmd_limit_hardcap ~cache_proof_db module Resources = struct module Discarded = struct @@ -2407,7 +2410,7 @@ let%test_module "staged ledger tests" = let create_and_apply_with_state_body_hash ?zkapp_cmd_limit ?(coinbase_receiver = coinbase_receiver) ?(winner = self_pk) ~(current_state_view : Zkapp_precondition.Protocol_state.View.t) - ~global_slot ~state_and_body_hash sl txns stmt_to_work = + ~global_slot ~state_and_body_hash ~cache_proof_db sl txns stmt_to_work = let open Deferred.Let_syntax in let supercharge_coinbase = supercharge_coinbase ~ledger:(Sl.ledger !sl) ~winner ~global_slot @@ -2434,7 +2437,7 @@ let%test_module "staged ledger tests" = Sl.apply ~constraint_constants ~global_slot !sl diff' ~logger ~verifier ~get_completed_work:(Fn.const None) ~current_state_view ~state_and_body_hash ~coinbase_receiver ~supercharge_coinbase - ~zkapp_cmd_limit_hardcap + ~zkapp_cmd_limit_hardcap ~cache_proof_db with | Ok x -> x @@ -2447,12 +2450,12 @@ let%test_module "staged ledger tests" = let create_and_apply ?(coinbase_receiver = coinbase_receiver) ?(winner = self_pk) ~global_slot ~protocol_state_view - ~state_and_body_hash sl txns stmt_to_work = + ~state_and_body_hash ~cache_proof_db sl txns stmt_to_work = let open Deferred.Let_syntax in let%map ledger_proof, diff, _, _, _ = create_and_apply_with_state_body_hash ~coinbase_receiver ~winner ~current_state_view:protocol_state_view ~global_slot - ~state_and_body_hash sl txns stmt_to_work + ~state_and_body_hash sl txns stmt_to_work ~cache_proof_db in (ledger_proof, diff) @@ -2743,11 +2746,12 @@ let%test_module "staged ledger tests" = -> unit Deferred.t = fun ~global_slot account_ids_to_check cmds cmd_iters sl ?(expected_proof_count = None) ?(allow_failures = false) - ?(check_snarked_ledger_transition = false) ~snarked_ledger test_mask + ?(check_snarked_ledger_transition = false) ~snarked_ledger test_mask provers stmt_to_work -> let global_slot = Mina_numbers.Global_slot_since_genesis.of_int global_slot in + let cache_proof_db = Filename.temp_dir_name in let state_tbl = State_hash.Table.create () in (*Add genesis state to the table*) let genesis, _ = dummy_state_and_view () in @@ -2772,7 +2776,8 @@ let%test_module "staged ledger tests" = ( state_hash , (Mina_state.Protocol_state.hashes current_state) .state_body_hash |> Option.value_exn ) - sl cmds_this_iter stmt_to_work + ~cache_proof_db sl + cmds_this_iter stmt_to_work in List.iter (Staged_ledger_diff.commands diff) ~f:(fun c -> match With_status.status c with @@ -2856,7 +2861,7 @@ let%test_module "staged ledger tests" = in let ledger_proof = Option.map ledger_proof ~f:(fun (p, x) -> - (Ledger_proof.Cache_tag.unwrap p, x) ) + (Ledger_proof.Cache_tag.unwrap p cache_proof_db, x) ) in let%bind () = if check_snarked_ledger_transition then @@ -5204,6 +5209,7 @@ let%test_module "staged ledger tests" = ~get_completed_work:(Fn.const None) ~logger ~verifier:verifier_full ~current_state_view ~state_and_body_hash ~coinbase_receiver + ~cache_proof_db:Filename.temp_dir_name ~supercharge_coinbase:false ~zkapp_cmd_limit_hardcap with | Ok _ -> diff --git a/src/lib/transaction_snark/transaction_snark.ml b/src/lib/transaction_snark/transaction_snark.ml index 13881f0ce7a..62a1e624c9f 100644 --- a/src/lib/transaction_snark/transaction_snark.ml +++ b/src/lib/transaction_snark/transaction_snark.ml @@ -72,7 +72,7 @@ module Make_str (A : Wire_types.Concrete) = struct let to_latest = Fn.id end end] - + module Cache_tag = Proof_cache_tag end @@ -90,6 +90,9 @@ module Make_str (A : Wire_types.Concrete) = struct end] module Cache_tag = struct + + module Cache = Proof.Cache_tag.Cache + type value = t [@@deriving compare, equal, sexp, yojson, hash] type t = @@ -98,11 +101,14 @@ module Make_str (A : Wire_types.Concrete) = struct } [@@deriving compare, equal, sexp, yojson, hash] - let unwrap ({ statement; proof } : t) : value = - { statement; proof = Proof.Cache_tag.unwrap proof } + let unwrap ({ statement; proof } : t) db : value = + { statement; proof = Proof.Cache_tag.unwrap proof db } - let generate ({ statement; proof } : value) : t = - { statement; proof = Proof.Cache_tag.generate proof } + let generate ({ statement; proof } : value) db : t = + { statement; proof = Proof.Cache_tag.generate proof db } + + module For_tests = Proof.Cache_tag.For_tests + end let proof t = t.proof @@ -4124,6 +4130,7 @@ module Make_str (A : Wire_types.Concrete) = struct lazy (constraint_system_digests ~constraint_constants ()) end + module For_tests = struct module Spec = struct type t = diff --git a/src/lib/transaction_snark/transaction_snark_intf.ml b/src/lib/transaction_snark/transaction_snark_intf.ml index 97be11de26a..5f9094174b7 100644 --- a/src/lib/transaction_snark/transaction_snark_intf.ml +++ b/src/lib/transaction_snark/transaction_snark_intf.ml @@ -22,13 +22,28 @@ module type Full = sig end] module Cache_tag : sig + + module Cache : sig + + type t + + val initialize : string -> t + + end + type value := t type t [@@deriving compare, equal, sexp, yojson, hash] - val unwrap : t -> value + val unwrap : t -> Cache.t -> value + + val generate : value -> Cache.t -> t + + module For_tests : sig - val generate : value -> t + val random : unit -> Cache.t + + end end val create : statement:Statement.With_sok.t -> proof:Mina_base.Proof.t -> t diff --git a/src/lib/transaction_snark_scan_state/transaction_snark_scan_state.ml b/src/lib/transaction_snark_scan_state/transaction_snark_scan_state.ml index c58f87fb014..d7527442a45 100644 --- a/src/lib/transaction_snark_scan_state/transaction_snark_scan_state.ml +++ b/src/lib/transaction_snark_scan_state/transaction_snark_scan_state.ml @@ -30,6 +30,7 @@ module type Monad_with_Or_error_intf = sig end end + module Transaction_with_witness = struct [%%versioned module Stable = struct @@ -51,7 +52,7 @@ module Transaction_with_witness = struct (Mina_ledger.Sparse_ledger.Stable.V2.t[@sexp.opaque]) ; block_global_slot : Mina_numbers.Global_slot_since_genesis.Stable.V1.t } - [@@deriving sexp, to_yojson] + [@@deriving to_yojson] let to_latest = Fn.id end @@ -166,8 +167,8 @@ module Repr = struct ; previous_incomplete_zkapp_updates : Transaction_with_witness.Stable.V2.t list * [ `Border_block_continued_in_the_next_tree of bool ] + ; proof_cache : Ledger_proof.Cache_tag.Cache.t } - [@@deriving sexp] let to_latest = Fn.id @@ -213,24 +214,26 @@ module Stable = struct ; previous_incomplete_zkapp_updates : Transaction_with_witness.t list * [ `Border_block_continued_in_the_next_tree of bool ] + ; proof_cache : Ledger_proof.Cache_tag.db } - let to_repr ({ scan_state; previous_incomplete_zkapp_updates } : t) : Repr.t + let to_repr ({ scan_state; previous_incomplete_zkapp_updates; proof_cache } : t) : Repr.t = { scan_state = Parallel_scan.State.map - ~f1:(fun (p, m) -> (Ledger_proof.Cache_tag.unwrap p, m)) + ~f1:(fun (p, m) -> (Ledger_proof.Cache_tag.unwrap p proof_cache, m )) ~f2:Fn.id scan_state ; previous_incomplete_zkapp_updates } - let of_repr ({ scan_state; previous_incomplete_zkapp_updates } : Repr.t) : t + let of_repr ({ scan_state; previous_incomplete_zkapp_updates; proof_cache } : Repr.t) : t = { scan_state = Parallel_scan.State.map - ~f1:(fun (p, m) -> (Ledger_proof.Cache_tag.generate p, m)) + ~f1:(fun (p, m) -> ((Ledger_proof.Cache_tag.generate p proof_cache), m)) ~f2:Fn.id scan_state ; previous_incomplete_zkapp_updates + ; proof_cache } include @@ -239,14 +242,14 @@ module Stable = struct (struct type nonrec t = t - let to_binable = to_repr + let to_binable = to_repr let of_binable = of_repr end) let to_latest = Fn.id - let hash (t : t) = Repr.hash (to_repr t) + let hash (t : t) cache_proof_db = Repr.hash (to_repr t ~cache_proof_db) end end] @@ -349,7 +352,7 @@ let create_expected_statement ~constraint_constants ; sok_digest = () } -let completed_work_to_scanable_work (job : job) (fee, current_proof, prover) : +let completed_work_to_scanable_work (job : job) (fee, current_proof, prover, cache_proof_db) : (Ledger_proof.Cache_tag.t * Sok_message.t) Or_error.t = let sok_digest = Ledger_proof.sok_digest current_proof and proof = Ledger_proof.underlying_proof current_proof in @@ -357,16 +360,16 @@ let completed_work_to_scanable_work (job : job) (fee, current_proof, prover) : | Base { statement; _ } -> let ledger_proof = Ledger_proof.Cache_tag.generate - @@ Ledger_proof.create ~statement ~sok_digest ~proof + (Ledger_proof.create ~statement ~sok_digest ~proof) cache_proof_db in Ok (ledger_proof, Sok_message.create ~fee ~prover) | Merge ((p, _), (p', _)) -> let open Or_error.Let_syntax in - let s = Ledger_proof.statement @@ Ledger_proof.Cache_tag.unwrap p - and s' = Ledger_proof.statement @@ Ledger_proof.Cache_tag.unwrap p' in + let s = Ledger_proof.statement @@ Ledger_proof.Cache_tag.unwrap p cache_proof_db + and s' = Ledger_proof.statement @@ Ledger_proof.Cache_tag.unwrap p' cache_proof_db in let%map statement = Transaction_snark.Statement.merge s s' in ( Ledger_proof.Cache_tag.generate - @@ Ledger_proof.create ~statement ~sok_digest ~proof + (Ledger_proof.create ~statement ~sok_digest ~proof) cache_proof_db , Sok_message.create ~fee ~prover ) let total_proofs (works : Transaction_snark_work.t list) = @@ -437,7 +440,8 @@ struct (*TODO: fold over the pending_coinbase tree and validate the statements?*) let scan_statement ~constraint_constants ~logger - ({ scan_state = tree; previous_incomplete_zkapp_updates = _ } : t) + ({ scan_state = tree; previous_incomplete_zkapp_updates = _ } : t) + ~cache_proof_db ~statement_check ~verifier : ( Transaction_snark.Statement.t , [ `Error of Error.t | `Empty ] ) @@ -498,12 +502,12 @@ struct in Some s2 in - let fold_step_a (acc_statement, acc_pc) job = + let fold_step_a (acc_statement, acc_pc) cache_proof_db job = match job with | Parallel_scan.Merge.Job.Part (proof, message) -> let%map acc_stmt = let statement = - Ledger_proof.statement @@ Ledger_proof.Cache_tag.unwrap proof + Ledger_proof.statement (Ledger_proof.Cache_tag.unwrap proof cache_proof_db) in merge_acc ~proofs:[ (proof, message) ] acc_statement statement in @@ -512,10 +516,10 @@ struct return (acc_statement, acc_pc) | Full { left = proof_1, message_1; right = proof_2, message_2; _ } -> let stmt1 = - Ledger_proof.statement @@ Ledger_proof.Cache_tag.unwrap proof_1 + Ledger_proof.statement (Ledger_proof.Cache_tag.unwrap proof_1 cache_proof_db) in let stmt2 = - Ledger_proof.statement @@ Ledger_proof.Cache_tag.unwrap proof_2 + Ledger_proof.statement @@ Ledger_proof.Cache_tag.unwrap proof_2 cache_proof_db in let%bind merged_statement = Timer.time timer (sprintf "merge:%s" __LOC__) (fun () -> @@ -588,7 +592,7 @@ struct Fold.fold_chronological_until tree ~init:(None, None) ~f_merge:(fun acc (_weight, job) -> let open Container.Continue_or_stop in - match%map.Deferred fold_step_a acc job with + match%map.Deferred fold_step_a acc cache_proof_db job with | Ok next -> Continue next | e -> @@ -618,7 +622,7 @@ struct Deferred.return (Error (`Error e)) let check_invariants t ~constraint_constants ~logger ~statement_check - ~verifier ~error_prefix + ~verifier ~error_prefix ~cache_proof_db ~(last_proof_statement : Transaction_snark.Statement.t option) ~(registers_end : ( Frozen_ledger_hash.t @@ -657,7 +661,7 @@ struct match%map O1trace.sync_thread "validate_transaction_snark_scan_state" (fun () -> scan_statement t ~constraint_constants ~logger ~statement_check - ~verifier ) + ~verifier ~cache_proof_db) with | Error (`Error e) -> Error e @@ -699,13 +703,13 @@ struct () end -let statement_of_job : job -> Transaction_snark.Statement.t option = function +let statement_of_job ~cache_proof_db : job -> Transaction_snark.Statement.t option = function | Base { statement; _ } -> Some statement | Merge ((p1, _), (p2, _)) -> Transaction_snark.Statement.merge - (Ledger_proof.statement @@ Ledger_proof.Cache_tag.unwrap p1) - (Ledger_proof.statement @@ Ledger_proof.Cache_tag.unwrap p2) + (Ledger_proof.statement @@ Ledger_proof.Cache_tag.unwrap p1 cache_proof_db) + (Ledger_proof.statement @@ Ledger_proof.Cache_tag.unwrap p2 cache_proof_db) |> Result.ok let create ~work_delay ~transaction_capacity_log_2 : t = @@ -1278,10 +1282,10 @@ let extract_from_job (job : job) = | Merge ((p1, _), (p2, _)) -> Second (p1, p2) -let snark_job_list_json t = +let snark_job_list_json ~cache_proof_db t = let all_jobs : Job_view.t list list = let fa (a : Ledger_proof.Cache_tag.t * Sok_message.t) = - Ledger_proof.statement @@ Ledger_proof.Cache_tag.unwrap (fst a) + Ledger_proof.statement @@ Ledger_proof.Cache_tag.unwrap (fst a) cache_proof_db in let fd (d : Transaction_with_witness.t) = d.statement in Parallel_scan.view_jobs_with_position t.scan_state fa fd @@ -1292,12 +1296,12 @@ let snark_job_list_json t = `List (List.map tree ~f:Job_view.to_yojson) ) ) ) (*Always the same pairing of jobs*) -let all_work_statements_exn t : Transaction_snark_work.Statement.t list = +let all_work_statements_exn ~cache_proof_db t : Transaction_snark_work.Statement.t list = let work_seqs = all_jobs t in List.concat_map work_seqs ~f:(fun work_seq -> One_or_two.group_list (List.map work_seq ~f:(fun job -> - match statement_of_job job with + match statement_of_job ~cache_proof_db job with | None -> assert false | Some stmt -> @@ -1313,18 +1317,19 @@ let k_work_pairs_for_new_diff t ~k = take (concat_map work_list ~f:(fun works -> One_or_two.group_list works)) k) (*Always the same pairing of jobs*) -let work_statements_for_new_diff t : Transaction_snark_work.Statement.t list = +let work_statements_for_new_diff ~cache_proof_db t : Transaction_snark_work.Statement.t list = let work_list = Parallel_scan.jobs_for_next_update t.scan_state in List.concat_map work_list ~f:(fun work_seq -> One_or_two.group_list (List.map work_seq ~f:(fun job -> - match statement_of_job job with + match statement_of_job ~cache_proof_db job with | None -> assert false | Some stmt -> stmt ) ) ) let all_work_pairs t + ~cache_proof_db ~(get_state : State_hash.t -> Mina_state.Protocol_state.value Or_error.t) : ( Transaction_witness.t , Ledger_proof.Cache_tag.t ) @@ -1374,8 +1379,8 @@ let all_work_pairs t | Second (p1, p2) -> let%map merged = Transaction_snark.Statement.merge - (Ledger_proof.statement @@ Ledger_proof.Cache_tag.unwrap p1) - (Ledger_proof.statement @@ Ledger_proof.Cache_tag.unwrap p2) + (Ledger_proof.statement @@ Ledger_proof.Cache_tag.unwrap p1 cache_proof_db) + (Ledger_proof.statement @@ Ledger_proof.Cache_tag.unwrap p2 cache_proof_db) in Snark_work_lib.Work.Single.Spec.Merge (merged, p1, p2) in @@ -1397,7 +1402,7 @@ let all_work_pairs t let update_metrics t = Parallel_scan.update_metrics t.scan_state -let fill_work_and_enqueue_transactions t ~logger transactions work = +let fill_work_and_enqueue_transactions t ~logger transactions work db = let open Or_error.Let_syntax in let fill_in_transaction_snark_work tree (works : Transaction_snark_work.t list) : (Ledger_proof.Cache_tag.t * Sok_message.t) list Or_error.t = @@ -1443,7 +1448,7 @@ let fill_work_and_enqueue_transactions t ~logger transactions work = proof_opt ~f:(fun ((proof, _), _txns_with_witnesses) -> let curr_stmt = - Ledger_proof.statement @@ Ledger_proof.Cache_tag.unwrap proof + Ledger_proof.statement @@ Ledger_proof.Cache_tag.unwrap proof db in let prev_stmt, incomplete_zkapp_updates_from_old_proof = Option.value_map @@ -1451,7 +1456,7 @@ let fill_work_and_enqueue_transactions t ~logger transactions work = (curr_stmt, ([], `Border_block_continued_in_the_next_tree false)) old_proof_and_incomplete_zkapp_updates ~f:(fun ((p', _), incomplete_zkapp_updates_from_old_proof) -> - ( Ledger_proof.statement @@ Ledger_proof.Cache_tag.unwrap p' + ( Ledger_proof.statement @@ Ledger_proof.Cache_tag.unwrap p' db , incomplete_zkapp_updates_from_old_proof ) ) in (*prev_target is connected to curr_source- Order of the arguments is diff --git a/src/lib/transition_frontier/frontier_base/breadcrumb.ml b/src/lib/transition_frontier/frontier_base/breadcrumb.ml index 76e3f619eef..81cfd31a586 100644 --- a/src/lib/transition_frontier/frontier_base/breadcrumb.ml +++ b/src/lib/transition_frontier/frontier_base/breadcrumb.ml @@ -89,7 +89,7 @@ let compute_block_trace_metadata transition_with_validation = ] let build ?skip_staged_ledger_verification ~logger ~precomputed_values ~verifier - ~trust_system ~parent + ~trust_system ~parent ~cache_proof_db ~transition:(transition_with_validation : Mina_block.almost_valid_block) ~get_completed_work ~sender ~transition_receipt_time () = let state_hash = @@ -107,7 +107,7 @@ let build ?skip_staged_ledger_verification ~logger ~precomputed_values ~verifier match%bind Validation.validate_staged_ledger_diff ?skip_staged_ledger_verification ~get_completed_work ~logger ~precomputed_values ~verifier - ~parent_staged_ledger:(staged_ledger parent) + ~parent_staged_ledger:(staged_ledger parent) ~cache_proof_db ~parent_protocol_state: ( parent.validated_transition |> Mina_block.Validated.header |> Mina_block.Header.protocol_state ) @@ -330,6 +330,7 @@ module For_tests = struct ?(trust_system = Trust_system.null ()) ~accounts_with_secret_keys () : (t -> t Deferred.t) Quickcheck.Generator.t = let open Quickcheck.Let_syntax in + let cache_proof_db = Ledger_proof.Cache_tag.For_tests.random () in let%bind slot_advancement = Int.gen_incl 1 10 in let%bind make_next_consensus_state = Consensus_state_hooks.For_tests.gen_consensus_state ~slot_advancement @@ -427,7 +428,7 @@ module For_tests = struct let ledger_proof_statement = Option.value_map ledger_proof_opt ~default:previous_ledger_proof_stmt ~f:(fun (proof, _) -> - Ledger_proof.statement @@ Ledger_proof.Cache_tag.unwrap proof ) + Ledger_proof.statement @@ Ledger_proof.Cache_tag.unwrap proof cache_proof_db) in let genesis_ledger_hash = previous_protocol_state |> Protocol_state.blockchain_state @@ -489,7 +490,7 @@ module For_tests = struct match%map build ~logger ~precomputed_values ~trust_system ~verifier ~get_completed_work:(Fn.const None) ~parent:parent_breadcrumb - ~transition: + ~cache_proof_db ~transition: ( next_block |> Mina_block.Validated.remember |> Validation.reset_staged_ledger_diff_validation ) ~sender:None ~skip_staged_ledger_verification:`All diff --git a/src/lib/transition_frontier/frontier_base/breadcrumb.mli b/src/lib/transition_frontier/frontier_base/breadcrumb.mli index 2a087b15a60..7af75afbc54 100644 --- a/src/lib/transition_frontier/frontier_base/breadcrumb.mli +++ b/src/lib/transition_frontier/frontier_base/breadcrumb.mli @@ -34,6 +34,7 @@ val build : -> verifier:Verifier.t -> trust_system:Trust_system.t -> parent:t + -> cache_proof_db:Ledger_proof.Cache_tag.Cache.t -> transition:Mina_block.almost_valid_block -> get_completed_work: ( Transaction_snark_work.Statement.t diff --git a/src/lib/transition_frontier/persistent_frontier/persistent_frontier.ml b/src/lib/transition_frontier/persistent_frontier/persistent_frontier.ml index 1ff4c630017..22327320bc8 100644 --- a/src/lib/transition_frontier/persistent_frontier/persistent_frontier.ml +++ b/src/lib/transition_frontier/persistent_frontier/persistent_frontier.ml @@ -190,7 +190,7 @@ module Instance = struct let load_full_frontier t ~context:(module Context : CONTEXT) ~root_ledger ~consensus_local_state ~max_length ~ignore_consensus_local_state - ~persistent_root_instance = + ~persistent_root_instance ~cache_proof_db = let open Context in let open Deferred.Result.Let_syntax in let validate genesis_state_hash (b, v) = @@ -321,7 +321,8 @@ module Instance = struct Breadcrumb.build ~skip_staged_ledger_verification:`All ~logger:t.factory.logger ~precomputed_values ~verifier:t.factory.verifier - ~trust_system:(Trust_system.null ()) ~parent ~transition + ~trust_system:(Trust_system.null ()) ~parent + ~cache_proof_db ~transition ~get_completed_work:(Fn.const None) ~sender:None ~transition_receipt_time () in diff --git a/src/lib/transition_frontier/transition_frontier.ml b/src/lib/transition_frontier/transition_frontier.ml index fdbf67b52e6..0b4361cb78c 100644 --- a/src/lib/transition_frontier/transition_frontier.ml +++ b/src/lib/transition_frontier/transition_frontier.ml @@ -95,7 +95,7 @@ let genesis_root_data ~precomputed_values = let load_from_persistence_and_start ~context:(module Context : CONTEXT) ~verifier ~consensus_local_state ~max_length ~persistent_root ~persistent_root_instance ~persistent_frontier ~persistent_frontier_instance - ~catchup_mode ignore_consensus_local_state = + ~catchup_mode ~cache_proof_db ignore_consensus_local_state = let open Context in let open Deferred.Result.Let_syntax in let root_identifier = @@ -139,7 +139,7 @@ let load_from_persistence_and_start ~context:(module Context : CONTEXT) ~root_ledger: (Persistent_root.Instance.snarked_ledger persistent_root_instance) ~consensus_local_state ~ignore_consensus_local_state - ~persistent_root_instance + ~persistent_root_instance ~cache_proof_db with | Error `Sync_cannot_be_running -> Error (`Failure "sync job is already running on persistent frontier") @@ -195,6 +195,7 @@ let rec load_with_max_length : -> persistent_root:Persistent_root.t -> persistent_frontier:Persistent_frontier.t -> catchup_mode:[ `Normal | `Super ] + -> cache_proof_db:Ledger_proof.Cache_tag.Cache.t -> unit -> ( t , [> `Bootstrap_required @@ -204,7 +205,7 @@ let rec load_with_max_length : Deferred.Result.t = fun ~context:(module Context : CONTEXT) ~max_length ?(retry_with_fresh_db = true) ~verifier ~consensus_local_state - ~persistent_root ~persistent_frontier ~catchup_mode () -> + ~persistent_root ~persistent_frontier ~catchup_mode ~cache_proof_db () -> let open Context in let open Deferred.Let_syntax in (* TODO: #3053 *) @@ -235,7 +236,7 @@ let rec load_with_max_length : ~context:(module Context) ~verifier ~consensus_local_state ~max_length ~persistent_root ~persistent_root_instance ~catchup_mode ~persistent_frontier - ~persistent_frontier_instance ignore_consensus_local_state + ~persistent_frontier_instance ~cache_proof_db ignore_consensus_local_state with | Ok _ as result -> [%str_log trace] Persisted_frontier_loaded ; @@ -338,7 +339,7 @@ let rec load_with_max_length : load_with_max_length ~context:(module Context) ~max_length ~verifier ~consensus_local_state ~persistent_root - ~persistent_frontier ~retry_with_fresh_db:false ~catchup_mode () + ~persistent_frontier ~retry_with_fresh_db:false ~catchup_mode ~cache_proof_db () >>| Result.map_error ~f:(function | `Persistent_frontier_malformed -> `Failure @@ -367,7 +368,7 @@ let rec load_with_max_length : let load ?(retry_with_fresh_db = true) ~context:(module Context : CONTEXT) ~verifier ~consensus_local_state ~persistent_root ~persistent_frontier - ~catchup_mode () = + ~catchup_mode ~cache_proof_db () = let open Context in O1trace.thread "transition_frontier_load" (fun () -> let max_length = @@ -377,7 +378,7 @@ let load ?(retry_with_fresh_db = true) ~context:(module Context : CONTEXT) load_with_max_length ~context:(module Context) ~max_length ~retry_with_fresh_db ~verifier ~consensus_local_state - ~persistent_root ~persistent_frontier ~catchup_mode () ) + ~persistent_root ~persistent_frontier ~catchup_mode ~cache_proof_db () ) (* The persistent root and persistent frontier as safe to ignore here * because their lifecycle is longer than the transition frontier's *) @@ -723,6 +724,7 @@ module For_tests = struct let compile_config = precomputed_values.compile_config end in + let cache_proof_db = Ledger_proof.Cache_tag.For_tests.random () in let open Context in let open Quickcheck.Generator.Let_syntax in let trust_system = @@ -796,7 +798,8 @@ module For_tests = struct `Normal | None -> `Normal ) - ~persistent_frontier () ) + ~persistent_frontier + ~cache_proof_db () ) in let frontier = let fail msg = failwith ("failed to load transition frontier: " ^ msg) in diff --git a/src/lib/transition_frontier/transition_frontier.mli b/src/lib/transition_frontier/transition_frontier.mli index 0e719016fa3..8ba8ba9b3e6 100644 --- a/src/lib/transition_frontier/transition_frontier.mli +++ b/src/lib/transition_frontier/transition_frontier.mli @@ -67,6 +67,7 @@ val load : -> persistent_root:Persistent_root.t -> persistent_frontier:Persistent_frontier.t -> catchup_mode:[ `Normal | `Super ] + -> cache_proof_db:Ledger_proof.Cache_tag.Cache.t -> unit -> ( t , [ `Failure of string @@ -122,6 +123,7 @@ module For_tests : sig -> persistent_root:Persistent_root.t -> persistent_frontier:Persistent_frontier.t -> catchup_mode:[ `Normal | `Super ] + -> cache_proof_db:Ledger_proof.Cache_tag.Cache.t -> unit -> ( t , [ `Failure of string diff --git a/src/lib/transition_frontier_controller/transition_frontier_controller.ml b/src/lib/transition_frontier_controller/transition_frontier_controller.ml index 7a28592ec8a..ea315386ad4 100644 --- a/src/lib/transition_frontier_controller/transition_frontier_controller.ml +++ b/src/lib/transition_frontier_controller/transition_frontier_controller.ml @@ -16,7 +16,7 @@ end let run ~context:(module Context : CONTEXT) ~trust_system ~verifier ~network ~time_controller ~collected_transitions ~frontier ~get_completed_work ~network_transition_reader ~producer_transition_reader ~clear_reader - ~cache_exceptions = + ~cache_exceptions ~cache_proof_db = let open Context in let valid_transition_pipe_capacity = 50 in let start_time = Time.now () in @@ -134,11 +134,11 @@ let run ~context:(module Context : CONTEXT) ~trust_system ~verifier ~network ~time_controller ~trust_system ~verifier ~frontier ~get_completed_work ~primary_transition_reader ~producer_transition_reader ~clean_up_catchup_scheduler ~catchup_job_writer ~catchup_breadcrumbs_reader - ~catchup_breadcrumbs_writer ~processed_transition_writer ; + ~catchup_breadcrumbs_writer ~processed_transition_writer ~cache_proof_db ; Ledger_catchup.run ~context:(module Context) ~trust_system ~verifier ~network ~frontier ~catchup_job_reader - ~catchup_breadcrumbs_writer ~unprocessed_transition_cache ; + ~catchup_breadcrumbs_writer ~unprocessed_transition_cache ~cache_proof_db; upon (Strict_pipe.Reader.read clear_reader) (fun _ -> let open Strict_pipe.Writer in kill valid_transition_writer ; diff --git a/src/lib/transition_handler/breadcrumb_builder.ml b/src/lib/transition_handler/breadcrumb_builder.ml index df40525c50e..09cfc99542e 100644 --- a/src/lib/transition_handler/breadcrumb_builder.ml +++ b/src/lib/transition_handler/breadcrumb_builder.ml @@ -5,7 +5,7 @@ open Cache_lib open Network_peer let build_subtrees_of_breadcrumbs ~logger ~precomputed_values ~verifier - ~trust_system ~frontier ~initial_hash subtrees_of_enveloped_transitions = + ~trust_system ~frontier ~initial_hash ~cache_proof_db subtrees_of_enveloped_transitions = let missing_parent_msg = Printf.sprintf "Transition frontier already garbage-collected the parent of %s" @@ -100,7 +100,7 @@ let build_subtrees_of_breadcrumbs ~logger ~precomputed_values ~verifier ~precomputed_values ~verifier ~trust_system ~parent ~transition:mostly_validated_transition ~get_completed_work:(Fn.const None) - ~sender:(Some sender) ~transition_receipt_time () ) + ~sender:(Some sender) ~transition_receipt_time ~cache_proof_db () ) with | Error _ -> Deferred.return @@ Or_error.error_string missing_parent_msg diff --git a/src/lib/transition_handler/catchup_scheduler.ml b/src/lib/transition_handler/catchup_scheduler.ml index 4e2ec85f7e5..d92f7e25d1e 100644 --- a/src/lib/transition_handler/catchup_scheduler.ml +++ b/src/lib/transition_handler/catchup_scheduler.ml @@ -64,7 +64,7 @@ type t = } let create ~logger ~precomputed_values ~verifier ~trust_system ~frontier - ~time_controller ~catchup_job_writer + ~time_controller ~catchup_job_writer ~cache_proof_db ~(catchup_breadcrumbs_writer : ( ( (Transition_frontier.Breadcrumb.t, State_hash.t) Cached.t * Validation_callback.t option ) @@ -102,6 +102,7 @@ let create ~logger ~precomputed_values ~verifier ~trust_system ~frontier [ ("catchup_scheduler", `String "Called from catchup scheduler") ] ) ~precomputed_values ~verifier ~trust_system ~frontier ~initial_hash + ~cache_proof_db transition_branches with | Ok trees_of_breadcrumbs -> @@ -395,6 +396,7 @@ let%test_module "Transition_handler.Catchup_scheduler tests" = let%test_unit "catchup jobs fire after the timeout" = let timeout_duration = Block_time.Span.of_ms 200L in let test_delta = Block_time.Span.of_ms 100L in + let cache_proof_db = Ledger_proof.Cache_tag.For_tests.random () in Quickcheck.test ~trials:3 (Transition_frontier.For_tests.gen_with_branch ~precomputed_values ~verifier ~max_length ~frontier_size:1 ~branch_size:2 () ) @@ -410,7 +412,7 @@ let%test_module "Transition_handler.Catchup_scheduler tests" = let disjoint_breadcrumb = List.last_exn branch in let scheduler = create ~frontier ~precomputed_values ~verifier ~catchup_job_writer - ~catchup_breadcrumbs_writer ~clean_up_signal:(Ivar.create ()) + ~catchup_breadcrumbs_writer ~cache_proof_db ~clean_up_signal:(Ivar.create ()) in watch scheduler ~timeout_duration ~valid_cb:None ~cached_transition: @@ -445,6 +447,7 @@ let%test_module "Transition_handler.Catchup_scheduler tests" = invalidated" = let timeout_duration = Block_time.Span.of_ms 200L in let test_delta = Block_time.Span.of_ms 400L in + let cache_proof_db = Ledger_proof.Cache_tag.For_tests.random () in Quickcheck.test ~trials:3 (Transition_frontier.For_tests.gen_with_branch ~precomputed_values ~verifier ~max_length ~frontier_size:1 ~branch_size:2 () ) @@ -470,7 +473,7 @@ let%test_module "Transition_handler.Catchup_scheduler tests" = in let scheduler = create ~precomputed_values ~frontier ~verifier ~catchup_job_writer - ~catchup_breadcrumbs_writer ~clean_up_signal:(Ivar.create ()) + ~catchup_breadcrumbs_writer ~cache_proof_db ~clean_up_signal:(Ivar.create ()) in watch scheduler ~timeout_duration ~valid_cb:None ~cached_transition: @@ -532,6 +535,7 @@ let%test_module "Transition_handler.Catchup_scheduler tests" = let%test_unit "catchup scheduler should not create duplicate jobs when a \ sequence of transitions is added in reverse order" = let timeout_duration = Block_time.Span.of_ms 400L in + let cache_proof_db = Ledger_proof.Cache_tag.For_tests.random () in Quickcheck.test ~trials:3 (Transition_frontier.For_tests.gen_with_branch ~precomputed_values ~verifier ~max_length ~frontier_size:1 ~branch_size:5 () ) @@ -546,7 +550,7 @@ let%test_module "Transition_handler.Catchup_scheduler tests" = in let scheduler = create ~precomputed_values ~frontier ~verifier ~catchup_job_writer - ~catchup_breadcrumbs_writer ~clean_up_signal:(Ivar.create ()) + ~catchup_breadcrumbs_writer ~cache_proof_db ~clean_up_signal:(Ivar.create ()) in let[@warning "-8"] (oldest_breadcrumb :: dependent_breadcrumbs) = List.rev branch diff --git a/src/lib/transition_handler/processor.ml b/src/lib/transition_handler/processor.ml index 8ae18aa925e..9e217148776 100644 --- a/src/lib/transition_handler/processor.ml +++ b/src/lib/transition_handler/processor.ml @@ -105,7 +105,7 @@ let add_and_finalize ~logger ~frontier ~catchup_scheduler let process_transition ~context:(module Context : CONTEXT) ~trust_system ~verifier ~get_completed_work ~frontier ~catchup_scheduler - ~processed_transition_writer ~time_controller ~block_or_header ~valid_cb = + ~processed_transition_writer ~time_controller ~block_or_header ~valid_cb ~cache_proof_db = let is_block_in_frontier = Fn.compose Option.is_some @@ Transition_frontier.find frontier in @@ -253,7 +253,7 @@ let process_transition ~context:(module Context : CONTEXT) ~trust_system Transition_frontier.Breadcrumb.build ~logger ~precomputed_values ~verifier ~get_completed_work ~trust_system ~transition_receipt_time ~sender:(Some sender) - ~parent:parent_breadcrumb ~transition:mostly_validated_transition + ~parent:parent_breadcrumb ~cache_proof_db ~transition:mostly_validated_transition (* TODO: Can we skip here? *) () ) ~transform_result:(function | Error (`Invalid_staged_ledger_hash error) @@ -320,12 +320,13 @@ let run ~context:(module Context : CONTEXT) ~verifier ~trust_system * [ `Ledger_catchup of unit Ivar.t | `Catchup_scheduler ] , crash buffered , unit ) - Writer.t ) ~processed_transition_writer = + Writer.t ) ~processed_transition_writer + ~cache_proof_db = let open Context in let catchup_scheduler = Catchup_scheduler.create ~logger ~precomputed_values ~verifier ~trust_system ~frontier ~time_controller ~catchup_job_writer ~catchup_breadcrumbs_writer - ~clean_up_signal:clean_up_catchup_scheduler + ~clean_up_signal:clean_up_catchup_scheduler ~cache_proof_db in let add_and_finalize = add_and_finalize ~frontier ~catchup_scheduler ~processed_transition_writer @@ -463,7 +464,7 @@ let run ~context:(module Context : CONTEXT) ~verifier ~trust_system Transition_frontier_controller.transitions_being_processed) | `Partially_valid_transition (block_or_header, `Valid_cb valid_cb) -> - process_transition ~block_or_header ~valid_cb ) ) ) + process_transition ~block_or_header ~valid_cb ~cache_proof_db ) ) ) let%test_module "Transition_handler.Processor tests" = ( module struct @@ -529,10 +530,12 @@ let%test_module "Transition_handler.Processor tests" = let frontier_size = 1 in let branch_size = 10 in let max_length = frontier_size + branch_size in + let cache_proof_db = Ledger_proof.Cache_tag.For_tests.random () in Quickcheck.test ~trials:4 (Transition_frontier.For_tests.gen_with_branch ~precomputed_values ~verifier ~max_length ~frontier_size ~branch_size () ) ~f:(fun (frontier, branch) -> + assert ( Thread_safe.block_on_async_exn (fun () -> let valid_transition_reader, valid_transition_writer = @@ -568,7 +571,8 @@ let%test_module "Transition_handler.Processor tests" = ~primary_transition_reader:valid_transition_reader ~producer_transition_reader ~catchup_job_writer ~catchup_breadcrumbs_reader ~catchup_breadcrumbs_writer - ~processed_transition_writer ; + ~processed_transition_writer + ~cache_proof_db ; List.iter branch ~f:(fun breadcrumb -> let b = downcast_breadcrumb breadcrumb diff --git a/src/lib/transition_router/transition_router.ml b/src/lib/transition_router/transition_router.ml index 83650d1e97b..e494112e2ba 100644 --- a/src/lib/transition_router/transition_router.ml +++ b/src/lib/transition_router/transition_router.ml @@ -100,7 +100,7 @@ let start_transition_frontier_controller ~context:(module Context : CONTEXT) ~trust_system ~verifier ~network ~time_controller ~get_completed_work ~producer_transition_writer_ref ~verified_transition_writer ~clear_reader ~collected_transitions ~cache_exceptions ?transition_writer_ref ~frontier_w - frontier = + frontier ~cache_proof_db = let open Context in [%str_log info] Starting_transition_frontier_controller ; let ( transition_frontier_controller_reader @@ -146,7 +146,7 @@ let start_transition_frontier_controller ~context:(module Context : CONTEXT) ~trust_system ~verifier ~network ~time_controller ~collected_transitions ~frontier ~get_completed_work ~network_transition_reader:transition_frontier_controller_reader - ~producer_transition_reader ~clear_reader ~cache_exceptions + ~producer_transition_reader ~clear_reader ~cache_exceptions ~cache_proof_db in Strict_pipe.Reader.iter new_verified_transition_reader ~f: @@ -160,7 +160,7 @@ let start_bootstrap_controller ~context:(module Context : CONTEXT) ~trust_system ~producer_transition_writer_ref ~verified_transition_writer ~clear_reader ?transition_writer_ref ~consensus_local_state ~frontier_w ~initial_root_transition ~persistent_root ~persistent_frontier - ~cache_exceptions ~best_seen_transition ~catchup_mode = + ~cache_exceptions ~best_seen_transition ~catchup_mode ~cache_proof_db = let open Context in [%str_log info] Starting_bootstrap_controller ; [%log info] "Starting Bootstrap Controller phase" ; @@ -204,7 +204,7 @@ let start_bootstrap_controller ~context:(module Context : CONTEXT) ~trust_system ~context:(module Context) ~trust_system ~verifier ~network ~consensus_local_state ~transition_reader:bootstrap_controller_reader ~persistent_frontier - ~persistent_root ~initial_root_transition ~preferred_peers ~catchup_mode ) + ~persistent_root ~initial_root_transition ~preferred_peers ~catchup_mode ~cache_proof_db) (fun (new_frontier, collected_transitions) -> Strict_pipe.Writer.kill bootstrap_controller_writer ; start_transition_frontier_controller @@ -326,13 +326,13 @@ let download_best_tip ~context:(module Context : CONTEXT) ~notify_online x.data ) ) let load_frontier ~context:(module Context : CONTEXT) ~verifier - ~persistent_frontier ~persistent_root ~consensus_local_state ~catchup_mode = + ~persistent_frontier ~persistent_root ~consensus_local_state ~catchup_mode ~cache_proof_db = let open Context in match%map Transition_frontier.load ~context:(module Context) ~verifier ~consensus_local_state ~persistent_root ~persistent_frontier - ~catchup_mode () + ~catchup_mode ~cache_proof_db () with | Ok frontier -> [%log info] "Successfully loaded frontier" ; @@ -398,7 +398,7 @@ let initialize ~context:(module Context : CONTEXT) ~sync_local_state ~network ~get_completed_work ~frontier_w ~producer_transition_writer_ref ~clear_reader ~verified_transition_writer ~cache_exceptions ~most_recent_valid_block_writer ~persistent_root ~persistent_frontier - ~consensus_local_state ~catchup_mode ~notify_online = + ~consensus_local_state ~catchup_mode ~notify_online ~cache_proof_db = let open Context in [%log info] "Initializing transition router" ; let%bind () = @@ -422,7 +422,7 @@ let initialize ~context:(module Context : CONTEXT) ~sync_local_state ~network (load_frontier ~context:(module Context) ~verifier ~persistent_frontier ~persistent_root ~consensus_local_state - ~catchup_mode ) + ~catchup_mode ~cache_proof_db) with | best_seen_transition, None -> [%log info] "Unable to load frontier; starting bootstrap" ; @@ -440,6 +440,7 @@ let initialize ~context:(module Context : CONTEXT) ~sync_local_state ~network ~initial_root_transition ~catchup_mode ~best_seen_transition: (Option.map ~f:(fun x -> `Block x) best_seen_transition) + ~cache_proof_db | Some best_tip, Some frontier when is_transition_for_bootstrap ~context:(module Consensus_context) @@ -467,8 +468,8 @@ let initialize ~context:(module Context : CONTEXT) ~sync_local_state ~network ~clear_reader ?transition_writer_ref:None ~consensus_local_state ~frontier_w ~initial_root_transition ~persistent_root ~persistent_frontier ~cache_exceptions ~catchup_mode - ~best_seen_transition:(Some (`Block best_tip)) - | best_tip_opt, Some frontier -> + ~best_seen_transition:(Some (`Block best_tip)) ~cache_proof_db + | best_tip_opt, Some frontier -> let collected_transitions = match best_tip_opt with | Some best_tip -> @@ -525,7 +526,7 @@ let initialize ~context:(module Context : CONTEXT) ~sync_local_state ~network ~trust_system ~verifier ~network ~time_controller ~get_completed_work ~producer_transition_writer_ref ~verified_transition_writer ~clear_reader ~collected_transitions ~cache_exceptions - ?transition_writer_ref:None ~frontier_w frontier + ?transition_writer_ref:None ~frontier_w frontier ~cache_proof_db let wait_till_genesis ~logger ~time_controller ~(precomputed_values : Precomputed_values.t) = @@ -576,7 +577,7 @@ let run ?(sync_local_state = true) ?(cache_exceptions = false) ~get_current_frontier ~frontier_broadcast_writer:frontier_w ~network_transition_reader ~producer_transition_reader ~get_most_recent_valid_block ~most_recent_valid_block_writer - ~get_completed_work ~catchup_mode ~notify_online () = + ~get_completed_work ~catchup_mode ~notify_online ~cache_proof_db () = let open Context in let module Consensus_context = struct include Context @@ -661,7 +662,7 @@ let run ?(sync_local_state = true) ?(cache_exceptions = false) ~get_completed_work ~frontier_w ~catchup_mode ~producer_transition_writer_ref ~clear_reader ~verified_transition_writer ~most_recent_valid_block_writer - ~consensus_local_state ~notify_online + ~consensus_local_state ~notify_online ~cache_proof_db in Ivar.fill_if_empty initialization_finish_signal () ; @@ -722,7 +723,7 @@ let run ?(sync_local_state = true) ?(cache_exceptions = false) ~clear_reader ~transition_writer_ref ~consensus_local_state ~frontier_w ~persistent_root ~persistent_frontier ~initial_root_transition - ~best_seen_transition:(Some b_or_h) ~catchup_mode ) + ~best_seen_transition:(Some b_or_h) ~catchup_mode ~cache_proof_db) else Deferred.unit | None -> Deferred.unit diff --git a/src/lib/uptime_service/uptime_service.ml b/src/lib/uptime_service/uptime_service.ml index d3cdb4c28fd..2bd4dc64636 100644 --- a/src/lib/uptime_service/uptime_service.ml +++ b/src/lib/uptime_service/uptime_service.ml @@ -195,7 +195,7 @@ let send_produced_block_at ~logger ~interruptor ~url ~peer_id ~produced:true block_data let send_block_and_transaction_snark ~logger ~constraint_constants ~interruptor - ~url ~snark_worker ~transition_frontier ~peer_id + ~url ~snark_worker ~transition_frontier ~peer_id ~cache_proof_db ~(submitter_keypair : Keypair.t) ~snark_work_fee ~graphql_control_port ~built_with_commit_sha = match Broadcast_pipe.Reader.peek transition_frontier with @@ -325,7 +325,7 @@ let send_block_and_transaction_snark ~logger ~constraint_constants ~interruptor (Uptime_snark_worker.perform_single snark_worker ( message , Snark_work_lib.Work.Single.Spec.map_proof - ~f:Ledger_proof.Cache_tag.unwrap single_spec ) ) + ~f:(fun proof ->Ledger_proof.Cache_tag.unwrap proof cache_proof_db) single_spec) ) with | Error e -> (* error in submitting to process *) @@ -364,7 +364,7 @@ let send_block_and_transaction_snark ~logger ~constraint_constants ~interruptor ~url ~state_hash ~produced:false block_data ) ) ) let start ~logger ~uptime_url ~snark_worker_opt ~constraint_constants - ~protocol_constants ~transition_frontier ~time_controller + ~protocol_constants ~transition_frontier ~time_controller ~cache_proof_db ~block_produced_bvar ~uptime_submitter_keypair ~get_next_producer_timing ~get_snark_work_fee ~get_peer ~graphql_control_port ~built_with_commit_sha = match uptime_url with @@ -472,7 +472,7 @@ let start ~logger ~uptime_url ~snark_worker_opt ~constraint_constants in match get_next_producer_time_opt () with | None -> - send_block_and_snark_work () + send_block_and_snark_work ~cache_proof_db () | Some next_producer_time -> (* we look for block production within 4 slots of the *desired* iteration start time, so a late iteration won't affect the @@ -485,9 +485,9 @@ let start ~logger ~uptime_url ~snark_worker_opt ~constraint_constants in if Time.( <= ) next_producer_time four_slots_from_start then (* send a block w/ SNARK work, then the produced block *) - let%bind () = send_block_and_snark_work () in + let%bind () = send_block_and_snark_work ~cache_proof_db () in send_just_block next_producer_time - else send_block_and_snark_work () ) ) ; + else send_block_and_snark_work ~cache_proof_db ()) ) ; Deferred.return (Block_time.add next_block_tm five_slots_span) in (* sync to slot boundary *)