From 47f1bb247114ef8b6b3781cfc6f3f39522264a52 Mon Sep 17 00:00:00 2001 From: dkijania Date: Sun, 29 Sep 2024 19:47:29 +0200 Subject: [PATCH 01/29] WIP --- buildkite/scripts/build-artifact.sh | 5 +- buildkite/src/Command/PatchArchiveTest.dhall | 30 ++++ .../src/Jobs/Test/PatchArchiveTest.dhall | 44 +++++ buildkite/src/Jobs/Test/ReplayerTest.dhall | 2 +- scripts/patch-archive-test.sh | 17 ++ scripts/replayer-test.sh | 2 +- src/test/archive/patch_archive_test/dune | 31 ++++ .../patch_archive_test/patch_archive_test.ml | 160 ++++++++++++++++++ src/test/mina_automation/archive_blocks.ml | 2 +- src/test/mina_automation/executor.ml | 27 ++- src/test/mina_automation/extract_blocks.ml | 2 +- .../mina_automation/missing_blocks_auditor.ml | 2 +- .../missing_blocks_guardian.ml | 2 +- src/test/mina_automation/replayer.ml | 2 +- 14 files changed, 305 insertions(+), 23 deletions(-) create mode 100644 buildkite/src/Command/PatchArchiveTest.dhall create mode 100644 buildkite/src/Jobs/Test/PatchArchiveTest.dhall create mode 100755 scripts/patch-archive-test.sh create mode 100644 src/test/archive/patch_archive_test/dune create mode 100644 src/test/archive/patch_archive_test/patch_archive_test.ml diff --git a/buildkite/scripts/build-artifact.sh b/buildkite/scripts/build-artifact.sh index 263a258ef3c..7eb9c125203 100755 --- a/buildkite/scripts/build-artifact.sh +++ b/buildkite/scripts/build-artifact.sh @@ -24,7 +24,7 @@ make -C src/app/libp2p_helper MAINNET_TARGETS="" [[ ${MINA_BUILD_MAINNET} ]] && MAINNET_TARGETS="src/app/cli/src/mina_mainnet_signatures.exe src/app/rosetta/rosetta_mainnet_signatures.exe src/app/rosetta/ocaml-signer/signer_mainnet_signatures.exe" -echo "--- Build all major tagets required for packaging" +echo "--- Build all major targets required for packaging" echo "Building from Commit SHA: ${MINA_COMMIT_SHA1}" echo "Rust Version: $(rustc --version)" dune build "--profile=${DUNE_PROFILE}" $INSTRUMENTED_PARAM \ @@ -45,4 +45,5 @@ dune build "--profile=${DUNE_PROFILE}" $INSTRUMENTED_PARAM \ src/app/rosetta/indexer_test/indexer_test.exe \ src/app/rosetta/ocaml-signer/signer_testnet_signatures.exe \ src/app/test_executive/test_executive.exe \ - src/test/command_line_tests/command_line_tests.exe # 2>&1 | tee /tmp/buildocaml.log + src/test/command_line_tests/command_line_tests.exe \ + src/test/archive/patch_archive_test/patch_archive_test.exe diff --git a/buildkite/src/Command/PatchArchiveTest.dhall b/buildkite/src/Command/PatchArchiveTest.dhall new file mode 100644 index 00000000000..aff17f8519b --- /dev/null +++ b/buildkite/src/Command/PatchArchiveTest.dhall @@ -0,0 +1,30 @@ +let Artifacts = ../Constants/Artifacts.dhall + +let Command = ./Base.dhall + +let Size = ./Size.dhall + +let Network = ../Constants/Network.dhall + +let RunWithPostgres = ./RunWithPostgres.dhall + +in { step = + \(dependsOn : List Command.TaggedKey.Type) + -> Command.build + Command.Config::{ + , commands = + [ RunWithPostgres.runInDockerWithPostgresConn + [ "PATCH_ARCHIVE_TEST_APP=mina-patch-archive-test" + , "NETWORK_DATA_FOLDER=/etc/mina/test/archive/sample_db" + ] + "./src/test/archive/sample_db/archive_db.sql" + Artifacts.Type.FunctionalTestSuite + (None Network.Type) + "./scripts/patch-archive-test.sh" + ] + , label = "Archive: Patch Archive test" + , key = "patch-archive-test" + , target = Size.Large + , depends_on = dependsOn + } + } diff --git a/buildkite/src/Jobs/Test/PatchArchiveTest.dhall b/buildkite/src/Jobs/Test/PatchArchiveTest.dhall new file mode 100644 index 00000000000..ccf4580774a --- /dev/null +++ b/buildkite/src/Jobs/Test/PatchArchiveTest.dhall @@ -0,0 +1,44 @@ +let S = ../../Lib/SelectFiles.dhall + +let Pipeline = ../../Pipeline/Dsl.dhall + +let PipelineTag = ../../Pipeline/Tag.dhall + +let JobSpec = ../../Pipeline/JobSpec.dhall + +let PatchArchiveTest = ../../Command/PatchArchiveTest.dhall + +let Profiles = ../../Constants/Profiles.dhall + +let Network = ../../Constants/Network.dhall + +let Artifacts = ../../Constants/Artifacts.dhall + +let Dockers = ../../Constants/DockerVersions.dhall + +let dependsOn = + Dockers.dependsOn + Dockers.Type.Bullseye + Network.Type.Devnet + Profiles.Type.Standard + Artifacts.Type.FunctionalTestSuite + +in Pipeline.build + Pipeline.Config::{ + , spec = JobSpec::{ + , dirtyWhen = + [ S.strictlyStart (S.contains "src") + , S.exactly "scripts/path-archive-test" "sh" + , S.exactly "buildkite/src/Jobs/Test/PatchArchiveTest" "dhall" + , S.exactly "buildkite/src/Command/PatchArchiveTest" "dhall" + ] + , path = "Test" + , name = "PatchArchiveTest" + , tags = + [ PipelineTag.Type.Long + , PipelineTag.Type.Test + , PipelineTag.Type.Stable + ] + } + , steps = [ PatchArchiveTest.step dependsOn ] + } diff --git a/buildkite/src/Jobs/Test/ReplayerTest.dhall b/buildkite/src/Jobs/Test/ReplayerTest.dhall index 0e3d665e2ce..24ce7acf7e1 100644 --- a/buildkite/src/Jobs/Test/ReplayerTest.dhall +++ b/buildkite/src/Jobs/Test/ReplayerTest.dhall @@ -19,7 +19,7 @@ let Artifacts = ../../Constants/Artifacts.dhall let dependsOn = Dockers.dependsOn Dockers.Type.Bullseye - (None Network.Type) + Network.Type.Devnet Profiles.Type.Standard Artifacts.Type.Archive diff --git a/scripts/patch-archive-test.sh b/scripts/patch-archive-test.sh new file mode 100755 index 00000000000..48e1c61861a --- /dev/null +++ b/scripts/patch-archive-test.sh @@ -0,0 +1,17 @@ +#!/bin/bash + +set -x +# test replayer on known archive db + +NETWORK_DATA_FOLDER=src/test/archive/sample_db +PATCH_ARCHIVE_TEST_APP=${PATCH_ARCHIVE_TEST_APP:-_build/default/src/test/archive/patch_archive_test/patch_archive_test.exe} +PG_PORT=${PG_PORT:-5432} +POSTGRES_USER=${POSTGRES_USER:-postgres} +POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-postgres} + +CONN=postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@localhost:${PG_PORT} + + +echo "Running patch archive test" +$PATCH_ARCHIVE_TEST_APP --source-uri $CONN \ + --network-data-folder $NETWORK_DATA_FOLDER diff --git a/scripts/replayer-test.sh b/scripts/replayer-test.sh index b75c32f4b08..8b6f511295b 100755 --- a/scripts/replayer-test.sh +++ b/scripts/replayer-test.sh @@ -5,7 +5,7 @@ set -x INPUT_FILE=src/test/archive/sample_db/replayer_input_file.json REPLAYER_APP=_build/default/src/app/replayer/replayer.exe -PG_CONN=postgres://postgres:postgres@localhost:5433/archive +PG_CONN=postgres://postgres:postgres@localhost:5432/archive while [[ "$#" -gt 0 ]]; do case $1 in -i|--input-file) INPUT_FILE="$2"; shift;; diff --git a/src/test/archive/patch_archive_test/dune b/src/test/archive/patch_archive_test/dune new file mode 100644 index 00000000000..109d27b4a77 --- /dev/null +++ b/src/test/archive/patch_archive_test/dune @@ -0,0 +1,31 @@ +(executable + (package patch_archive_test) + (name patch_archive_test) + (public_name patch_archive_test) + (libraries + async + async.async_command + core_kernel + caqti + caqti-async + caqti-driver-postgresql + integration_test_lib + archive_lib + block_time + mina_numbers + logger + mina_base + uri + base + async_kernel + core + async_unix + stdio + base.caml + result + mina_automation + bounded_types + ) + (preprocessor_deps ../../../config.mlh) + (instrumentation (backend bisect_ppx)) + (preprocess (pps ppx_version ppx_mina ppx_let ppx_hash ppx_compare ppx_sexp_conv h_list.ppx))) diff --git a/src/test/archive/patch_archive_test/patch_archive_test.ml b/src/test/archive/patch_archive_test/patch_archive_test.ml new file mode 100644 index 00000000000..8d0b2f91a67 --- /dev/null +++ b/src/test/archive/patch_archive_test/patch_archive_test.ml @@ -0,0 +1,160 @@ +(* patch_archive_test.ml *) + +(* test patching of archive databases + + test structure: + - import reference database for comparision (for example with 100 blocks) + - create new schema and export blocks from reference db with some missing ones + - patch the database with missing precomputed blocks + - compare original and copy +*) + +module Network_Data = struct + type t = + { init_script : String.t + ; precomputed_blocks_zip : String.t + ; genesis_ledger_file : String.t + ; replayer_input_file : String.t + ; folder : String.t + } + + let create folder = + { init_script = "archive_db.sql" + ; genesis_ledger_file = "input.json" + ; precomputed_blocks_zip = "precomputed_blocks.zip" + ; replayer_input_file = "replayer_input_file.json" + ; folder + } +end + +open Core_kernel +open Async +open Mina_automation + +let main ~db_uri ~network_data_folder () = + let open Deferred.Let_syntax in + let missing_blocks_count = 3 in + let network_name = "dummy" in + + let network_data = Network_Data.create network_data_folder in + + let output_folder = Filename.temp_dir_name ^ "/output" in + + let%bind output_folder = Unix.mkdtemp output_folder in + + let connection = Psql.Conn_str db_uri in + + let source_db_name = "patch_archive_test_source" in + let target_db_name = "patch_archive_test_target" in + let%bind _ = Psql.create_empty_db ~connection ~db:source_db_name in + let%bind _ = + Psql.run_script ~connection ~db:source_db_name + (network_data.folder ^ "/" ^ network_data.init_script) + in + let%bind () = Psql.create_mina_db ~connection ~db:target_db_name in + + let source_db = db_uri ^ "/" ^ source_db_name in + let target_db = db_uri ^ "/" ^ target_db_name in + + let extract_blocks = Extract_blocks.of_context Executor.AutoDetect in + let config = + { Extract_blocks.Config.archive_uri = source_db + ; range = Extract_blocks.Config.AllBlocks + ; output_folder = Some output_folder + ; network = Some network_name + ; include_block_height_in_name = true + } + in + let%bind _ = Extract_blocks.run extract_blocks ~config in + + let archive_blocks = Archive_blocks.of_context Executor.AutoDetect in + + let%bind extensional_files = + Sys.ls_dir output_folder + >>= Deferred.List.map ~f:(fun e -> + Deferred.return (output_folder ^ "/" ^ e) ) + in + + let n = + List.init missing_blocks_count ~f:(fun _ -> + Random.int (List.length extensional_files) ) + in + + let unpatched_extensional_files = + List.filteri extensional_files ~f:(fun i _ -> + not (List.mem n i ~equal:Int.equal) ) + |> List.dedup_and_sort ~compare:(fun left right -> + let scan_height item = + let item = + Filename.basename item |> Str.global_replace (Str.regexp "-") " " + in + Scanf.sscanf item "%s %d %s" (fun _ height _ -> height) + in + + let left_height = scan_height left in + let right_height = scan_height right in + + Int.compare left_height right_height ) + in + + let%bind _ = + Archive_blocks.run archive_blocks ~blocks:unpatched_extensional_files + ~archive_uri:target_db ~format:Extensional + in + + let%bind missing_blocks_auditor_path = + Missing_blocks_auditor.of_context Executor.AutoDetect + |> Missing_blocks_auditor.path + in + + let%bind archive_blocks_path = Archive_blocks.path archive_blocks in + + let config = + { Missing_blocks_guardian.Config.archive_uri = Uri.of_string target_db + ; precomputed_blocks = Uri.make ~scheme:"file" ~path:output_folder () + ; network = network_name + ; run_mode = Run + ; missing_blocks_auditor = missing_blocks_auditor_path + ; archive_blocks = archive_blocks_path + ; block_format = Extensional + } + in + + let missing_blocks_guardian = + Missing_blocks_guardian.of_context Executor.AutoDetect + in + + let%bind _ = Missing_blocks_guardian.run missing_blocks_guardian ~config in + + let replayer = Replayer.of_context Executor.AutoDetect in + + let%bind _ = + Replayer.run replayer ~archive_uri:target_db + ~input_config: + (network_data.folder ^ "/" ^ network_data.replayer_input_file) + ~interval_checkpoint:10 ~output_ledger:"./output_ledger" () + in + + Deferred.unit + +let () = + Command.( + run + (let open Let_syntax in + async ~summary:"Test patching of blocks in an archive database" + (let%map db_uri = + Param.flag "--source-uri" + ~doc: + "URI URI for connecting to the database (e.g., \ + postgres://$USER@localhost:5432)" + Param.(required string) + and network_data_folder = + Param.( + flag "--network-data-folder" ~aliases:[ "network-data-folder" ] + Param.(required string)) + ~doc: + "Path Path to folder containing network data. Usually it's sql \ + for db import, genesis ledger and zipped precomputed blocks \ + archive" + in + main ~db_uri ~network_data_folder ))) diff --git a/src/test/mina_automation/archive_blocks.ml b/src/test/mina_automation/archive_blocks.ml index 2c0dc47c89d..31e99b28a0e 100644 --- a/src/test/mina_automation/archive_blocks.ml +++ b/src/test/mina_automation/archive_blocks.ml @@ -9,7 +9,7 @@ include Executor let of_context context = Executor.of_context ~context ~dune_name:"src/app/archive_blocks/archive_blocks.exe" - ~official_name:"/usr/local/bin/mina-archive-blocks" + ~official_name:"mina-archive-blocks" type format = Precomputed | Extensional diff --git a/src/test/mina_automation/executor.ml b/src/test/mina_automation/executor.ml index 2d52f940955..6fba161bf17 100644 --- a/src/test/mina_automation/executor.ml +++ b/src/test/mina_automation/executor.ml @@ -38,13 +38,23 @@ module Executor = struct let built_name t = Printf.sprintf "_build/default/%s" t.dune_name + let paths = + Option.value_map ~f:(String.split ~on:':') ~default:[] (Sys.getenv "PATH") + + let exists_at_path t prefix = + match%bind Sys.file_exists (prefix ^ "/" ^ t.official_name) with + | `Yes -> + Deferred.return (Some prefix) + | _ -> + Deferred.return None + let path t = match%bind Sys.file_exists (built_name t) with | `Yes -> Deferred.return (built_name t) | _ -> ( - match%bind Sys.file_exists t.official_name with - | `Yes -> + match%bind Deferred.List.find_map ~f:(exists_at_path t) paths with + | Some _ -> Deferred.return t.official_name | _ -> Deferred.return t.dune_name ) @@ -60,18 +70,7 @@ module Executor = struct ~metadata:[ ("app", `String (built_name t)) ] ; run_from_local t ~args ?env () | _ -> ( - let paths = - Option.value_map ~f:(String.split ~on:':') ~default:[] - (Sys.getenv "PATH") - in - let exists_at_path prefix = - match%bind Sys.file_exists (prefix ^ "/" ^ t.official_name) with - | `Yes -> - Deferred.return (Some prefix) - | _ -> - Deferred.return None - in - match%bind Deferred.List.find_map ~f:exists_at_path paths with + match%bind Deferred.List.find_map ~f:(exists_at_path t) paths with | Some prefix -> [%log debug] "running from %s" prefix ~metadata:[ ("app", `String t.official_name) ] ; diff --git a/src/test/mina_automation/extract_blocks.ml b/src/test/mina_automation/extract_blocks.ml index 7b05e9b00cb..bfb3ec9e2ae 100644 --- a/src/test/mina_automation/extract_blocks.ml +++ b/src/test/mina_automation/extract_blocks.ml @@ -55,6 +55,6 @@ end let of_context context = Executor.of_context ~context ~dune_name:"src/app/extract_blocks/extract_blocks.exe" - ~official_name:"/usr/local/bin/mina-extract-blocks" + ~official_name:"mina-extract-blocks" let run t ~config = run t ~args:(Config.to_args config) () diff --git a/src/test/mina_automation/missing_blocks_auditor.ml b/src/test/mina_automation/missing_blocks_auditor.ml index f28983711bb..4c6116a4c24 100644 --- a/src/test/mina_automation/missing_blocks_auditor.ml +++ b/src/test/mina_automation/missing_blocks_auditor.ml @@ -8,4 +8,4 @@ include Executor let of_context context = Executor.of_context ~context ~dune_name:"src/app/missing_blocks_auditor/missing_blocks_auditor.exe" - ~official_name:"/usr/local/bin/mina-missing-blocks-auditor" + ~official_name:"mina-missing-blocks-auditor" diff --git a/src/test/mina_automation/missing_blocks_guardian.ml b/src/test/mina_automation/missing_blocks_guardian.ml index 42bbf0ec844..b74ea406855 100644 --- a/src/test/mina_automation/missing_blocks_guardian.ml +++ b/src/test/mina_automation/missing_blocks_guardian.ml @@ -45,7 +45,7 @@ end let of_context context = Executor.of_context ~context ~dune_name:"scripts/archive/missing-blocks-guardian.sh" - ~official_name:"/etc/mina/archive/missing-blocks-guardian.sh" + ~official_name:"mina-missing-blocks-guardian" let run t ~config = run t ~args:(Config.to_args config) ~env:(Config.to_envs config) () diff --git a/src/test/mina_automation/replayer.ml b/src/test/mina_automation/replayer.ml index 049f8071d95..fbdfe133d1c 100644 --- a/src/test/mina_automation/replayer.ml +++ b/src/test/mina_automation/replayer.ml @@ -68,7 +68,7 @@ include Executor let of_context context = Executor.of_context ~context ~dune_name:"src/app/replayer/replayer.exe" - ~official_name:"/usr/local/bin/mina-replayer" + ~official_name:"mina-replayer" let run t ~archive_uri ~input_config ~interval_checkpoint ?checkpoint_output_folder ?checkpoint_file_prefix ~output_ledger = From 7bfdc6afb1ff32e614ea48700b1e5632e63049c1 Mon Sep 17 00:00:00 2001 From: dkijania Date: Sun, 29 Sep 2024 20:23:13 +0200 Subject: [PATCH 02/29] fix PathArchiveTest --- buildkite/src/Jobs/Test/PatchArchiveTest.dhall | 2 +- buildkite/src/Jobs/Test/ReplayerTest.dhall | 2 +- src/dune-project | 1 + 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/buildkite/src/Jobs/Test/PatchArchiveTest.dhall b/buildkite/src/Jobs/Test/PatchArchiveTest.dhall index ccf4580774a..d9b23cf4659 100644 --- a/buildkite/src/Jobs/Test/PatchArchiveTest.dhall +++ b/buildkite/src/Jobs/Test/PatchArchiveTest.dhall @@ -19,7 +19,7 @@ let Dockers = ../../Constants/DockerVersions.dhall let dependsOn = Dockers.dependsOn Dockers.Type.Bullseye - Network.Type.Devnet + (Some Network.Type.Devnet) Profiles.Type.Standard Artifacts.Type.FunctionalTestSuite diff --git a/buildkite/src/Jobs/Test/ReplayerTest.dhall b/buildkite/src/Jobs/Test/ReplayerTest.dhall index 24ce7acf7e1..0e3d665e2ce 100644 --- a/buildkite/src/Jobs/Test/ReplayerTest.dhall +++ b/buildkite/src/Jobs/Test/ReplayerTest.dhall @@ -19,7 +19,7 @@ let Artifacts = ../../Constants/Artifacts.dhall let dependsOn = Dockers.dependsOn Dockers.Type.Bullseye - Network.Type.Devnet + (None Network.Type) Profiles.Type.Standard Artifacts.Type.Archive diff --git a/src/dune-project b/src/dune-project index 91a68888957..766a8cc514a 100644 --- a/src/dune-project +++ b/src/dune-project @@ -136,6 +136,7 @@ (package (name parallel_scan)) (package (name participating_state)) (package (name pasta_bindings)) +(package (name patch_archive_test)) (package (name perf_histograms)) (package (name pickles_base)) (package (name pickles)) From 32a2f845fdfd18b9c2795e8887639d8a09636a9a Mon Sep 17 00:00:00 2001 From: dkijania Date: Sun, 29 Sep 2024 21:34:05 +0200 Subject: [PATCH 03/29] fix deps --- buildkite/src/Command/MinaArtifact.dhall | 8 ++++---- buildkite/src/Jobs/Test/PatchArchiveTest.dhall | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/buildkite/src/Command/MinaArtifact.dhall b/buildkite/src/Command/MinaArtifact.dhall index 28793fb3d22..52d4e1f7171 100644 --- a/buildkite/src/Command/MinaArtifact.dhall +++ b/buildkite/src/Command/MinaArtifact.dhall @@ -250,10 +250,10 @@ let docker_step , deb_repo = DebianRepo.Type.Local , deb_profile = spec.profile , step_key = - "test-suite-${DebianVersions.lowerName - spec.debVersion}${Profiles.toLabelSegment - spec.profile}${BuildFlags.toLabelSegment - spec.buildFlags}--docker-image" + "functional-test-suite-${DebianVersions.lowerName + spec.debVersion}${Profiles.toLabelSegment + spec.profile}${BuildFlags.toLabelSegment + spec.buildFlags}-docker-image" , network = "berkeley" } ] diff --git a/buildkite/src/Jobs/Test/PatchArchiveTest.dhall b/buildkite/src/Jobs/Test/PatchArchiveTest.dhall index d9b23cf4659..e6b468d843b 100644 --- a/buildkite/src/Jobs/Test/PatchArchiveTest.dhall +++ b/buildkite/src/Jobs/Test/PatchArchiveTest.dhall @@ -19,7 +19,7 @@ let Dockers = ../../Constants/DockerVersions.dhall let dependsOn = Dockers.dependsOn Dockers.Type.Bullseye - (Some Network.Type.Devnet) + (None Network.Type) Profiles.Type.Standard Artifacts.Type.FunctionalTestSuite From 9ede8ea650c003499919636c2f3665bfbd4afd1c Mon Sep 17 00:00:00 2001 From: dkijania Date: Sun, 29 Sep 2024 22:23:43 +0200 Subject: [PATCH 04/29] fix hypen do underscore --- buildkite/src/Command/MinaArtifact.dhall | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/buildkite/src/Command/MinaArtifact.dhall b/buildkite/src/Command/MinaArtifact.dhall index 52d4e1f7171..7f8108e15eb 100644 --- a/buildkite/src/Command/MinaArtifact.dhall +++ b/buildkite/src/Command/MinaArtifact.dhall @@ -250,7 +250,7 @@ let docker_step , deb_repo = DebianRepo.Type.Local , deb_profile = spec.profile , step_key = - "functional-test-suite-${DebianVersions.lowerName + "functional_test_suite-${DebianVersions.lowerName spec.debVersion}${Profiles.toLabelSegment spec.profile}${BuildFlags.toLabelSegment spec.buildFlags}-docker-image" From ffe4f33cd4b846735fc7a9b89bfd4d3dc748ae63 Mon Sep 17 00:00:00 2001 From: dkijania Date: Mon, 30 Sep 2024 10:31:50 +0200 Subject: [PATCH 05/29] publish mina-patch-archive-test --- scripts/debian/builder-helpers.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/debian/builder-helpers.sh b/scripts/debian/builder-helpers.sh index 0e34178731d..bb0b03c04dd 100755 --- a/scripts/debian/builder-helpers.sh +++ b/scripts/debian/builder-helpers.sh @@ -241,7 +241,8 @@ build_functional_test_suite_deb() { # Binaries cp ./default/src/test/command_line_tests/command_line_tests.exe "${BUILDDIR}/usr/local/bin/mina-command-line-tests" - + cp ./default/src/test/patch_archive_tests/patch_archive_tests.exe "${BUILDDIR}/usr/local/bin/mina-patch-archive-tests" + build_deb mina-test-suite } From 7013d231850b24b26ce68cfd0f3e411c417647d2 Mon Sep 17 00:00:00 2001 From: dkijania Date: Mon, 30 Sep 2024 14:05:30 +0200 Subject: [PATCH 06/29] fix name for patch_archive_test --- scripts/debian/builder-helpers.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/debian/builder-helpers.sh b/scripts/debian/builder-helpers.sh index bb0b03c04dd..b98764d0da9 100755 --- a/scripts/debian/builder-helpers.sh +++ b/scripts/debian/builder-helpers.sh @@ -241,7 +241,7 @@ build_functional_test_suite_deb() { # Binaries cp ./default/src/test/command_line_tests/command_line_tests.exe "${BUILDDIR}/usr/local/bin/mina-command-line-tests" - cp ./default/src/test/patch_archive_tests/patch_archive_tests.exe "${BUILDDIR}/usr/local/bin/mina-patch-archive-tests" + cp ./default/src/test/archive/patch_archive_test/patch_archive_test.exe "${BUILDDIR}/usr/local/bin/mina-patch-archive-test" build_deb mina-test-suite From ccb0c5ebf8445212768238154d7e62c1904d896c Mon Sep 17 00:00:00 2001 From: dkijania Date: Mon, 30 Sep 2024 17:49:54 +0200 Subject: [PATCH 07/29] Fix missing block guardian --- src/test/mina_automation/missing_blocks_guardian.ml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/mina_automation/missing_blocks_guardian.ml b/src/test/mina_automation/missing_blocks_guardian.ml index b74ea406855..42bbf0ec844 100644 --- a/src/test/mina_automation/missing_blocks_guardian.ml +++ b/src/test/mina_automation/missing_blocks_guardian.ml @@ -45,7 +45,7 @@ end let of_context context = Executor.of_context ~context ~dune_name:"scripts/archive/missing-blocks-guardian.sh" - ~official_name:"mina-missing-blocks-guardian" + ~official_name:"/etc/mina/archive/missing-blocks-guardian.sh" let run t ~config = run t ~args:(Config.to_args config) ~env:(Config.to_envs config) () From bc9370c606585e93afac2bf6217881738d9578cf Mon Sep 17 00:00:00 2001 From: dkijania Date: Tue, 1 Oct 2024 23:25:33 +0200 Subject: [PATCH 08/29] use missing_blocks_guardian as binary --- scripts/debian/builder-helpers.sh | 2 +- src/test/mina_automation/missing_blocks_guardian.ml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/debian/builder-helpers.sh b/scripts/debian/builder-helpers.sh index b98764d0da9..e3d88c562d0 100755 --- a/scripts/debian/builder-helpers.sh +++ b/scripts/debian/builder-helpers.sh @@ -365,7 +365,7 @@ build_archive_deb () { cp ./default/src/app/extract_blocks/extract_blocks.exe "${BUILDDIR}/usr/local/bin/mina-extract-blocks" mkdir -p "${BUILDDIR}/etc/mina/archive" - cp ../scripts/archive/missing-blocks-guardian.sh "${BUILDDIR}/etc/mina/archive" + cp ../scripts/archive/missing-blocks-guardian.sh "${BUILDDIR}/usr/local/bin/mina-missing-blocks-guardian" cp ./default/src/app/missing_blocks_auditor/missing_blocks_auditor.exe "${BUILDDIR}/usr/local/bin/mina-missing-blocks-auditor" cp ./default/src/app/replayer/replayer.exe "${BUILDDIR}/usr/local/bin/mina-replayer" diff --git a/src/test/mina_automation/missing_blocks_guardian.ml b/src/test/mina_automation/missing_blocks_guardian.ml index 42bbf0ec844..b74ea406855 100644 --- a/src/test/mina_automation/missing_blocks_guardian.ml +++ b/src/test/mina_automation/missing_blocks_guardian.ml @@ -45,7 +45,7 @@ end let of_context context = Executor.of_context ~context ~dune_name:"scripts/archive/missing-blocks-guardian.sh" - ~official_name:"/etc/mina/archive/missing-blocks-guardian.sh" + ~official_name:"mina-missing-blocks-guardian" let run t ~config = run t ~args:(Config.to_args config) ~env:(Config.to_envs config) () From 3cf8a37a715a9e4caadb25562f9c458c22e41326 Mon Sep 17 00:00:00 2001 From: dkijania Date: Wed, 2 Oct 2024 10:41:54 +0200 Subject: [PATCH 09/29] do not remove last block before patching --- src/test/archive/patch_archive_test/patch_archive_test.ml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/test/archive/patch_archive_test/patch_archive_test.ml b/src/test/archive/patch_archive_test/patch_archive_test.ml index 8d0b2f91a67..5ba17608041 100644 --- a/src/test/archive/patch_archive_test/patch_archive_test.ml +++ b/src/test/archive/patch_archive_test/patch_archive_test.ml @@ -77,7 +77,10 @@ let main ~db_uri ~network_data_folder () = let n = List.init missing_blocks_count ~f:(fun _ -> - Random.int (List.length extensional_files) ) + (* never remove last block as missing-block-guardian can have issues when patching it + as it patching only gaps + *) + Random.int (List.length extensional_files - 1) ) in let unpatched_extensional_files = From 91df06cec12b5041b6d2770cbb6c1d381216f666 Mon Sep 17 00:00:00 2001 From: dkijania Date: Wed, 2 Oct 2024 16:58:03 +0200 Subject: [PATCH 10/29] remove duplicated package definition --- src/dune-project | 1 - 1 file changed, 1 deletion(-) diff --git a/src/dune-project b/src/dune-project index 67f3aec652f..3c4d5463c4c 100644 --- a/src/dune-project +++ b/src/dune-project @@ -137,7 +137,6 @@ (package (name participating_state)) (package (name patch_archive_test)) (package (name pasta_bindings)) -(package (name patch_archive_test)) (package (name perf_histograms)) (package (name pickles_base)) (package (name pickles)) From 2ba4e3dd6f86f91683b42f5ba12bd4869cb37b89 Mon Sep 17 00:00:00 2001 From: svv232 Date: Tue, 17 Sep 2024 21:24:05 -0400 Subject: [PATCH 11/29] refactoring delete in capnp proto --- src/libp2p_ipc/libp2p_ipc.capnp | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/libp2p_ipc/libp2p_ipc.capnp b/src/libp2p_ipc/libp2p_ipc.capnp index 8e603d46178..fca0b6b1139 100644 --- a/src/libp2p_ipc/libp2p_ipc.capnp +++ b/src/libp2p_ipc/libp2p_ipc.capnp @@ -332,7 +332,7 @@ struct Libp2pHelperInterface { result @1 :ValidationResult; } - struct DeleteResource { + struct RemoveResource { ids @0 :List(RootBlockId); } @@ -420,7 +420,7 @@ struct Libp2pHelperInterface { union { validation @1 :Libp2pHelperInterface.Validation; addResource @2 :Libp2pHelperInterface.AddResource; - deleteResource @3 :Libp2pHelperInterface.DeleteResource; + removeResource @3 :Libp2pHelperInterface.RemoveResource; downloadResource @4 :Libp2pHelperInterface.DownloadResource; heartbeatPeer @5 :Libp2pHelperInterface.HeartbeatPeer; } @@ -475,6 +475,7 @@ struct DaemonInterface { struct ResourceUpdate { type @0 :ResourceUpdateType; ids @1 :List(RootBlockId); + tag @2 :UInt8; } struct PushMessage { From 3139f54233aeac01080f7aae36cc3962768bf05c Mon Sep 17 00:00:00 2001 From: svv232 Date: Tue, 17 Sep 2024 21:27:02 -0400 Subject: [PATCH 12/29] new interfaces for bitswap message commands --- src/libp2p_ipc/libp2p_ipc.mli | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/libp2p_ipc/libp2p_ipc.mli b/src/libp2p_ipc/libp2p_ipc.mli index bdb960ffaf8..99e9c75acaa 100644 --- a/src/libp2p_ipc/libp2p_ipc.mli +++ b/src/libp2p_ipc/libp2p_ipc.mli @@ -36,7 +36,7 @@ module Subscription_id : sig val create : unit -> t end -val undefined_union : context:string -> int -> unit +val undefined_union : context:string -> int -> 'a val unsafe_parse_peer_id : peer_id -> Peer.Id.t @@ -97,6 +97,11 @@ val create_validation_push_message : val create_add_resource_push_message : tag:int -> data:string -> push_message +val create_download_resource_push_message : + tag:int -> ids:string list -> push_message + +val create_remove_resource_push_message : ids:string list -> push_message + val create_heartbeat_peer_push_message : peer_id:Peer.Id.t -> push_message val push_message_to_outgoing_message : push_message -> outgoing_message From 79c8f569faf437915a22c3ff5555f2086fb26122 Mon Sep 17 00:00:00 2001 From: svv232 Date: Tue, 17 Sep 2024 21:27:43 -0400 Subject: [PATCH 13/29] adding message definitions from bitswap pr --- src/libp2p_ipc/libp2p_ipc.ml | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/src/libp2p_ipc/libp2p_ipc.ml b/src/libp2p_ipc/libp2p_ipc.ml index 6258b436f34..d66bef5b895 100644 --- a/src/libp2p_ipc/libp2p_ipc.ml +++ b/src/libp2p_ipc/libp2p_ipc.ml @@ -265,6 +265,40 @@ let push_message_to_outgoing_message request = Builder.Libp2pHelperInterface.Message.( builder_op push_message_set_builder request) + let create_remove_resource_push_message ~ids = + let ids = + List.map ids ~f:(fun id -> + build' + (module Builder.RootBlockId) + Builder.RootBlockId.(op blake2b_hash_set id) ) + in + build' + (module Builder.Libp2pHelperInterface.PushMessage) + Builder.Libp2pHelperInterface.PushMessage.( + builder_op header_set_builder (create_push_message_header ()) + *> reader_op remove_resource_set_reader + (build + (module Builder.Libp2pHelperInterface.RemoveResource) + Builder.Libp2pHelperInterface.RemoveResource.( + list_op ids_set_list ids) )) + + let create_download_resource_push_message ~tag ~ids = + let ids = + List.map ids ~f:(fun id -> + build' + (module Builder.RootBlockId) + Builder.RootBlockId.(op blake2b_hash_set id) ) + in + build' + (module Builder.Libp2pHelperInterface.PushMessage) + Builder.Libp2pHelperInterface.PushMessage.( + builder_op header_set_builder (create_push_message_header ()) + *> reader_op download_resource_set_reader + (build + (module Builder.Libp2pHelperInterface.DownloadResource) + Builder.Libp2pHelperInterface.DownloadResource.( + op tag_set_exn tag *> list_op ids_set_list ids) )) + let create_add_resource_push_message ~tag ~data = build' (module Builder.Libp2pHelperInterface.PushMessage) From c0032ec1ef8acafea7e9100561ba681478272584 Mon Sep 17 00:00:00 2001 From: svv232 Date: Tue, 17 Sep 2024 21:28:21 -0400 Subject: [PATCH 14/29] fmt --- src/libp2p_ipc/libp2p_ipc.ml | 62 ++++++++++++++++++------------------ 1 file changed, 31 insertions(+), 31 deletions(-) diff --git a/src/libp2p_ipc/libp2p_ipc.ml b/src/libp2p_ipc/libp2p_ipc.ml index d66bef5b895..d1f0d40f81b 100644 --- a/src/libp2p_ipc/libp2p_ipc.ml +++ b/src/libp2p_ipc/libp2p_ipc.ml @@ -265,39 +265,39 @@ let push_message_to_outgoing_message request = Builder.Libp2pHelperInterface.Message.( builder_op push_message_set_builder request) - let create_remove_resource_push_message ~ids = - let ids = - List.map ids ~f:(fun id -> - build' - (module Builder.RootBlockId) - Builder.RootBlockId.(op blake2b_hash_set id) ) - in +let create_remove_resource_push_message ~ids = + let ids = + List.map ids ~f:(fun id -> build' - (module Builder.Libp2pHelperInterface.PushMessage) - Builder.Libp2pHelperInterface.PushMessage.( - builder_op header_set_builder (create_push_message_header ()) - *> reader_op remove_resource_set_reader - (build - (module Builder.Libp2pHelperInterface.RemoveResource) - Builder.Libp2pHelperInterface.RemoveResource.( - list_op ids_set_list ids) )) - - let create_download_resource_push_message ~tag ~ids = - let ids = - List.map ids ~f:(fun id -> - build' - (module Builder.RootBlockId) - Builder.RootBlockId.(op blake2b_hash_set id) ) - in + (module Builder.RootBlockId) + Builder.RootBlockId.(op blake2b_hash_set id) ) + in + build' + (module Builder.Libp2pHelperInterface.PushMessage) + Builder.Libp2pHelperInterface.PushMessage.( + builder_op header_set_builder (create_push_message_header ()) + *> reader_op remove_resource_set_reader + (build + (module Builder.Libp2pHelperInterface.RemoveResource) + Builder.Libp2pHelperInterface.RemoveResource.( + list_op ids_set_list ids) )) + +let create_download_resource_push_message ~tag ~ids = + let ids = + List.map ids ~f:(fun id -> build' - (module Builder.Libp2pHelperInterface.PushMessage) - Builder.Libp2pHelperInterface.PushMessage.( - builder_op header_set_builder (create_push_message_header ()) - *> reader_op download_resource_set_reader - (build - (module Builder.Libp2pHelperInterface.DownloadResource) - Builder.Libp2pHelperInterface.DownloadResource.( - op tag_set_exn tag *> list_op ids_set_list ids) )) + (module Builder.RootBlockId) + Builder.RootBlockId.(op blake2b_hash_set id) ) + in + build' + (module Builder.Libp2pHelperInterface.PushMessage) + Builder.Libp2pHelperInterface.PushMessage.( + builder_op header_set_builder (create_push_message_header ()) + *> reader_op download_resource_set_reader + (build + (module Builder.Libp2pHelperInterface.DownloadResource) + Builder.Libp2pHelperInterface.DownloadResource.( + op tag_set_exn tag *> list_op ids_set_list ids) )) let create_add_resource_push_message ~tag ~data = build' From 716b250f7c795ed0810712072544d67ebef37559 Mon Sep 17 00:00:00 2001 From: svv232 Date: Tue, 17 Sep 2024 21:37:51 -0400 Subject: [PATCH 15/29] fixing undefined union call after type signature change --- src/lib/mina_net2/libp2p_helper.ml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/lib/mina_net2/libp2p_helper.ml b/src/lib/mina_net2/libp2p_helper.ml index 2ef08749016..116a85be3c1 100644 --- a/src/lib/mina_net2/libp2p_helper.ml +++ b/src/lib/mina_net2/libp2p_helper.ml @@ -221,8 +221,7 @@ let handle_incoming_message t msg ~handle_push_message = handle_push_message t (DaemonInterface.PushMessage.get push_msg) ) ) | Undefined n -> - Libp2p_ipc.undefined_union ~context:"DaemonInterface.Message" n ; - Deferred.unit + Libp2p_ipc.undefined_union ~context:"DaemonInterface.Message" n let spawn ?(allow_multiple_instances = false) ~logger ~pids ~conf_dir ~handle_push_message () = From b2796f728800a12c6d4eb58bc58b7f32fe71733c Mon Sep 17 00:00:00 2001 From: svv232 Date: Tue, 17 Sep 2024 21:56:31 -0400 Subject: [PATCH 16/29] refactoring delete to remove --- .../src/libp2p_helper/bitswap_msg.go | 16 ++++++++-------- .../src/libp2p_helper/bitswap_test.go | 8 ++++---- .../src/libp2p_helper/incoming_msg.go | 2 +- 3 files changed, 13 insertions(+), 13 deletions(-) diff --git a/src/app/libp2p_helper/src/libp2p_helper/bitswap_msg.go b/src/app/libp2p_helper/src/libp2p_helper/bitswap_msg.go index ab6f18ec140..d41e67a1306 100644 --- a/src/app/libp2p_helper/src/libp2p_helper/bitswap_msg.go +++ b/src/app/libp2p_helper/src/libp2p_helper/bitswap_msg.go @@ -27,12 +27,12 @@ func (m AddResourcePush) handle(app *app) { } } -type DeleteResourcePushT = ipc.Libp2pHelperInterface_DeleteResource -type DeleteResourcePush DeleteResourcePushT +type RemoveResourcePushT = ipc.Libp2pHelperInterface_RemoveResource +type RemoveResourcePush RemoveResourcePushT -func fromDeleteResourcePush(m ipcPushMessage) (pushMessage, error) { - i, err := m.DeleteResource() - return DeleteResourcePush(i), err +func fromRemoveResourcePush(m ipcPushMessage) (pushMessage, error) { + i, err := m.RemoveResource() + return RemoveResourcePush(i), err } func extractRootBlockList(l ipc.RootBlockId_List) ([]root, error) { @@ -52,14 +52,14 @@ func extractRootBlockList(l ipc.RootBlockId_List) ([]root, error) { return ids, nil } -func (m DeleteResourcePush) handle(app *app) { - idsM, err := DeleteResourcePushT(m).Ids() +func (m RemoveResourcePush) handle(app *app) { + idsM, err := RemoveResourcePushT(m).Ids() var links []root if err == nil { links, err = extractRootBlockList(idsM) } if err != nil { - app.P2p.Logger.Errorf("DeleteResourcePush.handle: error %s", err) + app.P2p.Logger.Errorf("RemoveResourcePush.handle: error %s", err) return } app.bitswapCtx.deleteCmds <- bitswapDeleteCmd{links} diff --git a/src/app/libp2p_helper/src/libp2p_helper/bitswap_test.go b/src/app/libp2p_helper/src/libp2p_helper/bitswap_test.go index ec96f2ccd67..5af46623601 100644 --- a/src/app/libp2p_helper/src/libp2p_helper/bitswap_test.go +++ b/src/app/libp2p_helper/src/libp2p_helper/bitswap_test.go @@ -71,12 +71,12 @@ func getRootIds(ids ipc.RootBlockId_List) ([]BitswapBlockLink, error) { return links, nil } -func deleteResource(n testNode, root root) error { +func removeResource(n testNode, root root) error { _, seg, err := capnp.NewMessage(capnp.SingleSegment(nil)) if err != nil { return err } - m, err := ipc.NewRootLibp2pHelperInterface_DeleteResource(seg) + m, err := ipc.NewRootLibp2pHelperInterface_RemoveResource(seg) if err != nil { return err } @@ -88,7 +88,7 @@ func deleteResource(n testNode, root root) error { if err != nil { return err } - DeleteResourcePush(m).handle(n.node) + RemoveResourcePush(m).handle(n.node) return nil } @@ -393,7 +393,7 @@ func (conf bitswapTestConfig) execute(nodes []testNode, delayBeforeDownload bool if !resourceReplicated[ni] { continue } - err = deleteResource(nodes[ni], roots[ni]) + err = removeResource(nodes[ni], roots[ni]) if err != nil { return fmt.Errorf("Error removing own resources: %v", err) } diff --git a/src/app/libp2p_helper/src/libp2p_helper/incoming_msg.go b/src/app/libp2p_helper/src/libp2p_helper/incoming_msg.go index a4472c443c4..d7e7c0f88dc 100644 --- a/src/app/libp2p_helper/src/libp2p_helper/incoming_msg.go +++ b/src/app/libp2p_helper/src/libp2p_helper/incoming_msg.go @@ -34,7 +34,7 @@ var rpcRequestExtractors = map[ipc.Libp2pHelperInterface_RpcRequest_Which]extrac var pushMesssageExtractors = map[ipc.Libp2pHelperInterface_PushMessage_Which]extractPushMessage{ ipc.Libp2pHelperInterface_PushMessage_Which_addResource: fromAddResourcePush, - ipc.Libp2pHelperInterface_PushMessage_Which_deleteResource: fromDeleteResourcePush, + ipc.Libp2pHelperInterface_PushMessage_Which_removeResource: fromRemoveResourcePush, ipc.Libp2pHelperInterface_PushMessage_Which_downloadResource: fromDownloadResourcePush, ipc.Libp2pHelperInterface_PushMessage_Which_validation: fromValidationPush, ipc.Libp2pHelperInterface_PushMessage_Which_heartbeatPeer: fromHeartbeatPeerPush, From 53bef580dc9f3fcc202a96dbe624eedd3ac778df Mon Sep 17 00:00:00 2001 From: georgeee Date: Mon, 14 Oct 2024 13:45:38 +0000 Subject: [PATCH 17/29] Use tag in Go's part of IPC Fix sending resource update to provide tag accourding to the new Capnproto interface. --- .../src/libp2p_helper/bitswap.go | 71 +++------------ .../src/libp2p_helper/bitswap_delete.go | 87 +++++++++++++++++++ .../src/libp2p_helper/bitswap_downloader.go | 10 +-- .../libp2p_helper/src/libp2p_helper/msg.go | 3 +- 4 files changed, 106 insertions(+), 65 deletions(-) create mode 100644 src/app/libp2p_helper/src/libp2p_helper/bitswap_delete.go diff --git a/src/app/libp2p_helper/src/libp2p_helper/bitswap.go b/src/app/libp2p_helper/src/libp2p_helper/bitswap.go index f12063ed48b..8fd76b2a7aa 100644 --- a/src/app/libp2p_helper/src/libp2p_helper/bitswap.go +++ b/src/app/libp2p_helper/src/libp2p_helper/bitswap.go @@ -12,7 +12,6 @@ import ( blocks "github.com/ipfs/go-block-format" "github.com/ipfs/go-cid" exchange "github.com/ipfs/go-ipfs-exchange-interface" - ipld "github.com/ipfs/go-ipld-format" ) type bitswapDeleteCmd struct { @@ -91,62 +90,14 @@ func announceNewRootBlock(ctx context.Context, engine *bitswap.Bitswap, storage return storage.SetStatus(ctx, root, codanet.Full) } -func (bs *BitswapCtx) deleteRoot(root BitswapBlockLink) error { - if err := bs.storage.SetStatus(bs.ctx, root, codanet.Deleting); err != nil { - return err - } - ClearRootDownloadState(bs, root) - allDescendants := []BitswapBlockLink{root} - viewBlockF := func(b []byte) error { - links, _, err := ReadBitswapBlock(b) - if err == nil { - for _, l := range links { - var l2 BitswapBlockLink - copy(l2[:], l[:]) - allDescendants = append(allDescendants, l2) - } - } - return err - } - for _, block := range allDescendants { - if err := bs.storage.ViewBlock(bs.ctx, block, viewBlockF); err != nil && err != (ipld.ErrNotFound{Cid: codanet.BlockHashToCid(block)}) { - return err - } - } - if err := bs.storage.DeleteBlocks(bs.ctx, allDescendants); err != nil { - return err - } - return bs.storage.DeleteStatus(bs.ctx, root) -} - -func ClearRootDownloadState(bs BitswapState, root root) { - rootStates := bs.RootDownloadStates() - state, has := rootStates[root] - if !has { - return - } - nodeParams := bs.NodeDownloadParams() - delete(rootStates, root) - state.allDescendants.ForEach(func(c cid.Cid) error { - np, hasNp := nodeParams[c] - if hasNp { - delete(np, root) - if len(np) == 0 { - delete(nodeParams, c) - } - } - return nil - }) - state.cancelF() +func (bs *BitswapCtx) SendResourceUpdate(type_ ipc.ResourceUpdateType, tag BitswapDataTag, root root) { + bs.SendResourceUpdates(type_, tag, root) } -func (bs *BitswapCtx) SendResourceUpdate(type_ ipc.ResourceUpdateType, root root) { - bs.SendResourceUpdates(type_, root) -} -func (bs *BitswapCtx) SendResourceUpdates(type_ ipc.ResourceUpdateType, roots ...root) { +func (bs *BitswapCtx) SendResourceUpdates(type_ ipc.ResourceUpdateType, tag BitswapDataTag, roots ...root) { // Non-blocking upcall sending select { - case bs.outMsgChan <- mkResourceUpdatedUpcall(type_, roots): + case bs.outMsgChan <- mkResourceUpdatedUpcall(type_, tag, roots): default: for _, root := range roots { bitswapLogger.Errorf("Failed to send resource update of type %d"+ @@ -242,25 +193,27 @@ func (bs *BitswapCtx) Loop() { ClearRootDownloadState(bs, root) case cmd := <-bs.addCmds: configuredCheck() - blocks, root := SplitDataToBitswapBlocksLengthPrefixedWithTag(bs.maxBlockSize, cmd.data, BlockBodyTag) + blocks, root := SplitDataToBitswapBlocksLengthPrefixedWithTag(bs.maxBlockSize, cmd.data, cmd.tag) err := announceNewRootBlock(bs.ctx, bs.engine, bs.storage, blocks, root) if err == nil { - bs.SendResourceUpdate(ipc.ResourceUpdateType_added, root) + bs.SendResourceUpdate(ipc.ResourceUpdateType_added, cmd.tag, root) } else { bitswapLogger.Errorf("Failed to announce root cid %s (%s)", codanet.BlockHashToCidSuffix(root), err) } case cmd := <-bs.deleteCmds: configuredCheck() - success := []root{} + success := map[BitswapDataTag][]root{} for _, root := range cmd.rootIds { - err := bs.deleteRoot(root) + tag, err := DeleteRoot(bs, root) if err == nil { - success = append(success, root) + success[tag] = append(success[tag], root) } else { bitswapLogger.Errorf("Error processing delete request for %s: %s", codanet.BlockHashToCidSuffix(root), err) } } - bs.SendResourceUpdates(ipc.ResourceUpdateType_removed, success...) + for tag, roots := range success { + bs.SendResourceUpdates(ipc.ResourceUpdateType_removed, tag, roots...) + } case cmd := <-bs.downloadCmds: configuredCheck() // We put all ids to map to avoid diff --git a/src/app/libp2p_helper/src/libp2p_helper/bitswap_delete.go b/src/app/libp2p_helper/src/libp2p_helper/bitswap_delete.go new file mode 100644 index 00000000000..16660d1ceb5 --- /dev/null +++ b/src/app/libp2p_helper/src/libp2p_helper/bitswap_delete.go @@ -0,0 +1,87 @@ +package main + +import ( + "codanet" + "errors" + + "github.com/ipfs/go-cid" + ipld "github.com/ipfs/go-ipld-format" +) + +func ClearRootDownloadState(bs BitswapState, root root) { + rootStates := bs.RootDownloadStates() + state, has := rootStates[root] + if !has { + return + } + nodeParams := bs.NodeDownloadParams() + delete(rootStates, root) + state.allDescendants.ForEach(func(c cid.Cid) error { + np, hasNp := nodeParams[c] + if hasNp { + delete(np, root) + if len(np) == 0 { + delete(nodeParams, c) + } + } + return nil + }) + state.cancelF() +} + +func DeleteRoot(bs BitswapState, root BitswapBlockLink) (BitswapDataTag, error) { + if err := bs.SetStatus(root, codanet.Deleting); err != nil { + return 255, err + } + var tag BitswapDataTag + { + // Determining tag of root being deleted + state, has := bs.RootDownloadStates()[root] + if has { + tag = state.getTag() + } else { + err := bs.ViewBlock(root, func(b []byte) error { + _, fullBlockData, err := ReadBitswapBlock(b) + if err != nil { + return err + } + if len(fullBlockData) < 5 { + return errors.New("root block is too short") + } + tag = BitswapDataTag(fullBlockData[4]) + return nil + }) + if err != nil { + return 255, err + } + } + } + ClearRootDownloadState(bs, root) + descendantMap := map[[32]byte]struct{}{root: {}} + allDescendants := []BitswapBlockLink{root} + viewBlockF := func(b []byte) error { + links, _, err := ReadBitswapBlock(b) + if err == nil { + for _, l := range links { + var l2 BitswapBlockLink + copy(l2[:], l[:]) + _, has := descendantMap[l2] + if !has { + descendantMap[l2] = struct{}{} + allDescendants = append(allDescendants, l2) + } + } + } + return err + } + for i := 0; i < len(allDescendants); i++ { + block := allDescendants[i] + if err := bs.ViewBlock(block, viewBlockF); err != nil && err != (ipld.ErrNotFound{Cid: codanet.BlockHashToCid(block)}) { + return tag, err + } + } + if err := bs.DeleteBlocks(allDescendants); err != nil { + return tag, err + } + return tag, bs.DeleteStatus(root) +} diff --git a/src/app/libp2p_helper/src/libp2p_helper/bitswap_downloader.go b/src/app/libp2p_helper/src/libp2p_helper/bitswap_downloader.go index 8273e02a5bc..201af79abec 100644 --- a/src/app/libp2p_helper/src/libp2p_helper/bitswap_downloader.go +++ b/src/app/libp2p_helper/src/libp2p_helper/bitswap_downloader.go @@ -86,7 +86,7 @@ type BitswapState interface { DepthIndices() DepthIndices NewSession(downloadTimeout time.Duration) (BlockRequester, context.CancelFunc) RegisterDeadlineTracker(root, time.Duration) - SendResourceUpdate(type_ ipc.ResourceUpdateType, root root) + SendResourceUpdate(type_ ipc.ResourceUpdateType, tag BitswapDataTag, root root) CheckInvariants() } @@ -109,7 +109,7 @@ func kickStartRootDownload(root_ BitswapBlockLink, tag BitswapDataTag, bs Bitswa bitswapLogger.Debugf("Skipping download request for %s due to status: %s", codanet.BlockHashToCidSuffix(root_), err) status, err := bs.GetStatus(root_) if err == nil && status == codanet.Full { - bs.SendResourceUpdate(ipc.ResourceUpdateType_added, root_) + bs.SendResourceUpdate(ipc.ResourceUpdateType_added, tag, root_) } return } @@ -280,8 +280,8 @@ func processDownloadedBlock(block blocks.Block, bs BitswapState) { newParams, malformed := processDownloadedBlockStep(oldPs, block, rps, bs.MaxBlockSize(), depthIndices, bs.DataConfig()) for root, err := range malformed { bitswapLogger.Warnf("Block %s of root %s is malformed: %s", id, codanet.BlockHashToCidSuffix(root), err) - ClearRootDownloadState(bs, root) - bs.SendResourceUpdate(ipc.ResourceUpdateType_broken, root) + DeleteRoot(bs, root) + bs.SendResourceUpdate(ipc.ResourceUpdateType_broken, rps[root].getTag(), root) } blocksToProcess := make([]blocks.Block, 0) @@ -338,7 +338,7 @@ func processDownloadedBlock(block blocks.Block, bs BitswapState) { bitswapLogger.Warnf("Failed to update status of fully downloaded root %s: %s", root, err) } ClearRootDownloadState(bs, root) - bs.SendResourceUpdate(ipc.ResourceUpdateType_added, root) + bs.SendResourceUpdate(ipc.ResourceUpdateType_added, rootState.tag, root) } } for _, b := range blocksToProcess { diff --git a/src/app/libp2p_helper/src/libp2p_helper/msg.go b/src/app/libp2p_helper/src/libp2p_helper/msg.go index 053bbd64062..680b76bd487 100644 --- a/src/app/libp2p_helper/src/libp2p_helper/msg.go +++ b/src/app/libp2p_helper/src/libp2p_helper/msg.go @@ -395,7 +395,7 @@ func mkStreamMessageReceivedUpcall(streamIdx uint64, data []byte) *capnp.Message }) } -func mkResourceUpdatedUpcall(type_ ipc.ResourceUpdateType, rootIds []root) *capnp.Message { +func mkResourceUpdatedUpcall(type_ ipc.ResourceUpdateType, tag BitswapDataTag, rootIds []root) *capnp.Message { return mkPushMsg(func(m ipc.DaemonInterface_PushMessage) { im, err := m.NewResourceUpdated() panicOnErr(err) @@ -403,6 +403,7 @@ func mkResourceUpdatedUpcall(type_ ipc.ResourceUpdateType, rootIds []root) *capn panic("too many root ids in a single upcall") } im.SetType(type_) + im.SetTag(uint8(tag)) mIds, err := im.NewIds(int32(len(rootIds))) panicOnErr(err) for i, rootId := range rootIds { From 0a7f1fc98c64c246fcad13a478046f79ede5b0a2 Mon Sep 17 00:00:00 2001 From: dkijania Date: Tue, 15 Oct 2024 09:49:10 +0200 Subject: [PATCH 18/29] WIP --- buildkite/src/Command/Base.dhall | 1 + buildkite/src/Command/Bench/Base.dhall | 96 ++++++++++++++++++++++++ buildkite/src/Command/Size.dhall | 2 +- buildkite/src/Constants/Benchmarks.dhall | 22 ++++++ 4 files changed, 120 insertions(+), 1 deletion(-) create mode 100644 buildkite/src/Command/Bench/Base.dhall create mode 100644 buildkite/src/Constants/Benchmarks.dhall diff --git a/buildkite/src/Command/Base.dhall b/buildkite/src/Command/Base.dhall index 32ae7f2a820..deefb5a1742 100644 --- a/buildkite/src/Command/Base.dhall +++ b/buildkite/src/Command/Base.dhall @@ -131,6 +131,7 @@ let targetToAgent = , Integration = toMap { size = "integration" } , QA = toMap { size = "qa" } , Hardfork = toMap { size = "hardfork" } + , Perf = toMap { size = "perf" } , Multi = toMap { size = "generic-multi" } } target diff --git a/buildkite/src/Command/Bench/Base.dhall b/buildkite/src/Command/Bench/Base.dhall new file mode 100644 index 00000000000..3d2431943b6 --- /dev/null +++ b/buildkite/src/Command/Bench/Base.dhall @@ -0,0 +1,96 @@ +let PipelineMode = ../../Pipeline/Mode.dhall + +let PipelineTag = ../../Pipeline/Tag.dhall + +let Pipeline = ../../Pipeline/Dsl.dhall + +let JobSpec = ../../Pipeline/JobSpec.dhall + +let DebianVersions = ../../Constants/DebianVersions.dhall + +let RunInToolchain = ../../Command/RunInToolchain.dhall + +let Profiles = ../../Constants/Profiles.dhall + +let Command = ../../Command/Base.dhall + +let Docker = ../../Command/Docker/Type.dhall + +let Size = ../Size.dhall + +let Benchmarks = ../../Constants/Benchmarks.dhall + +let SelectFiles = ../../Lib/SelectFiles.dhall + +let Spec = + { Type = + { key : Text + , bench : Text + , label : Text + , size : Size + , name : Text + , path : Text + , mode : PipelineMode.Type + , dependsOn : List Command.TaggedKey.Type + , additionalDirtyWhen : List SelectFiles.Type + , yellowThreshold : Double + , redThreshold : Double + } + , default = + { mode = PipelineMode.Type.PullRequest + , size = Size.Medium + , dependsOn = + DebianVersions.dependsOn + DebianVersions.DebVersion.Bullseye + Profiles.Type.Standard + , additionalDirtyWhen = [] : List SelectFiles.Type + , yellowThreshold = 0.1 + , redThreshold = 0.2 + } + } + +let command + : Spec.Type -> Command.Type + = \(spec : Spec.Type) + -> Command.build + Command.Config::{ + , commands = + RunInToolchain.runInToolchain + (Benchmarks.toEnvList Benchmarks.Type::{=}) + "./buildkite/scripts/benchmarks.sh ${spec.bench} --red-threshold ${Double/show + spec.redThreshold} --yellow-threshold ${Double/show + spec.yellowThreshold}" + , label = "Perf: ${spec.label}" + , key = spec.key + , target = spec.size + , docker = None Docker.Type + , depends_on = spec.dependsOn + } + +let pipeline + : Spec.Type -> Pipeline.Config.Type + = \(spec : Spec.Type) + -> Pipeline.Config::{ + , spec = JobSpec::{ + , dirtyWhen = + [ SelectFiles.strictlyStart (SelectFiles.contains "src") + , SelectFiles.exactly + "buildkite/src/Command/Bench/Base" + "dhall" + , SelectFiles.contains "scripts/benchmark" + , SelectFiles.contains "buildkite/scripts/benchmark" + ] + # spec.additionalDirtyWhen + , path = spec.path + , name = spec.name + , mode = spec.mode + , tags = + [ PipelineTag.Type.Long + , PipelineTag.Type.Test + , PipelineTag.Type.Stable + ] + } + , steps = [ command spec ] + } + +in { command = command, pipeline = pipeline, Spec = Spec } diff --git a/buildkite/src/Command/Size.dhall b/buildkite/src/Command/Size.dhall index eda37582dc4..a7cadacc02a 100644 --- a/buildkite/src/Command/Size.dhall +++ b/buildkite/src/Command/Size.dhall @@ -1 +1 @@ -< XLarge | Large | Medium | Small | Integration | QA | Hardfork | Multi > +< XLarge | Large | Medium | Small | Integration | QA | Hardfork | Multi | Perf > diff --git a/buildkite/src/Constants/Benchmarks.dhall b/buildkite/src/Constants/Benchmarks.dhall new file mode 100644 index 00000000000..2edf5579bc9 --- /dev/null +++ b/buildkite/src/Constants/Benchmarks.dhall @@ -0,0 +1,22 @@ +let Spec = + { Type = { tokenEnvName : Text, bucket : Text, org : Text, host : Text } + , default = + { tokenEnvName = + "hxEc4COeR59kJXDyQp9g1TB_kKqxDspyBKdl0omdNY615j_2Opf-URZO2NeA3gy4dotlJ8vBrdds_ribgl58dw==" + , bucket = "mina-benchmarks" + , org = "Dev" + , host = "https://eu-central-1-1.aws.cloud2.influxdata.com/" + } + } + +let toEnvList = + \(spec : Spec.Type) + -> [ "INFLUX_HOST=${spec.host}" + , "INFLUX_TOKEN=${spec.tokenEnvName}" + , "INFLUX_ORG=${spec.org}" + , "INFLUX_BUCKET_NAME=${spec.bucket}" + ] + +let mainlineBranches = "[develop,compatible,master]" + +in { Type = Spec, toEnvList = toEnvList, mainlineBranches = mainlineBranches } From 69cc4dc0e236aa4e0ae6faf7e78cee2c185f87ca Mon Sep 17 00:00:00 2001 From: Anne-Laure Date: Wed, 2 Oct 2024 14:28:18 +0200 Subject: [PATCH 19/29] Block_producer: move log_bootstrap_mode outside of run Co-authored-by: georgeee --- src/lib/block_producer/block_producer.ml | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/src/lib/block_producer/block_producer.ml b/src/lib/block_producer/block_producer.ml index ab54c4cbe31..94a992cea93 100644 --- a/src/lib/block_producer/block_producer.ml +++ b/src/lib/block_producer/block_producer.ml @@ -656,6 +656,9 @@ let validate_genesis_protocol_state_block ~genesis_state_hash (b, v) = |> Result.map ~f:(Fn.flip Validation.with_body (Mina_block.body @@ With_hash.data b)) +let log_bootstrap_mode ~logger () = + [%log info] "Pausing block production while bootstrapping" + let run ~context:(module Context : CONTEXT) ~vrf_evaluator ~prover ~verifier ~trust_system ~get_completed_work ~transaction_resource_pool ~time_controller ~consensus_local_state ~coinbase_receiver ~frontier_reader @@ -701,9 +704,6 @@ let run ~context:(module Context : CONTEXT) ~vrf_evaluator ~prover ~verifier let rejected_blocks_logger = Logger.create ~id:Logger.Logger_id.rejected_blocks () in - let log_bootstrap_mode () = - [%log info] "Pausing block production while bootstrapping" - in let slot_tx_end = Runtime_config.slot_tx_end precomputed_values.runtime_config in @@ -715,7 +715,8 @@ let run ~context:(module Context : CONTEXT) ~vrf_evaluator ~prover ~verifier let open Interruptible.Let_syntax in match Broadcast_pipe.Reader.peek frontier_reader with | None -> - log_bootstrap_mode () ; Interruptible.return () + log_bootstrap_mode ~logger () ; + Interruptible.return () | Some frontier -> ( let global_slot = Consensus.Data.Block_data.global_slot_since_genesis block_data @@ -1081,7 +1082,7 @@ let run ~context:(module Context : CONTEXT) ~vrf_evaluator ~prover ~verifier (* Begin checking for the ability to produce a block *) match Broadcast_pipe.Reader.peek frontier_reader with | None -> - log_bootstrap_mode () ; + log_bootstrap_mode ~logger () ; don't_wait_for (let%map () = Broadcast_pipe.Reader.iter_until frontier_reader @@ -1361,9 +1362,6 @@ let run ~context:(module Context : CONTEXT) ~vrf_evaluator ~prover ~verifier let run_precomputed ~context:(module Context : CONTEXT) ~verifier ~trust_system ~time_controller ~frontier_reader ~transition_writer ~precomputed_blocks = let open Context in - let log_bootstrap_mode () = - [%log info] "Pausing block production while bootstrapping" - in let rejected_blocks_logger = Logger.create ~id:Logger.Logger_id.rejected_blocks () in @@ -1393,7 +1391,8 @@ let run_precomputed ~context:(module Context : CONTEXT) ~verifier ~trust_system in match Broadcast_pipe.Reader.peek frontier_reader with | None -> - log_bootstrap_mode () ; return () + log_bootstrap_mode ~logger () ; + return () | Some frontier -> let open Transition_frontier.Extensions in let transition_registry = @@ -1552,7 +1551,7 @@ let run_precomputed ~context:(module Context : CONTEXT) ~verifier ~trust_system (* Begin checking for the ability to produce a block *) match Broadcast_pipe.Reader.peek frontier_reader with | None -> - log_bootstrap_mode () ; + log_bootstrap_mode ~logger () ; let%bind () = Broadcast_pipe.Reader.iter_until frontier_reader ~f:(Fn.compose Deferred.return Option.is_some) From 7b6cc195afd769ce5c9f482cab95f5695142c542 Mon Sep 17 00:00:00 2001 From: Anne-Laure Date: Wed, 2 Oct 2024 14:34:33 +0200 Subject: [PATCH 20/29] Block_producer: move genesis_breadcrumb_creator outside of run Co-authored-by: georgeee --- src/lib/block_producer/block_producer.ml | 64 +++++++++++++----------- 1 file changed, 34 insertions(+), 30 deletions(-) diff --git a/src/lib/block_producer/block_producer.ml b/src/lib/block_producer/block_producer.ml index 94a992cea93..799fb327446 100644 --- a/src/lib/block_producer/block_producer.ml +++ b/src/lib/block_producer/block_producer.ml @@ -659,6 +659,39 @@ let validate_genesis_protocol_state_block ~genesis_state_hash (b, v) = let log_bootstrap_mode ~logger () = [%log info] "Pausing block production while bootstrapping" +let genesis_breadcrumb_creator ~context:(module Context : CONTEXT) prover = + let open Context in + let started = ref false in + let genesis_breadcrumb_ivar = Ivar.create () in + fun () -> + if !started then Ivar.read genesis_breadcrumb_ivar + else ( + started := true ; + let max_num_retries = 3 in + let rec go retries = + [%log info] + "Generating genesis proof ($attempts_remaining / $max_attempts)" + ~metadata: + [ ("attempts_remaining", `Int retries) + ; ("max_attempts", `Int max_num_retries) + ] ; + match%bind + Prover.create_genesis_block prover + (Genesis_proof.to_inputs precomputed_values) + with + | Ok res -> + Ivar.fill genesis_breadcrumb_ivar (Ok res) ; + return (Ok res) + | Error err -> + [%log error] "Failed to generate genesis breadcrumb: $error" + ~metadata:[ ("error", Error_json.error_to_yojson err) ] ; + if retries > 0 then go (retries - 1) + else ( + Ivar.fill genesis_breadcrumb_ivar (Error err) ; + return (Error err) ) + in + go max_num_retries ) + let run ~context:(module Context : CONTEXT) ~vrf_evaluator ~prover ~verifier ~trust_system ~get_completed_work ~transaction_resource_pool ~time_controller ~consensus_local_state ~coinbase_receiver ~frontier_reader @@ -670,36 +703,7 @@ let run ~context:(module Context : CONTEXT) ~vrf_evaluator ~prover ~verifier let consensus_constants = precomputed_values.consensus_constants in O1trace.sync_thread "produce_blocks" (fun () -> let genesis_breadcrumb = - let started = ref false in - let genesis_breadcrumb_ivar = Ivar.create () in - fun () -> - if !started then Ivar.read genesis_breadcrumb_ivar - else ( - started := true ; - let max_num_retries = 3 in - let rec go retries = - [%log info] - "Generating genesis proof ($attempts_remaining / $max_attempts)" - ~metadata: - [ ("attempts_remaining", `Int retries) - ; ("max_attempts", `Int max_num_retries) - ] ; - match%bind - Prover.create_genesis_block prover - (Genesis_proof.to_inputs precomputed_values) - with - | Ok res -> - Ivar.fill genesis_breadcrumb_ivar (Ok res) ; - return (Ok res) - | Error err -> - [%log error] "Failed to generate genesis breadcrumb: $error" - ~metadata:[ ("error", Error_json.error_to_yojson err) ] ; - if retries > 0 then go (retries - 1) - else ( - Ivar.fill genesis_breadcrumb_ivar (Error err) ; - return (Error err) ) - in - go max_num_retries ) + genesis_breadcrumb_creator ~context:(module Context) prover in let rejected_blocks_logger = Logger.create ~id:Logger.Logger_id.rejected_blocks () From 83fcc076c82dfe686465e60d8dcec5df4f847b7e Mon Sep 17 00:00:00 2001 From: Anne-Laure Date: Wed, 2 Oct 2024 14:52:37 +0200 Subject: [PATCH 21/29] Block_producer: move produce outside of run Co-authored-by: georgeee --- src/lib/block_producer/block_producer.ml | 721 +++++++++++------------ 1 file changed, 353 insertions(+), 368 deletions(-) diff --git a/src/lib/block_producer/block_producer.ml b/src/lib/block_producer/block_producer.ml index 799fb327446..2897dbd8951 100644 --- a/src/lib/block_producer/block_producer.ml +++ b/src/lib/block_producer/block_producer.ml @@ -692,6 +692,350 @@ let genesis_breadcrumb_creator ~context:(module Context : CONTEXT) prover = in go max_num_retries ) +let produce ~genesis_breadcrumb ~context:(module Context : CONTEXT) ~prover + ~verifier ~trust_system ~get_completed_work ~transaction_resource_pool + ~frontier_reader ~time_controller ~transition_writer ~log_block_creation + ~block_reward_threshold ~block_produced_bvar ~slot_tx_end ~slot_chain_end + ~net ~zkapp_cmd_limit_hardcap ivar + (scheduled_time, block_data, winner_pubkey) = + let open Context in + let module Breadcrumb = Transition_frontier.Breadcrumb in + let open Interruptible.Let_syntax in + let rejected_blocks_logger = + Logger.create ~id:Logger.Logger_id.rejected_blocks () + in + match Broadcast_pipe.Reader.peek frontier_reader with + | None -> + log_bootstrap_mode ~logger () ; + Interruptible.return () + | Some frontier -> ( + let global_slot = + Consensus.Data.Block_data.global_slot_since_genesis block_data + in + Internal_tracing.with_slot global_slot + @@ fun () -> + [%log internal] "Begin_block_production" ; + let open Transition_frontier.Extensions in + let transition_registry = + get_extension + (Transition_frontier.extensions frontier) + Transition_registry + in + let crumb = Transition_frontier.best_tip frontier in + let crumb = + let crumb_global_slot_since_genesis = + Breadcrumb.protocol_state crumb + |> Protocol_state.consensus_state + |> Consensus.Data.Consensus_state.global_slot_since_genesis + in + let block_global_slot_since_genesis = + Consensus.Proof_of_stake.Data.Block_data.global_slot_since_genesis + block_data + in + if + Mina_numbers.Global_slot_since_genesis.equal + crumb_global_slot_since_genesis block_global_slot_since_genesis + then + (* We received a block for this slot over the network before + attempting to produce our own. Build upon its parent instead + of attempting (and failing) to build upon the block itself. + *) + Transition_frontier.find_exn frontier (Breadcrumb.parent_hash crumb) + else crumb + in + let start = Block_time.now time_controller in + [%log info] + ~metadata: + [ ("parent_hash", Breadcrumb.parent_hash crumb |> State_hash.to_yojson) + ; ( "protocol_state" + , Breadcrumb.protocol_state crumb |> Protocol_state.value_to_yojson + ) + ] + "Producing new block with parent $parent_hash%!" ; + let previous_transition = Breadcrumb.block_with_hash crumb in + let previous_protocol_state = + Header.protocol_state + @@ Mina_block.header (With_hash.data previous_transition) + in + let%bind previous_protocol_state_proof = + if + Consensus.Data.Consensus_state.is_genesis_state + (Protocol_state.consensus_state previous_protocol_state) + && Option.is_none precomputed_values.proof_data + then ( + match%bind Interruptible.uninterruptible (genesis_breadcrumb ()) with + | Ok block -> + let proof = Blockchain_snark.Blockchain.proof block in + Interruptible.lift (Deferred.return proof) (Deferred.never ()) + | Error err -> + [%log error] + "Aborting block production: cannot generate a genesis proof" + ~metadata:[ ("error", Error_json.error_to_yojson err) ] ; + Interruptible.lift (Deferred.never ()) (Deferred.return ()) ) + else + return + ( Header.protocol_state_proof + @@ Mina_block.header (With_hash.data previous_transition) ) + in + [%log internal] "Get_transactions_from_pool" ; + let transactions = + Network_pool.Transaction_pool.Resource_pool.transactions + transaction_resource_pool + |> Sequence.map + ~f:Transaction_hash.User_command_with_valid_signature.data + in + let%bind () = Interruptible.lift (Deferred.return ()) (Ivar.read ivar) in + [%log internal] "Generate_next_state" ; + let%bind next_state_opt = + generate_next_state ~commit_id ~constraint_constants ~scheduled_time + ~block_data ~previous_protocol_state ~time_controller + ~staged_ledger:(Breadcrumb.staged_ledger crumb) + ~transactions ~get_completed_work ~logger ~log_block_creation + ~winner_pk:winner_pubkey ~block_reward_threshold + ~zkapp_cmd_limit:!zkapp_cmd_limit ~zkapp_cmd_limit_hardcap + ~slot_tx_end ~slot_chain_end + in + [%log internal] "Generate_next_state_done" ; + match next_state_opt with + | None -> + Interruptible.return () + | Some (protocol_state, internal_transition, pending_coinbase_witness) -> + let diff = + Internal_transition.staged_ledger_diff internal_transition + in + let commands = Staged_ledger_diff.commands diff in + let transactions_count = List.length commands in + let protocol_state_hashes = Protocol_state.hashes protocol_state in + let consensus_state_with_hashes = + { With_hash.hash = protocol_state_hashes + ; data = Protocol_state.consensus_state protocol_state + } + in + [%log internal] "@produced_block_state_hash" + ~metadata: + [ ( "state_hash" + , `String + (Mina_base.State_hash.to_base58_check + protocol_state_hashes.state_hash ) ) + ] ; + Internal_tracing.with_state_hash protocol_state_hashes.state_hash + @@ fun () -> + Debug_assert.debug_assert (fun () -> + [%test_result: [ `Take | `Keep ]] + (Consensus.Hooks.select + ~context:(module Context) + ~existing: + (With_hash.map ~f:Mina_block.consensus_state + previous_transition ) + ~candidate:consensus_state_with_hashes ) + ~expect:`Take + ~message: + "newly generated consensus states should be selected over \ + their parent" ; + let root_consensus_state_with_hashes = + Transition_frontier.root frontier + |> Breadcrumb.consensus_state_with_hashes + in + [%test_result: [ `Take | `Keep ]] + (Consensus.Hooks.select + ~context:(module Context) + ~existing:root_consensus_state_with_hashes + ~candidate:consensus_state_with_hashes ) + ~expect:`Take + ~message: + "newly generated consensus states should be selected over \ + the tf root" ) ; + Interruptible.uninterruptible + (let open Deferred.Let_syntax in + let emit_breadcrumb () = + let open Deferred.Result.Let_syntax in + [%log internal] + ~metadata:[ ("transactions_count", `Int transactions_count) ] + "Produce_state_transition_proof" ; + let%bind protocol_state_proof = + time ~logger ~time_controller + "Protocol_state_proof proving time(ms)" (fun () -> + O1trace.thread "dispatch_block_proving" (fun () -> + Prover.prove prover ~prev_state:previous_protocol_state + ~prev_state_proof:previous_protocol_state_proof + ~next_state:protocol_state internal_transition + pending_coinbase_witness ) + |> Deferred.Result.map_error ~f:(fun err -> + `Prover_error + ( err + , ( previous_protocol_state_proof + , internal_transition + , pending_coinbase_witness ) ) ) ) + in + let staged_ledger_diff = + Internal_transition.staged_ledger_diff internal_transition + in + let previous_state_hash = + (Protocol_state.hashes previous_protocol_state).state_hash + in + [%log internal] "Produce_chain_transition_proof" ; + let delta_block_chain_proof = + Transition_chain_prover.prove + ~length:(Mina_numbers.Length.to_int consensus_constants.delta) + ~frontier previous_state_hash + |> Option.value_exn + in + [%log internal] "Produce_validated_transition" ; + let%bind transition = + let open Result.Let_syntax in + Validation.wrap + { With_hash.hash = protocol_state_hashes + ; data = + (let body = Body.create staged_ledger_diff in + Mina_block.create ~body + ~header: + (Header.create ~protocol_state ~protocol_state_proof + ~delta_block_chain_proof () ) ) + } + |> Validation.skip_time_received_validation + `This_block_was_not_received_via_gossip + |> Validation.skip_protocol_versions_validation + `This_block_has_valid_protocol_versions + |> validate_genesis_protocol_state_block + ~genesis_state_hash: + (Protocol_state.genesis_state_hash + ~state_hash:(Some previous_state_hash) + previous_protocol_state ) + >>| Validation.skip_proof_validation + `This_block_was_generated_internally + >>| Validation.skip_delta_block_chain_validation + `This_block_was_not_received_via_gossip + >>= Validation.validate_frontier_dependencies + ~to_header:Mina_block.header + ~context:(module Context) + ~root_block: + ( Transition_frontier.root frontier + |> Breadcrumb.block_with_hash ) + ~is_block_in_frontier: + (Fn.compose Option.is_some + (Transition_frontier.find frontier) ) + |> Deferred.return + in + let transition_receipt_time = Some (Time.now ()) in + let%bind breadcrumb = + time ~logger ~time_controller + "Build breadcrumb on produced block" (fun () -> + Breadcrumb.build ~logger ~precomputed_values ~verifier + ~get_completed_work:(Fn.const None) ~trust_system + ~parent:crumb ~transition + ~sender:None (* Consider skipping `All here *) + ~skip_staged_ledger_verification:`Proofs + ~transition_receipt_time () ) + |> Deferred.Result.map_error ~f:(function + | `Invalid_staged_ledger_diff e -> + `Invalid_staged_ledger_diff (e, staged_ledger_diff) + | ( `Fatal_error _ + | `Invalid_genesis_protocol_state + | `Invalid_staged_ledger_hash _ + | `Not_selected_over_frontier_root + | `Parent_missing_from_frontier + | `Prover_error _ ) as err -> + err ) + in + let txs = + Mina_block.transactions ~constraint_constants + (Breadcrumb.block breadcrumb) + |> List.map ~f:Transaction.yojson_summary_with_status + in + [%log internal] "@block_metadata" + ~metadata: + [ ( "blockchain_length" + , Mina_numbers.Length.to_yojson + @@ Mina_block.blockchain_length + @@ Breadcrumb.block breadcrumb ) + ; ("transactions", `List txs) + ] ; + [%str_log info] + ~metadata:[ ("breadcrumb", Breadcrumb.to_yojson breadcrumb) ] + Block_produced ; + (* let uptime service (and any other waiters) know about breadcrumb *) + Bvar.broadcast block_produced_bvar breadcrumb ; + Mina_metrics.(Counter.inc_one Block_producer.blocks_produced) ; + Mina_metrics.Block_producer.( + Block_production_delay_histogram.observe block_production_delay + Time.( + Span.to_ms + @@ diff (now ()) + @@ Block_time.to_time_exn scheduled_time)) ; + [%log internal] "Send_breadcrumb_to_transition_frontier" ; + let%bind.Async.Deferred () = + Strict_pipe.Writer.write transition_writer breadcrumb + in + let metadata = + [ ( "state_hash" + , State_hash.to_yojson protocol_state_hashes.state_hash ) + ] + in + [%log internal] "Wait_for_confirmation" ; + [%log debug] ~metadata + "Waiting for block $state_hash to be inserted into frontier" ; + Deferred.choose + [ Deferred.choice + (Transition_registry.register transition_registry + protocol_state_hashes.state_hash ) + (Fn.const (Ok `Transition_accepted)) + ; Deferred.choice + ( Block_time.Timeout.create time_controller + (* We allow up to 20 seconds for the transition + to make its way from the transition_writer to + the frontier. + This value is chosen to be reasonably + generous. In theory, this should not take + terribly long. But long cycles do happen in + our system, and with medium curves those long + cycles can be substantial. + *) + (Block_time.Span.of_ms 20000L) + ~f:(Fn.const ()) + |> Block_time.Timeout.to_deferred ) + (Fn.const (Ok `Timed_out)) + ] + >>= function + | `Transition_accepted -> + [%log internal] "Transition_accepted" ; + [%log info] ~metadata + "Generated transition $state_hash was accepted into \ + transition frontier" ; + Deferred.map ~f:Result.return + (Mina_networking.broadcast_state net + (Breadcrumb.block_with_hash breadcrumb) ) + | `Timed_out -> + (* FIXME #3167: this should be fatal, and more + importantly, shouldn't happen. + *) + [%log internal] "Transition_accept_timeout" ; + let msg : (_, unit, string, unit) format4 = + "Timed out waiting for generated transition $state_hash to \ + enter transition frontier. Continuing to produce new \ + blocks anyway. This may mean your CPU is overloaded. \ + Consider disabling `-run-snark-worker` if it's \ + configured." + in + let span = + Block_time.diff (Block_time.now time_controller) start + in + let metadata = + [ ( "time" + , `Int (Block_time.Span.to_ms span |> Int64.to_int_exn) ) + ; ( "protocol_state" + , Protocol_state.Value.to_yojson protocol_state ) + ] + @ metadata + in + [%log' debug rejected_blocks_logger] ~metadata msg ; + [%log fatal] ~metadata msg ; + return () + in + let%bind res = emit_breadcrumb () in + let span = Block_time.diff (Block_time.now time_controller) start in + handle_block_production_errors ~logger ~rejected_blocks_logger + ~time_taken:span ~previous_protocol_state ~protocol_state res) ) + let run ~context:(module Context : CONTEXT) ~vrf_evaluator ~prover ~verifier ~trust_system ~get_completed_work ~transaction_resource_pool ~time_controller ~consensus_local_state ~coinbase_receiver ~frontier_reader @@ -705,381 +1049,22 @@ let run ~context:(module Context : CONTEXT) ~vrf_evaluator ~prover ~verifier let genesis_breadcrumb = genesis_breadcrumb_creator ~context:(module Context) prover in - let rejected_blocks_logger = - Logger.create ~id:Logger.Logger_id.rejected_blocks () - in let slot_tx_end = Runtime_config.slot_tx_end precomputed_values.runtime_config in let slot_chain_end = Runtime_config.slot_chain_end precomputed_values.runtime_config in - let module Breadcrumb = Transition_frontier.Breadcrumb in - let produce ivar (scheduled_time, block_data, winner_pubkey) = - let open Interruptible.Let_syntax in - match Broadcast_pipe.Reader.peek frontier_reader with - | None -> - log_bootstrap_mode ~logger () ; - Interruptible.return () - | Some frontier -> ( - let global_slot = - Consensus.Data.Block_data.global_slot_since_genesis block_data - in - Internal_tracing.with_slot global_slot - @@ fun () -> - [%log internal] "Begin_block_production" ; - let open Transition_frontier.Extensions in - let transition_registry = - get_extension - (Transition_frontier.extensions frontier) - Transition_registry - in - let crumb = Transition_frontier.best_tip frontier in - let crumb = - let crumb_global_slot_since_genesis = - Breadcrumb.protocol_state crumb - |> Protocol_state.consensus_state - |> Consensus.Data.Consensus_state.global_slot_since_genesis - in - let block_global_slot_since_genesis = - Consensus.Proof_of_stake.Data.Block_data - .global_slot_since_genesis block_data - in - if - Mina_numbers.Global_slot_since_genesis.equal - crumb_global_slot_since_genesis - block_global_slot_since_genesis - then - (* We received a block for this slot over the network before - attempting to produce our own. Build upon its parent instead - of attempting (and failing) to build upon the block itself. - *) - Transition_frontier.find_exn frontier - (Breadcrumb.parent_hash crumb) - else crumb - in - let start = Block_time.now time_controller in - [%log info] - ~metadata: - [ ( "parent_hash" - , Breadcrumb.parent_hash crumb |> State_hash.to_yojson ) - ; ( "protocol_state" - , Breadcrumb.protocol_state crumb - |> Protocol_state.value_to_yojson ) - ] - "Producing new block with parent $parent_hash%!" ; - let previous_transition = Breadcrumb.block_with_hash crumb in - let previous_protocol_state = - Header.protocol_state - @@ Mina_block.header (With_hash.data previous_transition) - in - let%bind previous_protocol_state_proof = - if - Consensus.Data.Consensus_state.is_genesis_state - (Protocol_state.consensus_state previous_protocol_state) - && Option.is_none precomputed_values.proof_data - then ( - match%bind - Interruptible.uninterruptible (genesis_breadcrumb ()) - with - | Ok block -> - let proof = Blockchain_snark.Blockchain.proof block in - Interruptible.lift (Deferred.return proof) - (Deferred.never ()) - | Error err -> - [%log error] - "Aborting block production: cannot generate a genesis \ - proof" - ~metadata:[ ("error", Error_json.error_to_yojson err) ] ; - Interruptible.lift (Deferred.never ()) (Deferred.return ()) - ) - else - return - ( Header.protocol_state_proof - @@ Mina_block.header (With_hash.data previous_transition) ) - in - [%log internal] "Get_transactions_from_pool" ; - let transactions = - Network_pool.Transaction_pool.Resource_pool.transactions - transaction_resource_pool - |> Sequence.map - ~f:Transaction_hash.User_command_with_valid_signature.data - in - let%bind () = - Interruptible.lift (Deferred.return ()) (Ivar.read ivar) - in - [%log internal] "Generate_next_state" ; - let%bind next_state_opt = - generate_next_state ~commit_id ~constraint_constants - ~scheduled_time ~block_data ~previous_protocol_state - ~time_controller - ~staged_ledger:(Breadcrumb.staged_ledger crumb) - ~transactions ~get_completed_work ~logger ~log_block_creation - ~winner_pk:winner_pubkey ~block_reward_threshold - ~zkapp_cmd_limit:!zkapp_cmd_limit ~zkapp_cmd_limit_hardcap - ~slot_tx_end ~slot_chain_end - in - [%log internal] "Generate_next_state_done" ; - match next_state_opt with - | None -> - Interruptible.return () - | Some - (protocol_state, internal_transition, pending_coinbase_witness) - -> - let diff = - Internal_transition.staged_ledger_diff internal_transition - in - let commands = Staged_ledger_diff.commands diff in - let transactions_count = List.length commands in - let protocol_state_hashes = - Protocol_state.hashes protocol_state - in - let consensus_state_with_hashes = - { With_hash.hash = protocol_state_hashes - ; data = Protocol_state.consensus_state protocol_state - } - in - [%log internal] "@produced_block_state_hash" - ~metadata: - [ ( "state_hash" - , `String - (Mina_base.State_hash.to_base58_check - protocol_state_hashes.state_hash ) ) - ] ; - Internal_tracing.with_state_hash - protocol_state_hashes.state_hash - @@ fun () -> - Debug_assert.debug_assert (fun () -> - [%test_result: [ `Take | `Keep ]] - (Consensus.Hooks.select - ~context:(module Context) - ~existing: - (With_hash.map ~f:Mina_block.consensus_state - previous_transition ) - ~candidate:consensus_state_with_hashes ) - ~expect:`Take - ~message: - "newly generated consensus states should be selected \ - over their parent" ; - let root_consensus_state_with_hashes = - Transition_frontier.root frontier - |> Breadcrumb.consensus_state_with_hashes - in - [%test_result: [ `Take | `Keep ]] - (Consensus.Hooks.select - ~context:(module Context) - ~existing:root_consensus_state_with_hashes - ~candidate:consensus_state_with_hashes ) - ~expect:`Take - ~message: - "newly generated consensus states should be selected \ - over the tf root" ) ; - Interruptible.uninterruptible - (let open Deferred.Let_syntax in - let emit_breadcrumb () = - let open Deferred.Result.Let_syntax in - [%log internal] - ~metadata: - [ ("transactions_count", `Int transactions_count) ] - "Produce_state_transition_proof" ; - let%bind protocol_state_proof = - time ~logger ~time_controller - "Protocol_state_proof proving time(ms)" (fun () -> - O1trace.thread "dispatch_block_proving" (fun () -> - Prover.prove prover - ~prev_state:previous_protocol_state - ~prev_state_proof:previous_protocol_state_proof - ~next_state:protocol_state internal_transition - pending_coinbase_witness ) - |> Deferred.Result.map_error ~f:(fun err -> - `Prover_error - ( err - , ( previous_protocol_state_proof - , internal_transition - , pending_coinbase_witness ) ) ) ) - in - let staged_ledger_diff = - Internal_transition.staged_ledger_diff internal_transition - in - let previous_state_hash = - (Protocol_state.hashes previous_protocol_state).state_hash - in - [%log internal] "Produce_chain_transition_proof" ; - let delta_block_chain_proof = - Transition_chain_prover.prove - ~length: - (Mina_numbers.Length.to_int consensus_constants.delta) - ~frontier previous_state_hash - |> Option.value_exn - in - [%log internal] "Produce_validated_transition" ; - let%bind transition = - let open Result.Let_syntax in - Validation.wrap - { With_hash.hash = protocol_state_hashes - ; data = - (let body = Body.create staged_ledger_diff in - Mina_block.create ~body - ~header: - (Header.create ~protocol_state - ~protocol_state_proof - ~delta_block_chain_proof () ) ) - } - |> Validation.skip_time_received_validation - `This_block_was_not_received_via_gossip - |> Validation.skip_protocol_versions_validation - `This_block_has_valid_protocol_versions - |> validate_genesis_protocol_state_block - ~genesis_state_hash: - (Protocol_state.genesis_state_hash - ~state_hash:(Some previous_state_hash) - previous_protocol_state ) - >>| Validation.skip_proof_validation - `This_block_was_generated_internally - >>| Validation.skip_delta_block_chain_validation - `This_block_was_not_received_via_gossip - >>= Validation.validate_frontier_dependencies - ~to_header:Mina_block.header - ~context:(module Context) - ~root_block: - ( Transition_frontier.root frontier - |> Breadcrumb.block_with_hash ) - ~is_block_in_frontier: - (Fn.compose Option.is_some - (Transition_frontier.find frontier) ) - |> Deferred.return - in - let transition_receipt_time = Some (Time.now ()) in - let%bind breadcrumb = - time ~logger ~time_controller - "Build breadcrumb on produced block" (fun () -> - Breadcrumb.build ~logger ~precomputed_values ~verifier - ~get_completed_work:(Fn.const None) ~trust_system - ~parent:crumb ~transition - ~sender:None (* Consider skipping `All here *) - ~skip_staged_ledger_verification:`Proofs - ~transition_receipt_time () ) - |> Deferred.Result.map_error ~f:(function - | `Invalid_staged_ledger_diff e -> - `Invalid_staged_ledger_diff - (e, staged_ledger_diff) - | ( `Fatal_error _ - | `Invalid_genesis_protocol_state - | `Invalid_staged_ledger_hash _ - | `Not_selected_over_frontier_root - | `Parent_missing_from_frontier - | `Prover_error _ ) as err -> - err ) - in - let txs = - Mina_block.transactions ~constraint_constants - (Breadcrumb.block breadcrumb) - |> List.map ~f:Transaction.yojson_summary_with_status - in - [%log internal] "@block_metadata" - ~metadata: - [ ( "blockchain_length" - , Mina_numbers.Length.to_yojson - @@ Mina_block.blockchain_length - @@ Breadcrumb.block breadcrumb ) - ; ("transactions", `List txs) - ] ; - [%str_log info] - ~metadata: - [ ("breadcrumb", Breadcrumb.to_yojson breadcrumb) ] - Block_produced ; - (* let uptime service (and any other waiters) know about breadcrumb *) - Bvar.broadcast block_produced_bvar breadcrumb ; - Mina_metrics.( - Counter.inc_one Block_producer.blocks_produced) ; - Mina_metrics.Block_producer.( - Block_production_delay_histogram.observe - block_production_delay - Time.( - Span.to_ms - @@ diff (now ()) - @@ Block_time.to_time_exn scheduled_time)) ; - [%log internal] "Send_breadcrumb_to_transition_frontier" ; - let%bind.Async.Deferred () = - Strict_pipe.Writer.write transition_writer breadcrumb - in - let metadata = - [ ( "state_hash" - , State_hash.to_yojson protocol_state_hashes.state_hash - ) - ] - in - [%log internal] "Wait_for_confirmation" ; - [%log debug] ~metadata - "Waiting for block $state_hash to be inserted into \ - frontier" ; - Deferred.choose - [ Deferred.choice - (Transition_registry.register transition_registry - protocol_state_hashes.state_hash ) - (Fn.const (Ok `Transition_accepted)) - ; Deferred.choice - ( Block_time.Timeout.create time_controller - (* We allow up to 20 seconds for the transition - to make its way from the transition_writer to - the frontier. - This value is chosen to be reasonably - generous. In theory, this should not take - terribly long. But long cycles do happen in - our system, and with medium curves those long - cycles can be substantial. - *) - (Block_time.Span.of_ms 20000L) - ~f:(Fn.const ()) - |> Block_time.Timeout.to_deferred ) - (Fn.const (Ok `Timed_out)) - ] - >>= function - | `Transition_accepted -> - [%log internal] "Transition_accepted" ; - [%log info] ~metadata - "Generated transition $state_hash was accepted into \ - transition frontier" ; - Deferred.map ~f:Result.return - (Mina_networking.broadcast_state net - (Breadcrumb.block_with_hash breadcrumb) ) - | `Timed_out -> - (* FIXME #3167: this should be fatal, and more - importantly, shouldn't happen. - *) - [%log internal] "Transition_accept_timeout" ; - let msg : (_, unit, string, unit) format4 = - "Timed out waiting for generated transition \ - $state_hash to enter transition frontier. \ - Continuing to produce new blocks anyway. This may \ - mean your CPU is overloaded. Consider disabling \ - `-run-snark-worker` if it's configured." - in - let span = - Block_time.diff (Block_time.now time_controller) start - in - let metadata = - [ ( "time" - , `Int - (Block_time.Span.to_ms span |> Int64.to_int_exn) - ) - ; ( "protocol_state" - , Protocol_state.Value.to_yojson protocol_state ) - ] - @ metadata - in - [%log' debug rejected_blocks_logger] ~metadata msg ; - [%log fatal] ~metadata msg ; - return () - in - let%bind res = emit_breadcrumb () in - let span = - Block_time.diff (Block_time.now time_controller) start - in - handle_block_production_errors ~logger ~rejected_blocks_logger - ~time_taken:span ~previous_protocol_state ~protocol_state - res) ) + let produce = + produce ~genesis_breadcrumb + ~context:(module Context : CONTEXT) + ~prover ~verifier ~trust_system ~get_completed_work + ~transaction_resource_pool ~frontier_reader ~time_controller + ~transition_writer ~log_block_creation ~block_reward_threshold + ~block_produced_bvar ~slot_tx_end ~slot_chain_end ~net + ~zkapp_cmd_limit_hardcap in + let module Breadcrumb = Transition_frontier.Breadcrumb in let production_supervisor = Singleton_supervisor.create ~task:produce in let scheduler = Singleton_scheduler.create time_controller in let rec check_next_block_timing slot i () = From 13c58c32e2fc29481632612ea02ea795970d3a10 Mon Sep 17 00:00:00 2001 From: Anne-Laure Date: Wed, 2 Oct 2024 15:55:44 +0200 Subject: [PATCH 22/29] Block_producer: move generate_genesis_proof_if_needed outside of run Co-authored-by: georgeee --- src/lib/block_producer/block_producer.ml | 37 ++++++++++++------------ 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/src/lib/block_producer/block_producer.ml b/src/lib/block_producer/block_producer.ml index 2897dbd8951..c1c39a94a9d 100644 --- a/src/lib/block_producer/block_producer.ml +++ b/src/lib/block_producer/block_producer.ml @@ -1036,6 +1036,19 @@ let produce ~genesis_breadcrumb ~context:(module Context : CONTEXT) ~prover handle_block_production_errors ~logger ~rejected_blocks_logger ~time_taken:span ~previous_protocol_state ~protocol_state res) ) +let generate_genesis_proof_if_needed ~genesis_breadcrumb ~frontier_reader () = + match Broadcast_pipe.Reader.peek frontier_reader with + | Some transition_frontier -> + let consensus_state = + Transition_frontier.best_tip transition_frontier + |> Transition_frontier.Breadcrumb.consensus_state + in + if Consensus.Data.Consensus_state.is_genesis_state consensus_state then + genesis_breadcrumb () |> Deferred.ignore_m + else Deferred.return () + | None -> + Deferred.return () + let run ~context:(module Context : CONTEXT) ~vrf_evaluator ~prover ~verifier ~trust_system ~get_completed_work ~transaction_resource_pool ~time_controller ~consensus_local_state ~coinbase_receiver ~frontier_reader @@ -1125,22 +1138,6 @@ let run ~context:(module Context : CONTEXT) ~vrf_evaluator ~prover ~verifier "Block producer will begin producing only empty blocks after \ $slot_diff slots" slot_tx_end ; - - let generate_genesis_proof_if_needed () = - match Broadcast_pipe.Reader.peek frontier_reader with - | Some transition_frontier -> - let consensus_state = - Transition_frontier.best_tip transition_frontier - |> Breadcrumb.consensus_state - in - if - Consensus.Data.Consensus_state.is_genesis_state - consensus_state - then genesis_breadcrumb () |> Deferred.ignore_m - else Deferred.return () - | None -> - Deferred.return () - in (* TODO: Re-enable this assertion when it doesn't fail dev demos * (see #5354) * assert ( @@ -1240,7 +1237,10 @@ let run ~context:(module Context : CONTEXT) ~vrf_evaluator ~prover ~verifier (`Produce_now (data, winner_pk)) consensus_state ; Mina_metrics.(Counter.inc_one Block_producer.slots_won) ; - let%map () = generate_genesis_proof_if_needed () in + let%map () = + generate_genesis_proof_if_needed ~genesis_breadcrumb + ~frontier_reader () + in ignore ( Interruptible.finally (Singleton_supervisor.dispatch production_supervisor @@ -1309,7 +1309,8 @@ let run ~context:(module Context : CONTEXT) ~vrf_evaluator ~prover ~verifier |> Block_time.Span.to_time_span in let%bind () = after span_till_time in - generate_genesis_proof_if_needed () ) ; + generate_genesis_proof_if_needed ~genesis_breadcrumb + ~frontier_reader () ) ; Singleton_scheduler.schedule scheduler scheduled_time ~f:(fun () -> ignore From 8b03a4564d159fd8fdbc7feefb2ed728caa014f6 Mon Sep 17 00:00:00 2001 From: Anne-Laure Date: Wed, 2 Oct 2024 16:38:33 +0200 Subject: [PATCH 23/29] Block_producer: remove useless variables --- src/lib/block_producer/block_producer.ml | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/lib/block_producer/block_producer.ml b/src/lib/block_producer/block_producer.ml index c1c39a94a9d..e4e29de4cd6 100644 --- a/src/lib/block_producer/block_producer.ml +++ b/src/lib/block_producer/block_producer.ml @@ -1056,8 +1056,6 @@ let run ~context:(module Context : CONTEXT) ~vrf_evaluator ~prover ~verifier ~block_reward_threshold ~block_produced_bvar ~vrf_evaluation_state ~net ~zkapp_cmd_limit_hardcap = let open Context in - let constraint_constants = precomputed_values.constraint_constants in - let consensus_constants = precomputed_values.consensus_constants in O1trace.sync_thread "produce_blocks" (fun () -> let genesis_breadcrumb = genesis_breadcrumb_creator ~context:(module Context) prover From adf07f2e7170814b006eb369fb176a5270a5e7ee Mon Sep 17 00:00:00 2001 From: Anne-Laure Date: Wed, 2 Oct 2024 16:42:55 +0200 Subject: [PATCH 24/29] Block_producer: move iteration outside of run Co-authored-by: georgeee --- src/lib/block_producer/block_producer.ml | 366 ++++++++++++----------- 1 file changed, 190 insertions(+), 176 deletions(-) diff --git a/src/lib/block_producer/block_producer.ml b/src/lib/block_producer/block_producer.ml index e4e29de4cd6..b66aca0f2f4 100644 --- a/src/lib/block_producer/block_producer.ml +++ b/src/lib/block_producer/block_producer.ml @@ -1049,6 +1049,166 @@ let generate_genesis_proof_if_needed ~genesis_breadcrumb ~frontier_reader () = | None -> Deferred.return () +let iteration ~schedule_next_vrf_check ~produce_block_now + ~schedule_block_production ~next_vrf_check_now ~genesis_breadcrumb + ~context:(module Context : CONTEXT) ~vrf_evaluator ~time_controller + ~coinbase_receiver ~frontier_reader ~set_next_producer_timing + ~transition_frontier ~vrf_evaluation_state ~epoch_data_for_vrf + ~ledger_snapshot i slot = + O1trace.thread "block_producer_iteration" + @@ fun () -> + let consensus_state = + Transition_frontier.( + best_tip transition_frontier |> Breadcrumb.consensus_state) + in + let i' = + Mina_numbers.Length.succ + epoch_data_for_vrf.Consensus.Data.Epoch_data_for_vrf.epoch + in + let new_global_slot = epoch_data_for_vrf.global_slot in + let open Context in + let%bind () = + if Mina_numbers.Length.(i' > i) then + Vrf_evaluation_state.update_epoch_data ~vrf_evaluator ~epoch_data_for_vrf + ~logger vrf_evaluation_state ~vrf_poll_interval + else Deferred.unit + in + let%bind () = + (*Poll once every slot if the evaluation for the epoch is not completed or the evaluation is completed*) + if + Mina_numbers.Global_slot_since_hard_fork.(new_global_slot > slot) + && not (Vrf_evaluation_state.finished vrf_evaluation_state) + then + Vrf_evaluation_state.poll ~vrf_evaluator ~logger vrf_evaluation_state + ~vrf_poll_interval + else Deferred.unit + in + match Core.Queue.dequeue vrf_evaluation_state.queue with + | None -> ( + (*Keep trying until we get some slots*) + let poll () = + let%bind () = Async.after vrf_poll_interval in + let%bind () = + Vrf_evaluation_state.poll ~vrf_evaluator ~logger vrf_evaluation_state + ~vrf_poll_interval + in + schedule_next_vrf_check (Block_time.now time_controller) + in + match Vrf_evaluation_state.evaluator_status vrf_evaluation_state with + | Completed -> + let epoch_end_time = + Consensus.Hooks.epoch_end_time ~constants:consensus_constants + epoch_data_for_vrf.epoch + in + set_next_producer_timing (`Check_again epoch_end_time) consensus_state ; + [%log info] "No more slots won in this epoch" ; + schedule_next_vrf_check epoch_end_time + | At last_slot -> + set_next_producer_timing (`Evaluating_vrf last_slot) consensus_state ; + poll () + | Start -> + set_next_producer_timing (`Evaluating_vrf new_global_slot) + consensus_state ; + poll () ) + | Some slot_won -> ( + let winning_global_slot = slot_won.global_slot in + let slot, epoch = + let t = + Consensus.Data.Consensus_time.of_global_slot winning_global_slot + ~constants:consensus_constants + in + Consensus.Data.Consensus_time.(slot t, epoch t) + in + [%log info] "Block producer won slot $slot in epoch $epoch" + ~metadata: + [ ( "slot" + , Mina_numbers.Global_slot_since_genesis.( + to_yojson @@ of_uint32 slot) ) + ; ("epoch", Mina_numbers.Length.to_yojson epoch) + ] ; + let now = Block_time.now time_controller in + let curr_global_slot = + Consensus.Data.Consensus_time.( + of_time_exn ~constants:consensus_constants now |> to_global_slot) + in + let winner_pk = fst slot_won.delegator in + let data = + Consensus.Hooks.get_block_data ~slot_won ~ledger_snapshot + ~coinbase_receiver:!coinbase_receiver + in + if + Mina_numbers.Global_slot_since_hard_fork.( + curr_global_slot = winning_global_slot) + then ( + (*produce now*) + [%log info] "Producing a block now" ; + set_next_producer_timing + (`Produce_now (data, winner_pk)) + consensus_state ; + Mina_metrics.(Counter.inc_one Block_producer.slots_won) ; + let%bind () = + generate_genesis_proof_if_needed ~genesis_breadcrumb ~frontier_reader + () + in + produce_block_now (now, data, winner_pk) ) + else + match + Mina_numbers.Global_slot_since_hard_fork.diff winning_global_slot + curr_global_slot + with + | None -> + [%log warn] + "Skipping block production for global slot $slot_won because it \ + has passed. Current global slot is $curr_slot" + ~metadata: + [ ( "slot_won" + , Mina_numbers.Global_slot_since_hard_fork.to_yojson + winning_global_slot ) + ; ( "curr_slot" + , Mina_numbers.Global_slot_since_hard_fork.to_yojson + curr_global_slot ) + ] ; + next_vrf_check_now () + | Some slot_diff -> + [%log info] "Producing a block in $slots slots" + ~metadata: + [ ("slots", Mina_numbers.Global_slot_span.to_yojson slot_diff) ] ; + let time = + Consensus.Data.Consensus_time.( + start_time ~constants:consensus_constants + (of_global_slot ~constants:consensus_constants + winning_global_slot )) + |> Block_time.to_span_since_epoch |> Block_time.Span.to_ms + in + set_next_producer_timing + (`Produce (time, data, winner_pk)) + consensus_state ; + Mina_metrics.(Counter.inc_one Block_producer.slots_won) ; + let scheduled_time = time_of_ms time in + don't_wait_for + ((* Attempt to generate a genesis proof in the slot + immediately before we'll actually need it, so that + it isn't limiting our block production time in the + won slot. + This also allows non-genesis blocks to be received + in the meantime and alleviate the need to produce + one at all, if this won't have block height 1. + *) + let scheduled_genesis_time = + time_of_ms + Int64.( + time - of_int constraint_constants.block_window_duration_ms) + in + let span_till_time = + Block_time.diff scheduled_genesis_time + (Block_time.now time_controller) + |> Block_time.Span.to_time_span + in + let%bind () = after span_till_time in + generate_genesis_proof_if_needed ~genesis_breadcrumb + ~frontier_reader () ) ; + schedule_block_production (scheduled_time, data, winner_pk) ) + let run ~context:(module Context : CONTEXT) ~vrf_evaluator ~prover ~verifier ~trust_system ~get_completed_work ~transaction_resource_pool ~time_controller ~consensus_local_state ~coinbase_receiver ~frontier_reader @@ -1136,6 +1296,9 @@ let run ~context:(module Context : CONTEXT) ~vrf_evaluator ~prover ~verifier "Block producer will begin producing only empty blocks after \ $slot_diff slots" slot_tx_end ; + let next_vrf_check_now = + check_next_block_timing new_global_slot i' + in (* TODO: Re-enable this assertion when it doesn't fail dev demos * (see #5354) * assert ( @@ -1143,183 +1306,34 @@ let run ~context:(module Context : CONTEXT) ~vrf_evaluator ~prover ~verifier ~constants:consensus_constants ~consensus_state ~local_state:consensus_local_state = None ) ; *) + let produce_block_now triple = + ignore + ( Interruptible.finally + (Singleton_supervisor.dispatch production_supervisor triple) + ~f:next_vrf_check_now + : (_, _) Interruptible.t ) + in don't_wait_for - (let%bind () = - if Mina_numbers.Length.(i' > i) then - Vrf_evaluation_state.update_epoch_data ~vrf_evaluator - ~epoch_data_for_vrf ~logger vrf_evaluation_state - ~vrf_poll_interval - else Deferred.unit - in - let%bind () = - (*Poll once every slot if the evaluation for the epoch is not completed or the evaluation is completed*) - if - Mina_numbers.Global_slot_since_hard_fork.( - new_global_slot > slot) - && not (Vrf_evaluation_state.finished vrf_evaluation_state) - then - Vrf_evaluation_state.poll ~vrf_evaluator ~logger - vrf_evaluation_state ~vrf_poll_interval - else Deferred.unit - in - match Core.Queue.dequeue vrf_evaluation_state.queue with - | None -> ( - (*Keep trying until we get some slots*) - let poll () = - let%bind () = Async.after vrf_poll_interval in - let%map () = - Vrf_evaluation_state.poll ~vrf_evaluator ~logger - vrf_evaluation_state ~vrf_poll_interval - in - Singleton_scheduler.schedule scheduler - (Block_time.now time_controller) - ~f:(check_next_block_timing new_global_slot i') - in - match - Vrf_evaluation_state.evaluator_status vrf_evaluation_state - with - | Completed -> - let epoch_end_time = - Consensus.Hooks.epoch_end_time - ~constants:consensus_constants - epoch_data_for_vrf.epoch - in - set_next_producer_timing (`Check_again epoch_end_time) - consensus_state ; - [%log info] "No more slots won in this epoch" ; - return - (Singleton_scheduler.schedule scheduler epoch_end_time - ~f:(check_next_block_timing new_global_slot i') ) - | At last_slot -> - set_next_producer_timing (`Evaluating_vrf last_slot) - consensus_state ; - poll () - | Start -> - set_next_producer_timing - (`Evaluating_vrf new_global_slot) consensus_state ; - poll () ) - | Some slot_won -> ( - let winning_global_slot = slot_won.global_slot in - let slot, epoch = - let t = - Consensus.Data.Consensus_time.of_global_slot - winning_global_slot ~constants:consensus_constants - in - Consensus.Data.Consensus_time.(slot t, epoch t) - in - [%log info] "Block producer won slot $slot in epoch $epoch" - ~metadata: - [ ( "slot" - , Mina_numbers.Global_slot_since_genesis.( - to_yojson @@ of_uint32 slot) ) - ; ("epoch", Mina_numbers.Length.to_yojson epoch) - ] ; - let now = Block_time.now time_controller in - let curr_global_slot = - Consensus.Data.Consensus_time.( - of_time_exn ~constants:consensus_constants now - |> to_global_slot) - in - let winner_pk = fst slot_won.delegator in - let data = - Consensus.Hooks.get_block_data ~slot_won ~ledger_snapshot - ~coinbase_receiver:!coinbase_receiver - in - if - Mina_numbers.Global_slot_since_hard_fork.( - curr_global_slot = winning_global_slot) - then ( - (*produce now*) - [%log info] "Producing a block now" ; - set_next_producer_timing - (`Produce_now (data, winner_pk)) - consensus_state ; - Mina_metrics.(Counter.inc_one Block_producer.slots_won) ; - let%map () = - generate_genesis_proof_if_needed ~genesis_breadcrumb - ~frontier_reader () - in - ignore - ( Interruptible.finally - (Singleton_supervisor.dispatch production_supervisor - (now, data, winner_pk) ) - ~f:(check_next_block_timing new_global_slot i') - : (_, _) Interruptible.t ) ) - else - match - Mina_numbers.Global_slot_since_hard_fork.diff - winning_global_slot curr_global_slot - with - | None -> - [%log warn] - "Skipping block production for global slot \ - $slot_won because it has passed. Current global \ - slot is $curr_slot" - ~metadata: - [ ( "slot_won" - , Mina_numbers.Global_slot_since_hard_fork - .to_yojson winning_global_slot ) - ; ( "curr_slot" - , Mina_numbers.Global_slot_since_hard_fork - .to_yojson curr_global_slot ) - ] ; - return (check_next_block_timing new_global_slot i' ()) - | Some slot_diff -> - [%log info] "Producing a block in $slots slots" - ~metadata: - [ ( "slots" - , Mina_numbers.Global_slot_span.to_yojson - slot_diff ) - ] ; - let time = - Consensus.Data.Consensus_time.( - start_time ~constants:consensus_constants - (of_global_slot ~constants:consensus_constants - winning_global_slot )) - |> Block_time.to_span_since_epoch - |> Block_time.Span.to_ms - in - set_next_producer_timing - (`Produce (time, data, winner_pk)) - consensus_state ; - Mina_metrics.(Counter.inc_one Block_producer.slots_won) ; - let scheduled_time = time_of_ms time in - don't_wait_for - ((* Attempt to generate a genesis proof in the slot - immediately before we'll actually need it, so that - it isn't limiting our block production time in the - won slot. - This also allows non-genesis blocks to be received - in the meantime and alleviate the need to produce - one at all, if this won't have block height 1. - *) - let scheduled_genesis_time = - time_of_ms - Int64.( - time - - of_int - constraint_constants - .block_window_duration_ms) - in - let span_till_time = - Block_time.diff scheduled_genesis_time - (Block_time.now time_controller) - |> Block_time.Span.to_time_span - in - let%bind () = after span_till_time in - generate_genesis_proof_if_needed ~genesis_breadcrumb - ~frontier_reader () ) ; - Singleton_scheduler.schedule scheduler scheduled_time - ~f:(fun () -> - ignore - ( Interruptible.finally - (Singleton_supervisor.dispatch - production_supervisor - (scheduled_time, data, winner_pk) ) - ~f: - (check_next_block_timing new_global_slot i') - : (_, _) Interruptible.t ) ) ; - Deferred.return () ) ) + ( iteration + ~schedule_next_vrf_check: + (Fn.compose Deferred.return + (Singleton_scheduler.schedule scheduler + ~f:next_vrf_check_now ) ) + ~produce_block_now: + (Fn.compose Deferred.return produce_block_now) + ~schedule_block_production:(fun (time, data, winner) -> + Singleton_scheduler.schedule scheduler time ~f:(fun () -> + produce_block_now (time, data, winner) ) ; + Deferred.unit ) + ~next_vrf_check_now: + (Fn.compose Deferred.return next_vrf_check_now) + ~genesis_breadcrumb + ~context:(module Context) + ~vrf_evaluator ~time_controller ~coinbase_receiver + ~frontier_reader ~set_next_producer_timing + ~transition_frontier ~vrf_evaluation_state ~epoch_data_for_vrf + ~ledger_snapshot i slot + : unit Deferred.t ) in let start () = check_next_block_timing Mina_numbers.Global_slot_since_hard_fork.zero From e249c48c5ee64dfd9950c211ea610ac0bbb39993 Mon Sep 17 00:00:00 2001 From: georgeee Date: Tue, 15 Oct 2024 09:35:28 +0000 Subject: [PATCH 25/29] Add comments to DeleteRoot function Explain the implementation and extract some code out of the function for better readability. --- .../src/libp2p_helper/bitswap_delete.go | 65 ++++++++++++------- .../src/libp2p_helper/bitswap_downloader.go | 5 +- .../src/libp2p_helper/bitswap_test.go | 3 +- .../libp2p_helper/src/libp2p_helper/error.go | 6 ++ 4 files changed, 50 insertions(+), 29 deletions(-) diff --git a/src/app/libp2p_helper/src/libp2p_helper/bitswap_delete.go b/src/app/libp2p_helper/src/libp2p_helper/bitswap_delete.go index 16660d1ceb5..78e82fafa54 100644 --- a/src/app/libp2p_helper/src/libp2p_helper/bitswap_delete.go +++ b/src/app/libp2p_helper/src/libp2p_helper/bitswap_delete.go @@ -5,7 +5,6 @@ import ( "errors" "github.com/ipfs/go-cid" - ipld "github.com/ipfs/go-ipld-format" ) func ClearRootDownloadState(bs BitswapState, root root) { @@ -29,35 +28,48 @@ func ClearRootDownloadState(bs BitswapState, root root) { state.cancelF() } +// getTag retrieves root's tag, whether the root is still being processed +// or its processing was completed +func getTag(bs BitswapState, root BitswapBlockLink) (tag BitswapDataTag, err error) { + state, has := bs.RootDownloadStates()[root] + if has { + tag = state.getTag() + } else { + err = bs.ViewBlock(root, func(b []byte) error { + _, fullBlockData, err := ReadBitswapBlock(b) + if err != nil { + return err + } + if len(fullBlockData) < 5 { + return errors.New("root block is too short") + } + tag = BitswapDataTag(fullBlockData[4]) + return nil + }) + } + return +} + func DeleteRoot(bs BitswapState, root BitswapBlockLink) (BitswapDataTag, error) { if err := bs.SetStatus(root, codanet.Deleting); err != nil { return 255, err } - var tag BitswapDataTag - { - // Determining tag of root being deleted - state, has := bs.RootDownloadStates()[root] - if has { - tag = state.getTag() - } else { - err := bs.ViewBlock(root, func(b []byte) error { - _, fullBlockData, err := ReadBitswapBlock(b) - if err != nil { - return err - } - if len(fullBlockData) < 5 { - return errors.New("root block is too short") - } - tag = BitswapDataTag(fullBlockData[4]) - return nil - }) - if err != nil { - return 255, err - } - } + tag, err := getTag(bs, root) + if err != nil { + return tag, err } ClearRootDownloadState(bs, root) + + // Performing breadth-first search (BFS) + + // descendantMap is a "visited" set, to ensure we do not + // traverse into nodes we once visited descendantMap := map[[32]byte]struct{}{root: {}} + + // allDescendants is a list of all discovered nodes, + // serving as both "queue" to be iterated over during BFS, + // and as a list of all nodes visited at the end of + // BFS iteration allDescendants := []BitswapBlockLink{root} viewBlockF := func(b []byte) error { links, _, err := ReadBitswapBlock(b) @@ -66,17 +78,22 @@ func DeleteRoot(bs BitswapState, root BitswapBlockLink) (BitswapDataTag, error) var l2 BitswapBlockLink copy(l2[:], l[:]) _, has := descendantMap[l2] + // Checking if the nodes was visited before if !has { descendantMap[l2] = struct{}{} + // Add an item to BFS queue allDescendants = append(allDescendants, l2) } } } return err } + // Iteration is done via index-based loop, because underlying + // array gets extended during iteration, and regular iterator + // wouldn't see these changes for i := 0; i < len(allDescendants); i++ { block := allDescendants[i] - if err := bs.ViewBlock(block, viewBlockF); err != nil && err != (ipld.ErrNotFound{Cid: codanet.BlockHashToCid(block)}) { + if err := bs.ViewBlock(block, viewBlockF); err != nil && !isBlockNotFound(block, err) { return tag, err } } diff --git a/src/app/libp2p_helper/src/libp2p_helper/bitswap_downloader.go b/src/app/libp2p_helper/src/libp2p_helper/bitswap_downloader.go index 201af79abec..0a620fc91c5 100644 --- a/src/app/libp2p_helper/src/libp2p_helper/bitswap_downloader.go +++ b/src/app/libp2p_helper/src/libp2p_helper/bitswap_downloader.go @@ -10,7 +10,6 @@ import ( blocks "github.com/ipfs/go-block-format" "github.com/ipfs/go-cid" - ipld "github.com/ipfs/go-ipld-format" logging "github.com/ipfs/go-log/v2" ) @@ -140,7 +139,7 @@ func kickStartRootDownload(root_ BitswapBlockLink, tag BitswapDataTag, bs Bitswa copy(rootBlock, b) return nil } - if err := bs.ViewBlock(root_, rootBlockViewF); err != nil && err != (ipld.ErrNotFound{Cid: codanet.BlockHashToCid(root_)}) { + if err := bs.ViewBlock(root_, rootBlockViewF); err != nil && !isBlockNotFound(root_, err) { handleError(err) return } @@ -316,7 +315,7 @@ func processDownloadedBlock(block blocks.Block, bs BitswapState) { b, _ := blocks.NewBlockWithCid(blockBytes, childId) blocksToProcess = append(blocksToProcess, b) } else { - if err != (ipld.ErrNotFound{Cid: codanet.BlockHashToCid(link)}) { + if !isBlockNotFound(link, err) { // we still schedule blocks for downloading // this case should rarely happen in practice bitswapLogger.Warnf("Failed to retrieve block %s from storage: %s", childId, err) diff --git a/src/app/libp2p_helper/src/libp2p_helper/bitswap_test.go b/src/app/libp2p_helper/src/libp2p_helper/bitswap_test.go index 5af46623601..93b25d85cbc 100644 --- a/src/app/libp2p_helper/src/libp2p_helper/bitswap_test.go +++ b/src/app/libp2p_helper/src/libp2p_helper/bitswap_test.go @@ -15,7 +15,6 @@ import ( capnp "capnproto.org/go/capnp/v3" "github.com/ipfs/go-cid" - ipld "github.com/ipfs/go-ipld-format" multihash "github.com/multiformats/go-multihash" "github.com/stretchr/testify/require" "golang.org/x/crypto/blake2b" @@ -189,7 +188,7 @@ func confirmBlocksNotInStorage(bs *BitswapCtx, resource []byte) error { }) if err == nil { return fmt.Errorf("block %s wasn't deleted", codanet.BlockHashToCidSuffix(h)) - } else if err != (ipld.ErrNotFound{Cid: codanet.BlockHashToCid(h)}) { + } else if !isBlockNotFound(h, err) { return err } } diff --git a/src/app/libp2p_helper/src/libp2p_helper/error.go b/src/app/libp2p_helper/src/libp2p_helper/error.go index c42db326a81..49ffc3a3ef1 100644 --- a/src/app/libp2p_helper/src/libp2p_helper/error.go +++ b/src/app/libp2p_helper/src/libp2p_helper/error.go @@ -1,9 +1,11 @@ package main import ( + "codanet" "fmt" "github.com/go-errors/errors" + ipld "github.com/ipfs/go-ipld-format" ) // TODO: wrap these in a new type, encode them differently in the rpc mainloop @@ -48,3 +50,7 @@ func needsConfigure() error { func needsDHT() error { return badRPC(errors.New("helper not yet joined to pubsub")) } + +func isBlockNotFound(block BitswapBlockLink, err error) bool { + return err == ipld.ErrNotFound{Cid: codanet.BlockHashToCid(block)} +} From d432976bcff7284013d9dc0030409184d343bacf Mon Sep 17 00:00:00 2001 From: dkijania Date: Tue, 15 Oct 2024 15:19:27 +0200 Subject: [PATCH 26/29] reference env vars in Benchmarks.dhall --- buildkite/src/Constants/Benchmarks.dhall | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/buildkite/src/Constants/Benchmarks.dhall b/buildkite/src/Constants/Benchmarks.dhall index 2edf5579bc9..d303dd42499 100644 --- a/buildkite/src/Constants/Benchmarks.dhall +++ b/buildkite/src/Constants/Benchmarks.dhall @@ -1,11 +1,10 @@ let Spec = { Type = { tokenEnvName : Text, bucket : Text, org : Text, host : Text } , default = - { tokenEnvName = - "hxEc4COeR59kJXDyQp9g1TB_kKqxDspyBKdl0omdNY615j_2Opf-URZO2NeA3gy4dotlJ8vBrdds_ribgl58dw==" - , bucket = "mina-benchmarks" - , org = "Dev" - , host = "https://eu-central-1-1.aws.cloud2.influxdata.com/" + { tokenEnvName = "\\\${INFLUX_TOKEN}" + , bucket = "\\\${INFLUX_BUCKET_NAME}" + , org = "\\\${INFLUX_ORG}" + , host = "\\\${INFLUX_HOST}" } } From 317ef7747ff4da50e9ac157dcbba2d665cb5404c Mon Sep 17 00:00:00 2001 From: svv232 Date: Wed, 16 Oct 2024 00:31:08 -0400 Subject: [PATCH 27/29] fixing the type signature in storage test --- .../libp2p_helper/src/libp2p_helper/bitswap_downloader_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/app/libp2p_helper/src/libp2p_helper/bitswap_downloader_test.go b/src/app/libp2p_helper/src/libp2p_helper/bitswap_downloader_test.go index af1b2b5378b..5855a13dde2 100644 --- a/src/app/libp2p_helper/src/libp2p_helper/bitswap_downloader_test.go +++ b/src/app/libp2p_helper/src/libp2p_helper/bitswap_downloader_test.go @@ -670,7 +670,7 @@ func (bs *testBitswapState) RegisterDeadlineTracker(root_ root, downloadTimeout downloadTimeout time.Duration }{root: root_, downloadTimeout: downloadTimeout}) } -func (bs *testBitswapState) SendResourceUpdate(type_ ipc.ResourceUpdateType, root root) { +func (bs *testBitswapState) SendResourceUpdate(type_ ipc.ResourceUpdateType, _tag BitswapDataTag, root root) { type1, has := bs.resourceUpdates[root] if has && type1 != type_ { panic("duplicate resource update") From 01b65d90285a3694d7c1f31c95a8f7c07edbe22b Mon Sep 17 00:00:00 2001 From: dkijania Date: Wed, 16 Oct 2024 20:14:36 +0200 Subject: [PATCH 28/29] never remove first block as well --- src/test/archive/patch_archive_test/patch_archive_test.ml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/test/archive/patch_archive_test/patch_archive_test.ml b/src/test/archive/patch_archive_test/patch_archive_test.ml index 5ba17608041..cdeaa635ddf 100644 --- a/src/test/archive/patch_archive_test/patch_archive_test.ml +++ b/src/test/archive/patch_archive_test/patch_archive_test.ml @@ -77,10 +77,10 @@ let main ~db_uri ~network_data_folder () = let n = List.init missing_blocks_count ~f:(fun _ -> - (* never remove last block as missing-block-guardian can have issues when patching it + (* never remove last and first block as missing-block-guardian can have issues when patching it as it patching only gaps *) - Random.int (List.length extensional_files - 1) ) + Random.int (List.length extensional_files - 2) + 1 ) in let unpatched_extensional_files = From 784206a972b80e50b955fb9d84185d392cc766ad Mon Sep 17 00:00:00 2001 From: dkijania Date: Thu, 17 Oct 2024 08:12:57 +0200 Subject: [PATCH 29/29] fix Bench/Base.dhall --- buildkite/src/Command/Bench/Base.dhall | 3 +++ 1 file changed, 3 insertions(+) diff --git a/buildkite/src/Command/Bench/Base.dhall b/buildkite/src/Command/Bench/Base.dhall index 3d2431943b6..48427e6738d 100644 --- a/buildkite/src/Command/Bench/Base.dhall +++ b/buildkite/src/Command/Bench/Base.dhall @@ -10,6 +10,8 @@ let DebianVersions = ../../Constants/DebianVersions.dhall let RunInToolchain = ../../Command/RunInToolchain.dhall +let Network = ../../Constants/Network.dhall + let Profiles = ../../Constants/Profiles.dhall let Command = ../../Command/Base.dhall @@ -42,6 +44,7 @@ let Spec = , dependsOn = DebianVersions.dependsOn DebianVersions.DebVersion.Bullseye + Network.Type.Devnet Profiles.Type.Standard , additionalDirtyWhen = [] : List SelectFiles.Type , yellowThreshold = 0.1