From 3a2561d10c32cd832b8daae1dbb7ad1eedbfc291 Mon Sep 17 00:00:00 2001 From: dkijania Date: Tue, 30 Jul 2024 11:37:27 +0200 Subject: [PATCH 001/234] update badges and logo for README.md --- README.md | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 49c0cf54099..ee70b7c60c6 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,18 @@ +### Build status + +| Develop | Compatible | Master | +| ------- | ---------- | ---------- | +| [![Build status - develop](https://badge.buildkite.com/0c47452f3ea619d3217d388e0de522b218db28c3e161887a9a.svg?branch=develop)](https://buildkite.com/o-1-labs-2/mina-end-to-end-nightlies) | [![Build status - compatible](https://badge.buildkite.com/0c47452f3ea619d3217d388e0de522b218db28c3e161887a9a.svg?branch=compatible)](https://buildkite.com/o-1-labs-2/mina-end-to-end-nightlies) | [![Build status - master](https://badge.buildkite.com/0c47452f3ea619d3217d388e0de522b218db28c3e161887a9a.svg?branch=master)](https://buildkite.com/o-1-labs-2/mina-end-to-end-nightlies) + +
+
+
+ - Mina logo + Mina logo -# Mina + Mina is the first cryptocurrency with a lightweight, constant-sized blockchain. This is the main source code repository for the Mina project and contains code for the OCaml protocol implementation, the [Mina Protocol website](https://minaprotocol.com), and wallet. Enjoy! From f81a431cdecb24fcf2addcc49c77bc4f452cad59 Mon Sep 17 00:00:00 2001 From: dkijania Date: Tue, 30 Jul 2024 11:54:59 +0200 Subject: [PATCH 002/234] add some spaces after and before logo --- README.md | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/README.md b/README.md index ee70b7c60c6..b7c73df3815 100644 --- a/README.md +++ b/README.md @@ -4,15 +4,13 @@ | ------- | ---------- | ---------- | | [![Build status - develop](https://badge.buildkite.com/0c47452f3ea619d3217d388e0de522b218db28c3e161887a9a.svg?branch=develop)](https://buildkite.com/o-1-labs-2/mina-end-to-end-nightlies) | [![Build status - compatible](https://badge.buildkite.com/0c47452f3ea619d3217d388e0de522b218db28c3e161887a9a.svg?branch=compatible)](https://buildkite.com/o-1-labs-2/mina-end-to-end-nightlies) | [![Build status - master](https://badge.buildkite.com/0c47452f3ea619d3217d388e0de522b218db28c3e161887a9a.svg?branch=master)](https://buildkite.com/o-1-labs-2/mina-end-to-end-nightlies) -
-

Mina logo - +
Mina is the first cryptocurrency with a lightweight, constant-sized blockchain. This is the main source code repository for the Mina project and contains code for the OCaml protocol implementation, the [Mina Protocol website](https://minaprotocol.com), and wallet. Enjoy! From 4466aeb535fd6ab2f347e04b28d2faa66a3172f5 Mon Sep 17 00:00:00 2001 From: georgeee Date: Sat, 14 Sep 2024 21:54:52 +0000 Subject: [PATCH 003/234] Remove dead code --- src/lib/mina_base/zkapp_call_forest.ml | 6 +----- src/lib/mina_ledger/ledger.ml | 27 -------------------------- 2 files changed, 1 insertion(+), 32 deletions(-) diff --git a/src/lib/mina_base/zkapp_call_forest.ml b/src/lib/mina_base/zkapp_call_forest.ml index 5c67949c94a..4b041e7451b 100644 --- a/src/lib/mina_base/zkapp_call_forest.ml +++ b/src/lib/mina_base/zkapp_call_forest.ml @@ -25,11 +25,6 @@ let pop_exn : t -> (Account_update.t * t) * t = function | _ -> failwith "pop_exn" -let push ~account_update ~calls t = - Zkapp_command.Call_forest.cons ~calls account_update t - -let hash (t : t) = Zkapp_command.Call_forest.hash t - open Snark_params.Tick.Run module Checked = struct @@ -210,6 +205,7 @@ module Checked = struct } ) : (account_update * t) * t ) ) + (* TODO Consider moving out of mina_base *) let push ~account_update: { account_update = { hash = account_update_hash; data = account_update } diff --git a/src/lib/mina_ledger/ledger.ml b/src/lib/mina_ledger/ledger.ml index d84b7a9afcc..2fe2c10deeb 100644 --- a/src/lib/mina_ledger/ledger.ml +++ b/src/lib/mina_ledger/ledger.ml @@ -341,33 +341,6 @@ module Ledger_inner = struct Debug_assert.debug_assert (fun () -> [%test_eq: Ledger_hash.t] start_hash (merkle_root ledger) ) ; (merkle_path ledger new_loc, Account.empty) - - let _handler t = - let open Snark_params.Tick in - let path_exn idx = - List.map (merkle_path_at_index_exn t idx) ~f:(function - | `Left h -> - h - | `Right h -> - h ) - in - stage (fun (With { request; respond }) -> - match request with - | Ledger_hash.Get_element idx -> - let elt = get_at_index_exn t idx in - let path = (path_exn idx :> Random_oracle.Digest.t list) in - respond (Provide (elt, path)) - | Ledger_hash.Get_path idx -> - let path = (path_exn idx :> Random_oracle.Digest.t list) in - respond (Provide path) - | Ledger_hash.Set (idx, account) -> - set_at_index_exn t idx account ; - respond (Provide ()) - | Ledger_hash.Find_index pk -> - let index = index_of_account_exn t pk in - respond (Provide index) - | _ -> - unhandled ) end include Ledger_inner From 635b3abb0e3f676e6163e37a14e5c35fe1a81401 Mon Sep 17 00:00:00 2001 From: georgeee Date: Tue, 3 Sep 2024 20:16:18 +0000 Subject: [PATCH 004/234] Optimize the non-present zkapp uri hashing --- src/lib/mina_base/zkapp_account.ml | 57 ++++++++++++++---------------- 1 file changed, 27 insertions(+), 30 deletions(-) diff --git a/src/lib/mina_base/zkapp_account.ml b/src/lib/mina_base/zkapp_account.ml index 663df35a545..9052abd8832 100644 --- a/src/lib/mina_base/zkapp_account.ml +++ b/src/lib/mina_base/zkapp_account.ml @@ -297,32 +297,35 @@ end (* This preimage cannot be attained by any string, due to the trailing [true] added below. *) -let zkapp_uri_non_preimage = - lazy (Random_oracle_input.Chunked.field_elements [| Field.zero; Field.zero |]) - -let hash_zkapp_uri_opt (zkapp_uri_opt : string option) = - let input = - match zkapp_uri_opt with - | Some zkapp_uri -> - (* We use [length*8 + 1] to pass a final [true] after the end of the - string, to ensure that trailing null bytes don't alias in the hash - preimage. - *) - let bits = Array.create ~len:((String.length zkapp_uri * 8) + 1) true in - String.foldi zkapp_uri ~init:() ~f:(fun i () c -> - let c = Char.to_int c in - (* Insert the bits into [bits], LSB order. *) - for j = 0 to 7 do - (* [Int.test_bit c j] *) - bits.((i * 8) + j) <- Int.bit_and c (1 lsl j) <> 0 - done ) ; +let zkapp_uri_non_preimage_hash = + lazy + ( Random_oracle.pack_input + (Random_oracle_input.Chunked.field_elements + [| Field.zero; Field.zero |] ) + |> Random_oracle.hash ~init:Hash_prefix_states.zkapp_uri ) + +let hash_zkapp_uri_opt = function + | None -> + Lazy.force zkapp_uri_non_preimage_hash + | Some zkapp_uri -> + (* We use [length*8 + 1] to pass a final [true] after the end of the + string, to ensure that trailing null bytes don't alias in the hash + preimage. + *) + let bits = Array.create ~len:((String.length zkapp_uri * 8) + 1) true in + String.foldi zkapp_uri ~init:() ~f:(fun i () c -> + let c = Char.to_int c in + (* Insert the bits into [bits], LSB order. *) + for j = 0 to 7 do + (* [Int.test_bit c j] *) + bits.((i * 8) + j) <- Int.bit_and c (1 lsl j) <> 0 + done ) ; + let input = Random_oracle_input.Chunked.packeds (Array.map ~f:(fun b -> (field_of_bool b, 1)) bits) - | None -> - Lazy.force zkapp_uri_non_preimage - in - Random_oracle.pack_input input - |> Random_oracle.hash ~init:Hash_prefix_states.zkapp_uri + in + Random_oracle.pack_input input + |> Random_oracle.hash ~init:Hash_prefix_states.zkapp_uri let hash_zkapp_uri (zkapp_uri : string) = hash_zkapp_uri_opt (Some zkapp_uri) @@ -389,12 +392,6 @@ let digest (t : t) = let default_digest = lazy (digest default) -let hash_zkapp_account_opt' = function - | None -> - Lazy.force default_digest - | Some (a : t) -> - digest a - let action_state_deriver obj = let open Fields_derivers_zkapps.Derivers in let list_5 = list ~static_length:5 (field @@ o ()) in From 3979aba4705b473b59c5c15ff2fe2fdd2ece4924 Mon Sep 17 00:00:00 2001 From: Dariusz Kijania Date: Wed, 18 Sep 2024 17:15:33 +0200 Subject: [PATCH 005/234] Update README.md added new nicer header and badges --- README.md | 22 +++++++++++++++------- 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index 56b1e10c554..66e10f111f1 100644 --- a/README.md +++ b/README.md @@ -1,14 +1,18 @@ -### Build status + -| Develop | Compatible | Master | -| ------- | ---------- | ---------- | -| [![Build status - develop](https://badge.buildkite.com/0c47452f3ea619d3217d388e0de522b218db28c3e161887a9a.svg?branch=develop)](https://buildkite.com/o-1-labs-2/mina-end-to-end-nightlies) | [![Build status - compatible](https://badge.buildkite.com/0c47452f3ea619d3217d388e0de522b218db28c3e161887a9a.svg?branch=compatible)](https://buildkite.com/o-1-labs-2/mina-end-to-end-nightlies) | [![Build status - master](https://badge.buildkite.com/0c47452f3ea619d3217d388e0de522b218db28c3e161887a9a.svg?branch=master)](https://buildkite.com/o-1-labs-2/mina-end-to-end-nightlies) +

Mina

-
+
-# Mina + ![GitHub stars](https://img.shields.io/github/stars/minaprotocol/mina)  ![GitHub forks](https://img.shields.io/github/forks/minaprotocol/mina) -
+![GitHub contributors](https://img.shields.io/github/contributors/minaprotocol/mina)  ![GitHub commit activity](https://img.shields.io/github/commit-activity/m/minaprotocol/mina)  ![GitHub last commit](https://img.shields.io/github/last-commit/minaprotocol/mina) + +| Develop[^1] | Compatible[^2] | Master[^3] | +| ------- | ---------- | ---------- | +| [![Build status - develop](https://badge.buildkite.com/0c47452f3ea619d3217d388e0de522b218db28c3e161887a9a.svg?branch=develop)](https://buildkite.com/o-1-labs-2/mina-end-to-end-nightlies) | [![Build status - compatible](https://badge.buildkite.com/0c47452f3ea619d3217d388e0de522b218db28c3e161887a9a.svg?branch=compatible)](https://buildkite.com/o-1-labs-2/mina-end-to-end-nightlies) | [![Build status - master](https://badge.buildkite.com/0c47452f3ea619d3217d388e0de522b218db28c3e161887a9a.svg?branch=master)](https://buildkite.com/o-1-labs-2/mina-end-to-end-nightlies) + +
Mina is the first cryptocurrency with a lightweight, constant-sized blockchain. This is the main source code repository for the Mina project and contains code for the OCaml protocol implementation, the [Mina Protocol website](https://minaprotocol.com), and wallet. Enjoy! @@ -60,3 +64,7 @@ The [Node Developers](https://docs.minaprotocol.com/node-developers) docs contai [Apache 2.0](LICENSE) Commits older than 2018-10-03 do not have a [LICENSE](LICENSE) file or this notice, but are distributed under the same terms. + +[^1]: Develop is a mainline branch containig code that may be not compatible with current mainnet and may require major upgrade (hardfork). +[^2]: Compatible is a mainline branch containig code which does not need hardfork in order to apply it to mainnet. +[^3]: Branch which contains current mainnet code. From 43ac16a9e60acc5e3a9971549c4a6fa6fce068e9 Mon Sep 17 00:00:00 2001 From: dkijania Date: Wed, 11 Sep 2024 20:32:03 +0200 Subject: [PATCH 006/234] Patch missing type shapes --- ...ersion-linter-patch-missing-type-shapes.sh | 42 ++++++++++++ buildkite/src/Jobs/Test/VersionLint.dhall | 16 ++--- scripts/version-linter.py | 65 ++++++++++++------- 3 files changed, 91 insertions(+), 32 deletions(-) create mode 100755 buildkite/scripts/version-linter-patch-missing-type-shapes.sh diff --git a/buildkite/scripts/version-linter-patch-missing-type-shapes.sh b/buildkite/scripts/version-linter-patch-missing-type-shapes.sh new file mode 100755 index 00000000000..bf5a6d8a62f --- /dev/null +++ b/buildkite/scripts/version-linter-patch-missing-type-shapes.sh @@ -0,0 +1,42 @@ +#!/bin/bash + +set -eox pipefail + +if [[ $# -ne 1 ]]; then + echo "Usage: $0 " + exit 1 +fi + +git config --global --add safe.directory /workdir + +source buildkite/scripts/handle-fork.sh +source buildkite/scripts/export-git-env-vars.sh + +release_branch=${REMOTE}/$1 + +RELEASE_BRANCH_COMMIT=$(git log -n 1 --format="%h" --abbrev=7 --no-merges $release_branch) + +function checkout_and_dump() { + local __commit=$1 + git checkout $__commit + git submodule sync + git submodule update --init --recursive + eval $(opam config env) + dune exec src/app/cli/src/mina.exe internal dump-type-shapes > ${__commit:0:7}-type-shapes.txt +} + +function revert_checkout() { + git checkout $BUILDKITE_COMMIT + git submodule sync + git submodule update --init --recursive +} + +if ! $(gsutil ls gs://mina-type-shapes/$BUILDKITE_COMMIT 2>/dev/null); then + checkout_and_dump $BUILDKITE_COMMIT + revert_checkout +fi + +if ! $(gsutil ls gs://mina-type-shapes/$RELEASE_BRANCH_COMMIT 2>/dev/null); then + checkout_and_dump $RELEASE_BRANCH_COMMIT + revert_checkout +fi diff --git a/buildkite/src/Jobs/Test/VersionLint.dhall b/buildkite/src/Jobs/Test/VersionLint.dhall index bc47db104ad..35eaa69c9a3 100644 --- a/buildkite/src/Jobs/Test/VersionLint.dhall +++ b/buildkite/src/Jobs/Test/VersionLint.dhall @@ -14,12 +14,12 @@ let JobSpec = ../../Pipeline/JobSpec.dhall let Command = ../../Command/Base.dhall -let RunInToolchain = ../../Command/RunInToolchain.dhall - let Docker = ../../Command/Docker/Type.dhall let Size = ../../Command/Size.dhall +let RunInToolchain = ../../Command/RunInToolchain.dhall + let dependsOn = [ { name = "MinaArtifactBullseye", key = "build-deb-pkg" } ] let buildTestCmd @@ -34,15 +34,17 @@ let buildTestCmd RunInToolchain.runInToolchain ([] : List Text) "buildkite/scripts/dump-mina-type-shapes.sh" + # RunInToolchain.runInToolchain + ([] : List Text) + "buildkite/scripts/version-linter-patch-missing-type-shapes.sh ${release_branch}" # [ Cmd.run - "gsutil cp \$(git log -n 1 --format=%h --abbrev=7)-type_shape.txt \$MINA_TYPE_SHAPE gs://mina-type-shapes" + "gsutil cp *-type_shape.txt \$MINA_TYPE_SHAPE gs://mina-type-shapes" ] # RunInToolchain.runInToolchain ([] : List Text) "buildkite/scripts/version-linter.sh ${release_branch}" , label = "Versioned type linter for ${release_branch}" , key = "version-linter-${release_branch}" - , soft_fail = Some soft_fail , target = cmd_target , docker = None Docker.Type , depends_on = dependsOn @@ -62,11 +64,7 @@ in Pipeline.build , dirtyWhen = lintDirtyWhen , path = "Test" , name = "VersionLint" - , tags = - [ PipelineTag.Type.Long - , PipelineTag.Type.Test - , PipelineTag.Type.Stable - ] + , tags = [ PipelineTag.Type.Long, PipelineTag.Type.Test ] } , steps = [ buildTestCmd diff --git a/scripts/version-linter.py b/scripts/version-linter.py index 0274a1f1c3f..ab9c8337b72 100755 --- a/scripts/version-linter.py +++ b/scripts/version-linter.py @@ -32,31 +32,50 @@ def set_error(): global exit_code exit_code=1 -def branch_commit(branch): +def branch_commits(branch,n): print ('Retrieving', branch, 'head commit...') result=subprocess.run(['git','log','-n','1','--format="%h"','--abbrev=7',f'{branch}'], capture_output=True) output=result.stdout.decode('ascii') print ('command stdout:', output) print ('command stderr:', result.stderr.decode('ascii')) - return output.replace('"','').replace('\n','') - -def download_type_shapes(role,branch,sha1) : + return output.replace('"','').splitlines() + +def url_to_type_shape_file(file): + ''' + Return url to mina type shape file + ''' + return f'https://storage.googleapis.com/mina-type-shapes/{file}' + +def sha_exists(sha1): + ''' + Checks if mina type shape with given sha exists + ''' + file = type_shape_file(sha1) + return url_exists(url_to_type_shape_file(file)) + +def url_exists(url): + ''' + Checks if url exists (by sending head and validating that status code is ok) + ''' + return requests.head(url).status_code == 200 + +def find_latest_type_shape_ref_on(branch,n=1): + ''' + Function tries to find best type shape reference commit by retrieving n last commits + and iterate over collection testing if any item points to valid url + ''' + commits = branch_commits(branch, n) + candidates = list(filter(lambda x: sha_exists(x), commits)) + if not any(candidates): + raise Exception(f'Cannot find type shape file for {branch}. I tried {n} last commits') + else: + return candidates[0] + +def download_type_shape(role,branch,sha1) : file=type_shape_file(sha1) print ('Downloading type shape file',file,'for',role,'branch',branch,'at commit',sha1) - url = f'https://storage.googleapis.com/mina-type-shapes/{file}' - r = requests.head(url, allow_redirects=True) - if r.status_code != 200: - print ("cannot fetch file reference from non-existing path: ${url}") - print ("looks like you need to generate it. Please use below steps") - print (f"git checkout ${sha1}") - print ("nix develop mina") - print (f"dune exec src/app/cli/src/mina.exe internal dump-type-shape > ${sha1}-type_shape.txt") - print ("gsutil cp gs://mina-type-shapes ${sha1}-type_shape.txt ") - - sys.exit(1) - - result=subprocess.run(['wget','--no-clobber',url]) + result=subprocess.run(['wget','--no-clobber',url_to_type_shape_file(file)]) def type_shape_file(sha1) : # created by buildkite build-artifact script @@ -250,18 +269,18 @@ def assert_commit(commit, desc): subprocess.run(['git','fetch'],capture_output=False) - base_branch_commit=branch_commit(base_branch) - download_type_shapes('base',base_branch,base_branch_commit) + base_branch_commit = find_latest_type_shape_ref_on(base_branch,n=10) + download_type_shape('base',base_branch,base_branch_commit) print('') - release_branch_commit=branch_commit(release_branch) - download_type_shapes('release',release_branch,release_branch_commit) + release_branch_commit=find_latest_type_shape_ref_on(release_branch, n=10) + download_type_shape('release',release_branch,release_branch_commit) print('') - pr_branch_commit=branch_commit(pr_branch) - download_type_shapes('pr',pr_branch,pr_branch_commit) + pr_branch_commit=find_latest_type_shape_ref_on(pr_branch) + download_type_shape('pr',pr_branch,pr_branch_commit) print('') From 24d56e6da337590df255c75330fad25056a7f26e Mon Sep 17 00:00:00 2001 From: dkijania Date: Mon, 16 Sep 2024 12:29:27 +0200 Subject: [PATCH 007/234] use soft_fail in VerisionLint dhall --- buildkite/src/Jobs/Test/VersionLint.dhall | 1 + 1 file changed, 1 insertion(+) diff --git a/buildkite/src/Jobs/Test/VersionLint.dhall b/buildkite/src/Jobs/Test/VersionLint.dhall index 35eaa69c9a3..a951ea817b8 100644 --- a/buildkite/src/Jobs/Test/VersionLint.dhall +++ b/buildkite/src/Jobs/Test/VersionLint.dhall @@ -48,6 +48,7 @@ let buildTestCmd , target = cmd_target , docker = None Docker.Type , depends_on = dependsOn + , soft_fail = Some soft_fail , artifact_paths = [ S.contains "core_dumps/*" ] } From 6a793941fb7beebef3ef346fc882d4b3f6a205cd Mon Sep 17 00:00:00 2001 From: dkijania Date: Wed, 18 Sep 2024 10:44:29 +0200 Subject: [PATCH 008/234] move gsutil upload to docker --- buildkite/scripts/dump-mina-type-shapes.sh | 2 ++ buildkite/scripts/gsutil-upload.sh | 11 +++++++++++ .../version-linter-patch-missing-type-shapes.sh | 4 +++- buildkite/src/Jobs/Test/VersionLint.dhall | 3 --- 4 files changed, 16 insertions(+), 4 deletions(-) create mode 100755 buildkite/scripts/gsutil-upload.sh diff --git a/buildkite/scripts/dump-mina-type-shapes.sh b/buildkite/scripts/dump-mina-type-shapes.sh index 57d3c2b2302..5c1d402e215 100755 --- a/buildkite/scripts/dump-mina-type-shapes.sh +++ b/buildkite/scripts/dump-mina-type-shapes.sh @@ -20,3 +20,5 @@ export TYPE_SHAPE_FILE=${MINA_COMMIT_SHA1}-type_shape.txt echo "--- Create type shapes git note for commit: ${MINA_COMMIT_SHA1}" mina internal dump-type-shapes > ${TYPE_SHAPE_FILE} + +source buildkite/scripts/gsutil-upload.sh ${TYPE_SHAPE_FILE} gs://mina-type-shapes \ No newline at end of file diff --git a/buildkite/scripts/gsutil-upload.sh b/buildkite/scripts/gsutil-upload.sh new file mode 100755 index 00000000000..888afc94cf1 --- /dev/null +++ b/buildkite/scripts/gsutil-upload.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +KEY_FILE=/var/secrets/gcloud/key.json + +if [ ! -f $KEY_FILE ]; then + echo "Cannot use gsutil for upload as key file cannot be foud in $KEY_FILE" +fi + +gcloud auth activate-service-account --key-file=$KEY_FILE + +gsutil cp $1 $2 \ No newline at end of file diff --git a/buildkite/scripts/version-linter-patch-missing-type-shapes.sh b/buildkite/scripts/version-linter-patch-missing-type-shapes.sh index bf5a6d8a62f..75590c93a04 100755 --- a/buildkite/scripts/version-linter-patch-missing-type-shapes.sh +++ b/buildkite/scripts/version-linter-patch-missing-type-shapes.sh @@ -22,7 +22,9 @@ function checkout_and_dump() { git submodule sync git submodule update --init --recursive eval $(opam config env) - dune exec src/app/cli/src/mina.exe internal dump-type-shapes > ${__commit:0:7}-type-shapes.txt + TYPE_SHAPE_FILE=${__commit:0:7}-type_shape.txt + dune exec src/app/cli/src/mina.exe internal dump-type-shapes > ${TYPE_SHAPE_FILE} + source buildkite/scripts/gsutil-upload.sh ${TYPE_SHAPE_FILE} gs://mina-type-shapes } function revert_checkout() { diff --git a/buildkite/src/Jobs/Test/VersionLint.dhall b/buildkite/src/Jobs/Test/VersionLint.dhall index a951ea817b8..48dde5bea09 100644 --- a/buildkite/src/Jobs/Test/VersionLint.dhall +++ b/buildkite/src/Jobs/Test/VersionLint.dhall @@ -37,9 +37,6 @@ let buildTestCmd # RunInToolchain.runInToolchain ([] : List Text) "buildkite/scripts/version-linter-patch-missing-type-shapes.sh ${release_branch}" - # [ Cmd.run - "gsutil cp *-type_shape.txt \$MINA_TYPE_SHAPE gs://mina-type-shapes" - ] # RunInToolchain.runInToolchain ([] : List Text) "buildkite/scripts/version-linter.sh ${release_branch}" From 09225829d0e100820e12e2e78eb77eab6957b910 Mon Sep 17 00:00:00 2001 From: dkijania Date: Wed, 18 Sep 2024 11:11:22 +0200 Subject: [PATCH 009/234] dhall make lint --- buildkite/src/Jobs/Test/VersionLint.dhall | 2 -- 1 file changed, 2 deletions(-) diff --git a/buildkite/src/Jobs/Test/VersionLint.dhall b/buildkite/src/Jobs/Test/VersionLint.dhall index 48dde5bea09..833a741519a 100644 --- a/buildkite/src/Jobs/Test/VersionLint.dhall +++ b/buildkite/src/Jobs/Test/VersionLint.dhall @@ -1,5 +1,3 @@ -let Cmd = ../../Lib/Cmds.dhall - let S = ../../Lib/SelectFiles.dhall let B = ../../External/Buildkite.dhall From 38208cbe6598755acb0c563c2fe857ca81a4691c Mon Sep 17 00:00:00 2001 From: dkijania Date: Wed, 18 Sep 2024 13:08:45 +0200 Subject: [PATCH 010/234] do not copy debian key --- buildkite/src/Lib/Cmds.dhall | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/buildkite/src/Lib/Cmds.dhall b/buildkite/src/Lib/Cmds.dhall index 63d5c3d7950..a64bfc8800c 100644 --- a/buildkite/src/Lib/Cmds.dhall +++ b/buildkite/src/Lib/Cmds.dhall @@ -71,11 +71,11 @@ let module = = if docker.useBash then "/bin/bash" else "/bin/sh" in { line = - "docker run -it --rm --entrypoint ${entrypoint} --init --volume ${sharedDir}:/shared --volume ${outerDir}:/workdir --workdir /workdir${envVars}${ if docker.privileged + "docker run -it --rm --entrypoint ${entrypoint} --init --volume /var/secrets:/var/secrets --volume ${sharedDir}:/shared --volume ${outerDir}:/workdir --workdir /workdir${envVars}${ if docker.privileged - then " --privileged" + then " --privileged" - else ""} ${docker.image} -c '${inner.line}'" + else ""} ${docker.image} -c '${inner.line}'" , readable = Optional/map Text @@ -142,7 +142,7 @@ let tests = let dockerExample = assert : { line = - "docker run -it --rm --entrypoint /bin/bash --init --volume /var/buildkite/shared:/shared --volume \\\$BUILDKITE_BUILD_CHECKOUT_PATH:/workdir --workdir /workdir --env ENV1 --env ENV2 --env TEST foo/bar:tag -c 'echo hello'" + "docker run -it --rm --entrypoint /bin/bash --init --volume /var/secrets:/var/secrets --volume /var/buildkite/shared:/shared --volume \\\$BUILDKITE_BUILD_CHECKOUT_PATH:/workdir --workdir /workdir --env ENV1 --env ENV2 --env TEST foo/bar:tag -c 'echo hello'" , readable = Some "Docker@foo/bar:tag ( echo hello )" } === M.inDocker @@ -154,7 +154,7 @@ let tests = let cacheExample = assert - : "./buildkite/scripts/cache-through.sh data.tar \"docker run -it --rm --entrypoint /bin/bash --init --volume /var/buildkite/shared:/shared --volume \\\$BUILDKITE_BUILD_CHECKOUT_PATH:/workdir --workdir /workdir --env ENV1 --env ENV2 --env TEST foo/bar:tag -c 'echo hello > /tmp/data/foo.txt && tar cvf data.tar /tmp/data'\"" + : "./buildkite/scripts/cache-through.sh data.tar \"docker run -it --rm --entrypoint /bin/bash --init --volume /var/secrets:/var/secrets --volume /var/buildkite/shared:/shared --volume \\\$BUILDKITE_BUILD_CHECKOUT_PATH:/workdir --workdir /workdir --env ENV1 --env ENV2 --env TEST foo/bar:tag -c 'echo hello > /tmp/data/foo.txt && tar cvf data.tar /tmp/data'\"" === M.format ( M.cacheThrough M.Docker::{ From 65b6b0980ea1085f3fd7fed95a3faf5cd9c0f926 Mon Sep 17 00:00:00 2001 From: dkijania Date: Wed, 18 Sep 2024 18:06:37 +0200 Subject: [PATCH 011/234] fix path to key.json --- buildkite/scripts/gsutil-upload.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/buildkite/scripts/gsutil-upload.sh b/buildkite/scripts/gsutil-upload.sh index 888afc94cf1..a1e18553f24 100755 --- a/buildkite/scripts/gsutil-upload.sh +++ b/buildkite/scripts/gsutil-upload.sh @@ -1,6 +1,6 @@ #!/bin/bash -KEY_FILE=/var/secrets/gcloud/key.json +KEY_FILE=/var/secrets/google/key.json if [ ! -f $KEY_FILE ]; then echo "Cannot use gsutil for upload as key file cannot be foud in $KEY_FILE" From c327264c67cb345887d71829eebeb23520d508dc Mon Sep 17 00:00:00 2001 From: dkijania Date: Wed, 18 Sep 2024 18:34:36 +0200 Subject: [PATCH 012/234] upload file after revert --- ...ersion-linter-patch-missing-type-shapes.sh | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/buildkite/scripts/version-linter-patch-missing-type-shapes.sh b/buildkite/scripts/version-linter-patch-missing-type-shapes.sh index 75590c93a04..dba2e9cce4b 100755 --- a/buildkite/scripts/version-linter-patch-missing-type-shapes.sh +++ b/buildkite/scripts/version-linter-patch-missing-type-shapes.sh @@ -16,6 +16,12 @@ release_branch=${REMOTE}/$1 RELEASE_BRANCH_COMMIT=$(git log -n 1 --format="%h" --abbrev=7 --no-merges $release_branch) +function revert_checkout() { + git checkout $BUILDKITE_COMMIT + git submodule sync + git submodule update --init --recursive +} + function checkout_and_dump() { local __commit=$1 git checkout $__commit @@ -23,22 +29,15 @@ function checkout_and_dump() { git submodule update --init --recursive eval $(opam config env) TYPE_SHAPE_FILE=${__commit:0:7}-type_shape.txt - dune exec src/app/cli/src/mina.exe internal dump-type-shapes > ${TYPE_SHAPE_FILE} - source buildkite/scripts/gsutil-upload.sh ${TYPE_SHAPE_FILE} gs://mina-type-shapes -} - -function revert_checkout() { - git checkout $BUILDKITE_COMMIT - git submodule sync - git submodule update --init --recursive + dune exec src/app/cli/src/mina.exe internal dump-type-shapes > /tmp/${TYPE_SHAPE_FILE} + revert_checkout + source buildkite/scripts/gsutil-upload.sh /tmp/${TYPE_SHAPE_FILE} gs://mina-type-shapes } if ! $(gsutil ls gs://mina-type-shapes/$BUILDKITE_COMMIT 2>/dev/null); then checkout_and_dump $BUILDKITE_COMMIT - revert_checkout fi if ! $(gsutil ls gs://mina-type-shapes/$RELEASE_BRANCH_COMMIT 2>/dev/null); then checkout_and_dump $RELEASE_BRANCH_COMMIT - revert_checkout fi From 07796e6c797236e1ef71b35ece2d6a85fade3c15 Mon Sep 17 00:00:00 2001 From: dkijania Date: Wed, 18 Sep 2024 21:38:30 +0200 Subject: [PATCH 013/234] remove no-merges from git log and patch version linter for base branch too --- .../version-linter-patch-missing-type-shapes.sh | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/buildkite/scripts/version-linter-patch-missing-type-shapes.sh b/buildkite/scripts/version-linter-patch-missing-type-shapes.sh index dba2e9cce4b..d4e10f8212a 100755 --- a/buildkite/scripts/version-linter-patch-missing-type-shapes.sh +++ b/buildkite/scripts/version-linter-patch-missing-type-shapes.sh @@ -14,7 +14,7 @@ source buildkite/scripts/export-git-env-vars.sh release_branch=${REMOTE}/$1 -RELEASE_BRANCH_COMMIT=$(git log -n 1 --format="%h" --abbrev=7 --no-merges $release_branch) +RELEASE_BRANCH_COMMIT=$(git log -n 1 --format="%h" --abbrev=7 $release_branch) function revert_checkout() { git checkout $BUILDKITE_COMMIT @@ -41,3 +41,11 @@ fi if ! $(gsutil ls gs://mina-type-shapes/$RELEASE_BRANCH_COMMIT 2>/dev/null); then checkout_and_dump $RELEASE_BRANCH_COMMIT fi + +if [[ -n "$BUILDKITE_PULL_REQUEST_BASE_BRANCH" ]]; then + BUILDKITE_PULL_REQUEST_BASE_BRANCH_COMMIT=$(git log -n 1 --format="%h" --abbrev=7 $BUILDKITE_PULL_REQUEST_BASE_BRANCH) + if ! $(gsutil ls gs://mina-type-shapes/$BUILDKITE_PULL_REQUEST_BASE_BRANCH_COMMIT 2>/dev/null); then + checkout_and_dump $BUILDKITE_PULL_REQUEST_BASE_BRANCH_COMMIT + revert_checkout + fi +fi \ No newline at end of file From ef68bf308ffbb40fc50168acac1afec392a50a33 Mon Sep 17 00:00:00 2001 From: dkijania Date: Thu, 19 Sep 2024 09:34:49 +0200 Subject: [PATCH 014/234] add remote when evaluating commit --- buildkite/scripts/version-linter-patch-missing-type-shapes.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/buildkite/scripts/version-linter-patch-missing-type-shapes.sh b/buildkite/scripts/version-linter-patch-missing-type-shapes.sh index d4e10f8212a..be9fdd36eac 100755 --- a/buildkite/scripts/version-linter-patch-missing-type-shapes.sh +++ b/buildkite/scripts/version-linter-patch-missing-type-shapes.sh @@ -43,7 +43,7 @@ if ! $(gsutil ls gs://mina-type-shapes/$RELEASE_BRANCH_COMMIT 2>/dev/null); then fi if [[ -n "$BUILDKITE_PULL_REQUEST_BASE_BRANCH" ]]; then - BUILDKITE_PULL_REQUEST_BASE_BRANCH_COMMIT=$(git log -n 1 --format="%h" --abbrev=7 $BUILDKITE_PULL_REQUEST_BASE_BRANCH) + BUILDKITE_PULL_REQUEST_BASE_BRANCH_COMMIT=$(git log -n 1 --format="%h" --abbrev=7 ${REMOTE}/${BUILDKITE_PULL_REQUEST_BASE_BRANCH} ) if ! $(gsutil ls gs://mina-type-shapes/$BUILDKITE_PULL_REQUEST_BASE_BRANCH_COMMIT 2>/dev/null); then checkout_and_dump $BUILDKITE_PULL_REQUEST_BASE_BRANCH_COMMIT revert_checkout From ed67a5e2090f085cdd658ef69af24a55e89d000c Mon Sep 17 00:00:00 2001 From: dkijania Date: Thu, 19 Sep 2024 10:51:56 +0200 Subject: [PATCH 015/234] fix variable checking --- buildkite/scripts/version-linter-patch-missing-type-shapes.sh | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/buildkite/scripts/version-linter-patch-missing-type-shapes.sh b/buildkite/scripts/version-linter-patch-missing-type-shapes.sh index be9fdd36eac..f030b28a01e 100755 --- a/buildkite/scripts/version-linter-patch-missing-type-shapes.sh +++ b/buildkite/scripts/version-linter-patch-missing-type-shapes.sh @@ -42,10 +42,9 @@ if ! $(gsutil ls gs://mina-type-shapes/$RELEASE_BRANCH_COMMIT 2>/dev/null); then checkout_and_dump $RELEASE_BRANCH_COMMIT fi -if [[ -n "$BUILDKITE_PULL_REQUEST_BASE_BRANCH" ]]; then +if [[ -n "${BUILDKITE_PULL_REQUEST_BASE_BRANCH:-}" ]]; then BUILDKITE_PULL_REQUEST_BASE_BRANCH_COMMIT=$(git log -n 1 --format="%h" --abbrev=7 ${REMOTE}/${BUILDKITE_PULL_REQUEST_BASE_BRANCH} ) if ! $(gsutil ls gs://mina-type-shapes/$BUILDKITE_PULL_REQUEST_BASE_BRANCH_COMMIT 2>/dev/null); then checkout_and_dump $BUILDKITE_PULL_REQUEST_BASE_BRANCH_COMMIT - revert_checkout fi fi \ No newline at end of file From 4de35adb0de88591e7ec3c4232a045f151c006ab Mon Sep 17 00:00:00 2001 From: dkijania Date: Wed, 11 Sep 2024 20:32:03 +0200 Subject: [PATCH 016/234] Patch missing type shapes --- ...ersion-linter-patch-missing-type-shapes.sh | 42 +++++++++++++++ buildkite/src/Jobs/Test/VersionLint.dhall | 14 ++--- scripts/version-linter.py | 53 +++++++++++++++---- 3 files changed, 92 insertions(+), 17 deletions(-) create mode 100755 buildkite/scripts/version-linter-patch-missing-type-shapes.sh diff --git a/buildkite/scripts/version-linter-patch-missing-type-shapes.sh b/buildkite/scripts/version-linter-patch-missing-type-shapes.sh new file mode 100755 index 00000000000..bf5a6d8a62f --- /dev/null +++ b/buildkite/scripts/version-linter-patch-missing-type-shapes.sh @@ -0,0 +1,42 @@ +#!/bin/bash + +set -eox pipefail + +if [[ $# -ne 1 ]]; then + echo "Usage: $0 " + exit 1 +fi + +git config --global --add safe.directory /workdir + +source buildkite/scripts/handle-fork.sh +source buildkite/scripts/export-git-env-vars.sh + +release_branch=${REMOTE}/$1 + +RELEASE_BRANCH_COMMIT=$(git log -n 1 --format="%h" --abbrev=7 --no-merges $release_branch) + +function checkout_and_dump() { + local __commit=$1 + git checkout $__commit + git submodule sync + git submodule update --init --recursive + eval $(opam config env) + dune exec src/app/cli/src/mina.exe internal dump-type-shapes > ${__commit:0:7}-type-shapes.txt +} + +function revert_checkout() { + git checkout $BUILDKITE_COMMIT + git submodule sync + git submodule update --init --recursive +} + +if ! $(gsutil ls gs://mina-type-shapes/$BUILDKITE_COMMIT 2>/dev/null); then + checkout_and_dump $BUILDKITE_COMMIT + revert_checkout +fi + +if ! $(gsutil ls gs://mina-type-shapes/$RELEASE_BRANCH_COMMIT 2>/dev/null); then + checkout_and_dump $RELEASE_BRANCH_COMMIT + revert_checkout +fi diff --git a/buildkite/src/Jobs/Test/VersionLint.dhall b/buildkite/src/Jobs/Test/VersionLint.dhall index d73db56e928..e9ee9ba1779 100644 --- a/buildkite/src/Jobs/Test/VersionLint.dhall +++ b/buildkite/src/Jobs/Test/VersionLint.dhall @@ -14,12 +14,12 @@ let JobSpec = ../../Pipeline/JobSpec.dhall let Command = ../../Command/Base.dhall -let RunInToolchain = ../../Command/RunInToolchain.dhall - let Docker = ../../Command/Docker/Type.dhall let Size = ../../Command/Size.dhall +let RunInToolchain = ../../Command/RunInToolchain.dhall + let dependsOn = [ { name = "MinaArtifactBullseye", key = "build-deb-pkg" } ] let buildTestCmd @@ -34,15 +34,17 @@ let buildTestCmd RunInToolchain.runInToolchain ([] : List Text) "buildkite/scripts/dump-mina-type-shapes.sh" - # [ Cmd.run - "gsutil cp \$(git log -n 1 --format=%h --abbrev=7 --no-merges)-type_shape.txt \$MINA_TYPE_SHAPE gs://mina-type-shapes" - ] + # RunInToolchain.runInToolchain + ([] : List Text) + "buildkite/scripts/version-linter-patch-missing-type-shapes.sh ${release_branch}" + # RunInToolchain.runInToolchain + ([] : List Text) + "gsutil cp *-type_shape.txt \$MINA_TYPE_SHAPE gs://mina-type-shapes" # RunInToolchain.runInToolchain ([] : List Text) "buildkite/scripts/version-linter.sh ${release_branch}" , label = "Versioned type linter for ${release_branch}" , key = "version-linter-${release_branch}" - , soft_fail = Some soft_fail , target = cmd_target , docker = None Docker.Type , depends_on = dependsOn diff --git a/scripts/version-linter.py b/scripts/version-linter.py index 8664ef6b0e9..29b555c0fe2 100755 --- a/scripts/version-linter.py +++ b/scripts/version-linter.py @@ -31,19 +31,50 @@ def set_error(): global exit_code exit_code=1 -def branch_commit(branch): +def branch_commits(branch,n): print ('Retrieving', branch, 'head commit...') result=subprocess.run(['git','log','-n','1','--format="%h"','--abbrev=7',f'{branch}'], capture_output=True) output=result.stdout.decode('ascii') print ('command stdout:', output) print ('command stderr:', result.stderr.decode('ascii')) - return output.replace('"','').replace('\n','') - -def download_type_shapes(role,branch,sha1) : + return output.replace('"','').splitlines() + +def url_to_type_shape_file(file): + ''' + Return url to mina type shape file + ''' + return f'https://storage.googleapis.com/mina-type-shapes/{file}' + +def sha_exists(sha1): + ''' + Checks if mina type shape with given sha exists + ''' + file = type_shape_file(sha1) + return url_exists(url_to_type_shape_file(file)) + +def url_exists(url): + ''' + Checks if url exists (by sending head and validating that status code is ok) + ''' + return requests.head(url).status_code == 200 + +def find_latest_type_shape_ref_on(branch,n=1): + ''' + Function tries to find best type shape reference commit by retrieving n last commits + and iterate over collection testing if any item points to valid url + ''' + commits = branch_commits(branch, n) + candidates = list(filter(lambda x: sha_exists(x), commits)) + if not any(candidates): + raise Exception(f'Cannot find type shape file for {branch}. I tried {n} last commits') + else: + return candidates[0] + +def download_type_shape(role,branch,sha1) : file=type_shape_file(sha1) print ('Downloading type shape file',file,'for',role,'branch',branch,'at commit',sha1) - result=subprocess.run(['wget','--no-clobber',f'https://storage.googleapis.com/mina-type-shapes/{file}']) + result=subprocess.run(['wget','--no-clobber',url_to_type_shape_file(file)]) def type_shape_file(sha1) : # created by buildkite build-artifact script @@ -237,18 +268,18 @@ def assert_commit(commit, desc): subprocess.run(['git','fetch'],capture_output=False) - base_branch_commit=branch_commit(base_branch) - download_type_shapes('base',base_branch,base_branch_commit) + base_branch_commit = find_latest_type_shape_ref_on(base_branch,n=10) + download_type_shape('base',base_branch,base_branch_commit) print('') - release_branch_commit=branch_commit(release_branch) - download_type_shapes('release',release_branch,release_branch_commit) + release_branch_commit=find_latest_type_shape_ref_on(release_branch, n=10) + download_type_shape('release',release_branch,release_branch_commit) print('') - pr_branch_commit=branch_commit(pr_branch) - download_type_shapes('pr',pr_branch,pr_branch_commit) + pr_branch_commit=find_latest_type_shape_ref_on(pr_branch) + download_type_shape('pr',pr_branch,pr_branch_commit) print('') From 36829c6a528ae9ec49202014998df84a3ce17133 Mon Sep 17 00:00:00 2001 From: dkijania Date: Mon, 16 Sep 2024 12:29:27 +0200 Subject: [PATCH 017/234] use soft_fail in VerisionLint dhall --- buildkite/src/Jobs/Test/VersionLint.dhall | 1 + 1 file changed, 1 insertion(+) diff --git a/buildkite/src/Jobs/Test/VersionLint.dhall b/buildkite/src/Jobs/Test/VersionLint.dhall index e9ee9ba1779..74c59b115cf 100644 --- a/buildkite/src/Jobs/Test/VersionLint.dhall +++ b/buildkite/src/Jobs/Test/VersionLint.dhall @@ -48,6 +48,7 @@ let buildTestCmd , target = cmd_target , docker = None Docker.Type , depends_on = dependsOn + , soft_fail = Some soft_fail , artifact_paths = [ S.contains "core_dumps/*" ] } From 72c6d04e716e575e07f345838bfdae7b9e5bbc3b Mon Sep 17 00:00:00 2001 From: dkijania Date: Wed, 18 Sep 2024 10:44:29 +0200 Subject: [PATCH 018/234] move gsutil upload to docker --- buildkite/scripts/dump-mina-type-shapes.sh | 2 ++ buildkite/scripts/gsutil-upload.sh | 11 +++++++++++ .../version-linter-patch-missing-type-shapes.sh | 4 +++- buildkite/src/Jobs/Test/VersionLint.dhall | 3 --- 4 files changed, 16 insertions(+), 4 deletions(-) create mode 100755 buildkite/scripts/gsutil-upload.sh diff --git a/buildkite/scripts/dump-mina-type-shapes.sh b/buildkite/scripts/dump-mina-type-shapes.sh index 57d3c2b2302..5c1d402e215 100755 --- a/buildkite/scripts/dump-mina-type-shapes.sh +++ b/buildkite/scripts/dump-mina-type-shapes.sh @@ -20,3 +20,5 @@ export TYPE_SHAPE_FILE=${MINA_COMMIT_SHA1}-type_shape.txt echo "--- Create type shapes git note for commit: ${MINA_COMMIT_SHA1}" mina internal dump-type-shapes > ${TYPE_SHAPE_FILE} + +source buildkite/scripts/gsutil-upload.sh ${TYPE_SHAPE_FILE} gs://mina-type-shapes \ No newline at end of file diff --git a/buildkite/scripts/gsutil-upload.sh b/buildkite/scripts/gsutil-upload.sh new file mode 100755 index 00000000000..888afc94cf1 --- /dev/null +++ b/buildkite/scripts/gsutil-upload.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +KEY_FILE=/var/secrets/gcloud/key.json + +if [ ! -f $KEY_FILE ]; then + echo "Cannot use gsutil for upload as key file cannot be foud in $KEY_FILE" +fi + +gcloud auth activate-service-account --key-file=$KEY_FILE + +gsutil cp $1 $2 \ No newline at end of file diff --git a/buildkite/scripts/version-linter-patch-missing-type-shapes.sh b/buildkite/scripts/version-linter-patch-missing-type-shapes.sh index bf5a6d8a62f..75590c93a04 100755 --- a/buildkite/scripts/version-linter-patch-missing-type-shapes.sh +++ b/buildkite/scripts/version-linter-patch-missing-type-shapes.sh @@ -22,7 +22,9 @@ function checkout_and_dump() { git submodule sync git submodule update --init --recursive eval $(opam config env) - dune exec src/app/cli/src/mina.exe internal dump-type-shapes > ${__commit:0:7}-type-shapes.txt + TYPE_SHAPE_FILE=${__commit:0:7}-type_shape.txt + dune exec src/app/cli/src/mina.exe internal dump-type-shapes > ${TYPE_SHAPE_FILE} + source buildkite/scripts/gsutil-upload.sh ${TYPE_SHAPE_FILE} gs://mina-type-shapes } function revert_checkout() { diff --git a/buildkite/src/Jobs/Test/VersionLint.dhall b/buildkite/src/Jobs/Test/VersionLint.dhall index 74c59b115cf..48dde5bea09 100644 --- a/buildkite/src/Jobs/Test/VersionLint.dhall +++ b/buildkite/src/Jobs/Test/VersionLint.dhall @@ -37,9 +37,6 @@ let buildTestCmd # RunInToolchain.runInToolchain ([] : List Text) "buildkite/scripts/version-linter-patch-missing-type-shapes.sh ${release_branch}" - # RunInToolchain.runInToolchain - ([] : List Text) - "gsutil cp *-type_shape.txt \$MINA_TYPE_SHAPE gs://mina-type-shapes" # RunInToolchain.runInToolchain ([] : List Text) "buildkite/scripts/version-linter.sh ${release_branch}" From dd8dca7e47613e2c1c4a7751f171a0b6078cea9e Mon Sep 17 00:00:00 2001 From: dkijania Date: Wed, 18 Sep 2024 11:11:22 +0200 Subject: [PATCH 019/234] dhall make lint --- buildkite/src/Jobs/Test/VersionLint.dhall | 2 -- 1 file changed, 2 deletions(-) diff --git a/buildkite/src/Jobs/Test/VersionLint.dhall b/buildkite/src/Jobs/Test/VersionLint.dhall index 48dde5bea09..833a741519a 100644 --- a/buildkite/src/Jobs/Test/VersionLint.dhall +++ b/buildkite/src/Jobs/Test/VersionLint.dhall @@ -1,5 +1,3 @@ -let Cmd = ../../Lib/Cmds.dhall - let S = ../../Lib/SelectFiles.dhall let B = ../../External/Buildkite.dhall From 7530ac4288bbb813101d18e5eee52058b55ebd5d Mon Sep 17 00:00:00 2001 From: dkijania Date: Wed, 18 Sep 2024 13:08:45 +0200 Subject: [PATCH 020/234] do not copy debian key --- buildkite/src/Lib/Cmds.dhall | 26 +++++++++++++++++++------- 1 file changed, 19 insertions(+), 7 deletions(-) diff --git a/buildkite/src/Lib/Cmds.dhall b/buildkite/src/Lib/Cmds.dhall index 49e2d8fc432..a64bfc8800c 100644 --- a/buildkite/src/Lib/Cmds.dhall +++ b/buildkite/src/Lib/Cmds.dhall @@ -13,8 +13,16 @@ let module = \(environment : List Text) -> let Docker = { Type = - { image : Text, extraEnv : List Text, privileged : Bool } - , default = { extraEnv = [] : List Text, privileged = False } + { image : Text + , extraEnv : List Text + , privileged : Bool + , useBash : Bool + } + , default = + { extraEnv = [] : List Text + , privileged = False + , useBash = True + } } let Cmd = { line : Text, readable : Optional Text } @@ -58,12 +66,16 @@ let module = : Text = "/var/buildkite/shared" + let entrypoint + : Text + = if docker.useBash then "/bin/bash" else "/bin/sh" + in { line = - "docker run -it --rm --entrypoint /bin/sh --init --volume ${sharedDir}:/shared --volume ${outerDir}:/workdir --workdir /workdir${envVars}${ if docker.privileged + "docker run -it --rm --entrypoint ${entrypoint} --init --volume /var/secrets:/var/secrets --volume ${sharedDir}:/shared --volume ${outerDir}:/workdir --workdir /workdir${envVars}${ if docker.privileged - then " --privileged" + then " --privileged" - else ""} ${docker.image} -c '${inner.line}'" + else ""} ${docker.image} -c '${inner.line}'" , readable = Optional/map Text @@ -130,7 +142,7 @@ let tests = let dockerExample = assert : { line = - "docker run -it --rm --entrypoint /bin/sh --init --volume /var/buildkite/shared:/shared --volume \\\$BUILDKITE_BUILD_CHECKOUT_PATH:/workdir --workdir /workdir --env ENV1 --env ENV2 --env TEST foo/bar:tag -c 'echo hello'" + "docker run -it --rm --entrypoint /bin/bash --init --volume /var/secrets:/var/secrets --volume /var/buildkite/shared:/shared --volume \\\$BUILDKITE_BUILD_CHECKOUT_PATH:/workdir --workdir /workdir --env ENV1 --env ENV2 --env TEST foo/bar:tag -c 'echo hello'" , readable = Some "Docker@foo/bar:tag ( echo hello )" } === M.inDocker @@ -142,7 +154,7 @@ let tests = let cacheExample = assert - : "./buildkite/scripts/cache-through.sh data.tar \"docker run -it --rm --entrypoint /bin/sh --init --volume /var/buildkite/shared:/shared --volume \\\$BUILDKITE_BUILD_CHECKOUT_PATH:/workdir --workdir /workdir --env ENV1 --env ENV2 --env TEST foo/bar:tag -c 'echo hello > /tmp/data/foo.txt && tar cvf data.tar /tmp/data'\"" + : "./buildkite/scripts/cache-through.sh data.tar \"docker run -it --rm --entrypoint /bin/bash --init --volume /var/secrets:/var/secrets --volume /var/buildkite/shared:/shared --volume \\\$BUILDKITE_BUILD_CHECKOUT_PATH:/workdir --workdir /workdir --env ENV1 --env ENV2 --env TEST foo/bar:tag -c 'echo hello > /tmp/data/foo.txt && tar cvf data.tar /tmp/data'\"" === M.format ( M.cacheThrough M.Docker::{ From ccf827dbf811534a04e2d93de281b92721c8ae91 Mon Sep 17 00:00:00 2001 From: dkijania Date: Wed, 18 Sep 2024 18:06:37 +0200 Subject: [PATCH 021/234] fix path to key.json --- buildkite/scripts/gsutil-upload.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/buildkite/scripts/gsutil-upload.sh b/buildkite/scripts/gsutil-upload.sh index 888afc94cf1..a1e18553f24 100755 --- a/buildkite/scripts/gsutil-upload.sh +++ b/buildkite/scripts/gsutil-upload.sh @@ -1,6 +1,6 @@ #!/bin/bash -KEY_FILE=/var/secrets/gcloud/key.json +KEY_FILE=/var/secrets/google/key.json if [ ! -f $KEY_FILE ]; then echo "Cannot use gsutil for upload as key file cannot be foud in $KEY_FILE" From 42270d3cb36cc244456b4527612cec62f2a03d6a Mon Sep 17 00:00:00 2001 From: dkijania Date: Wed, 18 Sep 2024 18:34:36 +0200 Subject: [PATCH 022/234] upload file after revert --- ...ersion-linter-patch-missing-type-shapes.sh | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/buildkite/scripts/version-linter-patch-missing-type-shapes.sh b/buildkite/scripts/version-linter-patch-missing-type-shapes.sh index 75590c93a04..dba2e9cce4b 100755 --- a/buildkite/scripts/version-linter-patch-missing-type-shapes.sh +++ b/buildkite/scripts/version-linter-patch-missing-type-shapes.sh @@ -16,6 +16,12 @@ release_branch=${REMOTE}/$1 RELEASE_BRANCH_COMMIT=$(git log -n 1 --format="%h" --abbrev=7 --no-merges $release_branch) +function revert_checkout() { + git checkout $BUILDKITE_COMMIT + git submodule sync + git submodule update --init --recursive +} + function checkout_and_dump() { local __commit=$1 git checkout $__commit @@ -23,22 +29,15 @@ function checkout_and_dump() { git submodule update --init --recursive eval $(opam config env) TYPE_SHAPE_FILE=${__commit:0:7}-type_shape.txt - dune exec src/app/cli/src/mina.exe internal dump-type-shapes > ${TYPE_SHAPE_FILE} - source buildkite/scripts/gsutil-upload.sh ${TYPE_SHAPE_FILE} gs://mina-type-shapes -} - -function revert_checkout() { - git checkout $BUILDKITE_COMMIT - git submodule sync - git submodule update --init --recursive + dune exec src/app/cli/src/mina.exe internal dump-type-shapes > /tmp/${TYPE_SHAPE_FILE} + revert_checkout + source buildkite/scripts/gsutil-upload.sh /tmp/${TYPE_SHAPE_FILE} gs://mina-type-shapes } if ! $(gsutil ls gs://mina-type-shapes/$BUILDKITE_COMMIT 2>/dev/null); then checkout_and_dump $BUILDKITE_COMMIT - revert_checkout fi if ! $(gsutil ls gs://mina-type-shapes/$RELEASE_BRANCH_COMMIT 2>/dev/null); then checkout_and_dump $RELEASE_BRANCH_COMMIT - revert_checkout fi From 33dedec66b76d23e6d7d73da60c1006d8cb31431 Mon Sep 17 00:00:00 2001 From: dkijania Date: Wed, 18 Sep 2024 21:38:30 +0200 Subject: [PATCH 023/234] remove no-merges from git log and patch version linter for base branch too --- .../version-linter-patch-missing-type-shapes.sh | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/buildkite/scripts/version-linter-patch-missing-type-shapes.sh b/buildkite/scripts/version-linter-patch-missing-type-shapes.sh index dba2e9cce4b..d4e10f8212a 100755 --- a/buildkite/scripts/version-linter-patch-missing-type-shapes.sh +++ b/buildkite/scripts/version-linter-patch-missing-type-shapes.sh @@ -14,7 +14,7 @@ source buildkite/scripts/export-git-env-vars.sh release_branch=${REMOTE}/$1 -RELEASE_BRANCH_COMMIT=$(git log -n 1 --format="%h" --abbrev=7 --no-merges $release_branch) +RELEASE_BRANCH_COMMIT=$(git log -n 1 --format="%h" --abbrev=7 $release_branch) function revert_checkout() { git checkout $BUILDKITE_COMMIT @@ -41,3 +41,11 @@ fi if ! $(gsutil ls gs://mina-type-shapes/$RELEASE_BRANCH_COMMIT 2>/dev/null); then checkout_and_dump $RELEASE_BRANCH_COMMIT fi + +if [[ -n "$BUILDKITE_PULL_REQUEST_BASE_BRANCH" ]]; then + BUILDKITE_PULL_REQUEST_BASE_BRANCH_COMMIT=$(git log -n 1 --format="%h" --abbrev=7 $BUILDKITE_PULL_REQUEST_BASE_BRANCH) + if ! $(gsutil ls gs://mina-type-shapes/$BUILDKITE_PULL_REQUEST_BASE_BRANCH_COMMIT 2>/dev/null); then + checkout_and_dump $BUILDKITE_PULL_REQUEST_BASE_BRANCH_COMMIT + revert_checkout + fi +fi \ No newline at end of file From f6f8e3d9621760cae61aca013f32b4cd6d559253 Mon Sep 17 00:00:00 2001 From: dkijania Date: Thu, 19 Sep 2024 09:34:49 +0200 Subject: [PATCH 024/234] add remote when evaluating commit --- buildkite/scripts/version-linter-patch-missing-type-shapes.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/buildkite/scripts/version-linter-patch-missing-type-shapes.sh b/buildkite/scripts/version-linter-patch-missing-type-shapes.sh index d4e10f8212a..be9fdd36eac 100755 --- a/buildkite/scripts/version-linter-patch-missing-type-shapes.sh +++ b/buildkite/scripts/version-linter-patch-missing-type-shapes.sh @@ -43,7 +43,7 @@ if ! $(gsutil ls gs://mina-type-shapes/$RELEASE_BRANCH_COMMIT 2>/dev/null); then fi if [[ -n "$BUILDKITE_PULL_REQUEST_BASE_BRANCH" ]]; then - BUILDKITE_PULL_REQUEST_BASE_BRANCH_COMMIT=$(git log -n 1 --format="%h" --abbrev=7 $BUILDKITE_PULL_REQUEST_BASE_BRANCH) + BUILDKITE_PULL_REQUEST_BASE_BRANCH_COMMIT=$(git log -n 1 --format="%h" --abbrev=7 ${REMOTE}/${BUILDKITE_PULL_REQUEST_BASE_BRANCH} ) if ! $(gsutil ls gs://mina-type-shapes/$BUILDKITE_PULL_REQUEST_BASE_BRANCH_COMMIT 2>/dev/null); then checkout_and_dump $BUILDKITE_PULL_REQUEST_BASE_BRANCH_COMMIT revert_checkout From 4f16f98100f6038f9a33cbdba41f7aa8357b1427 Mon Sep 17 00:00:00 2001 From: dkijania Date: Thu, 19 Sep 2024 10:51:56 +0200 Subject: [PATCH 025/234] fix variable checking --- buildkite/scripts/version-linter-patch-missing-type-shapes.sh | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/buildkite/scripts/version-linter-patch-missing-type-shapes.sh b/buildkite/scripts/version-linter-patch-missing-type-shapes.sh index be9fdd36eac..f030b28a01e 100755 --- a/buildkite/scripts/version-linter-patch-missing-type-shapes.sh +++ b/buildkite/scripts/version-linter-patch-missing-type-shapes.sh @@ -42,10 +42,9 @@ if ! $(gsutil ls gs://mina-type-shapes/$RELEASE_BRANCH_COMMIT 2>/dev/null); then checkout_and_dump $RELEASE_BRANCH_COMMIT fi -if [[ -n "$BUILDKITE_PULL_REQUEST_BASE_BRANCH" ]]; then +if [[ -n "${BUILDKITE_PULL_REQUEST_BASE_BRANCH:-}" ]]; then BUILDKITE_PULL_REQUEST_BASE_BRANCH_COMMIT=$(git log -n 1 --format="%h" --abbrev=7 ${REMOTE}/${BUILDKITE_PULL_REQUEST_BASE_BRANCH} ) if ! $(gsutil ls gs://mina-type-shapes/$BUILDKITE_PULL_REQUEST_BASE_BRANCH_COMMIT 2>/dev/null); then checkout_and_dump $BUILDKITE_PULL_REQUEST_BASE_BRANCH_COMMIT - revert_checkout fi fi \ No newline at end of file From 47f1bb247114ef8b6b3781cfc6f3f39522264a52 Mon Sep 17 00:00:00 2001 From: dkijania Date: Sun, 29 Sep 2024 19:47:29 +0200 Subject: [PATCH 026/234] WIP --- buildkite/scripts/build-artifact.sh | 5 +- buildkite/src/Command/PatchArchiveTest.dhall | 30 ++++ .../src/Jobs/Test/PatchArchiveTest.dhall | 44 +++++ buildkite/src/Jobs/Test/ReplayerTest.dhall | 2 +- scripts/patch-archive-test.sh | 17 ++ scripts/replayer-test.sh | 2 +- src/test/archive/patch_archive_test/dune | 31 ++++ .../patch_archive_test/patch_archive_test.ml | 160 ++++++++++++++++++ src/test/mina_automation/archive_blocks.ml | 2 +- src/test/mina_automation/executor.ml | 27 ++- src/test/mina_automation/extract_blocks.ml | 2 +- .../mina_automation/missing_blocks_auditor.ml | 2 +- .../missing_blocks_guardian.ml | 2 +- src/test/mina_automation/replayer.ml | 2 +- 14 files changed, 305 insertions(+), 23 deletions(-) create mode 100644 buildkite/src/Command/PatchArchiveTest.dhall create mode 100644 buildkite/src/Jobs/Test/PatchArchiveTest.dhall create mode 100755 scripts/patch-archive-test.sh create mode 100644 src/test/archive/patch_archive_test/dune create mode 100644 src/test/archive/patch_archive_test/patch_archive_test.ml diff --git a/buildkite/scripts/build-artifact.sh b/buildkite/scripts/build-artifact.sh index 263a258ef3c..7eb9c125203 100755 --- a/buildkite/scripts/build-artifact.sh +++ b/buildkite/scripts/build-artifact.sh @@ -24,7 +24,7 @@ make -C src/app/libp2p_helper MAINNET_TARGETS="" [[ ${MINA_BUILD_MAINNET} ]] && MAINNET_TARGETS="src/app/cli/src/mina_mainnet_signatures.exe src/app/rosetta/rosetta_mainnet_signatures.exe src/app/rosetta/ocaml-signer/signer_mainnet_signatures.exe" -echo "--- Build all major tagets required for packaging" +echo "--- Build all major targets required for packaging" echo "Building from Commit SHA: ${MINA_COMMIT_SHA1}" echo "Rust Version: $(rustc --version)" dune build "--profile=${DUNE_PROFILE}" $INSTRUMENTED_PARAM \ @@ -45,4 +45,5 @@ dune build "--profile=${DUNE_PROFILE}" $INSTRUMENTED_PARAM \ src/app/rosetta/indexer_test/indexer_test.exe \ src/app/rosetta/ocaml-signer/signer_testnet_signatures.exe \ src/app/test_executive/test_executive.exe \ - src/test/command_line_tests/command_line_tests.exe # 2>&1 | tee /tmp/buildocaml.log + src/test/command_line_tests/command_line_tests.exe \ + src/test/archive/patch_archive_test/patch_archive_test.exe diff --git a/buildkite/src/Command/PatchArchiveTest.dhall b/buildkite/src/Command/PatchArchiveTest.dhall new file mode 100644 index 00000000000..aff17f8519b --- /dev/null +++ b/buildkite/src/Command/PatchArchiveTest.dhall @@ -0,0 +1,30 @@ +let Artifacts = ../Constants/Artifacts.dhall + +let Command = ./Base.dhall + +let Size = ./Size.dhall + +let Network = ../Constants/Network.dhall + +let RunWithPostgres = ./RunWithPostgres.dhall + +in { step = + \(dependsOn : List Command.TaggedKey.Type) + -> Command.build + Command.Config::{ + , commands = + [ RunWithPostgres.runInDockerWithPostgresConn + [ "PATCH_ARCHIVE_TEST_APP=mina-patch-archive-test" + , "NETWORK_DATA_FOLDER=/etc/mina/test/archive/sample_db" + ] + "./src/test/archive/sample_db/archive_db.sql" + Artifacts.Type.FunctionalTestSuite + (None Network.Type) + "./scripts/patch-archive-test.sh" + ] + , label = "Archive: Patch Archive test" + , key = "patch-archive-test" + , target = Size.Large + , depends_on = dependsOn + } + } diff --git a/buildkite/src/Jobs/Test/PatchArchiveTest.dhall b/buildkite/src/Jobs/Test/PatchArchiveTest.dhall new file mode 100644 index 00000000000..ccf4580774a --- /dev/null +++ b/buildkite/src/Jobs/Test/PatchArchiveTest.dhall @@ -0,0 +1,44 @@ +let S = ../../Lib/SelectFiles.dhall + +let Pipeline = ../../Pipeline/Dsl.dhall + +let PipelineTag = ../../Pipeline/Tag.dhall + +let JobSpec = ../../Pipeline/JobSpec.dhall + +let PatchArchiveTest = ../../Command/PatchArchiveTest.dhall + +let Profiles = ../../Constants/Profiles.dhall + +let Network = ../../Constants/Network.dhall + +let Artifacts = ../../Constants/Artifacts.dhall + +let Dockers = ../../Constants/DockerVersions.dhall + +let dependsOn = + Dockers.dependsOn + Dockers.Type.Bullseye + Network.Type.Devnet + Profiles.Type.Standard + Artifacts.Type.FunctionalTestSuite + +in Pipeline.build + Pipeline.Config::{ + , spec = JobSpec::{ + , dirtyWhen = + [ S.strictlyStart (S.contains "src") + , S.exactly "scripts/path-archive-test" "sh" + , S.exactly "buildkite/src/Jobs/Test/PatchArchiveTest" "dhall" + , S.exactly "buildkite/src/Command/PatchArchiveTest" "dhall" + ] + , path = "Test" + , name = "PatchArchiveTest" + , tags = + [ PipelineTag.Type.Long + , PipelineTag.Type.Test + , PipelineTag.Type.Stable + ] + } + , steps = [ PatchArchiveTest.step dependsOn ] + } diff --git a/buildkite/src/Jobs/Test/ReplayerTest.dhall b/buildkite/src/Jobs/Test/ReplayerTest.dhall index 0e3d665e2ce..24ce7acf7e1 100644 --- a/buildkite/src/Jobs/Test/ReplayerTest.dhall +++ b/buildkite/src/Jobs/Test/ReplayerTest.dhall @@ -19,7 +19,7 @@ let Artifacts = ../../Constants/Artifacts.dhall let dependsOn = Dockers.dependsOn Dockers.Type.Bullseye - (None Network.Type) + Network.Type.Devnet Profiles.Type.Standard Artifacts.Type.Archive diff --git a/scripts/patch-archive-test.sh b/scripts/patch-archive-test.sh new file mode 100755 index 00000000000..48e1c61861a --- /dev/null +++ b/scripts/patch-archive-test.sh @@ -0,0 +1,17 @@ +#!/bin/bash + +set -x +# test replayer on known archive db + +NETWORK_DATA_FOLDER=src/test/archive/sample_db +PATCH_ARCHIVE_TEST_APP=${PATCH_ARCHIVE_TEST_APP:-_build/default/src/test/archive/patch_archive_test/patch_archive_test.exe} +PG_PORT=${PG_PORT:-5432} +POSTGRES_USER=${POSTGRES_USER:-postgres} +POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-postgres} + +CONN=postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@localhost:${PG_PORT} + + +echo "Running patch archive test" +$PATCH_ARCHIVE_TEST_APP --source-uri $CONN \ + --network-data-folder $NETWORK_DATA_FOLDER diff --git a/scripts/replayer-test.sh b/scripts/replayer-test.sh index b75c32f4b08..8b6f511295b 100755 --- a/scripts/replayer-test.sh +++ b/scripts/replayer-test.sh @@ -5,7 +5,7 @@ set -x INPUT_FILE=src/test/archive/sample_db/replayer_input_file.json REPLAYER_APP=_build/default/src/app/replayer/replayer.exe -PG_CONN=postgres://postgres:postgres@localhost:5433/archive +PG_CONN=postgres://postgres:postgres@localhost:5432/archive while [[ "$#" -gt 0 ]]; do case $1 in -i|--input-file) INPUT_FILE="$2"; shift;; diff --git a/src/test/archive/patch_archive_test/dune b/src/test/archive/patch_archive_test/dune new file mode 100644 index 00000000000..109d27b4a77 --- /dev/null +++ b/src/test/archive/patch_archive_test/dune @@ -0,0 +1,31 @@ +(executable + (package patch_archive_test) + (name patch_archive_test) + (public_name patch_archive_test) + (libraries + async + async.async_command + core_kernel + caqti + caqti-async + caqti-driver-postgresql + integration_test_lib + archive_lib + block_time + mina_numbers + logger + mina_base + uri + base + async_kernel + core + async_unix + stdio + base.caml + result + mina_automation + bounded_types + ) + (preprocessor_deps ../../../config.mlh) + (instrumentation (backend bisect_ppx)) + (preprocess (pps ppx_version ppx_mina ppx_let ppx_hash ppx_compare ppx_sexp_conv h_list.ppx))) diff --git a/src/test/archive/patch_archive_test/patch_archive_test.ml b/src/test/archive/patch_archive_test/patch_archive_test.ml new file mode 100644 index 00000000000..8d0b2f91a67 --- /dev/null +++ b/src/test/archive/patch_archive_test/patch_archive_test.ml @@ -0,0 +1,160 @@ +(* patch_archive_test.ml *) + +(* test patching of archive databases + + test structure: + - import reference database for comparision (for example with 100 blocks) + - create new schema and export blocks from reference db with some missing ones + - patch the database with missing precomputed blocks + - compare original and copy +*) + +module Network_Data = struct + type t = + { init_script : String.t + ; precomputed_blocks_zip : String.t + ; genesis_ledger_file : String.t + ; replayer_input_file : String.t + ; folder : String.t + } + + let create folder = + { init_script = "archive_db.sql" + ; genesis_ledger_file = "input.json" + ; precomputed_blocks_zip = "precomputed_blocks.zip" + ; replayer_input_file = "replayer_input_file.json" + ; folder + } +end + +open Core_kernel +open Async +open Mina_automation + +let main ~db_uri ~network_data_folder () = + let open Deferred.Let_syntax in + let missing_blocks_count = 3 in + let network_name = "dummy" in + + let network_data = Network_Data.create network_data_folder in + + let output_folder = Filename.temp_dir_name ^ "/output" in + + let%bind output_folder = Unix.mkdtemp output_folder in + + let connection = Psql.Conn_str db_uri in + + let source_db_name = "patch_archive_test_source" in + let target_db_name = "patch_archive_test_target" in + let%bind _ = Psql.create_empty_db ~connection ~db:source_db_name in + let%bind _ = + Psql.run_script ~connection ~db:source_db_name + (network_data.folder ^ "/" ^ network_data.init_script) + in + let%bind () = Psql.create_mina_db ~connection ~db:target_db_name in + + let source_db = db_uri ^ "/" ^ source_db_name in + let target_db = db_uri ^ "/" ^ target_db_name in + + let extract_blocks = Extract_blocks.of_context Executor.AutoDetect in + let config = + { Extract_blocks.Config.archive_uri = source_db + ; range = Extract_blocks.Config.AllBlocks + ; output_folder = Some output_folder + ; network = Some network_name + ; include_block_height_in_name = true + } + in + let%bind _ = Extract_blocks.run extract_blocks ~config in + + let archive_blocks = Archive_blocks.of_context Executor.AutoDetect in + + let%bind extensional_files = + Sys.ls_dir output_folder + >>= Deferred.List.map ~f:(fun e -> + Deferred.return (output_folder ^ "/" ^ e) ) + in + + let n = + List.init missing_blocks_count ~f:(fun _ -> + Random.int (List.length extensional_files) ) + in + + let unpatched_extensional_files = + List.filteri extensional_files ~f:(fun i _ -> + not (List.mem n i ~equal:Int.equal) ) + |> List.dedup_and_sort ~compare:(fun left right -> + let scan_height item = + let item = + Filename.basename item |> Str.global_replace (Str.regexp "-") " " + in + Scanf.sscanf item "%s %d %s" (fun _ height _ -> height) + in + + let left_height = scan_height left in + let right_height = scan_height right in + + Int.compare left_height right_height ) + in + + let%bind _ = + Archive_blocks.run archive_blocks ~blocks:unpatched_extensional_files + ~archive_uri:target_db ~format:Extensional + in + + let%bind missing_blocks_auditor_path = + Missing_blocks_auditor.of_context Executor.AutoDetect + |> Missing_blocks_auditor.path + in + + let%bind archive_blocks_path = Archive_blocks.path archive_blocks in + + let config = + { Missing_blocks_guardian.Config.archive_uri = Uri.of_string target_db + ; precomputed_blocks = Uri.make ~scheme:"file" ~path:output_folder () + ; network = network_name + ; run_mode = Run + ; missing_blocks_auditor = missing_blocks_auditor_path + ; archive_blocks = archive_blocks_path + ; block_format = Extensional + } + in + + let missing_blocks_guardian = + Missing_blocks_guardian.of_context Executor.AutoDetect + in + + let%bind _ = Missing_blocks_guardian.run missing_blocks_guardian ~config in + + let replayer = Replayer.of_context Executor.AutoDetect in + + let%bind _ = + Replayer.run replayer ~archive_uri:target_db + ~input_config: + (network_data.folder ^ "/" ^ network_data.replayer_input_file) + ~interval_checkpoint:10 ~output_ledger:"./output_ledger" () + in + + Deferred.unit + +let () = + Command.( + run + (let open Let_syntax in + async ~summary:"Test patching of blocks in an archive database" + (let%map db_uri = + Param.flag "--source-uri" + ~doc: + "URI URI for connecting to the database (e.g., \ + postgres://$USER@localhost:5432)" + Param.(required string) + and network_data_folder = + Param.( + flag "--network-data-folder" ~aliases:[ "network-data-folder" ] + Param.(required string)) + ~doc: + "Path Path to folder containing network data. Usually it's sql \ + for db import, genesis ledger and zipped precomputed blocks \ + archive" + in + main ~db_uri ~network_data_folder ))) diff --git a/src/test/mina_automation/archive_blocks.ml b/src/test/mina_automation/archive_blocks.ml index 2c0dc47c89d..31e99b28a0e 100644 --- a/src/test/mina_automation/archive_blocks.ml +++ b/src/test/mina_automation/archive_blocks.ml @@ -9,7 +9,7 @@ include Executor let of_context context = Executor.of_context ~context ~dune_name:"src/app/archive_blocks/archive_blocks.exe" - ~official_name:"/usr/local/bin/mina-archive-blocks" + ~official_name:"mina-archive-blocks" type format = Precomputed | Extensional diff --git a/src/test/mina_automation/executor.ml b/src/test/mina_automation/executor.ml index 2d52f940955..6fba161bf17 100644 --- a/src/test/mina_automation/executor.ml +++ b/src/test/mina_automation/executor.ml @@ -38,13 +38,23 @@ module Executor = struct let built_name t = Printf.sprintf "_build/default/%s" t.dune_name + let paths = + Option.value_map ~f:(String.split ~on:':') ~default:[] (Sys.getenv "PATH") + + let exists_at_path t prefix = + match%bind Sys.file_exists (prefix ^ "/" ^ t.official_name) with + | `Yes -> + Deferred.return (Some prefix) + | _ -> + Deferred.return None + let path t = match%bind Sys.file_exists (built_name t) with | `Yes -> Deferred.return (built_name t) | _ -> ( - match%bind Sys.file_exists t.official_name with - | `Yes -> + match%bind Deferred.List.find_map ~f:(exists_at_path t) paths with + | Some _ -> Deferred.return t.official_name | _ -> Deferred.return t.dune_name ) @@ -60,18 +70,7 @@ module Executor = struct ~metadata:[ ("app", `String (built_name t)) ] ; run_from_local t ~args ?env () | _ -> ( - let paths = - Option.value_map ~f:(String.split ~on:':') ~default:[] - (Sys.getenv "PATH") - in - let exists_at_path prefix = - match%bind Sys.file_exists (prefix ^ "/" ^ t.official_name) with - | `Yes -> - Deferred.return (Some prefix) - | _ -> - Deferred.return None - in - match%bind Deferred.List.find_map ~f:exists_at_path paths with + match%bind Deferred.List.find_map ~f:(exists_at_path t) paths with | Some prefix -> [%log debug] "running from %s" prefix ~metadata:[ ("app", `String t.official_name) ] ; diff --git a/src/test/mina_automation/extract_blocks.ml b/src/test/mina_automation/extract_blocks.ml index 7b05e9b00cb..bfb3ec9e2ae 100644 --- a/src/test/mina_automation/extract_blocks.ml +++ b/src/test/mina_automation/extract_blocks.ml @@ -55,6 +55,6 @@ end let of_context context = Executor.of_context ~context ~dune_name:"src/app/extract_blocks/extract_blocks.exe" - ~official_name:"/usr/local/bin/mina-extract-blocks" + ~official_name:"mina-extract-blocks" let run t ~config = run t ~args:(Config.to_args config) () diff --git a/src/test/mina_automation/missing_blocks_auditor.ml b/src/test/mina_automation/missing_blocks_auditor.ml index f28983711bb..4c6116a4c24 100644 --- a/src/test/mina_automation/missing_blocks_auditor.ml +++ b/src/test/mina_automation/missing_blocks_auditor.ml @@ -8,4 +8,4 @@ include Executor let of_context context = Executor.of_context ~context ~dune_name:"src/app/missing_blocks_auditor/missing_blocks_auditor.exe" - ~official_name:"/usr/local/bin/mina-missing-blocks-auditor" + ~official_name:"mina-missing-blocks-auditor" diff --git a/src/test/mina_automation/missing_blocks_guardian.ml b/src/test/mina_automation/missing_blocks_guardian.ml index 42bbf0ec844..b74ea406855 100644 --- a/src/test/mina_automation/missing_blocks_guardian.ml +++ b/src/test/mina_automation/missing_blocks_guardian.ml @@ -45,7 +45,7 @@ end let of_context context = Executor.of_context ~context ~dune_name:"scripts/archive/missing-blocks-guardian.sh" - ~official_name:"/etc/mina/archive/missing-blocks-guardian.sh" + ~official_name:"mina-missing-blocks-guardian" let run t ~config = run t ~args:(Config.to_args config) ~env:(Config.to_envs config) () diff --git a/src/test/mina_automation/replayer.ml b/src/test/mina_automation/replayer.ml index 049f8071d95..fbdfe133d1c 100644 --- a/src/test/mina_automation/replayer.ml +++ b/src/test/mina_automation/replayer.ml @@ -68,7 +68,7 @@ include Executor let of_context context = Executor.of_context ~context ~dune_name:"src/app/replayer/replayer.exe" - ~official_name:"/usr/local/bin/mina-replayer" + ~official_name:"mina-replayer" let run t ~archive_uri ~input_config ~interval_checkpoint ?checkpoint_output_folder ?checkpoint_file_prefix ~output_ledger = From 7bfdc6afb1ff32e614ea48700b1e5632e63049c1 Mon Sep 17 00:00:00 2001 From: dkijania Date: Sun, 29 Sep 2024 20:23:13 +0200 Subject: [PATCH 027/234] fix PathArchiveTest --- buildkite/src/Jobs/Test/PatchArchiveTest.dhall | 2 +- buildkite/src/Jobs/Test/ReplayerTest.dhall | 2 +- src/dune-project | 1 + 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/buildkite/src/Jobs/Test/PatchArchiveTest.dhall b/buildkite/src/Jobs/Test/PatchArchiveTest.dhall index ccf4580774a..d9b23cf4659 100644 --- a/buildkite/src/Jobs/Test/PatchArchiveTest.dhall +++ b/buildkite/src/Jobs/Test/PatchArchiveTest.dhall @@ -19,7 +19,7 @@ let Dockers = ../../Constants/DockerVersions.dhall let dependsOn = Dockers.dependsOn Dockers.Type.Bullseye - Network.Type.Devnet + (Some Network.Type.Devnet) Profiles.Type.Standard Artifacts.Type.FunctionalTestSuite diff --git a/buildkite/src/Jobs/Test/ReplayerTest.dhall b/buildkite/src/Jobs/Test/ReplayerTest.dhall index 24ce7acf7e1..0e3d665e2ce 100644 --- a/buildkite/src/Jobs/Test/ReplayerTest.dhall +++ b/buildkite/src/Jobs/Test/ReplayerTest.dhall @@ -19,7 +19,7 @@ let Artifacts = ../../Constants/Artifacts.dhall let dependsOn = Dockers.dependsOn Dockers.Type.Bullseye - Network.Type.Devnet + (None Network.Type) Profiles.Type.Standard Artifacts.Type.Archive diff --git a/src/dune-project b/src/dune-project index 91a68888957..766a8cc514a 100644 --- a/src/dune-project +++ b/src/dune-project @@ -136,6 +136,7 @@ (package (name parallel_scan)) (package (name participating_state)) (package (name pasta_bindings)) +(package (name patch_archive_test)) (package (name perf_histograms)) (package (name pickles_base)) (package (name pickles)) From e73a9293ff2ec636cf4f6ec98906189dd035954c Mon Sep 17 00:00:00 2001 From: Dariusz Kijania Date: Thu, 19 Sep 2024 00:02:54 +0200 Subject: [PATCH 028/234] Update README.md --- README.md | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index b848104e1d8..f537ee2bed0 100644 --- a/README.md +++ b/README.md @@ -1,14 +1,18 @@ -### Build status + -| Develop | Berkeley | Compatible | -| ------- | -------- | ---------- | -| [![Build status - develop](https://badge.buildkite.com/0c47452f3ea619d3217d388e0de522b218db28c3e161887a9a.svg?branch=develop)](https://buildkite.com/o-1-labs-2/mina-end-to-end-nightlies) | [![Build status - berkeley](https://badge.buildkite.com/0c47452f3ea619d3217d388e0de522b218db28c3e161887a9a.svg?branch=berkeley)](https://buildkite.com/o-1-labs-2/mina-end-to-end-nightlies) | [![Build status - compatible](https://badge.buildkite.com/0c47452f3ea619d3217d388e0de522b218db28c3e161887a9a.svg?branch=compatible)](https://buildkite.com/o-1-labs-2/mina-end-to-end-nightlies) +

Mina

- - Mina logo - +
-# Mina + ![GitHub stars](https://img.shields.io/github/stars/minaprotocol/mina)  ![GitHub forks](https://img.shields.io/github/forks/minaprotocol/mina) + +![GitHub contributors](https://img.shields.io/github/contributors/minaprotocol/mina)  ![GitHub commit activity](https://img.shields.io/github/commit-activity/m/minaprotocol/mina)  ![GitHub last commit](https://img.shields.io/github/last-commit/minaprotocol/mina) + +| Develop[^1] | Compatible[^2] | Master[^3] | +| ------- | ---------- | ---------- | +| [![Build status - develop](https://badge.buildkite.com/0c47452f3ea619d3217d388e0de522b218db28c3e161887a9a.svg?branch=develop)](https://buildkite.com/o-1-labs-2/mina-end-to-end-nightlies) | [![Build status - compatible](https://badge.buildkite.com/0c47452f3ea619d3217d388e0de522b218db28c3e161887a9a.svg?branch=compatible)](https://buildkite.com/o-1-labs-2/mina-end-to-end-nightlies) | [![Build status - master](https://badge.buildkite.com/0c47452f3ea619d3217d388e0de522b218db28c3e161887a9a.svg?branch=master)](https://buildkite.com/o-1-labs-2/mina-end-to-end-nightlies) + +
Mina is the first cryptocurrency with a lightweight, constant-sized blockchain. This is the main source code repository for the Mina project and contains code for the OCaml protocol implementation, the [Mina Protocol website](https://minaprotocol.com), and wallet. Enjoy! From 5f7faa07f2c52e76737d045dc945dbd88731d04b Mon Sep 17 00:00:00 2001 From: Dariusz Kijania Date: Thu, 19 Sep 2024 00:04:21 +0200 Subject: [PATCH 029/234] Update README.md --- README.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/README.md b/README.md index f537ee2bed0..66e10f111f1 100644 --- a/README.md +++ b/README.md @@ -64,3 +64,7 @@ The [Node Developers](https://docs.minaprotocol.com/node-developers) docs contai [Apache 2.0](LICENSE) Commits older than 2018-10-03 do not have a [LICENSE](LICENSE) file or this notice, but are distributed under the same terms. + +[^1]: Develop is a mainline branch containig code that may be not compatible with current mainnet and may require major upgrade (hardfork). +[^2]: Compatible is a mainline branch containig code which does not need hardfork in order to apply it to mainnet. +[^3]: Branch which contains current mainnet code. From 32a2f845fdfd18b9c2795e8887639d8a09636a9a Mon Sep 17 00:00:00 2001 From: dkijania Date: Sun, 29 Sep 2024 21:34:05 +0200 Subject: [PATCH 030/234] fix deps --- buildkite/src/Command/MinaArtifact.dhall | 8 ++++---- buildkite/src/Jobs/Test/PatchArchiveTest.dhall | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/buildkite/src/Command/MinaArtifact.dhall b/buildkite/src/Command/MinaArtifact.dhall index 28793fb3d22..52d4e1f7171 100644 --- a/buildkite/src/Command/MinaArtifact.dhall +++ b/buildkite/src/Command/MinaArtifact.dhall @@ -250,10 +250,10 @@ let docker_step , deb_repo = DebianRepo.Type.Local , deb_profile = spec.profile , step_key = - "test-suite-${DebianVersions.lowerName - spec.debVersion}${Profiles.toLabelSegment - spec.profile}${BuildFlags.toLabelSegment - spec.buildFlags}--docker-image" + "functional-test-suite-${DebianVersions.lowerName + spec.debVersion}${Profiles.toLabelSegment + spec.profile}${BuildFlags.toLabelSegment + spec.buildFlags}-docker-image" , network = "berkeley" } ] diff --git a/buildkite/src/Jobs/Test/PatchArchiveTest.dhall b/buildkite/src/Jobs/Test/PatchArchiveTest.dhall index d9b23cf4659..e6b468d843b 100644 --- a/buildkite/src/Jobs/Test/PatchArchiveTest.dhall +++ b/buildkite/src/Jobs/Test/PatchArchiveTest.dhall @@ -19,7 +19,7 @@ let Dockers = ../../Constants/DockerVersions.dhall let dependsOn = Dockers.dependsOn Dockers.Type.Bullseye - (Some Network.Type.Devnet) + (None Network.Type) Profiles.Type.Standard Artifacts.Type.FunctionalTestSuite From 9ede8ea650c003499919636c2f3665bfbd4afd1c Mon Sep 17 00:00:00 2001 From: dkijania Date: Sun, 29 Sep 2024 22:23:43 +0200 Subject: [PATCH 031/234] fix hypen do underscore --- buildkite/src/Command/MinaArtifact.dhall | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/buildkite/src/Command/MinaArtifact.dhall b/buildkite/src/Command/MinaArtifact.dhall index 52d4e1f7171..7f8108e15eb 100644 --- a/buildkite/src/Command/MinaArtifact.dhall +++ b/buildkite/src/Command/MinaArtifact.dhall @@ -250,7 +250,7 @@ let docker_step , deb_repo = DebianRepo.Type.Local , deb_profile = spec.profile , step_key = - "functional-test-suite-${DebianVersions.lowerName + "functional_test_suite-${DebianVersions.lowerName spec.debVersion}${Profiles.toLabelSegment spec.profile}${BuildFlags.toLabelSegment spec.buildFlags}-docker-image" From ffe4f33cd4b846735fc7a9b89bfd4d3dc748ae63 Mon Sep 17 00:00:00 2001 From: dkijania Date: Mon, 30 Sep 2024 10:31:50 +0200 Subject: [PATCH 032/234] publish mina-patch-archive-test --- scripts/debian/builder-helpers.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/debian/builder-helpers.sh b/scripts/debian/builder-helpers.sh index 0e34178731d..bb0b03c04dd 100755 --- a/scripts/debian/builder-helpers.sh +++ b/scripts/debian/builder-helpers.sh @@ -241,7 +241,8 @@ build_functional_test_suite_deb() { # Binaries cp ./default/src/test/command_line_tests/command_line_tests.exe "${BUILDDIR}/usr/local/bin/mina-command-line-tests" - + cp ./default/src/test/patch_archive_tests/patch_archive_tests.exe "${BUILDDIR}/usr/local/bin/mina-patch-archive-tests" + build_deb mina-test-suite } From 7013d231850b24b26ce68cfd0f3e411c417647d2 Mon Sep 17 00:00:00 2001 From: dkijania Date: Mon, 30 Sep 2024 14:05:30 +0200 Subject: [PATCH 033/234] fix name for patch_archive_test --- scripts/debian/builder-helpers.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/debian/builder-helpers.sh b/scripts/debian/builder-helpers.sh index bb0b03c04dd..b98764d0da9 100755 --- a/scripts/debian/builder-helpers.sh +++ b/scripts/debian/builder-helpers.sh @@ -241,7 +241,7 @@ build_functional_test_suite_deb() { # Binaries cp ./default/src/test/command_line_tests/command_line_tests.exe "${BUILDDIR}/usr/local/bin/mina-command-line-tests" - cp ./default/src/test/patch_archive_tests/patch_archive_tests.exe "${BUILDDIR}/usr/local/bin/mina-patch-archive-tests" + cp ./default/src/test/archive/patch_archive_test/patch_archive_test.exe "${BUILDDIR}/usr/local/bin/mina-patch-archive-test" build_deb mina-test-suite From b027e324d6ff3311a084c4924a2e0ef15552ce31 Mon Sep 17 00:00:00 2001 From: dkijania Date: Mon, 30 Sep 2024 14:22:48 +0200 Subject: [PATCH 034/234] Revert unnecessary changes --- buildkite/src/Jobs/Test/VersionLint.dhall | 4 ++-- buildkite/src/Lib/Cmds.dhall | 26 ++++++----------------- 2 files changed, 9 insertions(+), 21 deletions(-) diff --git a/buildkite/src/Jobs/Test/VersionLint.dhall b/buildkite/src/Jobs/Test/VersionLint.dhall index 833a741519a..ceb6d51ee9e 100644 --- a/buildkite/src/Jobs/Test/VersionLint.dhall +++ b/buildkite/src/Jobs/Test/VersionLint.dhall @@ -12,12 +12,12 @@ let JobSpec = ../../Pipeline/JobSpec.dhall let Command = ../../Command/Base.dhall +let RunInToolchain = ../../Command/RunInToolchain.dhall + let Docker = ../../Command/Docker/Type.dhall let Size = ../../Command/Size.dhall -let RunInToolchain = ../../Command/RunInToolchain.dhall - let dependsOn = [ { name = "MinaArtifactBullseye", key = "build-deb-pkg" } ] let buildTestCmd diff --git a/buildkite/src/Lib/Cmds.dhall b/buildkite/src/Lib/Cmds.dhall index a64bfc8800c..15d7a9a61d4 100644 --- a/buildkite/src/Lib/Cmds.dhall +++ b/buildkite/src/Lib/Cmds.dhall @@ -13,16 +13,8 @@ let module = \(environment : List Text) -> let Docker = { Type = - { image : Text - , extraEnv : List Text - , privileged : Bool - , useBash : Bool - } - , default = - { extraEnv = [] : List Text - , privileged = False - , useBash = True - } + { image : Text, extraEnv : List Text, privileged : Bool } + , default = { extraEnv = [] : List Text, privileged = False } } let Cmd = { line : Text, readable : Optional Text } @@ -66,16 +58,12 @@ let module = : Text = "/var/buildkite/shared" - let entrypoint - : Text - = if docker.useBash then "/bin/bash" else "/bin/sh" - in { line = - "docker run -it --rm --entrypoint ${entrypoint} --init --volume /var/secrets:/var/secrets --volume ${sharedDir}:/shared --volume ${outerDir}:/workdir --workdir /workdir${envVars}${ if docker.privileged + "docker run -it --rm --entrypoint sh --init --volume /var/secrets:/var/secrets --volume ${sharedDir}:/shared --volume ${outerDir}:/workdir --workdir /workdir${envVars}${ if docker.privileged - then " --privileged" + then " --privileged" - else ""} ${docker.image} -c '${inner.line}'" + else ""} ${docker.image} -c '${inner.line}'" , readable = Optional/map Text @@ -142,7 +130,7 @@ let tests = let dockerExample = assert : { line = - "docker run -it --rm --entrypoint /bin/bash --init --volume /var/secrets:/var/secrets --volume /var/buildkite/shared:/shared --volume \\\$BUILDKITE_BUILD_CHECKOUT_PATH:/workdir --workdir /workdir --env ENV1 --env ENV2 --env TEST foo/bar:tag -c 'echo hello'" + "docker run -it --rm --entrypoint sh --init --volume /var/secrets:/var/secrets --volume /var/buildkite/shared:/shared --volume \\\$BUILDKITE_BUILD_CHECKOUT_PATH:/workdir --workdir /workdir --env ENV1 --env ENV2 --env TEST foo/bar:tag -c 'echo hello'" , readable = Some "Docker@foo/bar:tag ( echo hello )" } === M.inDocker @@ -154,7 +142,7 @@ let tests = let cacheExample = assert - : "./buildkite/scripts/cache-through.sh data.tar \"docker run -it --rm --entrypoint /bin/bash --init --volume /var/secrets:/var/secrets --volume /var/buildkite/shared:/shared --volume \\\$BUILDKITE_BUILD_CHECKOUT_PATH:/workdir --workdir /workdir --env ENV1 --env ENV2 --env TEST foo/bar:tag -c 'echo hello > /tmp/data/foo.txt && tar cvf data.tar /tmp/data'\"" + : "./buildkite/scripts/cache-through.sh data.tar \"docker run -it --rm --entrypoint sh --init --volume /var/secrets:/var/secrets --volume /var/buildkite/shared:/shared --volume \\\$BUILDKITE_BUILD_CHECKOUT_PATH:/workdir --workdir /workdir --env ENV1 --env ENV2 --env TEST foo/bar:tag -c 'echo hello > /tmp/data/foo.txt && tar cvf data.tar /tmp/data'\"" === M.format ( M.cacheThrough M.Docker::{ From 523986de315ba36cd7fc9d8017a2292b445afdca Mon Sep 17 00:00:00 2001 From: dkijania Date: Mon, 30 Sep 2024 14:25:18 +0200 Subject: [PATCH 035/234] more revert --- buildkite/src/Jobs/Test/VersionLint.dhall | 2 +- buildkite/src/Lib/Cmds.dhall | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/buildkite/src/Jobs/Test/VersionLint.dhall b/buildkite/src/Jobs/Test/VersionLint.dhall index ceb6d51ee9e..934521dd936 100644 --- a/buildkite/src/Jobs/Test/VersionLint.dhall +++ b/buildkite/src/Jobs/Test/VersionLint.dhall @@ -40,10 +40,10 @@ let buildTestCmd "buildkite/scripts/version-linter.sh ${release_branch}" , label = "Versioned type linter for ${release_branch}" , key = "version-linter-${release_branch}" + , soft_fail = Some soft_fail , target = cmd_target , docker = None Docker.Type , depends_on = dependsOn - , soft_fail = Some soft_fail , artifact_paths = [ S.contains "core_dumps/*" ] } diff --git a/buildkite/src/Lib/Cmds.dhall b/buildkite/src/Lib/Cmds.dhall index 15d7a9a61d4..2ce5926a65e 100644 --- a/buildkite/src/Lib/Cmds.dhall +++ b/buildkite/src/Lib/Cmds.dhall @@ -59,11 +59,11 @@ let module = = "/var/buildkite/shared" in { line = - "docker run -it --rm --entrypoint sh --init --volume /var/secrets:/var/secrets --volume ${sharedDir}:/shared --volume ${outerDir}:/workdir --workdir /workdir${envVars}${ if docker.privileged + "docker run -it --rm --entrypoint /bin/sh --init --volume /var/secrets:/var/secrets --volume ${sharedDir}:/shared --volume ${outerDir}:/workdir --workdir /workdir${envVars}${ if docker.privileged - then " --privileged" + then " --privileged" - else ""} ${docker.image} -c '${inner.line}'" + else ""} ${docker.image} -c '${inner.line}'" , readable = Optional/map Text @@ -130,7 +130,7 @@ let tests = let dockerExample = assert : { line = - "docker run -it --rm --entrypoint sh --init --volume /var/secrets:/var/secrets --volume /var/buildkite/shared:/shared --volume \\\$BUILDKITE_BUILD_CHECKOUT_PATH:/workdir --workdir /workdir --env ENV1 --env ENV2 --env TEST foo/bar:tag -c 'echo hello'" + "docker run -it --rm --entrypoint /bin/sh --init --volume /var/secrets:/var/secrets --volume /var/buildkite/shared:/shared --volume \\\$BUILDKITE_BUILD_CHECKOUT_PATH:/workdir --workdir /workdir --env ENV1 --env ENV2 --env TEST foo/bar:tag -c 'echo hello'" , readable = Some "Docker@foo/bar:tag ( echo hello )" } === M.inDocker @@ -142,7 +142,7 @@ let tests = let cacheExample = assert - : "./buildkite/scripts/cache-through.sh data.tar \"docker run -it --rm --entrypoint sh --init --volume /var/secrets:/var/secrets --volume /var/buildkite/shared:/shared --volume \\\$BUILDKITE_BUILD_CHECKOUT_PATH:/workdir --workdir /workdir --env ENV1 --env ENV2 --env TEST foo/bar:tag -c 'echo hello > /tmp/data/foo.txt && tar cvf data.tar /tmp/data'\"" + : "./buildkite/scripts/cache-through.sh data.tar \"docker run -it --rm --entrypoint /bin/sh --init --volume /var/secrets:/var/secrets --volume /var/buildkite/shared:/shared --volume \\\$BUILDKITE_BUILD_CHECKOUT_PATH:/workdir --workdir /workdir --env ENV1 --env ENV2 --env TEST foo/bar:tag -c 'echo hello > /tmp/data/foo.txt && tar cvf data.tar /tmp/data'\"" === M.format ( M.cacheThrough M.Docker::{ From 7b439902363001fc3ce6adcd5d053e3a266cf341 Mon Sep 17 00:00:00 2001 From: dkijania Date: Mon, 30 Sep 2024 14:31:42 +0200 Subject: [PATCH 036/234] fix Cmds.dhall --- buildkite/src/Lib/Cmds.dhall | 26 +++++++++++++++++++------- 1 file changed, 19 insertions(+), 7 deletions(-) diff --git a/buildkite/src/Lib/Cmds.dhall b/buildkite/src/Lib/Cmds.dhall index 2ce5926a65e..a64bfc8800c 100644 --- a/buildkite/src/Lib/Cmds.dhall +++ b/buildkite/src/Lib/Cmds.dhall @@ -13,8 +13,16 @@ let module = \(environment : List Text) -> let Docker = { Type = - { image : Text, extraEnv : List Text, privileged : Bool } - , default = { extraEnv = [] : List Text, privileged = False } + { image : Text + , extraEnv : List Text + , privileged : Bool + , useBash : Bool + } + , default = + { extraEnv = [] : List Text + , privileged = False + , useBash = True + } } let Cmd = { line : Text, readable : Optional Text } @@ -58,12 +66,16 @@ let module = : Text = "/var/buildkite/shared" + let entrypoint + : Text + = if docker.useBash then "/bin/bash" else "/bin/sh" + in { line = - "docker run -it --rm --entrypoint /bin/sh --init --volume /var/secrets:/var/secrets --volume ${sharedDir}:/shared --volume ${outerDir}:/workdir --workdir /workdir${envVars}${ if docker.privileged + "docker run -it --rm --entrypoint ${entrypoint} --init --volume /var/secrets:/var/secrets --volume ${sharedDir}:/shared --volume ${outerDir}:/workdir --workdir /workdir${envVars}${ if docker.privileged - then " --privileged" + then " --privileged" - else ""} ${docker.image} -c '${inner.line}'" + else ""} ${docker.image} -c '${inner.line}'" , readable = Optional/map Text @@ -130,7 +142,7 @@ let tests = let dockerExample = assert : { line = - "docker run -it --rm --entrypoint /bin/sh --init --volume /var/secrets:/var/secrets --volume /var/buildkite/shared:/shared --volume \\\$BUILDKITE_BUILD_CHECKOUT_PATH:/workdir --workdir /workdir --env ENV1 --env ENV2 --env TEST foo/bar:tag -c 'echo hello'" + "docker run -it --rm --entrypoint /bin/bash --init --volume /var/secrets:/var/secrets --volume /var/buildkite/shared:/shared --volume \\\$BUILDKITE_BUILD_CHECKOUT_PATH:/workdir --workdir /workdir --env ENV1 --env ENV2 --env TEST foo/bar:tag -c 'echo hello'" , readable = Some "Docker@foo/bar:tag ( echo hello )" } === M.inDocker @@ -142,7 +154,7 @@ let tests = let cacheExample = assert - : "./buildkite/scripts/cache-through.sh data.tar \"docker run -it --rm --entrypoint /bin/sh --init --volume /var/secrets:/var/secrets --volume /var/buildkite/shared:/shared --volume \\\$BUILDKITE_BUILD_CHECKOUT_PATH:/workdir --workdir /workdir --env ENV1 --env ENV2 --env TEST foo/bar:tag -c 'echo hello > /tmp/data/foo.txt && tar cvf data.tar /tmp/data'\"" + : "./buildkite/scripts/cache-through.sh data.tar \"docker run -it --rm --entrypoint /bin/bash --init --volume /var/secrets:/var/secrets --volume /var/buildkite/shared:/shared --volume \\\$BUILDKITE_BUILD_CHECKOUT_PATH:/workdir --workdir /workdir --env ENV1 --env ENV2 --env TEST foo/bar:tag -c 'echo hello > /tmp/data/foo.txt && tar cvf data.tar /tmp/data'\"" === M.format ( M.cacheThrough M.Docker::{ From 561b2e793853399733b1bc8927aa3956acec4c40 Mon Sep 17 00:00:00 2001 From: dkijania Date: Mon, 30 Sep 2024 14:39:11 +0200 Subject: [PATCH 037/234] revert removing Stable tag --- buildkite/src/Jobs/Test/VersionLint.dhall | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/buildkite/src/Jobs/Test/VersionLint.dhall b/buildkite/src/Jobs/Test/VersionLint.dhall index 934521dd936..1368e8e70bc 100644 --- a/buildkite/src/Jobs/Test/VersionLint.dhall +++ b/buildkite/src/Jobs/Test/VersionLint.dhall @@ -60,7 +60,11 @@ in Pipeline.build , dirtyWhen = lintDirtyWhen , path = "Test" , name = "VersionLint" - , tags = [ PipelineTag.Type.Long, PipelineTag.Type.Test ] + , tags = + [ PipelineTag.Type.Long + , PipelineTag.Type.Test + , PipelineTag.Type.Stable + ] } , steps = [ buildTestCmd From cb9eebbfdfbeb7b59b613061fdc87b3165f95ed7 Mon Sep 17 00:00:00 2001 From: dkijania Date: Mon, 30 Sep 2024 14:58:24 +0200 Subject: [PATCH 038/234] fix typo and use shorter BUILDKITE_COMMIT when looking for reference files --- buildkite/scripts/gsutil-upload.sh | 2 +- buildkite/scripts/version-linter-patch-missing-type-shapes.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/buildkite/scripts/gsutil-upload.sh b/buildkite/scripts/gsutil-upload.sh index a1e18553f24..347ed3e38bd 100755 --- a/buildkite/scripts/gsutil-upload.sh +++ b/buildkite/scripts/gsutil-upload.sh @@ -3,7 +3,7 @@ KEY_FILE=/var/secrets/google/key.json if [ ! -f $KEY_FILE ]; then - echo "Cannot use gsutil for upload as key file cannot be foud in $KEY_FILE" + echo "Cannot use gsutil for upload as key file cannot be found in $KEY_FILE" fi gcloud auth activate-service-account --key-file=$KEY_FILE diff --git a/buildkite/scripts/version-linter-patch-missing-type-shapes.sh b/buildkite/scripts/version-linter-patch-missing-type-shapes.sh index f030b28a01e..4633b1890c8 100755 --- a/buildkite/scripts/version-linter-patch-missing-type-shapes.sh +++ b/buildkite/scripts/version-linter-patch-missing-type-shapes.sh @@ -34,7 +34,7 @@ function checkout_and_dump() { source buildkite/scripts/gsutil-upload.sh /tmp/${TYPE_SHAPE_FILE} gs://mina-type-shapes } -if ! $(gsutil ls gs://mina-type-shapes/$BUILDKITE_COMMIT 2>/dev/null); then +if ! $(gsutil ls gs://mina-type-shapes/${BUILDKITE_COMMIT:0:7} 2>/dev/null); then checkout_and_dump $BUILDKITE_COMMIT fi From ccb0c5ebf8445212768238154d7e62c1904d896c Mon Sep 17 00:00:00 2001 From: dkijania Date: Mon, 30 Sep 2024 17:49:54 +0200 Subject: [PATCH 039/234] Fix missing block guardian --- src/test/mina_automation/missing_blocks_guardian.ml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/mina_automation/missing_blocks_guardian.ml b/src/test/mina_automation/missing_blocks_guardian.ml index b74ea406855..42bbf0ec844 100644 --- a/src/test/mina_automation/missing_blocks_guardian.ml +++ b/src/test/mina_automation/missing_blocks_guardian.ml @@ -45,7 +45,7 @@ end let of_context context = Executor.of_context ~context ~dune_name:"scripts/archive/missing-blocks-guardian.sh" - ~official_name:"mina-missing-blocks-guardian" + ~official_name:"/etc/mina/archive/missing-blocks-guardian.sh" let run t ~config = run t ~args:(Config.to_args config) ~env:(Config.to_envs config) () From 1e4eec57f522d546c46b9add065ec931ebd90503 Mon Sep 17 00:00:00 2001 From: dkijania Date: Mon, 30 Sep 2024 18:01:31 +0200 Subject: [PATCH 040/234] add requests import to version-linter.py --- scripts/version-linter.py | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/version-linter.py b/scripts/version-linter.py index 29b555c0fe2..ab9c8337b72 100755 --- a/scripts/version-linter.py +++ b/scripts/version-linter.py @@ -24,6 +24,7 @@ import sys import re import sexpdata +import requests exit_code=0 From 717c727a5e32af6d827d421f1428811130d9501e Mon Sep 17 00:00:00 2001 From: dkijania Date: Tue, 1 Oct 2024 17:57:03 +0200 Subject: [PATCH 041/234] Fail the build on error match in output --- scripts/snark_transaction_profiler.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/scripts/snark_transaction_profiler.py b/scripts/snark_transaction_profiler.py index ce0439c14f9..51a3abe5814 100755 --- a/scripts/snark_transaction_profiler.py +++ b/scripts/snark_transaction_profiler.py @@ -9,13 +9,8 @@ import json import re -exit_code=0 prog = 'mina' -def set_error(): - global exit_code - exit_code=1 - def parse_stats (output) : print(output) @@ -63,13 +58,19 @@ def parse_stats (output) : print(f'running snark transaction profiler: {args}') (process_exit_code,output) = subprocess.getstatusoutput(args) + stats = parse_stats (output) #TODO: add code to check against some threshold print(stats) + errors = ["Error", "Failure", "zkapp failed"] + + if any(x in a_string for x in matches): + print(f'Error detected in output ({" or ".join(error)}). Failing the build') + sys.exit(1) if not process_exit_code == 0: print('non-zero exit code from program, failing build') - sys.exit(process_exit_code) + sys.exit(1) else: - sys.exit(exit_code) \ No newline at end of file + sys.exit(0) \ No newline at end of file From e5c129d578ca65b8694002cf432cdf07236312a0 Mon Sep 17 00:00:00 2001 From: dkijania Date: Tue, 1 Oct 2024 19:28:26 +0200 Subject: [PATCH 042/234] Introduce checker for dhall (deps, dirtyWhen) --- buildkite/Makefile | 10 +- buildkite/scripts/dhall/checker.py | 219 ++++++++++++++++++++++++++++ buildkite/scripts/helm-ci.sh | 2 +- buildkite/src/Jobs/Lint/Dhall.dhall | 20 +++ buildkite/src/Monorepo.dhall | 4 +- buildkite/src/Prepare.dhall | 2 +- 6 files changed, 252 insertions(+), 5 deletions(-) create mode 100755 buildkite/scripts/dhall/checker.py diff --git a/buildkite/Makefile b/buildkite/Makefile index 33de08c0af8..4e44b848430 100644 --- a/buildkite/Makefile +++ b/buildkite/Makefile @@ -14,4 +14,12 @@ lint: find ./src/ -name "*.dhall" -print0 | xargs -I{} -0 -n1 bash -c 'echo "{}" && dhall --ascii lint --inplace {} || exit 255' format: - find ./src/ -name "*.dhall" -print0 | xargs -I{} -0 -n1 bash -c 'echo "{}" && dhall --ascii format --inplace {} || exit 255' \ No newline at end of file + find ./src/ -name "*.dhall" -print0 | xargs -I{} -0 -n1 bash -c 'echo "{}" && dhall --ascii format --inplace {} || exit 255' + +check_deps: + python3 scripts/dhall/checker.py --root ./src/Jobs deps + +check_dirty: + python3 scripts/dhall/checker.py --root ./src/Jobs dirty-when + +all: check_syntax lint format check_deps check_dirty \ No newline at end of file diff --git a/buildkite/scripts/dhall/checker.py b/buildkite/scripts/dhall/checker.py new file mode 100755 index 00000000000..ce6d6578d88 --- /dev/null +++ b/buildkite/scripts/dhall/checker.py @@ -0,0 +1,219 @@ +""" + Runs dhall checks like: + + - validate if all dependencies in jobs are covered + + python3 buildkite/scripts/dhall/checker.py --root ./buildkite/src/Jobs deps + + - all dirtyWhen entries relates to existing files + + python3 buildkite/scripts/dhall/checker.py --root ./buildkite/src/Jobs dirty-when + + - print commands for given job + + python3 buildkite/scripts/dhall/checker.py --root ./buildkite/src/Jobs print-cmd --job SingleNodeTest +""" + + +import argparse +import subprocess +import os +from glob import glob +import tempfile +from pathlib import Path +import yaml + + +class CmdColors: + HEADER = '\033[95m' + OKBLUE = '\033[94m' + OKCYAN = '\033[96m' + OKGREEN = '\033[92m' + WARNING = '\033[93m' + FAIL = '\033[91m' + ENDC = '\033[0m' + BOLD = '\033[1m' + UNDERLINE = '\033[4m' + + +class PipelineInfoBuilder: + + def __init__(self, temp, file): + with open(f"{temp}/{file}") as stream: + try: + self.pipeline = yaml.safe_load(stream) + self.file = file + except yaml.YAMLError as exc: + print(f"cannot parse correctly {temp}/{file}, due to {exc}") + exit(1) + + def get_steps(self): + steps = [] + for step in self.pipeline["pipeline"]["steps"]: + key = step["key"] + deps = [] + if "depends_on" in step: + for dependsOn in step["depends_on"]: + deps.append(dependsOn["step"]) + commands = step["commands"] + steps.append(Step(key, deps, commands)) + return steps + + def get_dirty(self): + dirty = [] + for dirtyWhen in self.pipeline["spec"]["dirtyWhen"]: + path = dirtyWhen["dir"][0] if "dir" in dirtyWhen else "" + exts = dirtyWhen["exts"][0] if "exts" in dirtyWhen else "" + strictEnd = bool(dirtyWhen["strictEnd"]) if ( + not "strictEnd" in dirtyWhen) else False + strictStart = bool(dirtyWhen["strictStart"]) if ( + not "strictStart" in dirtyWhen) else False + dirty.append(DirtyWhen(path=path, strictStart=strictStart, + strictEnd=strictEnd, extension=exts)) + return dirty + + def build(self): + steps = self.get_steps() + dirty = self.get_dirty() + return PipelineInfo(self.file, self.pipeline, steps, dirty) + + +class DirtyWhen: + + def __init__(self, path, extension, strictStart, strictEnd): + self.path = path + self.extension = extension + self.strictStart = strictStart + self.strictEnd = strictEnd + + def calculate_path(self): + if self.extension and self.path: + return glob(f"{self.path}.{self.extension}") + if self.strictEnd and not self.strictStart: + if not self.extension: + return glob(f"**/*/{self.path}") + else: + return glob(f"*/{self.path}.{self.extension}") + if self.strictStart and self.strictEnd: + if not self.extension: + return glob(f"{self.path}*") + else: + return glob(f"{self.path}.{self.extension}") + if self.strictStart and not self.strictEnd: + return glob(self.path + '.*') + if not self.strictStart and not self.strictEnd: + if not self.extension: + if "." in self.path: + return glob(f"**/*/{self.path}", recursive=True) + else: + return glob(f"{self.path}*") + else: + return glob(f"*.{self.extension}") + raise RuntimeError("invalid state dirty when") + + def __str__(self): + return f"path: '{self.path}', exts: '{self.extension}', startStrict:{self.strictStart}, startEnd:{self.strictEnd}" + + +class Step: + + def __init__(self, key, deps, commands): + self.key = key + self.deps = deps + self.commands = commands + + +class PipelineInfo: + + def __init__(self, file, pipeline, steps, dirty): + self.file = file + self.pipeline = pipeline + self.steps = steps + self.dirty = dirty + + def keys(self): + return [step.key for step in self.steps] + + +parser = argparse.ArgumentParser(description='Executes mina benchmarks') +parser.add_argument("--root", required=True, + help="root folder where all dhall files resides") + +subparsers = parser.add_subparsers(dest="cmd") +subparsers.add_parser('dirty-when') +subparsers.add_parser('deps') +run = subparsers.add_parser('print-cmd') +run.add_argument("--job", required=True, help="job to run") +run.add_argument("--step", required=False, help="job to run") + + +args = parser.parse_args() +tmp = tempfile.mkdtemp() + +print(f"Artifacts are stored in {tmp}") + +for file in [y for x in os.walk(args.root) for y in glob(os.path.join(x[0], '*.dhall'))]: + name = Path(file).stem + with open(f"{tmp}/{name}.yml", "w") as outfile: + subprocess.run(["dhall-to-yaml", "--quoted", "--file", + file], stdout=outfile, check=True) + + +pipelinesInfo = [PipelineInfoBuilder(tmp, file).build() + for file in os.listdir(path=tmp)] + +if args.cmd == "deps": + + keys = [] + for pipeline in pipelinesInfo: + keys.extend(pipeline.keys()) + + failedSteps = [] + + for pipeline in pipelinesInfo: + for step in pipeline.steps: + for dep in step.deps: + if not dep in keys: + failedSteps.append((pipeline, step, dep)) + + if any(failedSteps): + print("Fatal: Missing dependency resolution found:") + for (pipeline, step, dep) in failedSteps: + file = str.replace(pipeline.file, ".yml", ".dhall") + print( + f"\t{CmdColors.FAIL}[FATAL] Unresolved dependency for step '{step.key}' in '{file}' depends on non existing job '{dep}'{CmdColors.ENDC}") + exit(1) + + +if args.cmd == "print-cmd": + pipeline = next(filter(lambda x: args.job in x.file, pipelinesInfo)) + + def get_steps(): + if args.step: + return [next(filter(lambda x: args.step in x.key, pipeline.steps))] + else: + return pipeline.steps + + steps = get_steps() + + for step in steps: + for command in step.commands: + if not command.startswith("echo"): + print(command) + +if args.cmd == "dirty-when": + + failedSteps = [] + + for pipeline in pipelinesInfo: + for dirty in pipeline.dirty: + if not bool(dirty.calculate_path()): + failedSteps.append((pipeline, dirty)) + + if any(failedSteps): + print("Fatal: Non existing dirtyWhen path detected:") + for (pipeline, dirty) in failedSteps: + file = str.replace(pipeline.file, ".yml", ".dhall") + print( + f"\t{CmdColors.FAIL}[FATAL] Unresolved dirtyWhen path in '{file}' ('{str(dirty)}'){CmdColors.ENDC}") + exit(1) diff --git a/buildkite/scripts/helm-ci.sh b/buildkite/scripts/helm-ci.sh index a4c2508e5d4..0a11adbe0f5 100755 --- a/buildkite/scripts/helm-ci.sh +++ b/buildkite/scripts/helm-ci.sh @@ -4,7 +4,7 @@ set -eou pipefail diff=$( - ./buildkite/scripts/generate-diff.sh + ./buildkite/scripts/git/generate-diff.sh ) echo "--- Generated change DIFF: ${diff}" diff --git a/buildkite/src/Jobs/Lint/Dhall.dhall b/buildkite/src/Jobs/Lint/Dhall.dhall index 0f7d3e081b4..73dd7e55419 100644 --- a/buildkite/src/Jobs/Lint/Dhall.dhall +++ b/buildkite/src/Jobs/Lint/Dhall.dhall @@ -54,5 +54,25 @@ in Pipeline.build , image = (../../Constants/ContainerImages.dhall).toolchainBase } } + , Command.build + Command.Config::{ + , commands = [ Cmd.run "cd buildkite && make check_deps" ] + , label = "Dhall: deps" + , key = "check-dhall-deps" + , target = Size.Multi + , docker = Some Docker::{ + , image = (../../Constants/ContainerImages.dhall).toolchainBase + } + } + , Command.build + Command.Config::{ + , commands = [ Cmd.run "cd buildkite && make check_dirty" ] + , label = "Dhall: dirtyWhen" + , key = "check-dhall-dirty" + , target = Size.Multi + , docker = Some Docker::{ + , image = (../../Constants/ContainerImages.dhall).toolchainBase + } + } ] } diff --git a/buildkite/src/Monorepo.dhall b/buildkite/src/Monorepo.dhall index 7106d8358e5..67867e53f7d 100644 --- a/buildkite/src/Monorepo.dhall +++ b/buildkite/src/Monorepo.dhall @@ -35,8 +35,8 @@ let jobs let prefixCommands = [ Cmd.run "git config --global http.sslCAInfo /etc/ssl/certs/ca-bundle.crt" - , Cmd.run "./buildkite/scripts/refresh_code.sh" - , Cmd.run "./buildkite/scripts/generate-diff.sh > _computed_diff.txt" + , Cmd.run "./buildkite/scripts/git/refresh_code.sh" + , Cmd.run "./buildkite/scripts/git/generate-diff.sh > _computed_diff.txt" ] let commands diff --git a/buildkite/src/Prepare.dhall b/buildkite/src/Prepare.dhall index 82cdb1c4df6..52a0761207e 100644 --- a/buildkite/src/Prepare.dhall +++ b/buildkite/src/Prepare.dhall @@ -33,7 +33,7 @@ let config [ Cmd.run "export BUILDKITE_PIPELINE_MODE=${mode}" , Cmd.run "export BUILDKITE_PIPELINE_FILTER=${filter}" , Cmd.run - "./buildkite/scripts/generate-jobs.sh > buildkite/src/gen/Jobs.dhall" + "./buildkite/scripts/dhall/generate-jobs.sh > buildkite/src/gen/Jobs.dhall" , Cmd.quietly "dhall-to-yaml --quoted <<< '(./buildkite/src/Monorepo.dhall) { mode=(./buildkite/src/Pipeline/Mode.dhall).Type.${mode}, filter=(./buildkite/src/Pipeline/Filter.dhall).Type.${filter} }' | buildkite-agent pipeline upload" ] From 300f2701b64c80d09e68338ac94a4d41d6c33f0a Mon Sep 17 00:00:00 2001 From: dkijania Date: Tue, 1 Oct 2024 20:46:54 +0200 Subject: [PATCH 043/234] Fix usage of checker --- buildkite/Makefile | 2 +- buildkite/scripts/dhall/checker.py | 49 +++++++++++++----------------- 2 files changed, 22 insertions(+), 29 deletions(-) diff --git a/buildkite/Makefile b/buildkite/Makefile index 4e44b848430..d163c3bf273 100644 --- a/buildkite/Makefile +++ b/buildkite/Makefile @@ -20,6 +20,6 @@ check_deps: python3 scripts/dhall/checker.py --root ./src/Jobs deps check_dirty: - python3 scripts/dhall/checker.py --root ./src/Jobs dirty-when + python3 scripts/dhall/checker.py --root $(PWD)/src/Jobs dirty-when --repo "$(PWD)/../" all: check_syntax lint format check_deps check_dirty \ No newline at end of file diff --git a/buildkite/scripts/dhall/checker.py b/buildkite/scripts/dhall/checker.py index ce6d6578d88..53de3f872f0 100755 --- a/buildkite/scripts/dhall/checker.py +++ b/buildkite/scripts/dhall/checker.py @@ -65,9 +65,9 @@ def get_dirty(self): path = dirtyWhen["dir"][0] if "dir" in dirtyWhen else "" exts = dirtyWhen["exts"][0] if "exts" in dirtyWhen else "" strictEnd = bool(dirtyWhen["strictEnd"]) if ( - not "strictEnd" in dirtyWhen) else False + "strictEnd" in dirtyWhen) else False strictStart = bool(dirtyWhen["strictStart"]) if ( - not "strictStart" in dirtyWhen) else False + "strictStart" in dirtyWhen) else False dirty.append(DirtyWhen(path=path, strictStart=strictStart, strictEnd=strictEnd, extension=exts)) return dirty @@ -86,30 +86,20 @@ def __init__(self, path, extension, strictStart, strictEnd): self.strictStart = strictStart self.strictEnd = strictEnd - def calculate_path(self): - if self.extension and self.path: - return glob(f"{self.path}.{self.extension}") - if self.strictEnd and not self.strictStart: - if not self.extension: - return glob(f"**/*/{self.path}") - else: - return glob(f"*/{self.path}.{self.extension}") - if self.strictStart and self.strictEnd: - if not self.extension: - return glob(f"{self.path}*") - else: - return glob(f"{self.path}.{self.extension}") - if self.strictStart and not self.strictEnd: - return glob(self.path + '.*') - if not self.strictStart and not self.strictEnd: - if not self.extension: - if "." in self.path: - return glob(f"**/*/{self.path}", recursive=True) - else: - return glob(f"{self.path}*") - else: - return glob(f"*.{self.extension}") - raise RuntimeError("invalid state dirty when") + def calculate_path(self,repo): + if not self.path: + return glob(os.path.join(repo,f'**/*{self.extension}')) + if not self.extension: + if self.strictEnd and self.strictStart: + return glob(os.path.join(repo, f'{self.path}')) + if not self.strictEnd and self.strictStart: + return glob(os.path.join(repo, f'{self.path}*')) + if not self.strictStart and self.strictEnd: + return glob(os.path.join(repo, f'**/{self.path}'), recursive= True) + if not self.strictStart and not self.strictEnd: + return glob(os.path.join(repo, f'*{self.path}*')) + return glob(os.path.join(repo, f'{self.path}.{self.extension}')) + #raise RuntimeError("invalid state dirty when") def __str__(self): return f"path: '{self.path}', exts: '{self.extension}', startStrict:{self.strictStart}, startEnd:{self.strictEnd}" @@ -140,7 +130,10 @@ def keys(self): help="root folder where all dhall files resides") subparsers = parser.add_subparsers(dest="cmd") -subparsers.add_parser('dirty-when') +dirty_when = subparsers.add_parser('dirty-when') +dirty_when.add_argument("--repo", required=True, + help="root folder for mina repo") + subparsers.add_parser('deps') run = subparsers.add_parser('print-cmd') run.add_argument("--job", required=True, help="job to run") @@ -207,7 +200,7 @@ def get_steps(): for pipeline in pipelinesInfo: for dirty in pipeline.dirty: - if not bool(dirty.calculate_path()): + if not bool(dirty.calculate_path(args.repo)): failedSteps.append((pipeline, dirty)) if any(failedSteps): From 752a7c6b15e601cf0c3137d7daafb1785932b356 Mon Sep 17 00:00:00 2001 From: dkijania Date: Tue, 1 Oct 2024 20:47:07 +0200 Subject: [PATCH 044/234] Fix jobs (dirtyWhen & dependency) --- buildkite/src/Jobs/Lint/ValidationService.dhall | 5 +---- buildkite/src/Jobs/Lint/Xrefcheck.dhall | 2 +- buildkite/src/Jobs/Release/TestnetAlerts.dhall | 2 -- buildkite/src/Jobs/Test/RosettaIntegrationTests.dhall | 2 +- buildkite/src/Jobs/Test/RosettaIntegrationTestsLong.dhall | 2 +- buildkite/src/Jobs/Test/TerraformNetworkTest.dhall | 4 ++-- buildkite/src/Jobs/Test/TestnetIntegrationTests.dhall | 6 +++--- buildkite/src/Jobs/Test/TestnetIntegrationTestsLong.dhall | 2 +- 8 files changed, 10 insertions(+), 15 deletions(-) diff --git a/buildkite/src/Jobs/Lint/ValidationService.dhall b/buildkite/src/Jobs/Lint/ValidationService.dhall index c9b632ee054..109ec08afed 100644 --- a/buildkite/src/Jobs/Lint/ValidationService.dhall +++ b/buildkite/src/Jobs/Lint/ValidationService.dhall @@ -75,10 +75,7 @@ in Pipeline.build (S.contains "buildkite/src/Jobs/Lint/ValidationService") in JobSpec::{ - , dirtyWhen = - [ dirtyDhallDir - , S.strictlyStart (S.contains ValidationService.rootPath) - ] + , dirtyWhen = [ dirtyDhallDir ] , path = "Lint" , name = "ValidationService" , tags = [ PipelineTag.Type.Fast, PipelineTag.Type.Lint ] diff --git a/buildkite/src/Jobs/Lint/Xrefcheck.dhall b/buildkite/src/Jobs/Lint/Xrefcheck.dhall index 20092e9f4ee..d29e2b02a36 100644 --- a/buildkite/src/Jobs/Lint/Xrefcheck.dhall +++ b/buildkite/src/Jobs/Lint/Xrefcheck.dhall @@ -23,7 +23,7 @@ in Pipeline.build , spec = JobSpec::{ , dirtyWhen = [ SelectFiles.strictly SelectFiles::{ exts = Some [ "md" ] } - , SelectFiles.strictly (SelectFiles.contains ".xrefcheck.yml") + , SelectFiles.strictly (SelectFiles.contains ".xrefcheck.yaml") ] , path = "Lint" , name = "Xrefcheck" diff --git a/buildkite/src/Jobs/Release/TestnetAlerts.dhall b/buildkite/src/Jobs/Release/TestnetAlerts.dhall index 83d8b40b045..6bad658b1df 100644 --- a/buildkite/src/Jobs/Release/TestnetAlerts.dhall +++ b/buildkite/src/Jobs/Release/TestnetAlerts.dhall @@ -40,8 +40,6 @@ in Pipeline.build , label = "Deploy Testnet alert rules" , key = "deploy-testnet-alerts" , target = Size.Medium - , depends_on = - [ { name = "TestnetAlerts", key = "lint-testnet-alerts" } ] , docker = None Docker.Type , if = Some "build.env('DEPLOY_ALERTS') == 'true'" } diff --git a/buildkite/src/Jobs/Test/RosettaIntegrationTests.dhall b/buildkite/src/Jobs/Test/RosettaIntegrationTests.dhall index 19dae98ec15..9c8e1b199de 100644 --- a/buildkite/src/Jobs/Test/RosettaIntegrationTests.dhall +++ b/buildkite/src/Jobs/Test/RosettaIntegrationTests.dhall @@ -64,7 +64,7 @@ in Pipeline.build Dockers.Type.Bullseye (Some Network.Type.Berkeley) Profiles.Type.Standard - Artifacts.Type.Rosetta + Artifacts.Type.Daemon } ] } diff --git a/buildkite/src/Jobs/Test/RosettaIntegrationTestsLong.dhall b/buildkite/src/Jobs/Test/RosettaIntegrationTestsLong.dhall index 4f0430533d6..53a94bc327d 100644 --- a/buildkite/src/Jobs/Test/RosettaIntegrationTestsLong.dhall +++ b/buildkite/src/Jobs/Test/RosettaIntegrationTestsLong.dhall @@ -56,7 +56,7 @@ in Pipeline.build Dockers.Type.Bullseye (Some Network.Type.Berkeley) Profiles.Type.Standard - Artifacts.Type.Rosetta + Artifacts.Type.Daemon } ] } diff --git a/buildkite/src/Jobs/Test/TerraformNetworkTest.dhall b/buildkite/src/Jobs/Test/TerraformNetworkTest.dhall index 1a8db62038f..a0f1b8bb2a6 100644 --- a/buildkite/src/Jobs/Test/TerraformNetworkTest.dhall +++ b/buildkite/src/Jobs/Test/TerraformNetworkTest.dhall @@ -35,8 +35,8 @@ in Pipeline.build Pipeline.Config::{ , spec = let unitDirtyWhen = - [ S.strictlyStart (S.contains "src/automation/terraform") - , S.strictlyStart (S.contains "src/helm") + [ S.strictlyStart (S.contains "automation/terraform") + , S.strictlyStart (S.contains "helm") , S.strictlyStart (S.contains "buildkite/src/Jobs/Test/TerraformNetworkTest") , S.strictlyStart diff --git a/buildkite/src/Jobs/Test/TestnetIntegrationTests.dhall b/buildkite/src/Jobs/Test/TestnetIntegrationTests.dhall index d7acd176432..cae92fdbfd2 100644 --- a/buildkite/src/Jobs/Test/TestnetIntegrationTests.dhall +++ b/buildkite/src/Jobs/Test/TestnetIntegrationTests.dhall @@ -39,18 +39,18 @@ in Pipeline.build , S.strictlyStart (S.contains "buildkite/src/Jobs/Test/TestnetIntegrationTest") , S.strictlyStart - (S.contains "buildkite/src/Jobs/Command/TestExecutive") + (S.contains "buildkite/src/Command/TestExecutive") , S.strictlyStart (S.contains "automation/terraform/modules/o1-integration") , S.strictlyStart (S.contains "automation/terraform/modules/kubernetes/testnet") , S.strictlyStart ( S.contains - "automation/buildkite/script/run-test-executive-cloud" + "buildkite/scripts/run-test-executive-cloud" ) , S.strictlyStart ( S.contains - "automation/buildkite/script/run-test-executive-local" + "buildkite/scripts/run-test-executive-local" ) ] , path = "Test" diff --git a/buildkite/src/Jobs/Test/TestnetIntegrationTestsLong.dhall b/buildkite/src/Jobs/Test/TestnetIntegrationTestsLong.dhall index 5cf7f25bdb3..22733be4e00 100644 --- a/buildkite/src/Jobs/Test/TestnetIntegrationTestsLong.dhall +++ b/buildkite/src/Jobs/Test/TestnetIntegrationTestsLong.dhall @@ -39,7 +39,7 @@ in Pipeline.build , S.strictlyStart (S.contains "buildkite/src/Jobs/Test/TestnetIntegrationTest") , S.strictlyStart - (S.contains "buildkite/src/Jobs/Command/TestExecutive") + (S.contains "buildkite/src/Command/TestExecutive") , S.strictlyStart (S.contains "automation/terraform/modules/o1-integration") , S.strictlyStart From 3f17bcbe47bf4443ab3dfe90a27222590b37f2e3 Mon Sep 17 00:00:00 2001 From: dkijania Date: Tue, 1 Oct 2024 21:22:12 +0200 Subject: [PATCH 045/234] Revert "Auxiliary commit to revert individual files from e5c129d578ca65b8694002cf432cdf07236312a0" This reverts commit 35f9de53f244c91656d101cb4b523deb6985367c. --- buildkite/src/Monorepo.dhall | 4 ++-- buildkite/src/Prepare.dhall | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/buildkite/src/Monorepo.dhall b/buildkite/src/Monorepo.dhall index 67867e53f7d..7106d8358e5 100644 --- a/buildkite/src/Monorepo.dhall +++ b/buildkite/src/Monorepo.dhall @@ -35,8 +35,8 @@ let jobs let prefixCommands = [ Cmd.run "git config --global http.sslCAInfo /etc/ssl/certs/ca-bundle.crt" - , Cmd.run "./buildkite/scripts/git/refresh_code.sh" - , Cmd.run "./buildkite/scripts/git/generate-diff.sh > _computed_diff.txt" + , Cmd.run "./buildkite/scripts/refresh_code.sh" + , Cmd.run "./buildkite/scripts/generate-diff.sh > _computed_diff.txt" ] let commands diff --git a/buildkite/src/Prepare.dhall b/buildkite/src/Prepare.dhall index 52a0761207e..82cdb1c4df6 100644 --- a/buildkite/src/Prepare.dhall +++ b/buildkite/src/Prepare.dhall @@ -33,7 +33,7 @@ let config [ Cmd.run "export BUILDKITE_PIPELINE_MODE=${mode}" , Cmd.run "export BUILDKITE_PIPELINE_FILTER=${filter}" , Cmd.run - "./buildkite/scripts/dhall/generate-jobs.sh > buildkite/src/gen/Jobs.dhall" + "./buildkite/scripts/generate-jobs.sh > buildkite/src/gen/Jobs.dhall" , Cmd.quietly "dhall-to-yaml --quoted <<< '(./buildkite/src/Monorepo.dhall) { mode=(./buildkite/src/Pipeline/Mode.dhall).Type.${mode}, filter=(./buildkite/src/Pipeline/Filter.dhall).Type.${filter} }' | buildkite-agent pipeline upload" ] From 2de0853f71015fbfd53d51cc579a0605de579e2d Mon Sep 17 00:00:00 2001 From: dkijania Date: Tue, 1 Oct 2024 21:26:02 +0200 Subject: [PATCH 046/234] remove comment from checker.py --- buildkite/scripts/dhall/checker.py | 1 - 1 file changed, 1 deletion(-) diff --git a/buildkite/scripts/dhall/checker.py b/buildkite/scripts/dhall/checker.py index 53de3f872f0..4e7f49f3900 100755 --- a/buildkite/scripts/dhall/checker.py +++ b/buildkite/scripts/dhall/checker.py @@ -99,7 +99,6 @@ def calculate_path(self,repo): if not self.strictStart and not self.strictEnd: return glob(os.path.join(repo, f'*{self.path}*')) return glob(os.path.join(repo, f'{self.path}.{self.extension}')) - #raise RuntimeError("invalid state dirty when") def __str__(self): return f"path: '{self.path}', exts: '{self.extension}', startStrict:{self.strictStart}, startEnd:{self.strictEnd}" From 416becbaf87be1f33aacb3184128df1fd7056d09 Mon Sep 17 00:00:00 2001 From: dkijania Date: Tue, 1 Oct 2024 21:27:27 +0200 Subject: [PATCH 047/234] Revert "Auxiliary commit to revert individual files from e5c129d578ca65b8694002cf432cdf07236312a0" This reverts commit 561a5b889757092f8d9c4e21605b96d36dc53828. --- buildkite/scripts/helm-ci.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/buildkite/scripts/helm-ci.sh b/buildkite/scripts/helm-ci.sh index 0a11adbe0f5..a4c2508e5d4 100755 --- a/buildkite/scripts/helm-ci.sh +++ b/buildkite/scripts/helm-ci.sh @@ -4,7 +4,7 @@ set -eou pipefail diff=$( - ./buildkite/scripts/git/generate-diff.sh + ./buildkite/scripts/generate-diff.sh ) echo "--- Generated change DIFF: ${diff}" From 4608a8ad3050832dbe6ecbfe7d166974191e6f55 Mon Sep 17 00:00:00 2001 From: dkijania Date: Tue, 1 Oct 2024 21:31:10 +0200 Subject: [PATCH 048/234] use toolchain for python check + format & lints --- buildkite/src/Jobs/Lint/Dhall.dhall | 20 +++++++++++-------- .../Jobs/Test/TestnetIntegrationTests.dhall | 11 +++------- .../Test/TestnetIntegrationTestsLong.dhall | 3 +-- 3 files changed, 16 insertions(+), 18 deletions(-) diff --git a/buildkite/src/Jobs/Lint/Dhall.dhall b/buildkite/src/Jobs/Lint/Dhall.dhall index 73dd7e55419..68d5678e684 100644 --- a/buildkite/src/Jobs/Lint/Dhall.dhall +++ b/buildkite/src/Jobs/Lint/Dhall.dhall @@ -12,6 +12,8 @@ let Command = ../../Command/Base.dhall let Docker = ../../Command/Docker/Type.dhall +let RunInToolchain = ../../Command/RunInToolchain.dhall + let Size = ../../Command/Size.dhall in Pipeline.build @@ -56,23 +58,25 @@ in Pipeline.build } , Command.build Command.Config::{ - , commands = [ Cmd.run "cd buildkite && make check_deps" ] + , commands = + RunInToolchain.runInToolchainBullseye + ([] : List Text) + "cd buildkite && make check_deps" , label = "Dhall: deps" , key = "check-dhall-deps" , target = Size.Multi - , docker = Some Docker::{ - , image = (../../Constants/ContainerImages.dhall).toolchainBase - } + , docker = None Docker.Type } , Command.build Command.Config::{ - , commands = [ Cmd.run "cd buildkite && make check_dirty" ] + , commands = + RunInToolchain.runInToolchainBullseye + ([] : List Text) + "cd buildkite && make check_dirty" , label = "Dhall: dirtyWhen" , key = "check-dhall-dirty" , target = Size.Multi - , docker = Some Docker::{ - , image = (../../Constants/ContainerImages.dhall).toolchainBase - } + , docker = None Docker.Type } ] } diff --git a/buildkite/src/Jobs/Test/TestnetIntegrationTests.dhall b/buildkite/src/Jobs/Test/TestnetIntegrationTests.dhall index cae92fdbfd2..6e0a3796207 100644 --- a/buildkite/src/Jobs/Test/TestnetIntegrationTests.dhall +++ b/buildkite/src/Jobs/Test/TestnetIntegrationTests.dhall @@ -38,20 +38,15 @@ in Pipeline.build , S.strictlyStart (S.contains "dockerfiles") , S.strictlyStart (S.contains "buildkite/src/Jobs/Test/TestnetIntegrationTest") - , S.strictlyStart - (S.contains "buildkite/src/Command/TestExecutive") + , S.strictlyStart (S.contains "buildkite/src/Command/TestExecutive") , S.strictlyStart (S.contains "automation/terraform/modules/o1-integration") , S.strictlyStart (S.contains "automation/terraform/modules/kubernetes/testnet") , S.strictlyStart - ( S.contains - "buildkite/scripts/run-test-executive-cloud" - ) + (S.contains "buildkite/scripts/run-test-executive-cloud") , S.strictlyStart - ( S.contains - "buildkite/scripts/run-test-executive-local" - ) + (S.contains "buildkite/scripts/run-test-executive-local") ] , path = "Test" , name = "TestnetIntegrationTests" diff --git a/buildkite/src/Jobs/Test/TestnetIntegrationTestsLong.dhall b/buildkite/src/Jobs/Test/TestnetIntegrationTestsLong.dhall index 22733be4e00..484e128740d 100644 --- a/buildkite/src/Jobs/Test/TestnetIntegrationTestsLong.dhall +++ b/buildkite/src/Jobs/Test/TestnetIntegrationTestsLong.dhall @@ -38,8 +38,7 @@ in Pipeline.build , S.strictlyStart (S.contains "dockerfiles") , S.strictlyStart (S.contains "buildkite/src/Jobs/Test/TestnetIntegrationTest") - , S.strictlyStart - (S.contains "buildkite/src/Command/TestExecutive") + , S.strictlyStart (S.contains "buildkite/src/Command/TestExecutive") , S.strictlyStart (S.contains "automation/terraform/modules/o1-integration") , S.strictlyStart From bcbb84c5504ff68f16ef4a640af36489000dbe91 Mon Sep 17 00:00:00 2001 From: dkijania Date: Tue, 1 Oct 2024 22:47:17 +0200 Subject: [PATCH 049/234] do not use inner docker when calling deps and dirtyWhen checks --- buildkite/src/Jobs/Lint/Dhall.dhall | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/buildkite/src/Jobs/Lint/Dhall.dhall b/buildkite/src/Jobs/Lint/Dhall.dhall index 68d5678e684..b56a1495e74 100644 --- a/buildkite/src/Jobs/Lint/Dhall.dhall +++ b/buildkite/src/Jobs/Lint/Dhall.dhall @@ -12,8 +12,6 @@ let Command = ../../Command/Base.dhall let Docker = ../../Command/Docker/Type.dhall -let RunInToolchain = ../../Command/RunInToolchain.dhall - let Size = ../../Command/Size.dhall in Pipeline.build @@ -58,10 +56,7 @@ in Pipeline.build } , Command.build Command.Config::{ - , commands = - RunInToolchain.runInToolchainBullseye - ([] : List Text) - "cd buildkite && make check_deps" + , commands = [ Cmd.run "cd buildkite && make check_deps" ] , label = "Dhall: deps" , key = "check-dhall-deps" , target = Size.Multi @@ -69,10 +64,7 @@ in Pipeline.build } , Command.build Command.Config::{ - , commands = - RunInToolchain.runInToolchainBullseye - ([] : List Text) - "cd buildkite && make check_dirty" + , commands = [ Cmd.run "cd buildkite && make check_dirty" ] , label = "Dhall: dirtyWhen" , key = "check-dhall-dirty" , target = Size.Multi From bc9370c606585e93afac2bf6217881738d9578cf Mon Sep 17 00:00:00 2001 From: dkijania Date: Tue, 1 Oct 2024 23:25:33 +0200 Subject: [PATCH 050/234] use missing_blocks_guardian as binary --- scripts/debian/builder-helpers.sh | 2 +- src/test/mina_automation/missing_blocks_guardian.ml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/debian/builder-helpers.sh b/scripts/debian/builder-helpers.sh index b98764d0da9..e3d88c562d0 100755 --- a/scripts/debian/builder-helpers.sh +++ b/scripts/debian/builder-helpers.sh @@ -365,7 +365,7 @@ build_archive_deb () { cp ./default/src/app/extract_blocks/extract_blocks.exe "${BUILDDIR}/usr/local/bin/mina-extract-blocks" mkdir -p "${BUILDDIR}/etc/mina/archive" - cp ../scripts/archive/missing-blocks-guardian.sh "${BUILDDIR}/etc/mina/archive" + cp ../scripts/archive/missing-blocks-guardian.sh "${BUILDDIR}/usr/local/bin/mina-missing-blocks-guardian" cp ./default/src/app/missing_blocks_auditor/missing_blocks_auditor.exe "${BUILDDIR}/usr/local/bin/mina-missing-blocks-auditor" cp ./default/src/app/replayer/replayer.exe "${BUILDDIR}/usr/local/bin/mina-replayer" diff --git a/src/test/mina_automation/missing_blocks_guardian.ml b/src/test/mina_automation/missing_blocks_guardian.ml index 42bbf0ec844..b74ea406855 100644 --- a/src/test/mina_automation/missing_blocks_guardian.ml +++ b/src/test/mina_automation/missing_blocks_guardian.ml @@ -45,7 +45,7 @@ end let of_context context = Executor.of_context ~context ~dune_name:"scripts/archive/missing-blocks-guardian.sh" - ~official_name:"/etc/mina/archive/missing-blocks-guardian.sh" + ~official_name:"mina-missing-blocks-guardian" let run t ~config = run t ~args:(Config.to_args config) ~env:(Config.to_envs config) () From a26404b978104f19d08048889511569dcf97de8a Mon Sep 17 00:00:00 2001 From: dkijania Date: Tue, 1 Oct 2024 23:57:51 +0200 Subject: [PATCH 051/234] use output for error checks --- scripts/snark_transaction_profiler.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/snark_transaction_profiler.py b/scripts/snark_transaction_profiler.py index 51a3abe5814..c1e8d25afff 100755 --- a/scripts/snark_transaction_profiler.py +++ b/scripts/snark_transaction_profiler.py @@ -65,8 +65,8 @@ def parse_stats (output) : errors = ["Error", "Failure", "zkapp failed"] - if any(x in a_string for x in matches): - print(f'Error detected in output ({" or ".join(error)}). Failing the build') + if any(x in output for x in errors): + print(f'Error detected in output ({" or ".join(errors)}). Failing the build') sys.exit(1) if not process_exit_code == 0: From b53580c51b4f194444959653486eed53ae44e483 Mon Sep 17 00:00:00 2001 From: dkijania Date: Wed, 2 Oct 2024 08:52:28 +0200 Subject: [PATCH 052/234] remove duplicated upload attempt --- buildkite/scripts/version-linter-patch-missing-type-shapes.sh | 4 ---- 1 file changed, 4 deletions(-) diff --git a/buildkite/scripts/version-linter-patch-missing-type-shapes.sh b/buildkite/scripts/version-linter-patch-missing-type-shapes.sh index 4633b1890c8..74c60dc8bd5 100755 --- a/buildkite/scripts/version-linter-patch-missing-type-shapes.sh +++ b/buildkite/scripts/version-linter-patch-missing-type-shapes.sh @@ -34,10 +34,6 @@ function checkout_and_dump() { source buildkite/scripts/gsutil-upload.sh /tmp/${TYPE_SHAPE_FILE} gs://mina-type-shapes } -if ! $(gsutil ls gs://mina-type-shapes/${BUILDKITE_COMMIT:0:7} 2>/dev/null); then - checkout_and_dump $BUILDKITE_COMMIT -fi - if ! $(gsutil ls gs://mina-type-shapes/$RELEASE_BRANCH_COMMIT 2>/dev/null); then checkout_and_dump $RELEASE_BRANCH_COMMIT fi From 93fa787ef74e3412729a575a3e88e3c59d53f651 Mon Sep 17 00:00:00 2001 From: dkijania Date: Wed, 2 Oct 2024 08:54:20 +0200 Subject: [PATCH 053/234] remove duplicated check for mina-type-shape which should be assured in dump-mina-type-shape script --- buildkite/scripts/version-linter-patch-missing-type-shapes.sh | 4 ---- 1 file changed, 4 deletions(-) diff --git a/buildkite/scripts/version-linter-patch-missing-type-shapes.sh b/buildkite/scripts/version-linter-patch-missing-type-shapes.sh index 4633b1890c8..74c60dc8bd5 100755 --- a/buildkite/scripts/version-linter-patch-missing-type-shapes.sh +++ b/buildkite/scripts/version-linter-patch-missing-type-shapes.sh @@ -34,10 +34,6 @@ function checkout_and_dump() { source buildkite/scripts/gsutil-upload.sh /tmp/${TYPE_SHAPE_FILE} gs://mina-type-shapes } -if ! $(gsutil ls gs://mina-type-shapes/${BUILDKITE_COMMIT:0:7} 2>/dev/null); then - checkout_and_dump $BUILDKITE_COMMIT -fi - if ! $(gsutil ls gs://mina-type-shapes/$RELEASE_BRANCH_COMMIT 2>/dev/null); then checkout_and_dump $RELEASE_BRANCH_COMMIT fi From 3cf8a37a715a9e4caadb25562f9c458c22e41326 Mon Sep 17 00:00:00 2001 From: dkijania Date: Wed, 2 Oct 2024 10:41:54 +0200 Subject: [PATCH 054/234] do not remove last block before patching --- src/test/archive/patch_archive_test/patch_archive_test.ml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/test/archive/patch_archive_test/patch_archive_test.ml b/src/test/archive/patch_archive_test/patch_archive_test.ml index 8d0b2f91a67..5ba17608041 100644 --- a/src/test/archive/patch_archive_test/patch_archive_test.ml +++ b/src/test/archive/patch_archive_test/patch_archive_test.ml @@ -77,7 +77,10 @@ let main ~db_uri ~network_data_folder () = let n = List.init missing_blocks_count ~f:(fun _ -> - Random.int (List.length extensional_files) ) + (* never remove last block as missing-block-guardian can have issues when patching it + as it patching only gaps + *) + Random.int (List.length extensional_files - 1) ) in let unpatched_extensional_files = From aaf80c63313edacf3e311ec16d4798db96222c71 Mon Sep 17 00:00:00 2001 From: dkijania Date: Wed, 2 Oct 2024 11:32:17 +0200 Subject: [PATCH 055/234] Using dhall-to-yaml in toolchain base while python script in toolchain container --- buildkite/Makefile | 9 +++++-- buildkite/scripts/dhall/checker.py | 21 ++++++--------- .../scripts/dhall/dump_dhall_to_pipelines.sh | 24 +++++++++++++++++ buildkite/src/Jobs/Lint/Dhall.dhall | 26 ++++++++++++++++--- 4 files changed, 61 insertions(+), 19 deletions(-) create mode 100755 buildkite/scripts/dhall/dump_dhall_to_pipelines.sh diff --git a/buildkite/Makefile b/buildkite/Makefile index d163c3bf273..b055603295a 100644 --- a/buildkite/Makefile +++ b/buildkite/Makefile @@ -16,10 +16,15 @@ lint: format: find ./src/ -name "*.dhall" -print0 | xargs -I{} -0 -n1 bash -c 'echo "{}" && dhall --ascii format --inplace {} || exit 255' + check_deps: - python3 scripts/dhall/checker.py --root ./src/Jobs deps + $(eval TMP := $(shell mktemp -d)) + scripts/dhall/dump_dhall_to_pipelines.sh src/Jobs "$(TMP)" + python3 scripts/dhall/checker.py --root "$(TMP)" deps check_dirty: - python3 scripts/dhall/checker.py --root $(PWD)/src/Jobs dirty-when --repo "$(PWD)/../" + $(eval TMP := $(shell mktemp -d)) + scripts/dhall/dump_dhall_to_pipelines.sh src/Jobs "$(TMP)" + python3 scripts/dhall/checker.py --root "$(TMP)" dirty-when --repo "$(PWD)/../" all: check_syntax lint format check_deps check_dirty \ No newline at end of file diff --git a/buildkite/scripts/dhall/checker.py b/buildkite/scripts/dhall/checker.py index 4e7f49f3900..df64bc45092 100755 --- a/buildkite/scripts/dhall/checker.py +++ b/buildkite/scripts/dhall/checker.py @@ -134,25 +134,17 @@ def keys(self): help="root folder for mina repo") subparsers.add_parser('deps') + + run = subparsers.add_parser('print-cmd') run.add_argument("--job", required=True, help="job to run") run.add_argument("--step", required=False, help="job to run") args = parser.parse_args() -tmp = tempfile.mkdtemp() - -print(f"Artifacts are stored in {tmp}") -for file in [y for x in os.walk(args.root) for y in glob(os.path.join(x[0], '*.dhall'))]: - name = Path(file).stem - with open(f"{tmp}/{name}.yml", "w") as outfile: - subprocess.run(["dhall-to-yaml", "--quoted", "--file", - file], stdout=outfile, check=True) - - -pipelinesInfo = [PipelineInfoBuilder(tmp, file).build() - for file in os.listdir(path=tmp)] +pipelinesInfo = [PipelineInfoBuilder(args.root, file).build() + for file in os.listdir(path=args.root)] if args.cmd == "deps": @@ -175,7 +167,8 @@ def keys(self): print( f"\t{CmdColors.FAIL}[FATAL] Unresolved dependency for step '{step.key}' in '{file}' depends on non existing job '{dep}'{CmdColors.ENDC}") exit(1) - + else: + print('Pipelines definitions correct') if args.cmd == "print-cmd": pipeline = next(filter(lambda x: args.job in x.file, pipelinesInfo)) @@ -209,3 +202,5 @@ def get_steps(): print( f"\t{CmdColors.FAIL}[FATAL] Unresolved dirtyWhen path in '{file}' ('{str(dirty)}'){CmdColors.ENDC}") exit(1) + else: + print('Pipelines definitions correct') diff --git a/buildkite/scripts/dhall/dump_dhall_to_pipelines.sh b/buildkite/scripts/dhall/dump_dhall_to_pipelines.sh new file mode 100755 index 00000000000..84193329b76 --- /dev/null +++ b/buildkite/scripts/dhall/dump_dhall_to_pipelines.sh @@ -0,0 +1,24 @@ +#!/bin/bash + +ROOT=$1 +OUTPUT=$2 + +mkdir -p "$OUTPUT" + +shopt -s globstar nullglob + +echo "Dumping pipelines from '$ROOT' to '$OUTPUT'" + +COUNTER=0 + +for file in "$ROOT"/**/*.dhall +do + filename=$(basename "$file") + filename="${filename%.*}" + + dhall-to-yaml --quoted --file "$file" > "$OUTPUT"/"$filename".yml + + COUNTER=$((COUNTER+1)) +done + +echo "Done. $COUNTER jobs exported" diff --git a/buildkite/src/Jobs/Lint/Dhall.dhall b/buildkite/src/Jobs/Lint/Dhall.dhall index b56a1495e74..c26aa6c8698 100644 --- a/buildkite/src/Jobs/Lint/Dhall.dhall +++ b/buildkite/src/Jobs/Lint/Dhall.dhall @@ -14,6 +14,8 @@ let Docker = ../../Command/Docker/Type.dhall let Size = ../../Command/Size.dhall +let RunInToolchain = ../../Command/RunInToolchain.dhall + in Pipeline.build Pipeline.Config::{ , spec = JobSpec::{ @@ -56,19 +58,35 @@ in Pipeline.build } , Command.build Command.Config::{ - , commands = [ Cmd.run "cd buildkite && make check_deps" ] + , commands = + [ Cmd.run + "buildkite/scripts/dhall/dump_dhall_to_pipelines.sh buildkite/src/Jobs _pipelines" + ] + # RunInToolchain.runInToolchainBullseye + ([] : List Text) + "python3 ./buildkite/scripts/dhall/checker.py --root _pipelines deps" , label = "Dhall: deps" , key = "check-dhall-deps" , target = Size.Multi - , docker = None Docker.Type + , docker = Some Docker::{ + , image = (../../Constants/ContainerImages.dhall).toolchainBase + } } , Command.build Command.Config::{ - , commands = [ Cmd.run "cd buildkite && make check_dirty" ] + , commands = + [ Cmd.run + "buildkite/scripts/dhall/dump_dhall_to_pipelines.sh buildkite/src/Jobs _pipelines" + ] + # RunInToolchain.runInToolchainBullseye + ([] : List Text) + "python3 scripts/dhall/checker.py --root _pipelines dirty-when --repo ." , label = "Dhall: dirtyWhen" , key = "check-dhall-dirty" , target = Size.Multi - , docker = None Docker.Type + , docker = Some Docker::{ + , image = (../../Constants/ContainerImages.dhall).toolchainBase + } } ] } From ec996e22a0d695c624452d2ac56f570378806f11 Mon Sep 17 00:00:00 2001 From: dkijania Date: Wed, 2 Oct 2024 16:19:06 +0200 Subject: [PATCH 056/234] run dump pipelines in docker --- buildkite/src/Jobs/Lint/Dhall.dhall | 23 +++++++++++------------ 1 file changed, 11 insertions(+), 12 deletions(-) diff --git a/buildkite/src/Jobs/Lint/Dhall.dhall b/buildkite/src/Jobs/Lint/Dhall.dhall index c26aa6c8698..999c45712f8 100644 --- a/buildkite/src/Jobs/Lint/Dhall.dhall +++ b/buildkite/src/Jobs/Lint/Dhall.dhall @@ -16,6 +16,13 @@ let Size = ../../Command/Size.dhall let RunInToolchain = ../../Command/RunInToolchain.dhall +let dump_pipelines_cmd = + Cmd.runInDocker + Cmd.Docker::{ + , image = (../../Constants/ContainerImages.dhall).toolchainBase + } + "buildkite/scripts/dhall/dump_dhall_to_pipelines.sh buildkite/src/Jobs _pipelines" + in Pipeline.build Pipeline.Config::{ , spec = JobSpec::{ @@ -59,34 +66,26 @@ in Pipeline.build , Command.build Command.Config::{ , commands = - [ Cmd.run - "buildkite/scripts/dhall/dump_dhall_to_pipelines.sh buildkite/src/Jobs _pipelines" - ] + [ dump_pipelines_cmd ] # RunInToolchain.runInToolchainBullseye ([] : List Text) "python3 ./buildkite/scripts/dhall/checker.py --root _pipelines deps" , label = "Dhall: deps" , key = "check-dhall-deps" , target = Size.Multi - , docker = Some Docker::{ - , image = (../../Constants/ContainerImages.dhall).toolchainBase - } + , docker = None Docker.Type } , Command.build Command.Config::{ , commands = - [ Cmd.run - "buildkite/scripts/dhall/dump_dhall_to_pipelines.sh buildkite/src/Jobs _pipelines" - ] + [ dump_pipelines_cmd ] # RunInToolchain.runInToolchainBullseye ([] : List Text) "python3 scripts/dhall/checker.py --root _pipelines dirty-when --repo ." , label = "Dhall: dirtyWhen" , key = "check-dhall-dirty" , target = Size.Multi - , docker = Some Docker::{ - , image = (../../Constants/ContainerImages.dhall).toolchainBase - } + , docker = None Docker.Type } ] } From 16157b4e66bd09c838b67283dc7c7715846171f5 Mon Sep 17 00:00:00 2001 From: dkijania Date: Thu, 3 Oct 2024 09:29:49 +0200 Subject: [PATCH 057/234] fix path to checker.py for dirtyWhen --- buildkite/src/Jobs/Lint/Dhall.dhall | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/buildkite/src/Jobs/Lint/Dhall.dhall b/buildkite/src/Jobs/Lint/Dhall.dhall index 999c45712f8..c813211fb28 100644 --- a/buildkite/src/Jobs/Lint/Dhall.dhall +++ b/buildkite/src/Jobs/Lint/Dhall.dhall @@ -81,7 +81,7 @@ in Pipeline.build [ dump_pipelines_cmd ] # RunInToolchain.runInToolchainBullseye ([] : List Text) - "python3 scripts/dhall/checker.py --root _pipelines dirty-when --repo ." + "python3 ./buildkite/scripts/dhall/checker.py --root _pipelines dirty-when --repo ." , label = "Dhall: dirtyWhen" , key = "check-dhall-dirty" , target = Size.Multi From 4bc2436e9eee50fab39dad8143f13565a88f4869 Mon Sep 17 00:00:00 2001 From: dkijania Date: Thu, 3 Oct 2024 10:38:38 +0200 Subject: [PATCH 058/234] fix RosettaIntegration tests dependencies from daemon to rosetta docker --- buildkite/src/Jobs/Test/RosettaIntegrationTests.dhall | 2 +- buildkite/src/Jobs/Test/RosettaIntegrationTestsLong.dhall | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/buildkite/src/Jobs/Test/RosettaIntegrationTests.dhall b/buildkite/src/Jobs/Test/RosettaIntegrationTests.dhall index 9c8e1b199de..19dae98ec15 100644 --- a/buildkite/src/Jobs/Test/RosettaIntegrationTests.dhall +++ b/buildkite/src/Jobs/Test/RosettaIntegrationTests.dhall @@ -64,7 +64,7 @@ in Pipeline.build Dockers.Type.Bullseye (Some Network.Type.Berkeley) Profiles.Type.Standard - Artifacts.Type.Daemon + Artifacts.Type.Rosetta } ] } diff --git a/buildkite/src/Jobs/Test/RosettaIntegrationTestsLong.dhall b/buildkite/src/Jobs/Test/RosettaIntegrationTestsLong.dhall index 53a94bc327d..4f0430533d6 100644 --- a/buildkite/src/Jobs/Test/RosettaIntegrationTestsLong.dhall +++ b/buildkite/src/Jobs/Test/RosettaIntegrationTestsLong.dhall @@ -56,7 +56,7 @@ in Pipeline.build Dockers.Type.Bullseye (Some Network.Type.Berkeley) Profiles.Type.Standard - Artifacts.Type.Daemon + Artifacts.Type.Rosetta } ] } From 9613e4d9a446d60f09c01a5a265a0141d7727550 Mon Sep 17 00:00:00 2001 From: dkijania Date: Thu, 3 Oct 2024 13:13:44 +0200 Subject: [PATCH 059/234] remove log parsing --- scripts/snark_transaction_profiler.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/scripts/snark_transaction_profiler.py b/scripts/snark_transaction_profiler.py index c1e8d25afff..2826cec2f4e 100755 --- a/scripts/snark_transaction_profiler.py +++ b/scripts/snark_transaction_profiler.py @@ -63,14 +63,6 @@ def parse_stats (output) : #TODO: add code to check against some threshold print(stats) - errors = ["Error", "Failure", "zkapp failed"] - - if any(x in output for x in errors): - print(f'Error detected in output ({" or ".join(errors)}). Failing the build') - sys.exit(1) - if not process_exit_code == 0: print('non-zero exit code from program, failing build') sys.exit(1) - else: - sys.exit(0) \ No newline at end of file From 101dfff25b457fdce3ca4084a6d2669792f22e4c Mon Sep 17 00:00:00 2001 From: dkijania Date: Thu, 3 Oct 2024 19:26:13 +0200 Subject: [PATCH 060/234] exit early on zkapp error --- src/lib/snark_profiler_lib/snark_profiler_lib.ml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/lib/snark_profiler_lib/snark_profiler_lib.ml b/src/lib/snark_profiler_lib/snark_profiler_lib.ml index 53481dd8209..6f5ca831d42 100644 --- a/src/lib/snark_profiler_lib/snark_profiler_lib.ml +++ b/src/lib/snark_profiler_lib/snark_profiler_lib.ml @@ -732,7 +732,8 @@ let profile_zkapps ~verifier ledger zkapp_commands = | Error exn -> (* workaround for SNARK failures *) printf !"Error: %s\n%!" (Exn.to_string exn) ; - printf "zkApp failed, continuing ...\n" ; + printf "zkApp failed, exiting ...\n" ; + exit 1 () in let tm_zkapp1 = Core.Unix.gettimeofday () in From b05e6e1c905e5c172b0c42a0e6e3bb4ac53865e0 Mon Sep 17 00:00:00 2001 From: dkijania Date: Thu, 3 Oct 2024 20:28:35 +0200 Subject: [PATCH 061/234] reformat --- src/lib/snark_profiler_lib/snark_profiler_lib.ml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/lib/snark_profiler_lib/snark_profiler_lib.ml b/src/lib/snark_profiler_lib/snark_profiler_lib.ml index 6f5ca831d42..ef924a5faf8 100644 --- a/src/lib/snark_profiler_lib/snark_profiler_lib.ml +++ b/src/lib/snark_profiler_lib/snark_profiler_lib.ml @@ -733,8 +733,7 @@ let profile_zkapps ~verifier ledger zkapp_commands = (* workaround for SNARK failures *) printf !"Error: %s\n%!" (Exn.to_string exn) ; printf "zkApp failed, exiting ...\n" ; - exit 1 - () + exit 1 () in let tm_zkapp1 = Core.Unix.gettimeofday () in let zkapp_span = Time.Span.of_sec (tm_zkapp1 -. tm_zkapp0) in From d8bbd270b10743a63baaaa4cb99242d63d54f465 Mon Sep 17 00:00:00 2001 From: dkijania Date: Thu, 3 Oct 2024 20:31:25 +0200 Subject: [PATCH 062/234] remove additional empty line --- buildkite/Makefile | 1 - 1 file changed, 1 deletion(-) diff --git a/buildkite/Makefile b/buildkite/Makefile index b055603295a..5464c602687 100644 --- a/buildkite/Makefile +++ b/buildkite/Makefile @@ -16,7 +16,6 @@ lint: format: find ./src/ -name "*.dhall" -print0 | xargs -I{} -0 -n1 bash -c 'echo "{}" && dhall --ascii format --inplace {} || exit 255' - check_deps: $(eval TMP := $(shell mktemp -d)) scripts/dhall/dump_dhall_to_pipelines.sh src/Jobs "$(TMP)" From 342ba93fdacce28572e7cb20ac6be9cefd057bc5 Mon Sep 17 00:00:00 2001 From: dkijania Date: Thu, 3 Oct 2024 20:47:04 +0200 Subject: [PATCH 063/234] fix compilation --- src/lib/snark_profiler_lib/snark_profiler_lib.ml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lib/snark_profiler_lib/snark_profiler_lib.ml b/src/lib/snark_profiler_lib/snark_profiler_lib.ml index ef924a5faf8..9071772ce5e 100644 --- a/src/lib/snark_profiler_lib/snark_profiler_lib.ml +++ b/src/lib/snark_profiler_lib/snark_profiler_lib.ml @@ -733,7 +733,7 @@ let profile_zkapps ~verifier ledger zkapp_commands = (* workaround for SNARK failures *) printf !"Error: %s\n%!" (Exn.to_string exn) ; printf "zkApp failed, exiting ...\n" ; - exit 1 () + exit 1 in let tm_zkapp1 = Core.Unix.gettimeofday () in let zkapp_span = Time.Span.of_sec (tm_zkapp1 -. tm_zkapp0) in From 64a960e7c14ee81fb59369e70969dddbda6f896f Mon Sep 17 00:00:00 2001 From: dkijania Date: Thu, 3 Oct 2024 20:51:34 +0200 Subject: [PATCH 064/234] removing (faulty) code which tries to find ref file on last 10 commits --- scripts/version-linter.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/scripts/version-linter.py b/scripts/version-linter.py index ab9c8337b72..2f8315bc713 100755 --- a/scripts/version-linter.py +++ b/scripts/version-linter.py @@ -32,7 +32,7 @@ def set_error(): global exit_code exit_code=1 -def branch_commits(branch,n): +def branch_commits(branch): print ('Retrieving', branch, 'head commit...') result=subprocess.run(['git','log','-n','1','--format="%h"','--abbrev=7',f'{branch}'], capture_output=True) @@ -60,12 +60,12 @@ def url_exists(url): ''' return requests.head(url).status_code == 200 -def find_latest_type_shape_ref_on(branch,n=1): +def find_latest_type_shape_ref_on(branch): ''' Function tries to find best type shape reference commit by retrieving n last commits and iterate over collection testing if any item points to valid url ''' - commits = branch_commits(branch, n) + commits = branch_commits(branch) candidates = list(filter(lambda x: sha_exists(x), commits)) if not any(candidates): raise Exception(f'Cannot find type shape file for {branch}. I tried {n} last commits') @@ -269,12 +269,12 @@ def assert_commit(commit, desc): subprocess.run(['git','fetch'],capture_output=False) - base_branch_commit = find_latest_type_shape_ref_on(base_branch,n=10) + base_branch_commit = find_latest_type_shape_ref_on(base_branch) download_type_shape('base',base_branch,base_branch_commit) print('') - release_branch_commit=find_latest_type_shape_ref_on(release_branch, n=10) + release_branch_commit=find_latest_type_shape_ref_on(release_branch) download_type_shape('release',release_branch,release_branch_commit) print('') From 2472c2f55444c736ff7649277412d3e37fb07e31 Mon Sep 17 00:00:00 2001 From: dkijania Date: Fri, 4 Oct 2024 09:29:22 +0200 Subject: [PATCH 065/234] rename function to signal only one commit will be returned --- scripts/version-linter.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/version-linter.py b/scripts/version-linter.py index 2f8315bc713..08fa2fc84a8 100755 --- a/scripts/version-linter.py +++ b/scripts/version-linter.py @@ -32,7 +32,7 @@ def set_error(): global exit_code exit_code=1 -def branch_commits(branch): +def latest_branch_commit(branch): print ('Retrieving', branch, 'head commit...') result=subprocess.run(['git','log','-n','1','--format="%h"','--abbrev=7',f'{branch}'], capture_output=True) @@ -65,7 +65,7 @@ def find_latest_type_shape_ref_on(branch): Function tries to find best type shape reference commit by retrieving n last commits and iterate over collection testing if any item points to valid url ''' - commits = branch_commits(branch) + commits = latest_branch_commit(branch) candidates = list(filter(lambda x: sha_exists(x), commits)) if not any(candidates): raise Exception(f'Cannot find type shape file for {branch}. I tried {n} last commits') From 8451dc8fcd4f7bf4d841ebe76331e0034f9addbf Mon Sep 17 00:00:00 2001 From: martyall Date: Fri, 4 Oct 2024 01:50:22 -0700 Subject: [PATCH 066/234] build minimal set with constants and config loader --- src/app/archive/cli/archive_cli.ml | 28 +- src/app/archive/lib/processor.ml | 28 +- src/app/batch_txn_tool/batch_txn_tool.ml | 36 +- .../src/cli_entrypoint/mina_cli_entrypoint.ml | 192 ++++------- src/app/cli/src/init/client.ml | 315 ++++++++++-------- .../delegation_verify/delegation_verify.ml | 84 ++--- src/lib/cli_lib/commands.ml | 40 ++- src/lib/cli_lib/flag.ml | 40 +-- src/lib/cli_lib/flag.mli | 19 +- src/lib/crypto/snarky_tests/dune | 1 + src/lib/crypto/snarky_tests/snarky_tests.ml | 23 +- .../genesis_ledger_helper.ml | 263 ++++++++------- src/lib/genesis_proof/genesis_proof.ml | 4 + src/lib/mina_lib/tests/tests.ml | 14 +- .../precomputed_values/precomputed_values.ml | 1 + src/lib/runtime_config/runtime_config.ml | 261 +++++++++++++++ src/lib/snark_worker/functor.ml | 15 +- src/lib/snark_worker/intf.ml | 8 +- .../standalone/run_snark_worker.ml | 13 +- src/test/command_line_tests/config.ml | 3 +- 20 files changed, 829 insertions(+), 559 deletions(-) diff --git a/src/app/archive/cli/archive_cli.ml b/src/app/archive/cli/archive_cli.ml index d9835087e91..b53c9c63ed7 100644 --- a/src/app/archive/cli/archive_cli.ml +++ b/src/app/archive/cli/archive_cli.ml @@ -38,22 +38,32 @@ let command_run = "int Delete blocks that are more than n blocks lower than the \ maximum seen block." in - let runtime_config_opt = - Option.map runtime_config_file ~f:(fun file -> - Yojson.Safe.from_file file |> Runtime_config.of_yojson - |> Result.ok_or_failwith ) - in fun () -> let logger = Logger.create () in - let genesis_constants = Genesis_constants.Compiled.genesis_constants in - let constraint_constants = - Genesis_constants.Compiled.constraint_constants + let open Deferred.Let_syntax in + let%bind constants = + Runtime_config.Constants.load_constants ~logger + (Option.to_list runtime_config_file) + in + let%bind runtime_config_opt = + match runtime_config_file with + | None -> + return None + | Some file -> + Deferred.Or_error.( + Genesis_ledger_helper.Config_loader.load_config_files ~logger + [ file ] + >>| fun a -> Option.some @@ fst a) + |> Deferred.Or_error.ok_exn in Stdout_log.setup log_json log_level ; [%log info] "Starting archive process; built with commit $commit" ~metadata:[ ("commit", `String Mina_version.commit_id) ] ; Archive_lib.Processor.setup_server ~metrics_server_port ~logger - ~genesis_constants ~constraint_constants + ~genesis_constants: + (Runtime_config.Constants.genesis_constants constants) + ~constraint_constants: + (Runtime_config.Constants.constraint_constants constants) ~postgres_address:postgres.value ~server_port: (Option.value server_port.value ~default:server_port.default) diff --git a/src/app/archive/lib/processor.ml b/src/app/archive/lib/processor.ml index e8452d413da..13ab51c7b46 100644 --- a/src/app/archive/lib/processor.ml +++ b/src/app/archive/lib/processor.ml @@ -4728,26 +4728,12 @@ let run pool reader ~genesis_constants ~constraint_constants ~logger Deferred.unit ) (* [add_genesis_accounts] is called when starting the archive process *) -let add_genesis_accounts ~logger ~(runtime_config_opt : Runtime_config.t option) - ~(genesis_constants : Genesis_constants.t) - ~(constraint_constants : Genesis_constants.Constraint_constants.t) pool = +let add_genesis_accounts ~logger + ~(runtime_config_opt : Precomputed_values.t option) pool = match runtime_config_opt with | None -> Deferred.unit - | Some runtime_config -> ( - let%bind precomputed_values = - match%map - Genesis_ledger_helper.init_from_config_file ~logger - ~proof_level:Genesis_constants.Compiled.proof_level - ~genesis_constants ~constraint_constants runtime_config - ~cli_proof_level:None - with - | Ok (precomputed_values, _) -> - precomputed_values - | Error err -> - failwithf "Could not get precomputed values, error: %s" - (Error.to_string_hum err) () - in + | Some precomputed_values -> ( let ledger = Precomputed_values.genesis_ledger precomputed_values |> Lazy.force in @@ -4767,7 +4753,8 @@ let add_genesis_accounts ~logger ~(runtime_config_opt : Runtime_config.t option) let%bind.Deferred.Result genesis_block_id = Block.add_if_doesn't_exist (module Conn) - ~constraint_constants genesis_block + ~constraint_constants:precomputed_values.constraint_constants + genesis_block in let%bind.Deferred.Result { ledger_hash; _ } = Block.load (module Conn) ~id:genesis_block_id @@ -4903,10 +4890,7 @@ let setup_server ~(genesis_constants : Genesis_constants.t) ~metadata:[ ("error", `String (Caqti_error.show e)) ] ; Deferred.unit | Ok pool -> - let%bind () = - add_genesis_accounts pool ~logger ~genesis_constants - ~constraint_constants ~runtime_config_opt - in + let%bind () = add_genesis_accounts pool ~logger ~runtime_config_opt in run ~constraint_constants ~genesis_constants pool reader ~logger ~delete_older_than |> don't_wait_for ; diff --git a/src/app/batch_txn_tool/batch_txn_tool.ml b/src/app/batch_txn_tool/batch_txn_tool.ml index 79741438d56..572e369a571 100644 --- a/src/app/batch_txn_tool/batch_txn_tool.ml +++ b/src/app/batch_txn_tool/batch_txn_tool.ml @@ -154,11 +154,10 @@ let there_and_back_again ~num_txn_per_acct ~txns_per_block ~slot_time ~fill_rate ~origin_sender_secret_key_path ~(origin_sender_secret_key_pw_option : string option) ~returner_secret_key_path ~(returner_secret_key_pw_option : string option) - ~graphql_target_node_option ~minimum_user_command_fee () = + ~graphql_target_node_option ~minimum_user_command_fee ~logger () = let open Deferred.Let_syntax in (* define the rate limiting function *) let open Logger in - let logger = Logger.create () in let limit_level = let slot_limit = Float.( @@ -310,8 +309,6 @@ let there_and_back_again ~num_txn_per_acct ~txns_per_block ~slot_time ~fill_rate return () let output_there_and_back_cmds = - let genesis_constants = Genesis_constants.Compiled.genesis_constants in - let compile_config = Mina_compile_config.Compiled.t in let open Command.Let_syntax in Command.async ~summary: @@ -390,23 +387,32 @@ let output_there_and_back_cmds = transactions, if this is not present then we use the env var \ MINA_PRIVKEY_PASS" (optional string) + and config_file = Cli_lib.Flag.conf_file and graphql_target_node_option = flag "--graphql-target-node" ~aliases:[ "graphql-target-node" ] ~doc: "URL The graphql node to send graphl commands to. must be in \ format `:`. default is `127.0.0.1:3085`" (optional string) - and minimum_user_command_fee = - let default = compile_config.default_transaction_fee in - Cli_lib.Flag.fee_common - ~minimum_user_command_fee:genesis_constants.minimum_user_command_fee - ~default_transaction_fee:default - in - there_and_back_again ~num_txn_per_acct ~txns_per_block ~txn_fee_option - ~slot_time ~fill_rate ~rate_limit ~rate_limit_level ~rate_limit_interval - ~origin_sender_secret_key_path ~origin_sender_secret_key_pw_option - ~returner_secret_key_path ~returner_secret_key_pw_option - ~graphql_target_node_option ~minimum_user_command_fee ) + and minimum_user_command_fee_opt = Cli_lib.Flag.fee_common in + fun () -> + let open Deferred.Let_syntax in + let logger = Logger.create () in + let%bind minimum_user_command_fee = + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file + in + Option.value + ~default: + (Runtime_config.Constants.genesis_constants conf) + .minimum_user_command_fee minimum_user_command_fee_opt + in + there_and_back_again ~num_txn_per_acct ~txns_per_block ~txn_fee_option + ~slot_time ~fill_rate ~rate_limit ~rate_limit_level + ~rate_limit_interval ~origin_sender_secret_key_path + ~origin_sender_secret_key_pw_option ~returner_secret_key_path + ~returner_secret_key_pw_option ~graphql_target_node_option + ~minimum_user_command_fee ~logger () ) let () = Command.run diff --git a/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml b/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml index b90833be0a1..7bf9d25e446 100644 --- a/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml +++ b/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml @@ -49,51 +49,7 @@ let plugin_flag = times" else Command.Param.return [] -let load_config_files ~logger ~genesis_constants ~constraint_constants ~conf_dir - ~genesis_dir ~cli_proof_level ~proof_level (config_files : string list) = - let open Deferred.Or_error.Let_syntax in - let genesis_dir = Option.value ~default:(conf_dir ^/ "genesis") genesis_dir in - let%bind config = - Runtime_config.Json_loader.load_config_files ~conf_dir ~logger config_files - in - match%bind.Deferred - Genesis_ledger_helper.init_from_config_file ~cli_proof_level ~genesis_dir - ~logger ~genesis_constants ~constraint_constants ~proof_level config - with - | Ok a -> - return a - | Error err -> - let ( json_config - , `Accounts_omitted - ( `Genesis genesis_accounts_omitted - , `Staking staking_accounts_omitted - , `Next next_accounts_omitted ) ) = - Runtime_config.to_yojson_without_accounts config - in - let append_accounts_omitted s = - Option.value_map - ~f:(fun i -> List.cons (s ^ "_accounts_omitted", `Int i)) - ~default:Fn.id - in - let metadata = - append_accounts_omitted "genesis" genesis_accounts_omitted - @@ append_accounts_omitted "staking" staking_accounts_omitted - @@ append_accounts_omitted "next" next_accounts_omitted [] - @ [ ("config", json_config) - ; ( "name" - , `String - (Option.value ~default:"not provided" - (let%bind.Option ledger = config.ledger in - Option.first_some ledger.name ledger.hash ) ) ) - ; ("error", Error_json.error_to_yojson err) - ] - in - [%log info] - "Initializing with runtime configuration. Ledger source: $name" - ~metadata ; - Error.raise err - -let setup_daemon logger ~itn_features ~default_snark_worker_fee = +let setup_daemon logger ~itn_features = let open Command.Let_syntax in let open Cli_lib.Arg_type in let receiver_key_warning = Cli_lib.Default.receiver_key_warning in @@ -262,8 +218,7 @@ let setup_daemon logger ~itn_features ~default_snark_worker_fee = ~doc: (sprintf "FEE Amount a worker wants to get compensated for generating a \ - snark proof (default: %d)" - (Currency.Fee.to_nanomina_int default_snark_worker_fee) ) + snark proof" ) (optional txn_fee) and work_reassignment_wait = flag "--work-reassignment-wait" @@ -686,22 +641,15 @@ let setup_daemon logger ~itn_features ~default_snark_worker_fee = in let pids = Child_processes.Termination.create_pid_table () in let mina_initialization_deferred () = - let genesis_constants = - Genesis_constants.Compiled.genesis_constants - in - let constraint_constants = - Genesis_constants.Compiled.constraint_constants - in - let compile_config = Mina_compile_config.Compiled.t in let%bind precomputed_values, config = - load_config_files ~logger ~conf_dir ~genesis_dir - ~proof_level:Genesis_constants.Compiled.proof_level config_files - ~genesis_constants ~constraint_constants ~cli_proof_level + Genesis_ledger_helper.Config_loader.load_config_files ~logger + ~conf_dir ?genesis_dir ?cli_proof_level ~itn_features config_files |> Deferred.Or_error.ok_exn in - - constraint_constants.block_window_duration_ms |> Float.of_int - |> Time.Span.of_ms |> Mina_metrics.initialize_all ; + let constraint_constants = precomputed_values.consensus_constants in + let compile_config = precomputed_values.compile_config in + constraint_constants.block_window_duration_ms |> Block_time.Span.to_ms + |> Float.of_int64 |> Time.Span.of_ms |> Mina_metrics.initialize_all ; let module DC = Runtime_config.Daemon in (* The explicit typing here is necessary to prevent type inference from specializing according @@ -1378,12 +1326,9 @@ Pass one of -peer, -peer-list-file, -seed, -peer-list-url.|} ; let () = Mina_plugins.init_plugins ~logger mina plugins in return mina ) -let daemon logger = - let compile_config = Mina_compile_config.Compiled.t in +let daemon logger ~itn_features = Command.async ~summary:"Mina daemon" - (Command.Param.map - (setup_daemon logger ~itn_features:compile_config.itn_features - ~default_snark_worker_fee:compile_config.default_snark_worker_fee ) + (Command.Param.map (setup_daemon logger ~itn_features) ~f:(fun setup_daemon () -> (* Immediately disable updating the time offset. *) Block_time.Controller.disable_setting_offset () ; @@ -1392,7 +1337,7 @@ let daemon logger = [%log info] "Daemon ready. Clients can now connect" ; Async.never () ) ) -let replay_blocks logger = +let replay_blocks ~itn_features logger = let replay_flag = let open Command.Param in flag "--blocks-filename" ~aliases:[ "-blocks-filename" ] (required string) @@ -1403,11 +1348,9 @@ let replay_blocks logger = flag "--format" ~aliases:[ "-format" ] (optional string) ~doc:"json|sexp The format to read lines of the file in (default: json)" in - let compile_config = Mina_compile_config.Compiled.t in Command.async ~summary:"Start mina daemon with blocks replayed from a file" (Command.Param.map3 replay_flag read_kind - (setup_daemon logger ~itn_features:compile_config.itn_features - ~default_snark_worker_fee:compile_config.default_snark_worker_fee ) + (setup_daemon logger ~itn_features) ~f:(fun blocks_filename read_kind setup_daemon () -> (* Enable updating the time offset. *) Block_time.Controller.enable_setting_offset () ; @@ -1599,34 +1542,38 @@ let snark_hashes = let json = Cli_lib.Flag.json in fun () -> if json then Core.printf "[]\n%!"] -let internal_commands logger = +let internal_commands ~itn_features logger = [ ( Snark_worker.Intf.command_name - , Snark_worker.command ~proof_level:Genesis_constants.Compiled.proof_level - ~constraint_constants:Genesis_constants.Compiled.constraint_constants - ~commit_id:Mina_version.commit_id ) + , Snark_worker.command ~commit_id:Mina_version.commit_id ) ; ("snark-hashes", snark_hashes) ; ( "run-prover" , Command.async ~summary:"Run prover on a sexp provided on a single line of stdin" - (Command.Param.return (fun () -> - let logger = Logger.create () in - let constraint_constants = - Genesis_constants.Compiled.constraint_constants - in - let proof_level = Genesis_constants.Compiled.proof_level in - Parallel.init_master () ; - match%bind Reader.read_sexp (Lazy.force Reader.stdin) with - | `Ok sexp -> - let%bind conf_dir = Unix.mkdtemp "/tmp/mina-prover" in - [%log info] "Prover state being logged to %s" conf_dir ; - let%bind prover = - Prover.create ~commit_id:Mina_version.commit_id ~logger - ~proof_level ~constraint_constants - ~pids:(Pid.Table.create ()) ~conf_dir () - in - Prover.prove_from_input_sexp prover sexp >>| ignore - | `Eof -> - failwith "early EOF while reading sexp" ) ) ) + (let open Command.Let_syntax in + let%map_open config_file = Cli_lib.Flag.conf_file in + fun () -> + let logger = Logger.create () in + let open Deferred.Let_syntax in + let%bind constraint_constants, proof_level = + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file + in + Runtime_config.Constants. + (constraint_constants conf, proof_level conf) + in + Parallel.init_master () ; + match%bind Reader.read_sexp (Lazy.force Reader.stdin) with + | `Ok sexp -> + let%bind conf_dir = Unix.mkdtemp "/tmp/mina-prover" in + [%log info] "Prover state being logged to %s" conf_dir ; + let%bind prover = + Prover.create ~commit_id:Mina_version.commit_id ~logger + ~proof_level ~constraint_constants ~pids:(Pid.Table.create ()) + ~conf_dir () + in + Prover.prove_from_input_sexp prover sexp >>| ignore + | `Eof -> + failwith "early EOF while reading sexp") ) ; ( "run-snark-worker-single" , Command.async ~summary:"Run snark-worker on a sexp provided on a single line of stdin" @@ -1634,14 +1581,18 @@ let internal_commands logger = let%map_open filename = flag "--file" (required string) ~doc:"File containing the s-expression of the snark work to execute" - in + and config_file = Cli_lib.Flag.conf_file in + fun () -> let open Deferred.Let_syntax in let logger = Logger.create () in - let constraint_constants = - Genesis_constants.Compiled.constraint_constants + let%bind constraint_constants, proof_level = + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file + in + Runtime_config.Constants. + (constraint_constants conf, proof_level conf) in - let proof_level = Genesis_constants.Compiled.proof_level in Parallel.init_master () ; match%bind Reader.with_file filename ~f:(fun reader -> @@ -1688,14 +1639,17 @@ let internal_commands logger = and limit = flag "--limit" ~aliases:[ "-limit" ] (optional int) ~doc:"limit the number of proofs taken from the file" - in + and config_file = Cli_lib.Flag.conf_file in fun () -> let open Async in let logger = Logger.create () in - let constraint_constants = - Genesis_constants.Compiled.constraint_constants + let%bind constraint_constants, proof_level = + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file + in + Runtime_config.Constants. + (constraint_constants conf, proof_level conf) in - let proof_level = Genesis_constants.Compiled.proof_level in Parallel.init_master () ; let%bind conf_dir = Unix.mkdtemp "/tmp/mina-verifier" in let mode = @@ -1831,18 +1785,12 @@ let internal_commands logger = () ) ; Deferred.return ()) ) ; ("dump-type-shapes", dump_type_shapes) - ; ("replay-blocks", replay_blocks logger) + ; ("replay-blocks", replay_blocks ~itn_features logger) ; ("audit-type-shapes", audit_type_shapes) ; ( "test-genesis-block-generation" , Command.async ~summary:"Generate a genesis proof" (let open Command.Let_syntax in - let%map_open config_files = - flag "--config-file" ~aliases:[ "config-file" ] - ~doc: - "PATH path to a configuration file (overrides MINA_CONFIG_FILE, \ - default: /daemon.json). Pass multiple times to \ - override fields from earlier config files" - (listed string) + let%map_open config_file = Cli_lib.Flag.conf_file and conf_dir = Cli_lib.Flag.conf_dir and genesis_dir = flag "--genesis-ledger-dir" ~aliases:[ "genesis-ledger-dir" ] @@ -1856,17 +1804,10 @@ let internal_commands logger = Parallel.init_master () ; let logger = Logger.create () in let conf_dir = Mina_lib.Conf_dir.compute_conf_dir conf_dir in - let genesis_constants = - Genesis_constants.Compiled.genesis_constants - in - let constraint_constants = - Genesis_constants.Compiled.constraint_constants - in - let proof_level = Genesis_constants.Proof_level.Full in + let cli_proof_level = Genesis_constants.Proof_level.Full in let%bind precomputed_values, _ = - load_config_files ~logger ~conf_dir ~genesis_dir ~genesis_constants - ~constraint_constants ~proof_level config_files - ~cli_proof_level:None + Genesis_ledger_helper.Config_loader.load_config_files ~logger + ~conf_dir ?genesis_dir ~cli_proof_level ~itn_features config_file |> Deferred.Or_error.ok_exn in let pids = Child_processes.Termination.create_pid_table () in @@ -1875,7 +1816,7 @@ let internal_commands logger = realistic test. *) Prover.create ~commit_id:Mina_version.commit_id ~logger ~pids - ~conf_dir ~proof_level + ~conf_dir ~proof_level:precomputed_values.proof_level ~constraint_constants:precomputed_values.constraint_constants () in match%bind @@ -1895,13 +1836,14 @@ let internal_commands logger = let mina_commands logger ~itn_features = [ ("accounts", Client.accounts) - ; ("daemon", daemon logger) + ; ("daemon", daemon ~itn_features logger) ; ("client", Client.client) ; ("advanced", Client.advanced ~itn_features) ; ("ledger", Client.ledger) ; ("libp2p", Client.libp2p) ; ( "internal" - , Command.group ~summary:"Internal commands" (internal_commands logger) ) + , Command.group ~summary:"Internal commands" + (internal_commands ~itn_features logger) ) ; (Parallel.worker_command_name, Parallel.worker_command) ; ("transaction-snark-profiler", Transaction_snark_profiler.command) ] @@ -1939,11 +1881,13 @@ let () = | [| _mina_exe; version |] when is_version_cmd version -> Mina_version.print_version () | _ -> - let compile_config = Mina_compile_config.Compiled.t in + let itn_features = + Sys.getenv "MINA_ITN_FEATURES" + |> Option.value_map ~default:false ~f:bool_of_string + in Command.run (Command.group ~summary:"Mina" ~preserve_subcommand_order:() - (mina_commands logger ~itn_features:compile_config.itn_features) ) - ) ; + (mina_commands logger ~itn_features) ) ) ; Core.exit 0 let linkme = () diff --git a/src/app/cli/src/init/client.ml b/src/app/cli/src/init/client.ml index d4c1e6dbb50..9771267c082 100644 --- a/src/app/cli/src/init/client.ml +++ b/src/app/cli/src/init/client.ml @@ -513,21 +513,31 @@ let send_payment_graphql = flag "--amount" ~aliases:[ "amount" ] ~doc:"VALUE Payment amount you want to send" (required txn_amount) in - let genesis_constants = Genesis_constants.Compiled.genesis_constants in - let compile_config = Mina_compile_config.Compiled.t in + let config_file = Cli_lib.Flag.conf_file in let args = - Args.zip3 - (Cli_lib.Flag.signed_command_common - ~minimum_user_command_fee:genesis_constants.minimum_user_command_fee - ~default_transaction_fee:compile_config.default_transaction_fee ) - receiver_flag amount_flag + Args.zip4 Cli_lib.Flag.signed_command_common receiver_flag amount_flag + config_file in Command.async ~summary:"Send payment to an address" (Cli_lib.Background_daemon.graphql_init args ~f:(fun graphql_endpoint - ({ Cli_lib.Flag.sender; fee; nonce; memo }, receiver, amount) + ( { Cli_lib.Flag.sender; fee; nonce; memo } + , receiver + , amount + , config_file ) -> + let open Deferred.Let_syntax in + let%bind compile_config = + let logger = Logger.create () in + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file + in + Runtime_config.Constants.compile_config conf + in + let fee = + Option.value ~default:compile_config.default_transaction_fee fee + in let%map response = let input = Mina_graphql.Types.Input.SendPaymentInput.make_input ~to_:receiver @@ -548,21 +558,28 @@ let delegate_stake_graphql = ~doc:"PUBLICKEY Public key to which you want to delegate your stake" (required public_key_compressed) in - let genesis_constants = Genesis_constants.Compiled.genesis_constants in - let compile_config = Mina_compile_config.Compiled.t in + let config_file = Cli_lib.Flag.conf_file in let args = - Args.zip2 - (Cli_lib.Flag.signed_command_common - ~minimum_user_command_fee:genesis_constants.minimum_user_command_fee - ~default_transaction_fee:compile_config.default_transaction_fee ) - receiver_flag + Args.zip3 Cli_lib.Flag.signed_command_common receiver_flag config_file in + Command.async ~summary:"Delegate your stake to another public key" (Cli_lib.Background_daemon.graphql_init args ~f:(fun graphql_endpoint - ({ Cli_lib.Flag.sender; fee; nonce; memo }, receiver) + ({ Cli_lib.Flag.sender; fee; nonce; memo }, receiver, config_file) -> + let open Deferred.Let_syntax in + let%bind compile_config = + let logger = Logger.create () in + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file + in + Runtime_config.Constants.compile_config conf + in + let fee = + Option.value ~default:compile_config.default_transaction_fee fee + in let%map response = Graphql_client.query_exn Graphql_queries.Send_delegation.( @@ -818,10 +835,16 @@ let hash_ledger = flag "--ledger-file" ~doc:"LEDGER-FILE File containing an exported ledger" (required string)) + and config_file = Cli_lib.Flag.conf_file and plaintext = Cli_lib.Flag.plaintext in fun () -> - let constraint_constants = - Genesis_constants.Compiled.constraint_constants + let open Deferred.Let_syntax in + let%bind constraint_constants = + let logger = Logger.create () in + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file + in + Runtime_config.Constants.constraint_constants conf in let process_accounts accounts = let packed_ledger = @@ -922,22 +945,28 @@ let currency_in_ledger = ignore (exit 1 : 'a Deferred.t) ) let constraint_system_digests = + let open Command.Let_syntax in Command.async ~summary:"Print MD5 digest of each SNARK constraint" - (Command.Param.return (fun () -> - let constraint_constants = - Genesis_constants.Compiled.constraint_constants - in - let proof_level = Genesis_constants.Compiled.proof_level in - let all = - Transaction_snark.constraint_system_digests ~constraint_constants () - @ Blockchain_snark.Blockchain_snark_state.constraint_system_digests - ~proof_level ~constraint_constants () - in - let all = - List.sort ~compare:(fun (k1, _) (k2, _) -> String.compare k1 k2) all + (let%map_open config_file = Cli_lib.Flag.conf_file in + fun () -> + let open Deferred.Let_syntax in + let%bind constraint_constants, proof_level = + let logger = Logger.create () in + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file in - List.iter all ~f:(fun (k, v) -> printf "%s\t%s\n" k (Md5.to_hex v)) ; - Deferred.unit ) ) + Runtime_config.Constants.(constraint_constants conf, proof_level conf) + in + let all = + Transaction_snark.constraint_system_digests ~constraint_constants () + @ Blockchain_snark.Blockchain_snark_state.constraint_system_digests + ~proof_level ~constraint_constants () + in + let all = + List.sort ~compare:(fun (k1, _) (k2, _) -> String.compare k1 k2) all + in + List.iter all ~f:(fun (k, v) -> printf "%s\t%s\n" k (Md5.to_hex v)) ; + Deferred.unit ) let snark_job_list = let open Deferred.Let_syntax in @@ -1605,14 +1634,19 @@ let lock_account = in printf "🔒 Locked account!\nPublic key: %s\n" pk_string ) ) -let generate_libp2p_keypair_do privkey_path = +let generate_libp2p_keypair_do privkey_path ~config_file = Cli_lib.Exceptions.handle_nicely @@ fun () -> Deferred.ignore_m (let open Deferred.Let_syntax in (* FIXME: I'd like to accumulate messages into this logger and only dump them out in failure paths. *) let logger = Logger.null () in - let compile_config = Mina_compile_config.Compiled.t in + let%bind compile_config = + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file + in + Runtime_config.Constants.compile_config conf + in (* Using the helper only for keypair generation requires no state. *) File_system.with_temp_dir "mina-generate-libp2p-keypair" ~f:(fun tmpd -> match%bind @@ -1637,16 +1671,23 @@ let generate_libp2p_keypair = Command.async ~summary:"Generate a new libp2p keypair and print out the peer ID" (let open Command.Let_syntax in - let%map_open privkey_path = Cli_lib.Flag.privkey_write_path in - generate_libp2p_keypair_do privkey_path) + let%map_open privkey_path = Cli_lib.Flag.privkey_write_path + and config_file = Cli_lib.Flag.conf_file in + generate_libp2p_keypair_do privkey_path ~config_file) -let dump_libp2p_keypair_do privkey_path = +let dump_libp2p_keypair_do privkey_path ~config_file = Cli_lib.Exceptions.handle_nicely @@ fun () -> Deferred.ignore_m (let open Deferred.Let_syntax in let logger = Logger.null () in - let compile_config = Mina_compile_config.Compiled.t in + let%bind compile_config = + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file + in + Runtime_config.Constants.compile_config conf + in + (* Using the helper only for keypair generation requires no state. *) File_system.with_temp_dir "mina-dump-libp2p-keypair" ~f:(fun tmpd -> match%bind @@ -1667,8 +1708,9 @@ let dump_libp2p_keypair_do privkey_path = let dump_libp2p_keypair = Command.async ~summary:"Print an existing libp2p keypair" (let open Command.Let_syntax in - let%map_open privkey_path = Cli_lib.Flag.privkey_read_path in - dump_libp2p_keypair_do privkey_path) + let%map_open privkey_path = Cli_lib.Flag.privkey_read_path + and config_file = Cli_lib.Flag.conf_file in + dump_libp2p_keypair_do privkey_path ~config_file) let trustlist_ip_flag = Command.Param.( @@ -1795,90 +1837,82 @@ let add_peers_graphql = } ) ) ) ) let compile_time_constants = - let genesis_constants = Genesis_constants.Compiled.genesis_constants in - let constraint_constants = Genesis_constants.Compiled.constraint_constants in - let proof_level = Genesis_constants.Compiled.proof_level in + let open Command.Let_syntax in Command.async ~summary:"Print a JSON map of the compile-time consensus parameters" - (Command.Param.return (fun () -> + (let%map_open config_file = Cli_lib.Flag.conf_file in + fun () -> + let home = Core.Sys.home_directory () in + let conf_dir = home ^/ Cli_lib.Default.conf_dir_name in + let genesis_dir = let home = Core.Sys.home_directory () in - let conf_dir = home ^/ Cli_lib.Default.conf_dir_name in - let genesis_dir = - let home = Core.Sys.home_directory () in - home ^/ Cli_lib.Default.conf_dir_name - in - let config_file = - match Sys.getenv "MINA_CONFIG_FILE" with - | Some config_file -> - config_file - | None -> - conf_dir ^/ "daemon.json" - in - let open Async in + home ^/ Cli_lib.Default.conf_dir_name + in + let open Deferred.Let_syntax in + let%map ({ consensus_constants; _ } as precomputed_values), _ = + (* This is kind of ugly because we are allowing for supplying a runtime_config value directly, rather than force what is read from the environment *) + (* TODO: See if we can initialize consensus_constants without also initializing the ledger *) let logger = Logger.create () in - let%map ({ consensus_constants; _ } as precomputed_values), _ = - let%bind runtime_config = - let%map.Deferred config_file = - Runtime_config.Json_loader.load_config_files ~conf_dir ~logger - [ config_file ] - >>| Or_error.ok - in - let default = - Runtime_config.of_json_layout - { Runtime_config.Json_layout.default with - ledger = - Some - { Runtime_config.Json_layout.Ledger.default with - accounts = Some [] - } - } - |> Result.ok_or_failwith - in - Option.value ~default config_file - in - Genesis_ledger_helper.init_from_config_file ~genesis_constants - ~constraint_constants ~logger:(Logger.null ()) ~proof_level - ~cli_proof_level:None ~genesis_dir runtime_config - >>| Or_error.ok_exn + let%bind m_conf = + Runtime_config.Json_loader.load_config_files ~conf_dir ~logger + config_file + >>| Or_error.ok + in + let default = + Runtime_config.of_json_layout + { Runtime_config.Json_layout.default with + ledger = + Some + { Runtime_config.Json_layout.Ledger.default with + accounts = Some [] + } + } + |> Result.ok_or_failwith in - let all_constants = - `Assoc - [ ( "genesis_state_timestamp" - , `String - ( Block_time.to_time_exn - consensus_constants.genesis_state_timestamp - |> Core.Time.to_string_iso8601_basic ~zone:Core.Time.Zone.utc - ) ) - ; ("k", `Int (Unsigned.UInt32.to_int consensus_constants.k)) - ; ( "coinbase" - , `String - (Currency.Amount.to_mina_string - precomputed_values.constraint_constants.coinbase_amount ) - ) - ; ( "block_window_duration_ms" - , `Int - precomputed_values.constraint_constants - .block_window_duration_ms ) - ; ("delta", `Int (Unsigned.UInt32.to_int consensus_constants.delta)) - ; ( "sub_windows_per_window" - , `Int - (Unsigned.UInt32.to_int - consensus_constants.sub_windows_per_window ) ) - ; ( "slots_per_sub_window" - , `Int - (Unsigned.UInt32.to_int - consensus_constants.slots_per_sub_window ) ) - ; ( "slots_per_window" - , `Int - (Unsigned.UInt32.to_int consensus_constants.slots_per_window) - ) - ; ( "slots_per_epoch" - , `Int - (Unsigned.UInt32.to_int consensus_constants.slots_per_epoch) - ) - ] + let runtime_config = Option.value ~default m_conf in + let constants = + Runtime_config.Constants.load_constants' runtime_config in - Core_kernel.printf "%s\n%!" (Yojson.Safe.to_string all_constants) ) ) + Genesis_ledger_helper.Config_loader.init_from_config_file ~genesis_dir + ~logger ~constants runtime_config + |> Deferred.Or_error.ok_exn + in + let all_constants = + `Assoc + [ ( "genesis_state_timestamp" + , `String + ( Block_time.to_time_exn + consensus_constants.genesis_state_timestamp + |> Core.Time.to_string_iso8601_basic ~zone:Core.Time.Zone.utc + ) ) + ; ("k", `Int (Unsigned.UInt32.to_int consensus_constants.k)) + ; ( "coinbase" + , `String + (Currency.Amount.to_mina_string + precomputed_values.constraint_constants.coinbase_amount ) ) + ; ( "block_window_duration_ms" + , `Int + precomputed_values.constraint_constants + .block_window_duration_ms ) + ; ("delta", `Int (Unsigned.UInt32.to_int consensus_constants.delta)) + ; ( "sub_windows_per_window" + , `Int + (Unsigned.UInt32.to_int + consensus_constants.sub_windows_per_window ) ) + ; ( "slots_per_sub_window" + , `Int + (Unsigned.UInt32.to_int + consensus_constants.slots_per_sub_window ) ) + ; ( "slots_per_window" + , `Int + (Unsigned.UInt32.to_int consensus_constants.slots_per_window) + ) + ; ( "slots_per_epoch" + , `Int (Unsigned.UInt32.to_int consensus_constants.slots_per_epoch) + ) + ] + in + Core_kernel.printf "%s\n%!" (Yojson.Safe.to_string all_constants) ) let node_status = let open Command.Param in @@ -2322,26 +2356,31 @@ let test_ledger_application = flag "--has-second-partition" ~doc:"Assume there is a second partition (scan state)" no_arg and tracing = flag "--tracing" ~doc:"Wrap test into tracing" no_arg + and config_file = Cli_lib.Flag.conf_file and no_masks = flag "--no-masks" ~doc:"Do not create masks" no_arg in Cli_lib.Exceptions.handle_nicely @@ fun () -> + let open Deferred.Let_syntax in + let%bind genesis_constants, constraint_constants = + let logger = Logger.create () in + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file + in + Runtime_config.Constants. + (genesis_constants conf, constraint_constants conf) + in let first_partition_slots = Option.value ~default:128 first_partition_slots in let num_txs_per_round = Option.value ~default:3 num_txs_per_round in let rounds = Option.value ~default:580 rounds in let max_depth = Option.value ~default:290 max_depth in - let constraint_constants = - Genesis_constants.Compiled.constraint_constants - in - let genesis_constants = Genesis_constants.Compiled.genesis_constants in Test_ledger_application.test ~privkey_path ~ledger_path ?prev_block_path ~first_partition_slots ~no_new_stack ~has_second_partition ~num_txs_per_round ~rounds ~no_masks ~max_depth ~tracing num_txs ~constraint_constants ~genesis_constants ) let itn_create_accounts = - let compile_config = Mina_compile_config.Compiled.t in Command.async ~summary:"Fund new accounts for incentivized testnet" (let open Command.Param in let privkey_path = Cli_lib.Flag.privkey_read_path in @@ -2352,10 +2391,7 @@ let itn_create_accounts = flag "--num-accounts" ~doc:"NN Number of new accounts" (required int) in let fee = - flag "--fee" - ~doc: - (sprintf "NN Fee in nanomina paid to create an account (minimum: %s)" - (Currency.Fee.to_string compile_config.minimum_user_command_fee) ) + flag "--fee" ~doc:"NN Fee in nanomina paid to create an account" (required int) in let amount = @@ -2363,13 +2399,28 @@ let itn_create_accounts = ~doc:"NN Amount in nanomina to be divided among new accounts" (required int) in - let args = Args.zip5 privkey_path key_prefix num_accounts fee amount in - let genesis_constants = Genesis_constants.Compiled.genesis_constants in - let constraint_constants = - Genesis_constants.Compiled.constraint_constants + let config_file = Cli_lib.Flag.conf_file in + let args = + Args.zip6 privkey_path key_prefix num_accounts fee amount config_file in Cli_lib.Background_daemon.rpc_init args - ~f:(Itn.create_accounts ~genesis_constants ~constraint_constants)) + ~f:(fun + port + (privkey_path, key_prefix, num_accounts, fee, amount, config_file) + -> + let open Deferred.Let_syntax in + let%bind genesis_constants, constraint_constants = + let logger = Logger.create () in + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file + in + Runtime_config.Constants. + (genesis_constants conf, constraint_constants conf) + in + let args' = (privkey_path, key_prefix, num_accounts, fee, amount) in + let genesis_constants = genesis_constants in + let constraint_constants = constraint_constants in + Itn.create_accounts ~genesis_constants ~constraint_constants port args' )) module Visualization = struct let create_command (type rpc_response) ~name ~f diff --git a/src/app/delegation_verify/delegation_verify.ml b/src/app/delegation_verify/delegation_verify.ml index f160d4c1234..2cad72cb40a 100644 --- a/src/app/delegation_verify/delegation_verify.ml +++ b/src/app/delegation_verify/delegation_verify.ml @@ -13,9 +13,7 @@ let get_filenames = let verify_snark_work ~verify_transaction_snarks ~proof ~message = verify_transaction_snarks [ (proof, message) ] -let config_flag = - let open Command.Param in - flag "--config-file" ~doc:"FILE config file" (optional string) +let config_flag = Cli_lib.Flag.conf_file let keyspace_flag = let open Command.Param in @@ -44,31 +42,15 @@ let timestamp = let open Command.Param in anon ("timestamp" %: string) -let instantiate_verify_functions ~logger ~genesis_constants - ~constraint_constants ~proof_level ~cli_proof_level = function - | None -> - Deferred.return - (Verifier.verify_functions ~constraint_constants ~proof_level ()) - | Some config_file -> - let%bind.Deferred precomputed_values = - let%bind.Deferred.Or_error config = - Runtime_config.Json_loader.load_config_files ~logger [ config_file ] - in - Genesis_ledger_helper.init_from_config_file ~logger ~proof_level - ~constraint_constants ~genesis_constants config ~cli_proof_level - in - let%map.Deferred precomputed_values = - match precomputed_values with - | Ok (precomputed_values, _) -> - Deferred.return precomputed_values - | Error _ -> - Output.display_error "fail to read config file" ; - exit 4 - in - let constraint_constants = - Precomputed_values.constraint_constants precomputed_values - in - Verifier.verify_functions ~constraint_constants ~proof_level:Full () +let instantiate_verify_functions ~logger ~cli_proof_level config_file = + let open Deferred.Let_syntax in + let%map constants = + Runtime_config.Constants.load_constants ~logger ~cli_proof_level config_file + in + let constraint_constants = + Runtime_config.Constants.constraint_constants constants + in + Verifier.verify_functions ~constraint_constants ~proof_level:Full () module Make_verifier (Source : Submission.Data_source) = struct let verify_transaction_snarks = Source.verify_transaction_snarks @@ -139,7 +121,7 @@ module Make_verifier (Source : Submission.Data_source) = struct |> Deferred.Or_error.all_unit end -let filesystem_command = +let filesystem_command ~logger = Command.async ~summary:"Verify submissions and block read from the filesystem" Command.Let_syntax.( let%map_open block_dir = block_dir_flag @@ -147,16 +129,10 @@ let filesystem_command = and no_checks = no_checks_flag and config_file = config_flag in fun () -> - let logger = Logger.create () in - let genesis_constants = Genesis_constants.Compiled.genesis_constants in - let constraint_constants = - Genesis_constants.Compiled.constraint_constants - in - let proof_level = Genesis_constants.Compiled.proof_level in let%bind.Deferred verify_blockchain_snarks, verify_transaction_snarks = - instantiate_verify_functions ~logger config_file ~genesis_constants - ~constraint_constants ~proof_level ~cli_proof_level:None + instantiate_verify_functions ~logger ~cli_proof_level:None config_file in + let submission_paths = get_filenames inputs in let module V = Make_verifier (struct include Submission.Filesystem @@ -175,7 +151,7 @@ let filesystem_command = Output.display_error @@ Error.to_string_hum e ; exit 1) -let cassandra_command = +let cassandra_command ~logger = Command.async ~summary:"Verify submissions and block read from Cassandra" Command.Let_syntax.( let%map_open cqlsh = cassandra_executable_flag @@ -186,15 +162,8 @@ let cassandra_command = and period_end = timestamp in fun () -> let open Deferred.Let_syntax in - let logger = Logger.create () in - let genesis_constants = Genesis_constants.Compiled.genesis_constants in - let constraint_constants = - Genesis_constants.Compiled.constraint_constants - in - let proof_level = Genesis_constants.Compiled.proof_level in let%bind.Deferred verify_blockchain_snarks, verify_transaction_snarks = - instantiate_verify_functions ~logger config_file ~genesis_constants - ~constraint_constants ~proof_level ~cli_proof_level:None + instantiate_verify_functions ~logger ~cli_proof_level:None config_file in let module V = Make_verifier (struct include Submission.Cassandra @@ -217,22 +186,15 @@ let cassandra_command = Output.display_error @@ Error.to_string_hum e ; exit 1) -let stdin_command = +let stdin_command ~logger = Command.async ~summary:"Verify submissions and blocks read from standard input" Command.Let_syntax.( let%map_open config_file = config_flag and no_checks = no_checks_flag in fun () -> let open Deferred.Let_syntax in - let logger = Logger.create () in - let genesis_constants = Genesis_constants.Compiled.genesis_constants in - let constraint_constants = - Genesis_constants.Compiled.constraint_constants - in - let proof_level = Genesis_constants.Compiled.proof_level in let%bind.Deferred verify_blockchain_snarks, verify_transaction_snarks = - instantiate_verify_functions ~logger config_file ~genesis_constants - ~constraint_constants ~proof_level ~cli_proof_level:None + instantiate_verify_functions ~logger ~cli_proof_level:None config_file in let module V = Make_verifier (struct include Submission.Stdin @@ -248,12 +210,14 @@ let stdin_command = Output.display_error @@ Error.to_string_hum e ; exit 1) -let command = +let command ~logger = Command.group ~summary:"A tool for verifying JSON payload submitted by the uptime service" - [ ("fs", filesystem_command) - ; ("cassandra", cassandra_command) - ; ("stdin", stdin_command) + [ ("fs", filesystem_command ~logger) + ; ("cassandra", cassandra_command ~logger) + ; ("stdin", stdin_command ~logger) ] -let () = Async.Command.run command +let () = + let logger = Logger.create () in + Async.Command.run @@ command ~logger diff --git a/src/lib/cli_lib/commands.ml b/src/lib/cli_lib/commands.ml index 375a130d6e7..aff4d661ab9 100644 --- a/src/lib/cli_lib/commands.ml +++ b/src/lib/cli_lib/commands.ml @@ -230,16 +230,20 @@ module Vrf = struct flag "--total-stake" ~doc:"AMOUNT The total balance of all accounts in the epoch ledger" (optional int) - in + and config_file = Flag.conf_file in Exceptions.handle_nicely @@ fun () -> let env = Secrets.Keypair.env in - let constraint_constants = - Genesis_constants.Compiled.constraint_constants + let open Deferred.Let_syntax in + let%bind constraint_constants = + let logger = Logger.create () in + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file + in + Runtime_config.Constants.constraint_constants conf in if Option.is_some (Sys.getenv env) then eprintf "Using password from environment variable %s\n" env ; - let open Deferred.Let_syntax in (* TODO-someday: constraint constants from config file. *) let%bind () = let password = @@ -297,17 +301,21 @@ module Vrf = struct \"epochSeed\": _, \"delegatorIndex\": _} JSON message objects read on \ stdin" (let open Command.Let_syntax in - let%map_open privkey_path = Flag.privkey_read_path in + let%map_open privkey_path = Flag.privkey_read_path + and config_file = Flag.conf_file in Exceptions.handle_nicely @@ fun () -> - let constraint_constants = - Genesis_constants.Compiled.constraint_constants - in let env = Secrets.Keypair.env in if Option.is_some (Sys.getenv env) then eprintf "Using password from environment variable %s\n" env ; let open Deferred.Let_syntax in - (* TODO-someday: constraint constants from config file. *) + let%bind constraint_constants = + let logger = Logger.create () in + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file + in + Runtime_config.Constants.constraint_constants conf + in let%bind () = let password = lazy @@ -362,11 +370,17 @@ module Vrf = struct totalStake: 1000000000}. The threshold is not checked against a \ ledger; this should be done manually to confirm whether threshold_met \ in the output corresponds to an actual won block." - ( Command.Param.return @@ Exceptions.handle_nicely + (let open Command.Let_syntax in + let%map_open config_file = Flag.conf_file in + Exceptions.handle_nicely @@ fun () -> let open Deferred.Let_syntax in - let constraint_constants = - Genesis_constants.Compiled.constraint_constants + let%bind constraint_constants = + let logger = Logger.create () in + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file + in + Runtime_config.Constants.constraint_constants conf in (* TODO-someday: constraint constants from config file. *) let lexbuf = Lexing.from_channel In_channel.stdin in @@ -399,7 +413,7 @@ module Vrf = struct (Error_json.error_to_yojson err) ) ; `Repeat () ) in - exit 0 ) + exit 0) let command_group = Command.group ~summary:"Commands for vrf evaluations" diff --git a/src/lib/cli_lib/flag.ml b/src/lib/cli_lib/flag.ml index c96200feb9e..051b2e69ffe 100644 --- a/src/lib/cli_lib/flag.ml +++ b/src/lib/cli_lib/flag.ml @@ -33,6 +33,15 @@ let conf_dir = flag "--config-directory" ~aliases:[ "config-directory" ] ~doc:"DIR Configuration directory" (optional string) +let conf_file = + let open Command.Param in + flag "--config-file" ~aliases:[ "config-file" ] + ~doc: + "PATH path to a configuration file (overrides MINA_CONFIG_FILE, default: \ + /daemon.json). Pass multiple times to override fields from \ + earlier config files" + (listed string) + module Doc_builder = struct type 'value t = { type_name : string @@ -343,32 +352,24 @@ end type signed_command_common = { sender : Signature_lib.Public_key.Compressed.t - ; fee : Currency.Fee.t + ; fee : Currency.Fee.t option ; nonce : Mina_base.Account.Nonce.t option ; memo : string option } -let fee_common ~default_transaction_fee ~minimum_user_command_fee : - Currency.Fee.t Command.Param.t = +let fee_common : Currency.Fee.t option Command.Param.t = Command.Param.flag "--fee" ~aliases:[ "fee" ] - ~doc: - (Printf.sprintf - "FEE Amount you are willing to pay to process the transaction \ - (default: %s) (minimum: %s)" - (Currency.Fee.to_mina_string default_transaction_fee) - (Currency.Fee.to_mina_string minimum_user_command_fee) ) - (Command.Param.optional_with_default default_transaction_fee - Arg_type.txn_fee ) - -let signed_command_common ~default_transaction_fee ~minimum_user_command_fee : - signed_command_common Command.Param.t = + ~doc:"FEE Amount you are willing to pay to process the transaction" + (Command.Param.optional Arg_type.txn_fee) + +let signed_command_common : signed_command_common Command.Param.t = let open Command.Let_syntax in let open Arg_type in let%map_open sender = flag "--sender" ~aliases:[ "sender" ] (required public_key_compressed) ~doc:"PUBLICKEY Public key from which you want to send the transaction" - and fee = fee_common ~default_transaction_fee ~minimum_user_command_fee + and fee = fee_common and nonce = flag "--nonce" ~aliases:[ "nonce" ] ~doc: @@ -401,15 +402,10 @@ module Signed_command = struct flag "--amount" ~aliases:[ "amount" ] ~doc:"VALUE Payment amount you want to send" (required txn_amount) - let fee ~default_transaction_fee ~minimum_user_command_fee = + let fee = let open Command.Param in flag "--fee" ~aliases:[ "fee" ] - ~doc: - (Printf.sprintf - "FEE Amount you are willing to pay to process the transaction \ - (default: %s) (minimum: %s)" - (Currency.Fee.to_mina_string default_transaction_fee) - (Currency.Fee.to_mina_string minimum_user_command_fee) ) + ~doc:"FEE Amount you are willing to pay to process the transaction" (optional txn_fee) let valid_until = diff --git a/src/lib/cli_lib/flag.mli b/src/lib/cli_lib/flag.mli index 69ff38f86f3..f95e5e3801a 100644 --- a/src/lib/cli_lib/flag.mli +++ b/src/lib/cli_lib/flag.mli @@ -12,6 +12,8 @@ val privkey_read_path : string Command.Param.t val conf_dir : string option Command.Param.t +val conf_file : string list Command.Param.t + module Types : sig type 'a with_name = { name : string; value : 'a } @@ -81,20 +83,14 @@ end type signed_command_common = { sender : Signature_lib.Public_key.Compressed.t - ; fee : Currency.Fee.t + ; fee : Currency.Fee.t option ; nonce : Mina_base.Account.Nonce.t option ; memo : string option } -val fee_common : - default_transaction_fee:Currency.Fee.t - -> minimum_user_command_fee:Currency.Fee.t - -> Currency.Fee.t Command.Param.t +val fee_common : Currency.Fee.t option Command.Param.t -val signed_command_common : - default_transaction_fee:Currency.Fee.t - -> minimum_user_command_fee:Currency.Fee.t - -> signed_command_common Command.Param.t +val signed_command_common : signed_command_common Command.Param.t module Signed_command : sig val hd_index : Mina_numbers.Hd_index.t Command.Param.t @@ -103,10 +99,7 @@ module Signed_command : sig val amount : Currency.Amount.t Command.Param.t - val fee : - default_transaction_fee:Currency.Fee.t - -> minimum_user_command_fee:Currency.Fee.t - -> Currency.Fee.t option Command.Param.t + val fee : Currency.Fee.t option Command.Param.t val valid_until : Mina_numbers.Global_slot_since_genesis.t option Command.Param.t diff --git a/src/lib/crypto/snarky_tests/dune b/src/lib/crypto/snarky_tests/dune index a9710cf679b..5fbfe8499c7 100644 --- a/src/lib/crypto/snarky_tests/dune +++ b/src/lib/crypto/snarky_tests/dune @@ -68,4 +68,5 @@ blockchain_snark transaction_snark genesis_constants + mina_runtime_config core)) diff --git a/src/lib/crypto/snarky_tests/snarky_tests.ml b/src/lib/crypto/snarky_tests/snarky_tests.ml index 51db1241b6d..68297f8468f 100644 --- a/src/lib/crypto/snarky_tests/snarky_tests.ml +++ b/src/lib/crypto/snarky_tests/snarky_tests.ml @@ -604,14 +604,12 @@ module Protocol_circuits = struct (* Full because we want to be sure nothing changes *) let proof_level = Genesis_constants.Proof_level.Full - let constraint_constants = Genesis_constants.Compiled.constraint_constants - let print_hash print expected digest : unit = if print then ( Format.printf "expected:\n%s\n" expected ; Format.printf "obtained:\n%s\n" digest ) - let blockchain () : unit = + let blockchain ~constraint_constants () : unit = let expected = "36786c300e37c2a2f1341ad6374aa113" in let digest = Blockchain_snark.Blockchain_snark_state.constraint_system_digests @@ -626,7 +624,7 @@ module Protocol_circuits = struct assert digests_match ; () - let transaction () : unit = + let transaction ~constraint_constants () : unit = let expected1 = "b8879f677f622a1d86648030701f43e1" in let expected2 = "740db2397b0b01806a48f061a2e2b063" in let digest = @@ -651,9 +649,9 @@ module Protocol_circuits = struct assert check ; () - let tests = - [ ("test blockchain circuit", `Quick, blockchain) - ; ("test transaction circuit", `Quick, transaction) + let tests ~constraint_constants = + [ ("test blockchain circuit", `Quick, blockchain ~constraint_constants) + ; ("test transaction circuit", `Quick, transaction ~constraint_constants) ] end @@ -666,16 +664,25 @@ let api_tests = ] let () = + Async.Thread_safe.block_on_async_exn + @@ fun () -> let range_checks = List.map ~f:QCheck_alcotest.to_alcotest [ RangeCircuits.test_range_gates ] in + let logger = Logger.create () in + let%map.Async.Deferred constraint_constants = + let%map.Async.Deferred config = + Runtime_config.Constants.load_constants ~logger [] + in + Runtime_config.Constants.constraint_constants config + in Alcotest.run "Simple snarky tests" [ ("outside of circuit tests before", outside_circuit_tests) ; ("API tests", api_tests) ; ("circuit tests", circuit_tests) ; ("As_prover tests", As_prover_circuits.as_prover_tests) ; ("range checks", range_checks) - ; ("protocol circuits", Protocol_circuits.tests) + ; ("protocol circuits", Protocol_circuits.tests ~constraint_constants) ; ("improper calls", Improper_calls.tests) (* We run the pure functions before and after other tests, because we've had bugs in the past where it would only work after the global state was initialized by an API function diff --git a/src/lib/genesis_ledger_helper/genesis_ledger_helper.ml b/src/lib/genesis_ledger_helper/genesis_ledger_helper.ml index bd1b74e5df0..d8fcb85be5f 100644 --- a/src/lib/genesis_ledger_helper/genesis_ledger_helper.ml +++ b/src/lib/genesis_ledger_helper/genesis_ledger_helper.ml @@ -666,7 +666,7 @@ module Genesis_proof = struct return None let generate_inputs ~runtime_config ~proof_level ~ledger ~genesis_epoch_data - ~constraint_constants ~blockchain_proof_system_id + ~constraint_constants ~blockchain_proof_system_id ~compile_config ~(genesis_constants : Genesis_constants.t) = let consensus_constants = Consensus.Constants.create ~constraint_constants @@ -682,6 +682,7 @@ module Genesis_proof = struct { Genesis_proof.Inputs.runtime_config ; constraint_constants ; proof_level + ; compile_config ; blockchain_proof_system_id ; genesis_ledger = ledger ; genesis_epoch_data @@ -708,6 +709,7 @@ module Genesis_proof = struct ; consensus_constants = inputs.consensus_constants ; constraint_constants = inputs.constraint_constants ; genesis_body_reference = inputs.genesis_body_reference + ; compile_config = inputs.compile_config } | _ -> Deferred.return (Genesis_proof.create_values_no_proof inputs) @@ -759,119 +761,148 @@ let print_config ~logger (config : Runtime_config.t) = [%log info] "Initializing with runtime configuration. Ledger name: $name" ~metadata -let inputs_from_config_file ?(genesis_dir = Cache_dir.autogen_path) ~logger - ~cli_proof_level ~(genesis_constants : Genesis_constants.t) - ~(constraint_constants : Genesis_constants.Constraint_constants.t) - ~proof_level:compiled_proof_level ?overwrite_version - (config : Runtime_config.t) = - print_config ~logger config ; - let open Deferred.Or_error.Let_syntax in - let proof_level = - List.find_map_exn ~f:Fn.id - [ cli_proof_level - ; Option.Let_syntax.( - let%bind proof = config.proof in - match%map proof.level with - | Full -> - Genesis_constants.Proof_level.Full - | Check -> - Check - | None -> - None) - ; Some compiled_proof_level - ] - in - let constraint_constants, blockchain_proof_system_id = - match config.proof with - | None -> - [%log info] "Using the compiled constraint constants" ; - (constraint_constants, Some (Pickles.Verification_key.Id.dummy ())) - | Some config -> - [%log info] "Using the constraint constants from the configuration file" ; - let blockchain_proof_system_id = - (* We pass [None] here, which will force the constraint systems to be - set up and their hashes evaluated before we can calculate the - genesis proof's filename. - This adds no overhead if we are generating a genesis proof, since - we will do these evaluations anyway to load the blockchain proving - key. Otherwise, this will in a slight slowdown. - *) - None +module type Config_loader_intf = sig + val load_config_files : + ?overwrite_version:Mina_numbers.Txn_version.t + -> ?genesis_dir:string + -> ?itn_features:bool + -> ?cli_proof_level:Genesis_constants.Proof_level.t + -> ?conf_dir:string + -> logger:Logger.t + -> string list + -> (Precomputed_values.t * Runtime_config.t) Deferred.Or_error.t + + val init_from_config_file : + ?overwrite_version:Mina_numbers.Txn_version.t + -> ?genesis_dir:string + -> logger:Logger.t + -> constants:Runtime_config.Constants.constants + -> Runtime_config.t + -> (Precomputed_values.t * Runtime_config.t) Deferred.Or_error.t +end + +module Config_loader : Config_loader_intf = struct + let inputs_from_config_file ?(genesis_dir = Cache_dir.autogen_path) ~logger + ~(constants : Runtime_config.Constants.constants) ?overwrite_version + (config : Runtime_config.t) = + print_config ~logger config ; + let open Deferred.Or_error.Let_syntax in + let blockchain_proof_system_id = None in + let constraint_constants = + Runtime_config.Constants.constraint_constants constants + in + let proof_level = Runtime_config.Constants.proof_level constants in + let compile_config = Runtime_config.Constants.compile_config constants in + let genesis_constants = + Runtime_config.Constants.genesis_constants constants + in + let%bind genesis_ledger, ledger_config, ledger_file = + match config.ledger with + | Some ledger -> + Ledger.load ~proof_level ~genesis_dir ~logger ~constraint_constants + ?overwrite_version ledger + | None -> + [%log fatal] "No ledger was provided in the runtime configuration" ; + Deferred.Or_error.errorf + "No ledger was provided in the runtime configuration" + in + [%log info] "Loaded genesis ledger from $ledger_file" + ~metadata:[ ("ledger_file", `String ledger_file) ] ; + let%bind genesis_epoch_data, genesis_epoch_data_config = + Epoch_data.load ~proof_level ~genesis_dir ~logger ~constraint_constants + config.epoch_data + in + let config = + { config with + ledger = Option.map config.ledger ~f:(fun _ -> ledger_config) + ; epoch_data = genesis_epoch_data_config + } + in + let%map genesis_constants = + Deferred.return + @@ make_genesis_constants ~logger ~default:genesis_constants config + in + let proof_inputs = + Genesis_proof.generate_inputs ~runtime_config:config ~proof_level + ~ledger:genesis_ledger ~constraint_constants ~genesis_constants + ~compile_config ~blockchain_proof_system_id ~genesis_epoch_data + in + (proof_inputs, config) + + let init_from_config_file ?overwrite_version ?genesis_dir ~logger + ~(constants : Runtime_config.Constants.constants) + (config : Runtime_config.t) : + (Precomputed_values.t * Runtime_config.t) Deferred.Or_error.t = + let open Deferred.Or_error.Let_syntax in + let%map inputs, config = + inputs_from_config_file ?genesis_dir ~constants ~logger ?overwrite_version + config + in + let values = Genesis_proof.create_values_no_proof inputs in + (values, config) + + let%test_module "Account config test" = + ( module struct + let%test_unit "Runtime config <=> Account" = + let module Ledger = (val Genesis_ledger.for_unit_tests) in + let accounts = Lazy.force Ledger.accounts in + List.iter accounts ~f:(fun (sk, acc) -> + let acc_config = Accounts.Single.of_account acc sk in + let acc' = + Accounts.Single.to_account_with_pk acc_config |> Or_error.ok_exn + in + [%test_eq: Account.t] acc acc' ) + end ) + + let load_config_files ?overwrite_version ?genesis_dir ?(itn_features = false) + ?cli_proof_level ?conf_dir ~logger (config_files : string list) = + let open Deferred.Or_error.Let_syntax in + let genesis_dir = + let%map.Option conf_dir = conf_dir in + Option.value ~default:(conf_dir ^/ "genesis") genesis_dir + in + let%bind.Deferred constants = + Runtime_config.Constants.load_constants ?conf_dir ?cli_proof_level + ~itn_features ~logger config_files + in + let%bind config = + Runtime_config.Json_loader.load_config_files ?conf_dir ~logger + config_files + in + match%bind.Deferred + init_from_config_file ?overwrite_version ?genesis_dir ~logger ~constants + config + with + | Ok a -> + return a + | Error err -> + let ( json_config + , `Accounts_omitted + ( `Genesis genesis_accounts_omitted + , `Staking staking_accounts_omitted + , `Next next_accounts_omitted ) ) = + Runtime_config.to_yojson_without_accounts config in - ( make_constraint_constants ~default:constraint_constants config - , blockchain_proof_system_id ) - in - let%bind () = - match (proof_level, compiled_proof_level) with - | _, Full | (Check | None), _ -> - return () - | Full, ((Check | None) as compiled) -> - let str = Genesis_constants.Proof_level.to_string in - [%log fatal] - "Proof level $proof_level is not compatible with compile-time proof \ - level $compiled_proof_level" - ~metadata: - [ ("proof_level", `String (str proof_level)) - ; ("compiled_proof_level", `String (str compiled)) - ] ; - Deferred.Or_error.errorf - "Proof level %s is not compatible with compile-time proof level %s" - (str proof_level) (str compiled) - in - let%bind genesis_ledger, ledger_config, ledger_file = - match config.ledger with - | Some ledger -> - Ledger.load ~proof_level ~genesis_dir ~logger ~constraint_constants - ?overwrite_version ledger - | None -> - [%log fatal] "No ledger was provided in the runtime configuration" ; - Deferred.Or_error.errorf - "No ledger was provided in the runtime configuration" - in - [%log info] "Loaded genesis ledger from $ledger_file" - ~metadata:[ ("ledger_file", `String ledger_file) ] ; - let%bind genesis_epoch_data, genesis_epoch_data_config = - Epoch_data.load ~proof_level ~genesis_dir ~logger ~constraint_constants - config.epoch_data - in - let config = - { config with - ledger = Option.map config.ledger ~f:(fun _ -> ledger_config) - ; epoch_data = genesis_epoch_data_config - } - in - let%map genesis_constants = - Deferred.return - @@ make_genesis_constants ~logger ~default:genesis_constants config - in - let proof_inputs = - Genesis_proof.generate_inputs ~runtime_config:config ~proof_level - ~ledger:genesis_ledger ~constraint_constants ~genesis_constants - ~blockchain_proof_system_id ~genesis_epoch_data - in - (proof_inputs, config) - -let init_from_config_file ?genesis_dir ~cli_proof_level ~genesis_constants - ~constraint_constants ~logger ~proof_level ?overwrite_version - (config : Runtime_config.t) : - (Precomputed_values.t * Runtime_config.t) Deferred.Or_error.t = - let open Deferred.Or_error.Let_syntax in - let%map inputs, config = - inputs_from_config_file ?genesis_dir ~cli_proof_level ~genesis_constants - ~constraint_constants ~logger ~proof_level ?overwrite_version config - in - let values = Genesis_proof.create_values_no_proof inputs in - (values, config) - -let%test_module "Account config test" = - ( module struct - let%test_unit "Runtime config <=> Account" = - let module Ledger = (val Genesis_ledger.for_unit_tests) in - let accounts = Lazy.force Ledger.accounts in - List.iter accounts ~f:(fun (sk, acc) -> - let acc_config = Accounts.Single.of_account acc sk in - let acc' = - Accounts.Single.to_account_with_pk acc_config |> Or_error.ok_exn - in - [%test_eq: Account.t] acc acc' ) - end ) + let append_accounts_omitted s = + Option.value_map + ~f:(fun i -> List.cons (s ^ "_accounts_omitted", `Int i)) + ~default:Fn.id + in + let metadata = + append_accounts_omitted "genesis" genesis_accounts_omitted + @@ append_accounts_omitted "staking" staking_accounts_omitted + @@ append_accounts_omitted "next" next_accounts_omitted [] + @ [ ("config", json_config) + ; ( "name" + , `String + (Option.value ~default:"not provided" + (let%bind.Option ledger = config.ledger in + Option.first_some ledger.name ledger.hash ) ) ) + ; ("error", Error_json.error_to_yojson err) + ] + in + [%log info] + "Initializing with runtime configuration. Ledger source: $name" + ~metadata ; + Error.raise err +end diff --git a/src/lib/genesis_proof/genesis_proof.ml b/src/lib/genesis_proof/genesis_proof.ml index 9d3c74d6a18..db2aabbeccb 100644 --- a/src/lib/genesis_proof/genesis_proof.ml +++ b/src/lib/genesis_proof/genesis_proof.ml @@ -8,6 +8,7 @@ module Inputs = struct ; constraint_constants : Genesis_constants.Constraint_constants.t ; proof_level : Genesis_constants.Proof_level.t ; genesis_constants : Genesis_constants.t + ; compile_config : Mina_compile_config.t ; genesis_ledger : Genesis_ledger.Packed.t ; genesis_epoch_data : Consensus.Genesis_epoch_data.t ; genesis_body_reference : Consensus.Body_reference.t @@ -85,6 +86,7 @@ module T = struct ; constraint_constants : Genesis_constants.Constraint_constants.t ; genesis_constants : Genesis_constants.t ; proof_level : Genesis_constants.Proof_level.t + ; compile_config : Mina_compile_config.t ; genesis_ledger : Genesis_ledger.Packed.t ; genesis_epoch_data : Consensus.Genesis_epoch_data.t ; genesis_body_reference : Consensus.Body_reference.t @@ -223,6 +225,7 @@ let create_values_no_proof (t : Inputs.t) = ; constraint_constants = t.constraint_constants ; proof_level = t.proof_level ; genesis_constants = t.genesis_constants + ; compile_config = t.compile_config ; genesis_ledger = t.genesis_ledger ; genesis_epoch_data = t.genesis_epoch_data ; genesis_body_reference = t.genesis_body_reference @@ -240,6 +243,7 @@ let to_inputs (t : t) : Inputs.t = ; constraint_constants = t.constraint_constants ; proof_level = t.proof_level ; genesis_constants = t.genesis_constants + ; compile_config = t.compile_config ; genesis_ledger = t.genesis_ledger ; genesis_epoch_data = t.genesis_epoch_data ; genesis_body_reference = t.genesis_body_reference diff --git a/src/lib/mina_lib/tests/tests.ml b/src/lib/mina_lib/tests/tests.ml index d41de09099d..688ab9c89f6 100644 --- a/src/lib/mina_lib/tests/tests.ml +++ b/src/lib/mina_lib/tests/tests.ml @@ -36,11 +36,6 @@ let%test_module "Epoch ledger sync tests" = let dir_prefix = "sync_test_data" - let genesis_constants = Genesis_constants.For_unit_tests.t - - let constraint_constants = - Genesis_constants.For_unit_tests.Constraint_constants.t - let make_dirname s = let open Core in let uuid = Uuid_unix.create () |> Uuid.to_string in @@ -51,7 +46,7 @@ let%test_module "Epoch ledger sync tests" = let runtime_config : Runtime_config.t = { daemon = None ; genesis = None - ; proof = None + ; proof = Some { Runtime_config.Proof_keys.default with level = None } ; ledger = Some { base = Named "test" @@ -66,10 +61,11 @@ let%test_module "Epoch ledger sync tests" = } in match%map - Genesis_ledger_helper.init_from_config_file + Genesis_ledger_helper.Config_loader.init_from_config_file ~genesis_dir:(make_dirname "genesis_dir") - ~constraint_constants ~genesis_constants ~logger ~proof_level:None - runtime_config ~cli_proof_level:None + ~constants: + (Runtime_config.Constants.magic_for_unit_tests runtime_config) + ~logger runtime_config with | Ok (precomputed_values, _) -> precomputed_values diff --git a/src/lib/precomputed_values/precomputed_values.ml b/src/lib/precomputed_values/precomputed_values.ml index fd97c4ec317..c5d03d0020e 100644 --- a/src/lib/precomputed_values/precomputed_values.ml +++ b/src/lib/precomputed_values/precomputed_values.ml @@ -43,4 +43,5 @@ let for_unit_tests = ; protocol_state_with_hashes ; constraint_system_digests = hashes ; proof_data = None + ; compile_config = Mina_compile_config.For_unit_tests.t }) diff --git a/src/lib/runtime_config/runtime_config.ml b/src/lib/runtime_config/runtime_config.ml index 461a16e57fc..b8ad91af694 100644 --- a/src/lib/runtime_config/runtime_config.ml +++ b/src/lib/runtime_config/runtime_config.ml @@ -978,6 +978,14 @@ module Proof_keys = struct type t = Log_2 of int | Txns_per_second_x10 of int [@@deriving bin_io_unversioned] + let log2 = function Log_2 i -> Some i | Txns_per_second_x10 _ -> None + + let txns_per_second_x10 = function + | Log_2 _ -> + None + | Txns_per_second_x10 i -> + Some i + let to_json_layout : t -> Json_layout.Proof_keys.Transaction_capacity.t = function | Log_2 i -> @@ -1039,6 +1047,19 @@ module Proof_keys = struct ; fork } + let default = + { level = None + ; sub_windows_per_window = None + ; ledger_depth = None + ; work_delay = None + ; block_window_duration_ms = None + ; transaction_capacity = None + ; coinbase_amount = None + ; supercharged_coinbase_factor = None + ; account_creation_fee = None + ; fork = None + } + let to_json_layout { level ; sub_windows_per_window @@ -1710,3 +1731,243 @@ module Json_loader : Json_loader_intf = struct ] ; failwithf "Could not parse configuration file: %s" err () ) end + +module type Constants_intf = sig + type constants + + val load_constants : + ?conf_dir:string + -> ?commit_id_short:string + -> ?itn_features:bool + -> ?cli_proof_level:Genesis_constants.Proof_level.t + -> logger:Logger.t + -> string list + -> constants Deferred.t + + val load_constants' : + ?itn_features:bool + -> ?cli_proof_level:Genesis_constants.Proof_level.t + -> t + -> constants + + val genesis_constants : constants -> Genesis_constants.t + + val constraint_constants : + constants -> Genesis_constants.Constraint_constants.t + + val proof_level : constants -> Genesis_constants.Proof_level.t + + val compile_config : constants -> Mina_compile_config.t + + val magic_for_unit_tests : t -> constants +end + +module Constants : Constants_intf = struct + type constants = + { genesis_constants : Genesis_constants.t + ; constraint_constants : Genesis_constants.Constraint_constants.t + ; proof_level : Genesis_constants.Proof_level.t + ; compile_config : Mina_compile_config.t + } + + let genesis_constants t = t.genesis_constants + + let constraint_constants t = t.constraint_constants + + let proof_level t = t.proof_level + + let compile_config t = t.compile_config + + let combine (a : constants) (b : t) : constants = + let genesis_constants = + { Genesis_constants.protocol = + { k = + Option.value ~default:a.genesis_constants.protocol.k + Option.(b.genesis >>= fun g -> g.k) + ; delta = + Option.value ~default:a.genesis_constants.protocol.delta + Option.(b.genesis >>= fun g -> g.delta) + ; slots_per_epoch = + Option.value ~default:a.genesis_constants.protocol.slots_per_epoch + Option.(b.genesis >>= fun g -> g.slots_per_epoch) + ; slots_per_sub_window = + Option.value + ~default:a.genesis_constants.protocol.slots_per_sub_window + Option.(b.genesis >>= fun g -> g.slots_per_sub_window) + ; grace_period_slots = + Option.value + ~default:a.genesis_constants.protocol.grace_period_slots + Option.(b.genesis >>= fun g -> g.grace_period_slots) + ; genesis_state_timestamp = + Option.value + ~default:a.genesis_constants.protocol.genesis_state_timestamp + Option.( + b.genesis + >>= fun g -> + g.genesis_state_timestamp + >>| Genesis_constants.genesis_timestamp_of_string + >>| Genesis_constants.of_time) + } + ; txpool_max_size = + Option.value ~default:a.genesis_constants.txpool_max_size + Option.(b.daemon >>= fun d -> d.txpool_max_size) + ; num_accounts = + Option.first_some + Option.(b.ledger >>= fun l -> l.num_accounts) + a.genesis_constants.num_accounts + ; zkapp_proof_update_cost = + Option.value ~default:a.genesis_constants.zkapp_proof_update_cost + Option.(b.daemon >>= fun d -> d.zkapp_proof_update_cost) + ; zkapp_signed_single_update_cost = + Option.value + ~default:a.genesis_constants.zkapp_signed_single_update_cost + Option.(b.daemon >>= fun d -> d.zkapp_signed_single_update_cost) + ; zkapp_signed_pair_update_cost = + Option.value + ~default:a.genesis_constants.zkapp_signed_pair_update_cost + Option.(b.daemon >>= fun d -> d.zkapp_signed_pair_update_cost) + ; zkapp_transaction_cost_limit = + Option.value ~default:a.genesis_constants.zkapp_transaction_cost_limit + Option.(b.daemon >>= fun d -> d.zkapp_transaction_cost_limit) + ; max_event_elements = + Option.value ~default:a.genesis_constants.max_event_elements + Option.(b.daemon >>= fun d -> d.max_event_elements) + ; max_action_elements = + Option.value ~default:a.genesis_constants.max_action_elements + Option.(b.daemon >>= fun d -> d.max_action_elements) + ; zkapp_cmd_limit_hardcap = + Option.value ~default:a.genesis_constants.zkapp_cmd_limit_hardcap + Option.(b.daemon >>= fun d -> d.zkapp_cmd_limit_hardcap) + ; minimum_user_command_fee = + Option.value ~default:a.genesis_constants.minimum_user_command_fee + Option.(b.daemon >>= fun d -> d.minimum_user_command_fee) + } + in + let constraint_constants = + let fork = + let a = a.constraint_constants.fork in + let b = + let%map.Option f = Option.(b.proof >>= fun x -> x.fork) in + { Genesis_constants.Fork_constants.state_hash = + Pickles.Backend.Tick.Field.of_string f.state_hash + ; blockchain_length = Mina_numbers.Length.of_int f.blockchain_length + ; global_slot_since_genesis = + Mina_numbers.Global_slot_since_genesis.of_int + f.global_slot_since_genesis + } + in + Option.first_some b a + in + { a.constraint_constants with + sub_windows_per_window = + Option.value ~default:a.constraint_constants.sub_windows_per_window + Option.(b.proof >>= fun p -> p.sub_windows_per_window) + ; ledger_depth = + Option.value ~default:a.constraint_constants.ledger_depth + Option.(b.proof >>= fun p -> p.ledger_depth) + ; work_delay = + Option.value ~default:a.constraint_constants.work_delay + Option.(b.proof >>= fun p -> p.work_delay) + ; block_window_duration_ms = + Option.value ~default:a.constraint_constants.block_window_duration_ms + Option.(b.proof >>= fun p -> p.block_window_duration_ms) + ; transaction_capacity_log_2 = + Option.value + ~default:a.constraint_constants.transaction_capacity_log_2 + Option.( + b.proof + >>= fun p -> + p.transaction_capacity >>= Proof_keys.Transaction_capacity.log2) + ; coinbase_amount = + Option.value ~default:a.constraint_constants.coinbase_amount + Option.(b.proof >>= fun p -> p.coinbase_amount) + ; supercharged_coinbase_factor = + Option.value + ~default:a.constraint_constants.supercharged_coinbase_factor + Option.(b.proof >>= fun p -> p.supercharged_coinbase_factor) + ; account_creation_fee = + Option.value ~default:a.constraint_constants.account_creation_fee + Option.(b.proof >>= fun p -> p.account_creation_fee) + ; fork + } + in + let proof_level = + let coerce_proof_level = function + | Proof_keys.Level.Full -> + Genesis_constants.Proof_level.Full + | Check -> + Genesis_constants.Proof_level.Check + | None -> + Genesis_constants.Proof_level.None + in + Option.value ~default:Genesis_constants.Proof_level.None + Option.(b.proof >>= fun p -> p.level >>| coerce_proof_level) + in + let compile_config = + { a.compile_config with + block_window_duration = + constraint_constants.block_window_duration_ms |> Float.of_int + |> Time.Span.of_ms + ; zkapp_proof_update_cost = genesis_constants.zkapp_proof_update_cost + ; zkapp_signed_single_update_cost = + genesis_constants.zkapp_signed_single_update_cost + ; zkapp_signed_pair_update_cost = + genesis_constants.zkapp_signed_pair_update_cost + ; zkapp_transaction_cost_limit = + genesis_constants.zkapp_transaction_cost_limit + ; max_event_elements = genesis_constants.max_event_elements + ; max_action_elements = genesis_constants.max_action_elements + ; zkapp_cmd_limit_hardcap = genesis_constants.zkapp_cmd_limit_hardcap + ; minimum_user_command_fee = genesis_constants.minimum_user_command_fee + ; network_id = + Option.value ~default:a.compile_config.network_id + Option.(b.daemon >>= fun d -> d.network_id) + } + in + { genesis_constants; constraint_constants; proof_level; compile_config } + + let load_constants' ?itn_features ?cli_proof_level runtime_config = + let constants = + let compile_constants = + { genesis_constants = Genesis_constants.Compiled.genesis_constants + ; constraint_constants = + Genesis_constants.Compiled.constraint_constants + ; proof_level = Genesis_constants.Compiled.proof_level + ; compile_config = Mina_compile_config.Compiled.t + } + in + let cs = combine compile_constants runtime_config in + { cs with + proof_level = Option.value ~default:cs.proof_level cli_proof_level + ; compile_config = + { cs.compile_config with + itn_features = + Option.value ~default:cs.compile_config.itn_features itn_features + } + } + in + constants + + (* Use this function if you don't need/want the ledger configuration *) + let load_constants ?conf_dir ?commit_id_short ?itn_features ?cli_proof_level + ~logger config_files = + Deferred.Or_error.ok_exn + @@ + let open Deferred.Or_error.Let_syntax in + let%map runtime_config = + Json_loader.load_config_files ?conf_dir ?commit_id_short ~logger + config_files + in + load_constants' ?itn_features ?cli_proof_level runtime_config + + let magic_for_unit_tests t = + let compile_constants = + { genesis_constants = Genesis_constants.For_unit_tests.t + ; constraint_constants = + Genesis_constants.For_unit_tests.Constraint_constants.t + ; proof_level = Genesis_constants.For_unit_tests.Proof_level.t + ; compile_config = Mina_compile_config.For_unit_tests.t + } + in + combine compile_constants t +end diff --git a/src/lib/snark_worker/functor.ml b/src/lib/snark_worker/functor.ml index a62453e2877..aa60e19bbff 100644 --- a/src/lib/snark_worker/functor.ml +++ b/src/lib/snark_worker/functor.ml @@ -340,8 +340,7 @@ module Make (Inputs : Intf.Inputs_intf) : in go () - let command_from_rpcs ~commit_id ~proof_level:default_proof_level - ~constraint_constants + let command_from_rpcs ~commit_id (module Rpcs_versioned : Intf.Rpcs_versioned_S with type Work.ledger_proof = Inputs.Ledger_proof.t ) = Command.async ~summary:"Snark worker" @@ -350,7 +349,7 @@ module Make (Inputs : Intf.Inputs_intf) : flag "--daemon-address" ~aliases:[ "daemon-address" ] (required (Arg_type.create Host_and_port.of_string)) ~doc:"HOST-AND-PORT address daemon is listening on" - and proof_level = + and cli_proof_level = flag "--proof-level" ~aliases:[ "proof-level" ] (optional (Arg_type.create Genesis_constants.Proof_level.of_string)) ~doc:"full|check|none" @@ -360,13 +359,19 @@ module Make (Inputs : Intf.Inputs_intf) : (optional bool) ~doc: "true|false Shutdown when disconnected from daemon (default:true)" + and config_file = Cli_lib.Flag.conf_file and conf_dir = Cli_lib.Flag.conf_dir in fun () -> let logger = Logger.create () ~metadata:[ ("process", `String "Snark Worker") ] in - let proof_level = - Option.value ~default:default_proof_level proof_level + let%bind.Deferred constraint_constants, proof_level = + let%map.Deferred config = + Runtime_config.Constants.load_constants ?conf_dir ?cli_proof_level + ~logger config_file + in + Runtime_config.Constants. + (constraint_constants config, proof_level config) in Option.value_map ~default:() conf_dir ~f:(fun conf_dir -> let logrotate_max_size = 1024 * 10 in diff --git a/src/lib/snark_worker/intf.ml b/src/lib/snark_worker/intf.ml index 9173d066d9c..2a9d93bb03c 100644 --- a/src/lib/snark_worker/intf.ml +++ b/src/lib/snark_worker/intf.ml @@ -154,8 +154,6 @@ module type S0 = sig val command_from_rpcs : commit_id:string - -> proof_level:Genesis_constants.Proof_level.t - -> constraint_constants:Genesis_constants.Constraint_constants.t -> (module Rpcs_versioned_S with type Work.ledger_proof = ledger_proof) -> Command.t @@ -173,9 +171,5 @@ module type S = sig module Rpcs_versioned : Rpcs_versioned_S with type Work.ledger_proof = ledger_proof - val command : - commit_id:string - -> proof_level:Genesis_constants.Proof_level.t - -> constraint_constants:Genesis_constants.Constraint_constants.t - -> Command.t + val command : commit_id:string -> Command.t end diff --git a/src/lib/snark_worker/standalone/run_snark_worker.ml b/src/lib/snark_worker/standalone/run_snark_worker.ml index 771f647c6c2..ee7ec476d4a 100644 --- a/src/lib/snark_worker/standalone/run_snark_worker.ml +++ b/src/lib/snark_worker/standalone/run_snark_worker.ml @@ -8,7 +8,8 @@ let command = (let%map_open spec = flag "--spec-sexp" ~doc:"" (required (sexp_conv Prod.single_spec_of_sexp)) - and proof_level = + and config_file = Cli_lib.Flag.conf_file + and cli_proof_level = flag "--proof-level" ~doc:"" (optional_with_default Genesis_constants.Proof_level.Full (Command.Arg_type.of_alist_exn @@ -19,8 +20,14 @@ let command = in fun () -> let open Async in - let constraint_constants = - Genesis_constants.Compiled.constraint_constants + let open Deferred.Let_syntax in + let%bind constraint_constants, proof_level = + let logger = Logger.create () in + let%map conf = + Runtime_config.Constants.load_constants ~cli_proof_level ~logger + config_file + in + Runtime_config.Constants.(constraint_constants conf, proof_level conf) in let%bind worker_state = Prod.Worker_state.create ~constraint_constants ~proof_level () diff --git a/src/test/command_line_tests/config.ml b/src/test/command_line_tests/config.ml index 2ed907ba406..ecf6950abdb 100644 --- a/src/test/command_line_tests/config.ml +++ b/src/test/command_line_tests/config.ml @@ -29,7 +29,8 @@ module ConfigDirs = struct let generate_keys t = let open Deferred.Let_syntax in let%map () = - Init.Client.generate_libp2p_keypair_do (libp2p_keypair_folder t) () + Init.Client.generate_libp2p_keypair_do (libp2p_keypair_folder t) + ~config_file:[] () in () From 4579f667469d34e63c68cc80b4425a555a391ee2 Mon Sep 17 00:00:00 2001 From: dkijania Date: Fri, 4 Oct 2024 12:42:52 +0200 Subject: [PATCH 067/234] replace xrefcheck docker with awesome_bot --- .xrefcheck.yaml | 63 ------------------- buildkite/src/Constants/ContainerImages.dhall | 2 +- buildkite/src/Jobs/Lint/Xrefcheck.dhall | 18 +++++- 3 files changed, 16 insertions(+), 67 deletions(-) delete mode 100644 .xrefcheck.yaml diff --git a/.xrefcheck.yaml b/.xrefcheck.yaml deleted file mode 100644 index a1c87cb6104..00000000000 --- a/.xrefcheck.yaml +++ /dev/null @@ -1,63 +0,0 @@ -# Parameters of repository traversal. -traversal: - # Files and folders which we pretend do not exist - # (so they are neither analyzed nor can be referenced). - ignored: - # Git files - - .git - # Build artifacts - - _build - - _opam - # Git submodules - - src/external - - src/lib/marlin - - src/lib/crypto/proof-systems - - src/lib/snarky - - frontend/wallet/tablecloth - # Unsure of the relevance anymore - - frontend/wallet/README.md - -# Verification parameters. -verification: - # On 'anchor not found' error, how much similar anchors should be displayed as - # hint. Number should be between 0 and 1, larger value means stricter filter. - anchorSimilarityThreshold: 0.5 - - # When checking external references, how long to wait on request before - # declaring "Response timeout". - externalRefCheckTimeout: 10s - - # Prefixes of files, references in which should not be analyzed. - notScanned: - - .github/pull_request_template.md - - .github/issue_template.md - - .github/PULL_REQUEST_TEMPLATE - - .github/ISSUE_TEMPLATE - - # Glob patterns describing the files which do not physically exist in the - # repository but should be treated as existing nevertheless. - virtualFiles: - - ../../../issues - - ../../../issues/* - - ../../../pulls - - ../../../pulls/* - - # POSIX extended regular expressions that match external references - # that have to be ignored (not verified). - # It is an optional parameter, so it can be omitted. - ignoreRefs: - - "https://github.com/.*" # Otherwise Resource unavailable (429 too many requests) - - # Check localhost links. - checkLocalhost: false - - # Skip links which return 403 or 401 code. - ignoreAuthFailures: true - -# Parameters of scanners for various file types. -scanners: - markdown: - # Flavor of markdown, e.g. GitHub-flavor. - # - # This affects which anchors are generated for headers. - flavor: GitHub diff --git a/buildkite/src/Constants/ContainerImages.dhall b/buildkite/src/Constants/ContainerImages.dhall index ff3cd4d0d58..144942f3a16 100644 --- a/buildkite/src/Constants/ContainerImages.dhall +++ b/buildkite/src/Constants/ContainerImages.dhall @@ -18,6 +18,6 @@ , ubuntu2004 = "ubuntu:20.04" , postgres = "postgres:12.4-alpine" , xrefcheck = - "serokell/xrefcheck@sha256:8fbb35a909abc353364f1bd3148614a1160ef3c111c0c4ae84e58fdf16019eeb" + "dkhamsing/awesome_bot@sha256:sha256:a8adaeb3b3bd5745304743e4d8a6d512127646e420544a6d22d9f58a07f35884" , nixos = "gcr.io/o1labs-192920/nix-unstable:1.0.0" } diff --git a/buildkite/src/Jobs/Lint/Xrefcheck.dhall b/buildkite/src/Jobs/Lint/Xrefcheck.dhall index 20092e9f4ee..2ce8f18330d 100644 --- a/buildkite/src/Jobs/Lint/Xrefcheck.dhall +++ b/buildkite/src/Jobs/Lint/Xrefcheck.dhall @@ -23,7 +23,8 @@ in Pipeline.build , spec = JobSpec::{ , dirtyWhen = [ SelectFiles.strictly SelectFiles::{ exts = Some [ "md" ] } - , SelectFiles.strictly (SelectFiles.contains ".xrefcheck.yml") + , SelectFiles.strictlyStart + (SelectFiles.contains "buildkite/src/Jobs/Lint/Xrefcheck.dhall") ] , path = "Lint" , name = "Xrefcheck" @@ -32,14 +33,25 @@ in Pipeline.build , steps = [ Command.build Command.Config::{ - , commands = [] : List Cmd.Type + , commands = + [ Cmd.run + ( "-allow-dupe" + ++ "--allow-redirect" + ++ "--allow 403,401" + ++ "--skip-save-results" + ++ "`find . -name \"*.md\"" + ++ "! -path \"./src/lib/crypto/kimchi_bindings/*\" " + ++ "! -path \"./src/lib/crypto/proof-systems/*\" " + ++ "! -path \"./src/external/*\" " + ++ "` " + ) + ] , label = "Verifies references in markdown" , key = "xrefcheck" , target = Size.Small , soft_fail = Some (B/SoftFail.Boolean True) , docker = Some Docker::{ , image = (../../Constants/ContainerImages.dhall).xrefcheck - , shell = None (List Text) } } ] From a999601248dae001eb6910017b95d9eb397c1fee Mon Sep 17 00:00:00 2001 From: dkijania Date: Fri, 4 Oct 2024 13:16:49 +0200 Subject: [PATCH 068/234] limiting correctly publishing deb attempts --- scripts/publish-deb.sh | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/scripts/publish-deb.sh b/scripts/publish-deb.sh index b00a094817d..7fd31ccef16 100755 --- a/scripts/publish-deb.sh +++ b/scripts/publish-deb.sh @@ -77,8 +77,23 @@ do deb="${deb_split[0]}" deb=$(basename $deb) - for i in {1..10}; do (verify_o1test_repo_has_package $deb) && break || sleep 60; done + for i in {1..10}; do + LAST_VERIFY_STATUS=verify_o1test_repo_has_package $deb + + if [[ $LAST_VERIFY_STATUS == 0 ]]; then + echo "succesfully validated that package is uploaded to deb-s3" + break + fi + + sleep 60 + i=$((i+1)) + done + if [[ $LAST_VERIFY_STATUS != 0 ]]; then + echo "Cannot locate '$deb' in debian repo. failing job..." + echo "You may still try to rerun job as debian repository is known from imperfect performance" + exit 1 + fi done From 4c6c904cf32fe183c5f2cf0ae8c7f4d324a99e7b Mon Sep 17 00:00:00 2001 From: dkijania Date: Fri, 4 Oct 2024 14:25:23 +0200 Subject: [PATCH 069/234] revert unnecessary changes & apply shell checks --- scripts/publish-deb.sh | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/scripts/publish-deb.sh b/scripts/publish-deb.sh index 7fd31ccef16..ecde329d42b 100755 --- a/scripts/publish-deb.sh +++ b/scripts/publish-deb.sh @@ -62,7 +62,7 @@ for i in {1..10}; do ( # Verify integrity of debs on remote repo function verify_o1test_repo_has_package { sudo apt-get update - ${DEBS3_SHOW} ${1} ${DEB_VERSION} $ARCH -c $DEB_CODENAME -m $DEB_RELEASE + ${DEBS3_SHOW} "${1}" "${DEB_VERSION}" $ARCH -c "$DEB_CODENAME" -m "$DEB_RELEASE" return $? } @@ -73,9 +73,11 @@ do DEBS3_SHOW="deb-s3 show $BUCKET_ARG $S3_REGION_ARG" - deb_split=(${deb//_/ }) - deb="${deb_split[0]}" - deb=$(basename $deb) + # extracting name from debian package path. E.g: + # _build/mina-archive_3.0.1-develop-a2a872a.deb -> mina-archive + deb=$(basename "$deb") + deb="${deb%_*}" + for i in {1..10}; do LAST_VERIFY_STATUS=verify_o1test_repo_has_package $deb From dfb144f372bd5f7325eb86d0630f6de95a99f887 Mon Sep 17 00:00:00 2001 From: dkijania Date: Fri, 4 Oct 2024 14:30:55 +0200 Subject: [PATCH 070/234] fix docker format --- buildkite/src/Constants/ContainerImages.dhall | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/buildkite/src/Constants/ContainerImages.dhall b/buildkite/src/Constants/ContainerImages.dhall index 144942f3a16..6078b52f2d8 100644 --- a/buildkite/src/Constants/ContainerImages.dhall +++ b/buildkite/src/Constants/ContainerImages.dhall @@ -18,6 +18,6 @@ , ubuntu2004 = "ubuntu:20.04" , postgres = "postgres:12.4-alpine" , xrefcheck = - "dkhamsing/awesome_bot@sha256:sha256:a8adaeb3b3bd5745304743e4d8a6d512127646e420544a6d22d9f58a07f35884" + "dkhamsing/awesome_bot@sha256:a8adaeb3b3bd5745304743e4d8a6d512127646e420544a6d22d9f58a07f35884" , nixos = "gcr.io/o1labs-192920/nix-unstable:1.0.0" } From 8fcbfdcccecce6cbba682133bf97a2be23bd7922 Mon Sep 17 00:00:00 2001 From: dkijania Date: Fri, 4 Oct 2024 15:16:02 +0200 Subject: [PATCH 071/234] fix function call --- scripts/publish-deb.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/publish-deb.sh b/scripts/publish-deb.sh index ecde329d42b..57923cdc2d8 100755 --- a/scripts/publish-deb.sh +++ b/scripts/publish-deb.sh @@ -80,7 +80,7 @@ do for i in {1..10}; do - LAST_VERIFY_STATUS=verify_o1test_repo_has_package $deb + LAST_VERIFY_STATUS=$(verify_o1test_repo_has_package "$deb") if [[ $LAST_VERIFY_STATUS == 0 ]]; then echo "succesfully validated that package is uploaded to deb-s3" From d87b348c0ff9c57a32477419325be4285f9b2919 Mon Sep 17 00:00:00 2001 From: dkijania Date: Fri, 4 Oct 2024 15:25:19 +0200 Subject: [PATCH 072/234] do not use shell --- buildkite/src/Jobs/Lint/Xrefcheck.dhall | 1 + 1 file changed, 1 insertion(+) diff --git a/buildkite/src/Jobs/Lint/Xrefcheck.dhall b/buildkite/src/Jobs/Lint/Xrefcheck.dhall index 2ce8f18330d..d20d2eb5f98 100644 --- a/buildkite/src/Jobs/Lint/Xrefcheck.dhall +++ b/buildkite/src/Jobs/Lint/Xrefcheck.dhall @@ -52,6 +52,7 @@ in Pipeline.build , soft_fail = Some (B/SoftFail.Boolean True) , docker = Some Docker::{ , image = (../../Constants/ContainerImages.dhall).xrefcheck + , shell = None (List Text) } } ] From 6d1081353f91abd213e9d9d9ae3cd32dca14ab18 Mon Sep 17 00:00:00 2001 From: martyall Date: Fri, 4 Oct 2024 07:38:37 -0700 Subject: [PATCH 073/234] fix state_hash parser --- src/lib/runtime_config/runtime_config.ml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/lib/runtime_config/runtime_config.ml b/src/lib/runtime_config/runtime_config.ml index b8ad91af694..fb2bc7e338f 100644 --- a/src/lib/runtime_config/runtime_config.ml +++ b/src/lib/runtime_config/runtime_config.ml @@ -1849,7 +1849,7 @@ module Constants : Constants_intf = struct let b = let%map.Option f = Option.(b.proof >>= fun x -> x.fork) in { Genesis_constants.Fork_constants.state_hash = - Pickles.Backend.Tick.Field.of_string f.state_hash + Mina_base.State_hash.of_base58_check_exn f.state_hash ; blockchain_length = Mina_numbers.Length.of_int f.blockchain_length ; global_slot_since_genesis = Mina_numbers.Global_slot_since_genesis.of_int @@ -1930,8 +1930,7 @@ module Constants : Constants_intf = struct let constants = let compile_constants = { genesis_constants = Genesis_constants.Compiled.genesis_constants - ; constraint_constants = - Genesis_constants.Compiled.constraint_constants + ; constraint_constants = Genesis_constants.Compiled.constraint_constants ; proof_level = Genesis_constants.Compiled.proof_level ; compile_config = Mina_compile_config.Compiled.t } From d13de59b6f2447430313bdf79e9e518e0ac983f5 Mon Sep 17 00:00:00 2001 From: dkijania Date: Fri, 4 Oct 2024 17:53:07 +0200 Subject: [PATCH 074/234] remove verify function --- scripts/publish-deb.sh | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/scripts/publish-deb.sh b/scripts/publish-deb.sh index 57923cdc2d8..00c088cf551 100755 --- a/scripts/publish-deb.sh +++ b/scripts/publish-deb.sh @@ -59,12 +59,6 @@ for i in {1..10}; do ( "${DEB_NAMES}" ) && break || scripts/clear-deb-s3-lockfile.sh; done -# Verify integrity of debs on remote repo -function verify_o1test_repo_has_package { - sudo apt-get update - ${DEBS3_SHOW} "${1}" "${DEB_VERSION}" $ARCH -c "$DEB_CODENAME" -m "$DEB_RELEASE" - return $? -} for deb in $DEB_NAMES do @@ -80,7 +74,10 @@ do for i in {1..10}; do - LAST_VERIFY_STATUS=$(verify_o1test_repo_has_package "$deb") + + sudo apt-get update + ${DEBS3_SHOW} "$deb" "${DEB_VERSION}" "${ARCH}" -c "${DEB_CODENAME}" -m "${DEB_RELEASE}" + LAST_VERIFY_STATUS=$? if [[ $LAST_VERIFY_STATUS == 0 ]]; then echo "succesfully validated that package is uploaded to deb-s3" From 214715566396c0932506a257daa44f7549b8aa0b Mon Sep 17 00:00:00 2001 From: dkijania Date: Fri, 4 Oct 2024 18:36:39 +0200 Subject: [PATCH 075/234] use RunInDocker construct rather than docker field --- buildkite/src/Jobs/Lint/Xrefcheck.dhall | 20 +++++++++----------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/buildkite/src/Jobs/Lint/Xrefcheck.dhall b/buildkite/src/Jobs/Lint/Xrefcheck.dhall index d20d2eb5f98..6f9771f4df1 100644 --- a/buildkite/src/Jobs/Lint/Xrefcheck.dhall +++ b/buildkite/src/Jobs/Lint/Xrefcheck.dhall @@ -12,8 +12,6 @@ let Cmd = ../../Lib/Cmds.dhall let Command = ../../Command/Base.dhall -let Docker = ../../Command/Docker/Type.dhall - let Size = ../../Command/Size.dhall let B/SoftFail = B.definitions/commandStep/properties/soft_fail/Type @@ -34,11 +32,15 @@ in Pipeline.build [ Command.build Command.Config::{ , commands = - [ Cmd.run - ( "-allow-dupe" - ++ "--allow-redirect" - ++ "--allow 403,401" - ++ "--skip-save-results" + [ Cmd.runInDocker + Cmd.Docker::{ + , image = (../../Constants/ContainerImages.dhall).xrefcheck + } + ( "awesome_bot -allow-dupe " + ++ "--allow-redirect " + ++ "--allow 403,401 " + ++ "--skip-save-results " + ++ "--files " ++ "`find . -name \"*.md\"" ++ "! -path \"./src/lib/crypto/kimchi_bindings/*\" " ++ "! -path \"./src/lib/crypto/proof-systems/*\" " @@ -50,10 +52,6 @@ in Pipeline.build , key = "xrefcheck" , target = Size.Small , soft_fail = Some (B/SoftFail.Boolean True) - , docker = Some Docker::{ - , image = (../../Constants/ContainerImages.dhall).xrefcheck - , shell = None (List Text) - } } ] } From f4bcc9c6fe1468f74756a22ee6622a452cdb8522 Mon Sep 17 00:00:00 2001 From: dkijania Date: Fri, 4 Oct 2024 19:30:05 +0200 Subject: [PATCH 076/234] add space in find parameters --- buildkite/src/Jobs/Lint/Xrefcheck.dhall | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/buildkite/src/Jobs/Lint/Xrefcheck.dhall b/buildkite/src/Jobs/Lint/Xrefcheck.dhall index 6f9771f4df1..63a2767bba8 100644 --- a/buildkite/src/Jobs/Lint/Xrefcheck.dhall +++ b/buildkite/src/Jobs/Lint/Xrefcheck.dhall @@ -41,7 +41,7 @@ in Pipeline.build ++ "--allow 403,401 " ++ "--skip-save-results " ++ "--files " - ++ "`find . -name \"*.md\"" + ++ "`find . -name \"*.md\" " ++ "! -path \"./src/lib/crypto/kimchi_bindings/*\" " ++ "! -path \"./src/lib/crypto/proof-systems/*\" " ++ "! -path \"./src/external/*\" " From f1c142d625959cbb4014c65021acaa4c4f40c78a Mon Sep 17 00:00:00 2001 From: dkijania Date: Mon, 7 Oct 2024 18:07:01 +0200 Subject: [PATCH 077/234] Added more linters to Size = Multi category --- buildkite/src/Jobs/Lint/Fast.dhall | 2 +- buildkite/src/Jobs/Lint/HelmChart.dhall | 2 +- buildkite/src/Jobs/Lint/Merge.dhall | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/buildkite/src/Jobs/Lint/Fast.dhall b/buildkite/src/Jobs/Lint/Fast.dhall index 6a326a9588f..66ee356019c 100644 --- a/buildkite/src/Jobs/Lint/Fast.dhall +++ b/buildkite/src/Jobs/Lint/Fast.dhall @@ -42,7 +42,7 @@ in Pipeline.build , label = "Fast lint steps; CODEOWNERs, RFCs, Check Snarky & Proof-Systems submodules, Preprocessor Deps" , key = "lint" - , target = Size.Small + , target = Size.Multi , docker = Some Docker::{ , image = (../../Constants/ContainerImages.dhall).toolchainBase } diff --git a/buildkite/src/Jobs/Lint/HelmChart.dhall b/buildkite/src/Jobs/Lint/HelmChart.dhall index 8c7ea461349..02bbc31f508 100644 --- a/buildkite/src/Jobs/Lint/HelmChart.dhall +++ b/buildkite/src/Jobs/Lint/HelmChart.dhall @@ -35,7 +35,7 @@ in Pipeline.build [ Cmd.run "HELM_LINT=true buildkite/scripts/helm-ci.sh" ] , label = "Helm chart lint steps" , key = "lint-helm-chart" - , target = Size.Small + , target = Size.Multi , docker = None Docker.Type } ] diff --git a/buildkite/src/Jobs/Lint/Merge.dhall b/buildkite/src/Jobs/Lint/Merge.dhall index 57c2ecc0099..a03ae66b395 100644 --- a/buildkite/src/Jobs/Lint/Merge.dhall +++ b/buildkite/src/Jobs/Lint/Merge.dhall @@ -33,7 +33,7 @@ in Pipeline.build [ Cmd.run "buildkite/scripts/merges-cleanly.sh compatible" ] , label = "Check merges cleanly into compatible" , key = "clean-merge-compatible" - , target = Size.Small + , target = Size.Multi , docker = Some Docker::{ , image = (../../Constants/ContainerImages.dhall).toolchainBase } @@ -44,7 +44,7 @@ in Pipeline.build [ Cmd.run "buildkite/scripts/merges-cleanly.sh develop" ] , label = "Check merges cleanly into develop" , key = "clean-merge-develop" - , target = Size.Small + , target = Size.Multi , docker = Some Docker::{ , image = (../../Constants/ContainerImages.dhall).toolchainBase } @@ -55,7 +55,7 @@ in Pipeline.build [ Cmd.run "buildkite/scripts/merges-cleanly.sh master" ] , label = "Check merges cleanly into master" , key = "clean-merge-master" - , target = Size.Small + , target = Size.Multi , docker = Some Docker::{ , image = (../../Constants/ContainerImages.dhall).toolchainBase } From 38d4d9a9d19502be6863443ef0b541567db77317 Mon Sep 17 00:00:00 2001 From: martyall Date: Mon, 7 Oct 2024 13:55:44 -0700 Subject: [PATCH 078/234] remove unconfigurable constants module --- .../genesis_constants/genesis_constants.ml | 15 +++-- .../mina_compile_config.ml | 59 ++----------------- src/lib/node_config/dune | 2 +- src/lib/node_config/for_unit_tests/dune | 2 +- .../node_config_for_unit_tests.ml | 38 +++++++++++- .../node_config_for_unit_tests.mli | 22 +++++++ src/lib/node_config/intf/node_config_intf.mli | 27 --------- src/lib/node_config/node_config.ml | 1 - .../node_config/unconfigurable_constants/dune | 7 --- .../node_config_unconfigurable_constants.ml | 38 ------------ .../node_config_unconfigurable_constants.mli | 1 - 11 files changed, 73 insertions(+), 139 deletions(-) delete mode 100644 src/lib/node_config/unconfigurable_constants/dune delete mode 100644 src/lib/node_config/unconfigurable_constants/node_config_unconfigurable_constants.ml delete mode 100644 src/lib/node_config/unconfigurable_constants/node_config_unconfigurable_constants.mli diff --git a/src/lib/genesis_constants/genesis_constants.ml b/src/lib/genesis_constants/genesis_constants.ml index 35259dd34ba..5128450b183 100644 --- a/src/lib/genesis_constants/genesis_constants.ml +++ b/src/lib/genesis_constants/genesis_constants.ml @@ -419,14 +419,13 @@ module Make (Node_config : Node_config_intf.S) : S = struct } ; txpool_max_size = pool_max_size ; num_accounts = None - ; zkapp_proof_update_cost = Node_config.zkapp_proof_update_cost - ; zkapp_signed_single_update_cost = - Node_config.zkapp_signed_single_update_cost - ; zkapp_signed_pair_update_cost = Node_config.zkapp_signed_pair_update_cost - ; zkapp_transaction_cost_limit = Node_config.zkapp_transaction_cost_limit - ; max_event_elements = Node_config.max_event_elements - ; max_action_elements = Node_config.max_action_elements - ; zkapp_cmd_limit_hardcap = Node_config.zkapp_cmd_limit_hardcap + ; zkapp_proof_update_cost = 10.26 + ; zkapp_signed_pair_update_cost = 10.08 + ; zkapp_signed_single_update_cost = 9.14 + ; zkapp_transaction_cost_limit = 69.45 + ; max_event_elements = 100 + ; max_action_elements = 100 + ; zkapp_cmd_limit_hardcap = 128 ; minimum_user_command_fee = Currency.Fee.of_mina_string_exn Node_config.minimum_user_command_fee } diff --git a/src/lib/mina_compile_config/mina_compile_config.ml b/src/lib/mina_compile_config/mina_compile_config.ml index 067bc5b7bac..bc5f7c30363 100644 --- a/src/lib/mina_compile_config/mina_compile_config.ml +++ b/src/lib/mina_compile_config/mina_compile_config.ml @@ -21,13 +21,6 @@ module Inputs = struct ; rpc_handshake_timeout_sec : float ; rpc_heartbeat_timeout_sec : float ; rpc_heartbeat_send_every_sec : float - ; zkapp_proof_update_cost : float - ; zkapp_signed_pair_update_cost : float - ; zkapp_signed_single_update_cost : float - ; zkapp_transaction_cost_limit : float - ; max_event_elements : int - ; max_action_elements : int - ; zkapp_cmd_limit_hardcap : int ; zkapps_disabled : bool } [@@deriving yojson] @@ -47,15 +40,9 @@ type t = ; rpc_handshake_timeout : Time.Span.t ; rpc_heartbeat_timeout : Time.Span.t ; rpc_heartbeat_send_every : Time.Span.t - ; zkapp_proof_update_cost : float - ; zkapp_signed_pair_update_cost : float - ; zkapp_signed_single_update_cost : float - ; zkapp_transaction_cost_limit : float - ; max_event_elements : int - ; max_action_elements : int - ; zkapp_cmd_limit_hardcap : int ; zkapps_disabled : bool } +[@@deriving sexp_of] let make (inputs : Inputs.t) = { curve_size = inputs.curve_size @@ -78,15 +65,8 @@ let make (inputs : Inputs.t) = ; rpc_heartbeat_timeout = Time.Span.of_sec inputs.rpc_heartbeat_timeout_sec ; rpc_heartbeat_send_every = Time.Span.of_sec inputs.rpc_heartbeat_send_every_sec - ; zkapp_proof_update_cost = inputs.zkapp_proof_update_cost - ; zkapp_signed_pair_update_cost = inputs.zkapp_signed_pair_update_cost - ; zkapp_signed_single_update_cost = inputs.zkapp_signed_single_update_cost - ; zkapp_transaction_cost_limit = inputs.zkapp_transaction_cost_limit - ; max_event_elements = inputs.max_event_elements - ; max_action_elements = inputs.max_action_elements ; network_id = inputs.network_id ; zkapp_cmd_limit = inputs.zkapp_cmd_limit - ; zkapp_cmd_limit_hardcap = inputs.zkapp_cmd_limit_hardcap ; zkapps_disabled = inputs.zkapps_disabled } @@ -112,18 +92,10 @@ let to_yojson t = , `Float (Time.Span.to_sec t.rpc_heartbeat_timeout) ) ; ( "rpc_heartbeat_send_every" , `Float (Time.Span.to_sec t.rpc_heartbeat_send_every) ) - ; ("zkapp_proof_update_cost", `Float t.zkapp_proof_update_cost) - ; ("zkapp_signed_pair_update_cost", `Float t.zkapp_signed_pair_update_cost) - ; ( "zkapp_signed_single_update_cost" - , `Float t.zkapp_signed_single_update_cost ) - ; ("zkapp_transaction_cost_limit", `Float t.zkapp_transaction_cost_limit) - ; ("max_event_elements", `Int t.max_event_elements) - ; ("max_action_elements", `Int t.max_action_elements) ; ("network_id", `String t.network_id) ; ( "zkapp_cmd_limit" , Option.value_map ~default:`Null ~f:(fun x -> `Int x) t.zkapp_cmd_limit ) - ; ("zkapp_cmd_limit_hardcap", `Int t.zkapp_cmd_limit_hardcap) ; ("zkapps_disabled", `Bool t.zkapps_disabled) ] @@ -139,21 +111,12 @@ module Compiled = struct ; compaction_interval_ms = Node_config.compaction_interval ; block_window_duration_ms = Node_config.block_window_duration ; vrf_poll_interval_ms = Node_config.vrf_poll_interval - ; rpc_handshake_timeout_sec = Node_config.rpc_handshake_timeout_sec - ; rpc_heartbeat_timeout_sec = Node_config.rpc_heartbeat_timeout_sec - ; rpc_heartbeat_send_every_sec = Node_config.rpc_heartbeat_send_every_sec - ; zkapp_proof_update_cost = Node_config.zkapp_proof_update_cost - ; zkapp_signed_pair_update_cost = - Node_config.zkapp_signed_pair_update_cost - ; zkapp_signed_single_update_cost = - Node_config.zkapp_signed_single_update_cost - ; zkapp_transaction_cost_limit = Node_config.zkapp_transaction_cost_limit - ; max_event_elements = Node_config.max_event_elements - ; max_action_elements = Node_config.max_action_elements ; network_id = Node_config.network ; zkapp_cmd_limit = Node_config.zkapp_cmd_limit - ; zkapp_cmd_limit_hardcap = Node_config.zkapp_cmd_limit_hardcap - ; zkapps_disabled = Node_config.zkapps_disabled + ; rpc_handshake_timeout_sec = 60.0 + ; rpc_heartbeat_timeout_sec = 60.0 + ; rpc_heartbeat_send_every_sec = 10.0 + ; zkapps_disabled = false } in make inputs @@ -180,20 +143,8 @@ module For_unit_tests = struct Node_config_for_unit_tests.rpc_heartbeat_timeout_sec ; rpc_heartbeat_send_every_sec = Node_config_for_unit_tests.rpc_heartbeat_send_every_sec - ; zkapp_proof_update_cost = - Node_config_for_unit_tests.zkapp_proof_update_cost - ; zkapp_signed_pair_update_cost = - Node_config_for_unit_tests.zkapp_signed_pair_update_cost - ; zkapp_signed_single_update_cost = - Node_config_for_unit_tests.zkapp_signed_single_update_cost - ; zkapp_transaction_cost_limit = - Node_config_for_unit_tests.zkapp_transaction_cost_limit - ; max_event_elements = Node_config_for_unit_tests.max_event_elements - ; max_action_elements = Node_config_for_unit_tests.max_action_elements ; network_id = Node_config_for_unit_tests.network ; zkapp_cmd_limit = Node_config_for_unit_tests.zkapp_cmd_limit - ; zkapp_cmd_limit_hardcap = - Node_config_for_unit_tests.zkapp_cmd_limit_hardcap ; zkapps_disabled = Node_config_for_unit_tests.zkapps_disabled } in diff --git a/src/lib/node_config/dune b/src/lib/node_config/dune index 3d3eebb8b77..019dba43c42 100644 --- a/src/lib/node_config/dune +++ b/src/lib/node_config/dune @@ -4,7 +4,7 @@ (libraries node_config_intf node_config_version - node_config_unconfigurable_constants) + ) (preprocessor_deps ../../config.mlh) (instrumentation (backend bisect_ppx)) (preprocess (pps ppx_version ppx_base ppx_optcomp)) diff --git a/src/lib/node_config/for_unit_tests/dune b/src/lib/node_config/for_unit_tests/dune index 460efc1f009..003ed5d630e 100644 --- a/src/lib/node_config/for_unit_tests/dune +++ b/src/lib/node_config/for_unit_tests/dune @@ -4,7 +4,7 @@ (libraries node_config_intf node_config_version - node_config_unconfigurable_constants) + ) (instrumentation (backend bisect_ppx)) (preprocess (pps ppx_version ppx_base ppx_optcomp)) ) diff --git a/src/lib/node_config/for_unit_tests/node_config_for_unit_tests.ml b/src/lib/node_config/for_unit_tests/node_config_for_unit_tests.ml index 95c880e9142..80f4c4fc397 100644 --- a/src/lib/node_config/for_unit_tests/node_config_for_unit_tests.ml +++ b/src/lib/node_config/for_unit_tests/node_config_for_unit_tests.ml @@ -4,7 +4,6 @@ *) include Node_config_version -include Node_config_unconfigurable_constants let (ledger_depth : int) = (10 : int) @@ -64,3 +63,40 @@ let (vrf_poll_interval : int) = (0 : int) let zkapp_cmd_limit = None let scan_state_tps_goal_x10 : int option = None + +(** limits on Zkapp_command.t size + 10.26*np + 10.08*n2 + 9.14*n1 < 69.45 + where np: number of single proof updates + n2: number of pairs of signed/no-auth update + n1: number of single signed/no-auth update + and their coefficients representing the cost + The formula was generated based on benchmarking data conducted on bare + metal i9 processor with room to include lower spec. + 69.45 was the total time for a combination of updates that was considered + acceptable. + The method used to estimate the cost was linear least squares. +*) + +let zkapp_proof_update_cost = 10.26 + +let zkapp_signed_pair_update_cost = 10.08 + +let zkapp_signed_single_update_cost = 9.14 + +let zkapp_transaction_cost_limit = 69.45 + +let max_event_elements = 100 + +let max_action_elements = 100 + +let zkapp_cmd_limit_hardcap = 128 + +(* These are fine to be non-configurable *) + +let zkapps_disabled = false + +let rpc_handshake_timeout_sec = 60.0 + +let rpc_heartbeat_timeout_sec = 60.0 + +let rpc_heartbeat_send_every_sec = 10.0 (*same as the default*) diff --git a/src/lib/node_config/for_unit_tests/node_config_for_unit_tests.mli b/src/lib/node_config/for_unit_tests/node_config_for_unit_tests.mli index eb996f25855..bbe3b4300d0 100644 --- a/src/lib/node_config/for_unit_tests/node_config_for_unit_tests.mli +++ b/src/lib/node_config/for_unit_tests/node_config_for_unit_tests.mli @@ -1 +1,23 @@ include Node_config_intf.S + +val zkapp_proof_update_cost : float + +val zkapp_signed_pair_update_cost : float + +val zkapp_signed_single_update_cost : float + +val zkapp_transaction_cost_limit : float + +val max_event_elements : int + +val max_action_elements : int + +val zkapp_cmd_limit_hardcap : int + +val zkapps_disabled : bool + +val rpc_handshake_timeout_sec : float + +val rpc_heartbeat_timeout_sec : float + +val rpc_heartbeat_send_every_sec : float diff --git a/src/lib/node_config/intf/node_config_intf.mli b/src/lib/node_config/intf/node_config_intf.mli index 3d2960cc380..b7fb1629fc6 100644 --- a/src/lib/node_config/intf/node_config_intf.mli +++ b/src/lib/node_config/intf/node_config_intf.mli @@ -6,36 +6,9 @@ module type Version = sig val protocol_version_patch : int end -(* It's stupid that this exists. TODO: Remove and make configurable. *) -module type Unconfigurable_constants = sig - val zkapp_proof_update_cost : float - - val zkapp_signed_pair_update_cost : float - - val zkapp_signed_single_update_cost : float - - val zkapp_transaction_cost_limit : float - - val max_event_elements : int - - val max_action_elements : int - - val zkapp_cmd_limit_hardcap : int - - val zkapps_disabled : bool - - val rpc_handshake_timeout_sec : float - - val rpc_heartbeat_timeout_sec : float - - val rpc_heartbeat_send_every_sec : float -end - module type S = sig include Version - include Unconfigurable_constants - val ledger_depth : int val curve_size : int diff --git a/src/lib/node_config/node_config.ml b/src/lib/node_config/node_config.ml index 78485f82a17..25f20aa97ba 100644 --- a/src/lib/node_config/node_config.ml +++ b/src/lib/node_config/node_config.ml @@ -7,7 +7,6 @@ *) include Node_config_version -include Node_config_unconfigurable_constants [%%inject "ledger_depth", ledger_depth] diff --git a/src/lib/node_config/unconfigurable_constants/dune b/src/lib/node_config/unconfigurable_constants/dune deleted file mode 100644 index 6bcb95d8668..00000000000 --- a/src/lib/node_config/unconfigurable_constants/dune +++ /dev/null @@ -1,7 +0,0 @@ -(library - (name node_config_unconfigurable_constants) - (public_name mina_node_config.unconfigurable_constants) - (libraries node_config_intf) - (instrumentation (backend bisect_ppx)) - (preprocess (pps ppx_version ppx_base ppx_optcomp)) -) diff --git a/src/lib/node_config/unconfigurable_constants/node_config_unconfigurable_constants.ml b/src/lib/node_config/unconfigurable_constants/node_config_unconfigurable_constants.ml deleted file mode 100644 index 1097f348655..00000000000 --- a/src/lib/node_config/unconfigurable_constants/node_config_unconfigurable_constants.ml +++ /dev/null @@ -1,38 +0,0 @@ -(* FIXME: These should be configurable. *) - -(** limits on Zkapp_command.t size - 10.26*np + 10.08*n2 + 9.14*n1 < 69.45 - where np: number of single proof updates - n2: number of pairs of signed/no-auth update - n1: number of single signed/no-auth update - and their coefficients representing the cost - The formula was generated based on benchmarking data conducted on bare - metal i9 processor with room to include lower spec. - 69.45 was the total time for a combination of updates that was considered - acceptable. - The method used to estimate the cost was linear least squares. -*) - -let zkapp_proof_update_cost = 10.26 - -let zkapp_signed_pair_update_cost = 10.08 - -let zkapp_signed_single_update_cost = 9.14 - -let zkapp_transaction_cost_limit = 69.45 - -let max_event_elements = 100 - -let max_action_elements = 100 - -let zkapp_cmd_limit_hardcap = 128 - -(* These are fine to be non-configurable *) - -let zkapps_disabled = false - -let rpc_handshake_timeout_sec = 60.0 - -let rpc_heartbeat_timeout_sec = 60.0 - -let rpc_heartbeat_send_every_sec = 10.0 (*same as the default*) diff --git a/src/lib/node_config/unconfigurable_constants/node_config_unconfigurable_constants.mli b/src/lib/node_config/unconfigurable_constants/node_config_unconfigurable_constants.mli deleted file mode 100644 index 51efc504085..00000000000 --- a/src/lib/node_config/unconfigurable_constants/node_config_unconfigurable_constants.mli +++ /dev/null @@ -1 +0,0 @@ -include Node_config_intf.Unconfigurable_constants From cec3f90acf64e8eba478839c3b836dbd9d81b954 Mon Sep 17 00:00:00 2001 From: martyall Date: Mon, 7 Oct 2024 13:56:14 -0700 Subject: [PATCH 079/234] update itn_logger and logger --- src/lib/itn_logger/dune | 2 -- src/lib/itn_logger/itn_logger.ml | 39 +++++++++++++++-------------- src/lib/logger/fake/logger.ml | 10 +++++++- src/lib/logger/logger.mli | 12 ++++++++- src/lib/logger/native/logger.ml | 43 ++++++++++++++++++++------------ 5 files changed, 67 insertions(+), 39 deletions(-) diff --git a/src/lib/itn_logger/dune b/src/lib/itn_logger/dune index bf9a03dbc9d..6f9c8936a26 100644 --- a/src/lib/itn_logger/dune +++ b/src/lib/itn_logger/dune @@ -14,8 +14,6 @@ core core_kernel yojson - ;; local libraries - mina_node_config.unconfigurable_constants ) (instrumentation (backend bisect_ppx)) (preprocess (pps ppx_version ppx_mina ppx_jane))) diff --git a/src/lib/itn_logger/itn_logger.ml b/src/lib/itn_logger/itn_logger.ml index 11e5f50c997..ad5a5d73f18 100644 --- a/src/lib/itn_logger/itn_logger.ml +++ b/src/lib/itn_logger/itn_logger.ml @@ -67,7 +67,16 @@ module Submit_internal_log = struct ~bin_response end -let dispatch_remote_log log = +type config = + { rpc_handshake_timeout : Time.Span.t + ; rpc_heartbeat_timeout : Time_ns.Span.t + ; rpc_heartbeat_send_every : Time_ns.Span.t + } +[@@deriving bin_io_unversioned] + +(* dispatch log to daemon *) + +let dispatch_remote_log config log = let open Async.Deferred.Let_syntax in let rpc = Submit_internal_log.rpc in match daemon_where_to_connect () with @@ -80,21 +89,11 @@ let dispatch_remote_log log = | Some where_to_connect -> ( let%map res = Async.Rpc.Connection.with_client - ~handshake_timeout: - (Time.Span.of_sec - Node_config_unconfigurable_constants.rpc_handshake_timeout_sec ) + ~handshake_timeout:config.rpc_handshake_timeout ~heartbeat_config: (Async.Rpc.Connection.Heartbeat_config.create - ~timeout: - (Time_ns.Span.of_sec - Node_config_unconfigurable_constants - .rpc_heartbeat_timeout_sec ) - ~send_every: - (Time_ns.Span.of_sec - Node_config_unconfigurable_constants - .rpc_heartbeat_send_every_sec ) - () ) - where_to_connect + ~timeout:config.rpc_heartbeat_timeout + ~send_every:config.rpc_heartbeat_send_every () ) where_to_connect (fun conn -> Async.Rpc.Rpc.dispatch rpc conn log) in (* not ideal that errors are not themselves logged *) @@ -110,13 +109,13 @@ let dispatch_remote_log log = (* Used to ensure that no more than one log message is in-flight at a time to guarantee sequential processing. *) -let sequential_dispatcher_loop () = +let sequential_dispatcher_loop config () = let open Async in let pipe_r, pipe_w = Pipe.create () in - don't_wait_for (Pipe.iter pipe_r ~f:dispatch_remote_log) ; + don't_wait_for (Pipe.iter pipe_r ~f:(dispatch_remote_log config)) ; pipe_w -let sequential_log_writer_pipe = sequential_dispatcher_loop () +let sequential_log_writer_pipe config = sequential_dispatcher_loop config () (* this function can be called: (1) by the logging process (daemon, verifier, or prover) from the logger in Logger, or @@ -125,7 +124,7 @@ let sequential_log_writer_pipe = sequential_dispatcher_loop () for (1), if the process is the verifier or prover, the log is forwarded by RPC to the daemon, resulting in a recursive call of type (2) *) -let log ?process ~timestamp ~message ~metadata () = +let log ?process ~timestamp ~message ~metadata ~config () = match get_process_kind () with | Some process -> (* prover or verifier, send log to daemon @@ -136,7 +135,9 @@ let log ?process ~timestamp ~message ~metadata () = List.map metadata ~f:(fun (s, json) -> (s, Yojson.Safe.to_string json)) in let remote_log = { timestamp; message; metadata; process } in - Async.Pipe.write_without_pushback sequential_log_writer_pipe remote_log + Async.Pipe.write_without_pushback + (sequential_log_writer_pipe config) + remote_log | None -> (* daemon *) (* convert JSON to Basic.t in queue, so we don't have to in GraphQL response *) diff --git a/src/lib/logger/fake/logger.ml b/src/lib/logger/fake/logger.ml index 959a7a6c567..5f74dc578c6 100644 --- a/src/lib/logger/fake/logger.ml +++ b/src/lib/logger/fake/logger.ml @@ -134,7 +134,15 @@ type t = Metadata.Stable.Latest.t [@@deriving bin_io_unversioned] let metadata = Fn.id -let create ?metadata:_ ?id:_ ?itn_features:_ () = Metadata.empty +type itn_logger_config = unit + +let make_itn_logger_config ~rpc_handshake_timeout:_ ~rpc_heartbeat_timeout:_ + ~rpc_heartbeat_send_every:_ = + () + +let create ?metadata:_ ?id:_ ?itn_config:_ () = Metadata.empty + +let with_itn _ = Fn.id let null () = Metadata.empty diff --git a/src/lib/logger/logger.mli b/src/lib/logger/logger.mli index c2cb406e29e..c6eff3bb4a7 100644 --- a/src/lib/logger/logger.mli +++ b/src/lib/logger/logger.mli @@ -135,10 +135,20 @@ type 'a log_function = -> ('a, unit, string, unit) format4 -> 'a +type itn_logger_config + +val make_itn_logger_config : + rpc_handshake_timeout:Time.Span.t + -> rpc_heartbeat_timeout:Time_ns.Span.t + -> rpc_heartbeat_send_every:Time_ns.Span.t + -> itn_logger_config + +val with_itn : itn_logger_config -> t -> t + val create : ?metadata:(string, Yojson.Safe.t) List.Assoc.t -> ?id:string - -> ?itn_features:bool + -> ?itn_config:itn_logger_config -> unit -> t diff --git a/src/lib/logger/native/logger.ml b/src/lib/logger/native/logger.ml index da36f253cda..dcbc3c90b93 100644 --- a/src/lib/logger/native/logger.ml +++ b/src/lib/logger/native/logger.ml @@ -333,25 +333,33 @@ type t = { null : bool ; metadata : Metadata.Stable.Latest.t ; id : Bounded_types.String.Stable.V1.t - ; itn_features : bool + ; itn_config : Itn_logger.config option } [@@deriving bin_io_unversioned] let metadata t = t.metadata -let create ?(metadata = []) ?(id = "default") ?(itn_features = false) () = +type itn_logger_config = Itn_logger.config + +let make_itn_logger_config ~rpc_handshake_timeout ~rpc_heartbeat_timeout + ~rpc_heartbeat_send_every = + { Itn_logger.rpc_handshake_timeout + ; rpc_heartbeat_timeout + ; rpc_heartbeat_send_every + } + +let create ?(metadata = []) ?(id = "default") ?itn_config () = { null = false ; metadata = Metadata.extend Metadata.empty metadata ; id - ; itn_features + ; itn_config } +let with_itn itn_logger_config t = + { t with itn_config = Some itn_logger_config } + let null () = - { null = true - ; metadata = Metadata.empty - ; id = "default" - ; itn_features = false - } + { null = true; metadata = Metadata.empty; id = "default"; itn_config = None } let extend t metadata = { t with metadata = Metadata.extend t.metadata metadata } @@ -413,14 +421,17 @@ let log t ~level ~module_ ~location ?(metadata = []) ?event_id fmt = in raw t message' ; match level with - | Internal -> - if t.itn_features then - let timestamp = message'.timestamp in - let entries = - Itn_logger.postprocess_message ~timestamp ~message ~metadata - in - List.iter entries ~f:(fun (timestamp, message, metadata) -> - Itn_logger.log ~timestamp ~message ~metadata () ) + | Internal -> ( + match t.itn_config with + | Some config -> + let timestamp = message'.timestamp in + let entries = + Itn_logger.postprocess_message ~timestamp ~message ~metadata + in + List.iter entries ~f:(fun (timestamp, message, metadata) -> + Itn_logger.log ~timestamp ~message ~metadata ~config () ) + | None -> + () ) | _ -> () in From a0d9c1b4e03f18234fdbee28c51e87b4d9e40464 Mon Sep 17 00:00:00 2001 From: martyall Date: Mon, 7 Oct 2024 13:56:28 -0700 Subject: [PATCH 080/234] update runtime_config --- src/lib/runtime_config/runtime_config.ml | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/src/lib/runtime_config/runtime_config.ml b/src/lib/runtime_config/runtime_config.ml index fb2bc7e338f..b6af6b10e56 100644 --- a/src/lib/runtime_config/runtime_config.ml +++ b/src/lib/runtime_config/runtime_config.ml @@ -1908,17 +1908,6 @@ module Constants : Constants_intf = struct block_window_duration = constraint_constants.block_window_duration_ms |> Float.of_int |> Time.Span.of_ms - ; zkapp_proof_update_cost = genesis_constants.zkapp_proof_update_cost - ; zkapp_signed_single_update_cost = - genesis_constants.zkapp_signed_single_update_cost - ; zkapp_signed_pair_update_cost = - genesis_constants.zkapp_signed_pair_update_cost - ; zkapp_transaction_cost_limit = - genesis_constants.zkapp_transaction_cost_limit - ; max_event_elements = genesis_constants.max_event_elements - ; max_action_elements = genesis_constants.max_action_elements - ; zkapp_cmd_limit_hardcap = genesis_constants.zkapp_cmd_limit_hardcap - ; minimum_user_command_fee = genesis_constants.minimum_user_command_fee ; network_id = Option.value ~default:a.compile_config.network_id Option.(b.daemon >>= fun d -> d.network_id) From 1eb7ff362e60e54ded209bd5782c9e520cf66f61 Mon Sep 17 00:00:00 2001 From: martyall Date: Mon, 7 Oct 2024 13:56:49 -0700 Subject: [PATCH 081/234] src/lib --- src/lib/daemon_rpcs/client.ml | 32 ++++++++--------- src/lib/daemon_rpcs/dune | 1 - src/lib/mina_base/dune | 1 + .../test/verification_key_permission_test.ml | 2 ++ src/lib/mina_base/test/zero_vesting_period.ml | 2 ++ src/lib/mina_base/user_command.ml | 9 ++--- src/lib/mina_graphql/mina_graphql.ml | 12 ++++--- src/lib/mina_lib/archive_client.ml | 12 ++++--- src/lib/mina_lib/archive_client.mli | 6 ++-- src/lib/mina_lib/mina_lib.ml | 8 +++-- src/lib/mina_lib/tests/tests.ml | 3 +- src/lib/network_pool/intf.ml | 1 + src/lib/network_pool/transaction_pool.ml | 11 ++++-- src/lib/snark_worker/dune | 1 - src/lib/snark_worker/functor.ml | 36 ++++++++++--------- src/lib/transaction/transaction.ml | 4 +-- .../transaction_inclusion_status.ml | 2 +- src/lib/transition_handler/block_sink.ml | 7 +++- src/lib/transition_handler/block_sink.mli | 1 + 19 files changed, 89 insertions(+), 62 deletions(-) diff --git a/src/lib/daemon_rpcs/client.ml b/src/lib/daemon_rpcs/client.ml index 1c95b95a6bc..050a3491894 100644 --- a/src/lib/daemon_rpcs/client.ml +++ b/src/lib/daemon_rpcs/client.ml @@ -6,27 +6,23 @@ open Async let print_rpc_error error = eprintf "RPC connection error: %s\n" (Error.to_string_hum error) -let dispatch rpc query (host_and_port : Host_and_port.t) = +let dispatch ~(compile_config : Mina_compile_config.t) rpc query + (host_and_port : Host_and_port.t) = Deferred.Or_error.try_with_join ~here:[%here] (fun () -> Tcp.with_connection (Tcp.Where_to_connect.of_host_and_port host_and_port) ~timeout:(Time.Span.of_sec 1.) (fun _ r w -> let open Deferred.Let_syntax in match%bind Rpc.Connection.create - ~handshake_timeout: - (Time.Span.of_sec - Node_config_unconfigurable_constants - .rpc_handshake_timeout_sec ) + ~handshake_timeout:compile_config.rpc_handshake_timeout ~heartbeat_config: (Rpc.Connection.Heartbeat_config.create ~timeout: - (Time_ns.Span.of_sec - Node_config_unconfigurable_constants - .rpc_heartbeat_timeout_sec ) + ( compile_config.rpc_heartbeat_timeout |> Time.Span.to_sec + |> Time_ns.Span.of_sec ) ~send_every: - (Time_ns.Span.of_sec - Node_config_unconfigurable_constants - .rpc_heartbeat_send_every_sec ) + ( compile_config.rpc_heartbeat_send_every + |> Time.Span.to_sec |> Time_ns.Span.of_sec ) () ) r w ~connection_state:(fun _ -> ()) @@ -40,19 +36,19 @@ let dispatch rpc query (host_and_port : Host_and_port.t) = | Ok conn -> Rpc.Rpc.dispatch rpc conn query ) ) -let dispatch_join_errors rpc query port = +let dispatch_join_errors ~compile_config rpc query port = let open Deferred.Let_syntax in - let%map res = dispatch rpc query port in + let%map res = dispatch ~compile_config rpc query port in Or_error.join res (** Call an RPC, passing handlers for a successful call and a failing one. Note that a successful *call* may have failed on the server side and returned a failing result. To deal with that, the success handler returns an Or_error. *) -let dispatch_with_message rpc query port ~success ~error +let dispatch_with_message ~compile_config rpc query port ~success ~error ~(join_error : 'a Or_error.t -> 'b Or_error.t) = let fail err = eprintf "%s\n%!" err ; exit 18 in - let%bind res = dispatch rpc query port in + let%bind res = dispatch ~compile_config rpc query port in match join_error res with | Ok x -> printf "%s\n" (success x) ; @@ -62,8 +58,8 @@ let dispatch_with_message rpc query port ~success ~error let dispatch_pretty_message (type t) (module Print : Cli_lib.Render.Printable_intf with type t = t) - ?(json = true) ~(join_error : 'a Or_error.t -> t Or_error.t) ~error_ctx rpc - query port = - let%bind res = dispatch rpc query port in + ?(json = true) ~compile_config ~(join_error : 'a Or_error.t -> t Or_error.t) + ~error_ctx rpc query port = + let%bind res = dispatch ~compile_config rpc query port in Cli_lib.Render.print (module Print) json (join_error res) ~error_ctx |> Deferred.return diff --git a/src/lib/daemon_rpcs/dune b/src/lib/daemon_rpcs/dune index 86876a493e8..a9abbb47f89 100644 --- a/src/lib/daemon_rpcs/dune +++ b/src/lib/daemon_rpcs/dune @@ -36,7 +36,6 @@ perf_histograms sync_status node_addrs_and_ports - mina_node_config.unconfigurable_constants logger network_pool data_hash_lib diff --git a/src/lib/mina_base/dune b/src/lib/mina_base/dune index e362e42f2bb..5324ac38daf 100644 --- a/src/lib/mina_base/dune +++ b/src/lib/mina_base/dune @@ -73,6 +73,7 @@ snark_bits error_json ppx_version.runtime + mina_compile_config ) (preprocess (pps ppx_annot ppx_snarky ppx_here ppx_mina ppx_version ppx_compare ppx_deriving.enum ppx_deriving.ord ppx_deriving.make diff --git a/src/lib/mina_base/test/verification_key_permission_test.ml b/src/lib/mina_base/test/verification_key_permission_test.ml index e7c8559722c..ba624fb295e 100644 --- a/src/lib/mina_base/test/verification_key_permission_test.ml +++ b/src/lib/mina_base/test/verification_key_permission_test.ml @@ -39,6 +39,7 @@ let update_vk_perm_with_different_version () = match User_command.check_well_formedness ~genesis_constants:Genesis_constants.For_unit_tests.t + ~compile_config:Mina_compile_config.For_unit_tests.t (Zkapp_command (update_vk_perm_to_be ~auth:(auth, different_version))) with | Ok _ -> @@ -52,6 +53,7 @@ let update_vk_perm_with_current_version () = Quickcheck.test ~trials:10 auth_gen ~f:(fun auth -> match User_command.check_well_formedness + ~compile_config:Mina_compile_config.For_unit_tests.t ~genesis_constants:Genesis_constants.For_unit_tests.t (Zkapp_command (update_vk_perm_to_be diff --git a/src/lib/mina_base/test/zero_vesting_period.ml b/src/lib/mina_base/test/zero_vesting_period.ml index bc717be8fc6..41aee5aca98 100644 --- a/src/lib/mina_base/test/zero_vesting_period.ml +++ b/src/lib/mina_base/test/zero_vesting_period.ml @@ -282,6 +282,7 @@ let zero_vesting_period_is_error () = match User_command.check_well_formedness ~genesis_constants:Genesis_constants.For_unit_tests.t + ~compile_config:Mina_compile_config.For_unit_tests.t (Zkapp_command zkapp_zero_vesting_period) with | Error [ Zero_vesting_period ] -> @@ -294,6 +295,7 @@ let zkapp_nonzero_vesting_period = mk_zkapp_with_vesting_period 1 let nonzero_vesting_period_ok () = match User_command.check_well_formedness + ~compile_config:Mina_compile_config.For_unit_tests.t ~genesis_constants:Genesis_constants.For_unit_tests.t (Zkapp_command zkapp_nonzero_vesting_period) with diff --git a/src/lib/mina_base/user_command.ml b/src/lib/mina_base/user_command.ml index 909fb53deb6..952d290ee74 100644 --- a/src/lib/mina_base/user_command.ml +++ b/src/lib/mina_base/user_command.ml @@ -230,9 +230,9 @@ let fee : t -> Currency.Fee.t = function let has_insufficient_fee ~minimum_fee t = Currency.Fee.(fee t < minimum_fee) -let is_disabled = function +let is_disabled ~(compile_config : Mina_compile_config.t) = function | Zkapp_command _ -> - Node_config_unconfigurable_constants.zkapps_disabled + compile_config.zkapps_disabled | _ -> false @@ -430,7 +430,8 @@ module Well_formedness_error = struct "Transaction type disabled" end -let check_well_formedness ~(genesis_constants : Genesis_constants.t) t : +let check_well_formedness ~(genesis_constants : Genesis_constants.t) + ~(compile_config : Mina_compile_config.t) t : (unit, Well_formedness_error.t list) result = let preds = let open Well_formedness_error in @@ -439,7 +440,7 @@ let check_well_formedness ~(genesis_constants : Genesis_constants.t) t : , Insufficient_fee ) ; (has_zero_vesting_period, Zero_vesting_period) ; (is_incompatible_version, Incompatible_version) - ; (is_disabled, Transaction_type_disabled) + ; (is_disabled ~compile_config, Transaction_type_disabled) ; (has_invalid_call_forest, Zkapp_invalid_call_forest) ] in diff --git a/src/lib/mina_graphql/mina_graphql.ml b/src/lib/mina_graphql/mina_graphql.ml index f39abf50342..cc1f8da66c5 100644 --- a/src/lib/mina_graphql/mina_graphql.ml +++ b/src/lib/mina_graphql/mina_graphql.ml @@ -936,8 +936,10 @@ module Mutations = struct "Could not find an archive process to connect to" in let%map () = - Mina_lib.Archive_client.dispatch_precomputed_block archive_location - block + Mina_lib.Archive_client.dispatch_precomputed_block + ~compile_config: + (Mina_lib.config mina).precomputed_values.compile_config + archive_location block |> Deferred.Result.map_error ~f:Error.to_string_hum in () ) @@ -967,8 +969,10 @@ module Mutations = struct "Could not find an archive process to connect to" in let%map () = - Mina_lib.Archive_client.dispatch_extensional_block archive_location - block + Mina_lib.Archive_client.dispatch_extensional_block + ~compile_config: + (Mina_lib.config mina).precomputed_values.compile_config + archive_location block |> Deferred.Result.map_error ~f:Error.to_string_hum in () ) diff --git a/src/lib/mina_lib/archive_client.ml b/src/lib/mina_lib/archive_client.ml index 7f7e49790e8..396f878ddfe 100644 --- a/src/lib/mina_lib/archive_client.ml +++ b/src/lib/mina_lib/archive_client.ml @@ -2,7 +2,7 @@ open Core_kernel open Async_kernel open Pipe_lib -let dispatch ?(max_tries = 5) ~logger +let dispatch ?(max_tries = 5) ~logger ~compile_config (archive_location : Host_and_port.t Cli_lib.Flag.Types.with_name) diff = let rec go tries_left errs = if Int.( <= ) tries_left 0 then @@ -20,7 +20,7 @@ let dispatch ?(max_tries = 5) ~logger [%sexp_of: (string * Host_and_port.t) * (string * string)] ) ) else match%bind - Daemon_rpcs.Client.dispatch Archive_lib.Rpc.t diff + Daemon_rpcs.Client.dispatch ~compile_config Archive_lib.Rpc.t diff archive_location.value with | Ok () -> @@ -33,7 +33,7 @@ let dispatch ?(max_tries = 5) ~logger in go max_tries [] -let make_dispatch_block rpc ?(max_tries = 5) +let make_dispatch_block ~compile_config rpc ?(max_tries = 5) (archive_location : Host_and_port.t Cli_lib.Flag.Types.with_name) block = let rec go tries_left errs = if Int.( <= ) tries_left 0 then @@ -51,7 +51,8 @@ let make_dispatch_block rpc ?(max_tries = 5) [%sexp_of: (string * Host_and_port.t) * (string * string)] ) ) else match%bind - Daemon_rpcs.Client.dispatch rpc block archive_location.value + Daemon_rpcs.Client.dispatch ~compile_config rpc block + archive_location.value with | Ok () -> return (Ok ()) @@ -111,7 +112,8 @@ let run ~logger ~precomputed_values , `Float (Time.Span.to_ms (Time.diff diff_time start)) ) ] ; match%map - dispatch archive_location ~logger (Transition_frontier diff) + dispatch ~compile_config:precomputed_values.compile_config + archive_location ~logger (Transition_frontier diff) with | Ok () -> [%log debug] diff --git a/src/lib/mina_lib/archive_client.mli b/src/lib/mina_lib/archive_client.mli index 612e04859be..0cebd18f41b 100644 --- a/src/lib/mina_lib/archive_client.mli +++ b/src/lib/mina_lib/archive_client.mli @@ -2,13 +2,15 @@ open Core open Pipe_lib val dispatch_precomputed_block : - ?max_tries:int + compile_config:Mina_compile_config.t + -> ?max_tries:int -> Host_and_port.t Cli_lib.Flag.Types.with_name -> Mina_block.Precomputed.t -> unit Async.Deferred.Or_error.t val dispatch_extensional_block : - ?max_tries:int + compile_config:Mina_compile_config.t + -> ?max_tries:int -> Host_and_port.t Cli_lib.Flag.Types.with_name -> Archive_lib.Extensional.Block.t -> unit Async.Deferred.Or_error.t diff --git a/src/lib/mina_lib/mina_lib.ml b/src/lib/mina_lib/mina_lib.ml index 84255950c33..c4fde594bb2 100644 --- a/src/lib/mina_lib/mina_lib.ml +++ b/src/lib/mina_lib/mina_lib.ml @@ -944,7 +944,8 @@ let add_full_transactions t user_commands = List.find_map user_commands ~f:(fun cmd -> match User_command.check_well_formedness - ~genesis_constants:t.config.precomputed_values.genesis_constants cmd + ~genesis_constants:t.config.precomputed_values.genesis_constants + ~compile_config:t.config.precomputed_values.compile_config cmd with | Ok () -> None @@ -976,6 +977,7 @@ let add_zkapp_transactions t (zkapp_commands : Zkapp_command.t list) = match User_command.check_well_formedness ~genesis_constants:t.config.precomputed_values.genesis_constants + ~compile_config:t.config.precomputed_values.compile_config (Zkapp_command cmd) with | Ok () -> @@ -1498,6 +1500,7 @@ let create ~commit_id ?wallets (config : Config.t) = let catchup_mode = if config.super_catchup then `Super else `Normal in let constraint_constants = config.precomputed_values.constraint_constants in let consensus_constants = config.precomputed_values.consensus_constants in + let compile_config = config.precomputed_values.compile_config in let block_window_duration = config.compile_config.block_window_duration in let monitor = Option.value ~default:(Monitor.create ()) config.monitor in Async.Scheduler.within' ~monitor (fun () -> @@ -1811,7 +1814,7 @@ let create ~commit_id ?wallets (config : Config.t) = ~pool_max_size: config.precomputed_values.genesis_constants.txpool_max_size ~genesis_constants:config.precomputed_values.genesis_constants - ~slot_tx_end + ~slot_tx_end ~compile_config in let first_received_message_signal = Ivar.create () in let online_status, notify_online_impl = @@ -1868,6 +1871,7 @@ let create ~commit_id ?wallets (config : Config.t) = ; genesis_constants = config.precomputed_values.genesis_constants ; constraint_constants ; block_window_duration + ; compile_config } in let sinks = (block_sink, tx_remote_sink, snark_remote_sink) in diff --git a/src/lib/mina_lib/tests/tests.ml b/src/lib/mina_lib/tests/tests.ml index 688ab9c89f6..1243f3e28fa 100644 --- a/src/lib/mina_lib/tests/tests.ml +++ b/src/lib/mina_lib/tests/tests.ml @@ -182,6 +182,7 @@ let%test_module "Epoch ledger sync tests" = ; genesis_constants = precomputed_values.genesis_constants ; constraint_constants ; block_window_duration = compile_config.block_window_duration + ; compile_config } in let _transaction_pool, tx_remote_sink, _tx_local_sink = @@ -190,7 +191,7 @@ let%test_module "Epoch ledger sync tests" = ~trust_system ~pool_max_size:precomputed_values.genesis_constants.txpool_max_size ~genesis_constants:precomputed_values.genesis_constants - ~slot_tx_end:None + ~slot_tx_end:None ~compile_config in Network_pool.Transaction_pool.create ~config ~constraint_constants ~consensus_constants ~time_controller ~logger diff --git a/src/lib/network_pool/intf.ml b/src/lib/network_pool/intf.ml index c587756a216..63af3e4b0cc 100644 --- a/src/lib/network_pool/intf.ml +++ b/src/lib/network_pool/intf.ml @@ -384,6 +384,7 @@ module type Transaction_resource_pool_intf = sig -> verifier:Verifier.t -> genesis_constants:Genesis_constants.t -> slot_tx_end:Mina_numbers.Global_slot_since_hard_fork.t option + -> compile_config:Mina_compile_config.t -> Config.t val member : t -> Transaction_hash.User_command_with_valid_signature.t -> bool diff --git a/src/lib/network_pool/transaction_pool.ml b/src/lib/network_pool/transaction_pool.ml index e99224ecdb8..00ce1b5120b 100644 --- a/src/lib/network_pool/transaction_pool.ml +++ b/src/lib/network_pool/transaction_pool.ml @@ -289,18 +289,20 @@ struct ; verifier : (Verifier.t[@sexp.opaque]) ; genesis_constants : Genesis_constants.t ; slot_tx_end : Mina_numbers.Global_slot_since_hard_fork.t option + ; compile_config : Mina_compile_config.t } [@@deriving sexp_of] (* remove next line if there's a way to force [@@deriving make] write a named parameter instead of an optional parameter *) let make ~trust_system ~pool_max_size ~verifier ~genesis_constants - ~slot_tx_end = + ~slot_tx_end ~compile_config = { trust_system ; pool_max_size ; verifier ; genesis_constants ; slot_tx_end + ; compile_config } end @@ -1090,7 +1092,8 @@ struct ~f:(fun acc user_cmd -> match User_command.check_well_formedness - ~genesis_constants:t.config.genesis_constants user_cmd + ~genesis_constants:t.config.genesis_constants + ~compile_config:t.config.compile_config user_cmd with | Ok () -> acc @@ -1658,6 +1661,8 @@ let%test_module _ = let genesis_constants = precomputed_values.genesis_constants + let compile_config = precomputed_values.compile_config + let minimum_fee = Currency.Fee.to_nanomina_int genesis_constants.minimum_user_command_fee @@ -1919,7 +1924,7 @@ let%test_module _ = let trust_system = Trust_system.null () in let config = Test.Resource_pool.make_config ~trust_system ~pool_max_size ~verifier - ~genesis_constants ~slot_tx_end + ~genesis_constants ~slot_tx_end ~compile_config in let pool_, _, _ = Test.create ~config ~logger ~constraint_constants ~consensus_constants diff --git a/src/lib/snark_worker/dune b/src/lib/snark_worker/dune index 35f564ad95a..db1f2151555 100644 --- a/src/lib/snark_worker/dune +++ b/src/lib/snark_worker/dune @@ -42,7 +42,6 @@ mina_ledger transaction_snark_work error_json - mina_node_config.unconfigurable_constants mina_state transaction_protocol_state ppx_version.runtime diff --git a/src/lib/snark_worker/functor.ml b/src/lib/snark_worker/functor.ml index aa60e19bbff..f4ea0d862f5 100644 --- a/src/lib/snark_worker/functor.ml +++ b/src/lib/snark_worker/functor.ml @@ -118,21 +118,19 @@ module Make (Inputs : Intf.Inputs_intf) : ; prover = public_key } ) - let dispatch rpc shutdown_on_disconnect query address = + let dispatch ~(compile_config : Mina_compile_config.t) rpc + shutdown_on_disconnect query address = let%map res = Rpc.Connection.with_client - ~handshake_timeout: - (Time.Span.of_sec - Node_config_unconfigurable_constants.rpc_handshake_timeout_sec ) + ~handshake_timeout:compile_config.rpc_handshake_timeout ~heartbeat_config: (Rpc.Connection.Heartbeat_config.create ~timeout: - (Time_ns.Span.of_sec - Node_config_unconfigurable_constants.rpc_heartbeat_timeout_sec ) + ( compile_config.rpc_heartbeat_timeout |> Time.Span.to_sec + |> Time_ns.Span.of_sec ) ~send_every: - (Time_ns.Span.of_sec - Node_config_unconfigurable_constants - .rpc_heartbeat_send_every_sec ) + ( compile_config.rpc_heartbeat_send_every |> Time.Span.to_sec + |> Time_ns.Span.of_sec ) () ) (Tcp.Where_to_connect.of_host_and_port address) (fun conn -> Rpc.Rpc.dispatch rpc conn query) @@ -228,7 +226,8 @@ module Make (Inputs : Intf.Inputs_intf) : let main (module Rpcs_versioned : Intf.Rpcs_versioned_S with type Work.ledger_proof = Inputs.Ledger_proof.t ) ~logger - ~proof_level ~constraint_constants daemon_address shutdown_on_disconnect = + ~proof_level ~constraint_constants ~compile_config daemon_address + shutdown_on_disconnect = let%bind state = Worker_state.create ~constraint_constants ~proof_level () in @@ -270,8 +269,8 @@ module Make (Inputs : Intf.Inputs_intf) : !"Snark worker using daemon $addr" ~metadata:[ ("addr", `String (Host_and_port.to_string daemon_address)) ] ; match%bind - dispatch Rpcs_versioned.Get_work.Latest.rpc shutdown_on_disconnect () - daemon_address + dispatch Rpcs_versioned.Get_work.Latest.rpc shutdown_on_disconnect + ~compile_config () daemon_address with | Error e -> log_and_retry "getting work" e (retry_pause 10.) go @@ -303,7 +302,8 @@ module Make (Inputs : Intf.Inputs_intf) : let%bind () = match%map dispatch Rpcs_versioned.Failed_to_generate_snark.Latest.rpc - shutdown_on_disconnect (e, work, public_key) daemon_address + ~compile_config shutdown_on_disconnect (e, work, public_key) + daemon_address with | Error e -> [%log error] @@ -327,7 +327,7 @@ module Make (Inputs : Intf.Inputs_intf) : ] ; let rec submit_work () = match%bind - dispatch Rpcs_versioned.Submit_work.Latest.rpc + dispatch ~compile_config Rpcs_versioned.Submit_work.Latest.rpc shutdown_on_disconnect result daemon_address with | Error e -> @@ -365,13 +365,15 @@ module Make (Inputs : Intf.Inputs_intf) : let logger = Logger.create () ~metadata:[ ("process", `String "Snark Worker") ] in - let%bind.Deferred constraint_constants, proof_level = + let%bind.Deferred constraint_constants, proof_level, compile_config = let%map.Deferred config = Runtime_config.Constants.load_constants ?conf_dir ?cli_proof_level ~logger config_file in Runtime_config.Constants. - (constraint_constants config, proof_level config) + ( constraint_constants config + , proof_level config + , compile_config config ) in Option.value_map ~default:() conf_dir ~f:(fun conf_dir -> let logrotate_max_size = 1024 * 10 in @@ -390,7 +392,7 @@ module Make (Inputs : Intf.Inputs_intf) : Core.exit 0 ) ; main (module Rpcs_versioned) - ~logger ~proof_level ~constraint_constants daemon_port + ~logger ~proof_level ~constraint_constants ~compile_config daemon_port (Option.value ~default:true shutdown_on_disconnect)) let arguments ~proof_level ~daemon_address ~shutdown_on_disconnect = diff --git a/src/lib/transaction/transaction.ml b/src/lib/transaction/transaction.ml index 5fa84bc6181..b45b0361c3e 100644 --- a/src/lib/transaction/transaction.ml +++ b/src/lib/transaction/transaction.ml @@ -144,10 +144,10 @@ let valid_size ~genesis_constants (t : t) = | Fee_transfer _ | Coinbase _ -> Ok () -let check_well_formedness ~genesis_constants (t : t) = +let check_well_formedness ~genesis_constants ~compile_config (t : t) = match t with | Command cmd -> - User_command.check_well_formedness ~genesis_constants cmd + User_command.check_well_formedness ~genesis_constants ~compile_config cmd | Fee_transfer _ | Coinbase _ -> Ok () diff --git a/src/lib/transaction_inclusion_status/transaction_inclusion_status.ml b/src/lib/transaction_inclusion_status/transaction_inclusion_status.ml index 555bf91eeb6..ec3e3f1c6f0 100644 --- a/src/lib/transaction_inclusion_status/transaction_inclusion_status.ml +++ b/src/lib/transaction_inclusion_status/transaction_inclusion_status.ml @@ -119,7 +119,7 @@ let%test_module "transaction_status" = let config = Transaction_pool.Resource_pool.make_config ~trust_system ~pool_max_size ~verifier ~genesis_constants:precomputed_values.genesis_constants - ~slot_tx_end:None + ~slot_tx_end:None ~compile_config:precomputed_values.compile_config in let transaction_pool, _, local_sink = Transaction_pool.create ~config diff --git a/src/lib/transition_handler/block_sink.ml b/src/lib/transition_handler/block_sink.ml index dd9ac11e3a7..712f8df15a5 100644 --- a/src/lib/transition_handler/block_sink.ml +++ b/src/lib/transition_handler/block_sink.ml @@ -20,6 +20,7 @@ type block_sink_config = ; genesis_constants : Genesis_constants.t ; constraint_constants : Genesis_constants.Constraint_constants.t ; block_window_duration : Time.Span.t + ; compile_config : Mina_compile_config.t } type t = @@ -34,6 +35,7 @@ type t = ; genesis_constants : Genesis_constants.t ; constraint_constants : Genesis_constants.Constraint_constants.t ; block_window_duration : Time.Span.t + ; compile_config : Mina_compile_config.t } | Void @@ -56,6 +58,7 @@ let push sink (`Transition e, `Time_received tm, `Valid_cb cb) = ; genesis_constants ; constraint_constants ; block_window_duration + ; compile_config } -> O1trace.sync_thread "handle_block_gossip" @@ fun () -> @@ -150,7 +153,7 @@ let push sink (`Transition e, `Time_received tm, `Valid_cb cb) = List.exists transactions ~f:(fun txn -> match Mina_transaction.Transaction.check_well_formedness - ~genesis_constants txn.data + ~genesis_constants ~compile_config txn.data with | Ok () -> false @@ -213,6 +216,7 @@ let create ; genesis_constants ; constraint_constants ; block_window_duration + ; compile_config } = let rate_limiter = Network_pool.Rate_limiter.create @@ -235,6 +239,7 @@ let create ; genesis_constants ; constraint_constants ; block_window_duration + ; compile_config } ) let void = Void diff --git a/src/lib/transition_handler/block_sink.mli b/src/lib/transition_handler/block_sink.mli index e3586a01cd3..7941657ed80 100644 --- a/src/lib/transition_handler/block_sink.mli +++ b/src/lib/transition_handler/block_sink.mli @@ -23,6 +23,7 @@ type block_sink_config = ; genesis_constants : Genesis_constants.t ; constraint_constants : Genesis_constants.Constraint_constants.t ; block_window_duration : Time.Span.t + ; compile_config : Mina_compile_config.t } val create : From 16119d17775dfa7aa33ecee828f8d94befdaceb2 Mon Sep 17 00:00:00 2001 From: martyall Date: Mon, 7 Oct 2024 13:58:33 -0700 Subject: [PATCH 082/234] cli --- .../src/cli_entrypoint/mina_cli_entrypoint.ml | 40 +- src/app/cli/src/init/client.ml | 426 +++++++++++++----- src/app/cli/src/init/itn.ml | 17 +- src/app/cli/src/init/mina_run.ml | 14 +- 4 files changed, 373 insertions(+), 124 deletions(-) diff --git a/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml b/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml index 7bf9d25e446..9844eeeefd1 100644 --- a/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml +++ b/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml @@ -49,6 +49,22 @@ let plugin_flag = times" else Command.Param.return [] +let with_itn_logger ~itn_features ~(compile_config : Mina_compile_config.t) + ~logger = + if itn_features then + let conf = + Logger.make_itn_logger_config + ~rpc_handshake_timeout:compile_config.rpc_handshake_timeout + ~rpc_heartbeat_timeout: + ( compile_config.rpc_heartbeat_timeout |> Time.Span.to_sec + |> Time_ns.Span.of_sec ) + ~rpc_heartbeat_send_every: + ( compile_config.rpc_heartbeat_send_every |> Time.Span.to_sec + |> Time_ns.Span.of_sec ) + in + Logger.with_itn conf logger + else logger + let setup_daemon logger ~itn_features = let open Command.Let_syntax in let open Cli_lib.Arg_type in @@ -648,6 +664,7 @@ let setup_daemon logger ~itn_features = in let constraint_constants = precomputed_values.consensus_constants in let compile_config = precomputed_values.compile_config in + let logger = with_itn_logger ~itn_features ~compile_config ~logger in constraint_constants.block_window_duration_ms |> Block_time.Span.to_ms |> Float.of_int64 |> Time.Span.of_ms |> Mina_metrics.initialize_all ; @@ -1552,15 +1569,15 @@ let internal_commands ~itn_features logger = (let open Command.Let_syntax in let%map_open config_file = Cli_lib.Flag.conf_file in fun () -> - let logger = Logger.create () in let open Deferred.Let_syntax in - let%bind constraint_constants, proof_level = + let%bind constraint_constants, proof_level, compile_config = let%map conf = Runtime_config.Constants.load_constants ~logger config_file in Runtime_config.Constants. - (constraint_constants conf, proof_level conf) + (constraint_constants conf, proof_level conf, compile_config conf) in + let logger = with_itn_logger ~itn_features ~compile_config ~logger in Parallel.init_master () ; match%bind Reader.read_sexp (Lazy.force Reader.stdin) with | `Ok sexp -> @@ -1585,14 +1602,14 @@ let internal_commands ~itn_features logger = fun () -> let open Deferred.Let_syntax in - let logger = Logger.create () in - let%bind constraint_constants, proof_level = + let%bind constraint_constants, proof_level, compile_config = let%map conf = Runtime_config.Constants.load_constants ~logger config_file in Runtime_config.Constants. - (constraint_constants conf, proof_level conf) + (constraint_constants conf, proof_level conf, compile_config conf) in + let logger = with_itn_logger ~itn_features ~compile_config ~logger in Parallel.init_master () ; match%bind Reader.with_file filename ~f:(fun reader -> @@ -1642,14 +1659,14 @@ let internal_commands ~itn_features logger = and config_file = Cli_lib.Flag.conf_file in fun () -> let open Async in - let logger = Logger.create () in - let%bind constraint_constants, proof_level = + let%bind constraint_constants, proof_level, compile_config = let%map conf = Runtime_config.Constants.load_constants ~logger config_file in Runtime_config.Constants. - (constraint_constants conf, proof_level conf) + (constraint_constants conf, proof_level conf, compile_config conf) in + let logger = with_itn_logger ~itn_features ~compile_config ~logger in Parallel.init_master () ; let%bind conf_dir = Unix.mkdtemp "/tmp/mina-verifier" in let mode = @@ -1802,7 +1819,6 @@ let internal_commands ~itn_features logger = fun () -> let open Deferred.Let_syntax in Parallel.init_master () ; - let logger = Logger.create () in let conf_dir = Mina_lib.Conf_dir.compute_conf_dir conf_dir in let cli_proof_level = Genesis_constants.Proof_level.Full in let%bind precomputed_values, _ = @@ -1810,6 +1826,10 @@ let internal_commands ~itn_features logger = ~conf_dir ?genesis_dir ~cli_proof_level ~itn_features config_file |> Deferred.Or_error.ok_exn in + let logger = + with_itn_logger ~itn_features + ~compile_config:precomputed_values.compile_config ~logger + in let pids = Child_processes.Termination.create_pid_table () in let%bind prover = (* We create a prover process (unnecessarily) here, to have a more diff --git a/src/app/cli/src/init/client.ml b/src/app/cli/src/init/client.ml index 9771267c082..ea361b4ae25 100644 --- a/src/app/cli/src/init/client.ml +++ b/src/app/cli/src/init/client.ml @@ -42,10 +42,19 @@ let or_error_str ~f_ok ~error = function let stop_daemon = let open Deferred.Let_syntax in let open Daemon_rpcs in - let open Command.Param in Command.async ~summary:"Stop the daemon" - (Cli_lib.Background_daemon.rpc_init (return ()) ~f:(fun port () -> - let%map res = Daemon_rpcs.Client.dispatch Stop_daemon.rpc () port in + (Cli_lib.Background_daemon.rpc_init Cli_lib.Flag.conf_file + ~f:(fun port config_file -> + let%bind compile_config = + let logger = Logger.create () in + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file + in + Runtime_config.Constants.compile_config conf + in + let%map res = + Daemon_rpcs.Client.dispatch ~compile_config Stop_daemon.rpc () port + in printf "%s" (or_error_str res ~f_ok:(fun _ -> "Daemon stopping\n") @@ -168,12 +177,21 @@ let get_trust_status = (required Cli_lib.Arg_type.ip_address) in let json_flag = Cli_lib.Flag.json in - let flags = Args.zip2 address_flag json_flag in + let config_file = Cli_lib.Flag.conf_file in + let flags = Args.zip3 config_file address_flag json_flag in Command.async ~summary:"Get the trust status associated with an IP address" - (Cli_lib.Background_daemon.rpc_init flags ~f:(fun port (ip_address, json) -> + (Cli_lib.Background_daemon.rpc_init flags + ~f:(fun port (config_file, ip_address, json) -> + let%bind compile_config = + let logger = Logger.create () in + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file + in + Runtime_config.Constants.compile_config conf + in match%map - Daemon_rpcs.Client.dispatch Daemon_rpcs.Get_trust_status.rpc - ip_address port + Daemon_rpcs.Client.dispatch ~compile_config + Daemon_rpcs.Get_trust_status.rpc ip_address port with | Ok statuses -> print_trust_statuses @@ -203,13 +221,22 @@ let get_trust_status_all = ~doc:"Only show trust statuses whose trust score is nonzero" in let json_flag = Cli_lib.Flag.json in - let flags = Args.zip2 nonzero_flag json_flag in + let config_file = Cli_lib.Flag.conf_file in + let flags = Args.zip3 config_file nonzero_flag json_flag in Command.async ~summary:"Get trust statuses for all peers known to the trust system" - (Cli_lib.Background_daemon.rpc_init flags ~f:(fun port (nonzero, json) -> + (Cli_lib.Background_daemon.rpc_init flags + ~f:(fun port (config_file, nonzero, json) -> + let%bind compile_config = + let logger = Logger.create () in + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file + in + Runtime_config.Constants.compile_config conf + in match%map - Daemon_rpcs.Client.dispatch Daemon_rpcs.Get_trust_status_all.rpc () - port + Daemon_rpcs.Client.dispatch ~compile_config + Daemon_rpcs.Get_trust_status_all.rpc () port with | Ok ip_trust_statuses -> (* always round the trust scores for display *) @@ -240,12 +267,21 @@ let reset_trust_status = (required Cli_lib.Arg_type.ip_address) in let json_flag = Cli_lib.Flag.json in - let flags = Args.zip2 address_flag json_flag in + let config_file = Cli_lib.Flag.conf_file in + let flags = Args.zip3 config_file address_flag json_flag in Command.async ~summary:"Reset the trust status associated with an IP address" - (Cli_lib.Background_daemon.rpc_init flags ~f:(fun port (ip_address, json) -> + (Cli_lib.Background_daemon.rpc_init flags + ~f:(fun port (config_file, ip_address, json) -> + let%bind compile_config = + let logger = Logger.create () in + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file + in + Runtime_config.Constants.compile_config conf + in match%map - Daemon_rpcs.Client.dispatch Daemon_rpcs.Reset_trust_status.rpc - ip_address port + Daemon_rpcs.Client.dispatch ~compile_config + Daemon_rpcs.Reset_trust_status.rpc ip_address port with | Ok status -> print_trust_statuses status json @@ -261,17 +297,25 @@ let get_public_keys = ~doc:"Show extra details (eg. balance, nonce) in addition to public keys" in let error_ctx = "Failed to get public-keys" in + let config_file = Cli_lib.Flag.conf_file in Command.async ~summary:"Get public keys" (Cli_lib.Background_daemon.rpc_init - (Args.zip2 with_details_flag Cli_lib.Flag.json) - ~f:(fun port (is_balance_included, json) -> + (Args.zip3 config_file with_details_flag Cli_lib.Flag.json) + ~f:(fun port (config_file, is_balance_included, json) -> + let%bind compile_config = + let logger = Logger.create () in + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file + in + Runtime_config.Constants.compile_config conf + in if is_balance_included then - Daemon_rpcs.Client.dispatch_pretty_message ~json + Daemon_rpcs.Client.dispatch_pretty_message ~compile_config ~json ~join_error:Or_error.join ~error_ctx (module Cli_lib.Render.Public_key_with_details) Get_public_keys_with_details.rpc () port else - Daemon_rpcs.Client.dispatch_pretty_message ~json + Daemon_rpcs.Client.dispatch_pretty_message ~compile_config ~json ~join_error:Or_error.join ~error_ctx (module Cli_lib.Render.String_list_formatter) Get_public_keys.rpc () port ) ) @@ -314,10 +358,19 @@ let verify_receipt = ~doc:"TOKEN_ID The token ID for the account" (optional_with_default Token_id.default Cli_lib.Arg_type.token_id) in + let config_file = Cli_lib.Flag.conf_file in Command.async ~summary:"Verify a receipt of a sent payment" (Cli_lib.Background_daemon.rpc_init - (Args.zip4 payment_path_flag proof_path_flag address_flag token_flag) - ~f:(fun port (payment_path, proof_path, pk, token_id) -> + (Args.zip5 config_file payment_path_flag proof_path_flag address_flag + token_flag ) + ~f:(fun port (config_file, payment_path, proof_path, pk, token_id) -> + let%bind compile_config = + let logger = Logger.create () in + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file + in + Runtime_config.Constants.compile_config conf + in let account_id = Account_id.create pk token_id in let dispatch_result = let open Deferred.Or_error.Let_syntax in @@ -342,7 +395,7 @@ let verify_receipt = ~error: (sprintf "Proof file %s has invalid json format" proof_path) in - Daemon_rpcs.Client.dispatch Verify_proof.rpc + Daemon_rpcs.Client.dispatch ~compile_config Verify_proof.rpc (account_id, payment, proof) port in @@ -354,13 +407,16 @@ let verify_receipt = ) let get_nonce : - rpc:(Account_id.t, Account.Nonce.t option Or_error.t) Rpc.Rpc.t + compile_config:Mina_compile_config.t + -> rpc:(Account_id.t, Account.Nonce.t option Or_error.t) Rpc.Rpc.t -> Account_id.t -> Host_and_port.t -> (Account.Nonce.t, string) Deferred.Result.t = - fun ~rpc account_id port -> + fun ~compile_config ~rpc account_id port -> let open Deferred.Let_syntax in - let%map res = Daemon_rpcs.Client.dispatch rpc account_id port in + let%map res = + Daemon_rpcs.Client.dispatch ~compile_config rpc account_id port + in match Or_error.join res with | Ok (Some n) -> Ok n @@ -382,12 +438,22 @@ let get_nonce_cmd = ~doc:"TOKEN_ID The token ID for the account" (optional_with_default Token_id.default Cli_lib.Arg_type.token_id) in - let flags = Args.zip2 address_flag token_flag in + let config_file = Cli_lib.Flag.conf_file in + let flags = Args.zip3 config_file address_flag token_flag in Command.async ~summary:"Get the current nonce for an account" - (Cli_lib.Background_daemon.rpc_init flags ~f:(fun port (pk, token_flag) -> + (Cli_lib.Background_daemon.rpc_init flags + ~f:(fun port (config_file, pk, token_flag) -> let account_id = Account_id.create pk token_flag in + let%bind compile_config = + let logger = Logger.create () in + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file + in + Runtime_config.Constants.compile_config conf + in match%bind - get_nonce ~rpc:Daemon_rpcs.Get_nonce.rpc account_id port + get_nonce ~compile_config ~rpc:Daemon_rpcs.Get_nonce.rpc account_id + port with | Error e -> eprintf "Failed to get nonce\n%s\n" e ; @@ -398,11 +464,21 @@ let get_nonce_cmd = let status = let open Daemon_rpcs in - let flag = Args.zip2 Cli_lib.Flag.json Cli_lib.Flag.performance in + let flag = + Args.zip3 Cli_lib.Flag.conf_file Cli_lib.Flag.json Cli_lib.Flag.performance + in Command.async ~summary:"Get running daemon status" - (Cli_lib.Background_daemon.rpc_init flag ~f:(fun port (json, performance) -> - Daemon_rpcs.Client.dispatch_pretty_message ~json ~join_error:Fn.id - ~error_ctx:"Failed to get status" + (Cli_lib.Background_daemon.rpc_init flag + ~f:(fun port (config_file, json, performance) -> + let%bind compile_config = + let logger = Logger.create () in + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file + in + Runtime_config.Constants.compile_config conf + in + Daemon_rpcs.Client.dispatch_pretty_message ~compile_config ~json + ~join_error:Fn.id ~error_ctx:"Failed to get status" (module Daemon_rpcs.Types.Status) Get_status.rpc (if performance then `Performance else `None) @@ -410,18 +486,29 @@ let status = let status_clear_hist = let open Daemon_rpcs in - let flag = Args.zip2 Cli_lib.Flag.json Cli_lib.Flag.performance in + let flag = + Args.zip3 Cli_lib.Flag.conf_file Cli_lib.Flag.json Cli_lib.Flag.performance + in Command.async ~summary:"Clear histograms reported in status" - (Cli_lib.Background_daemon.rpc_init flag ~f:(fun port (json, performance) -> - Daemon_rpcs.Client.dispatch_pretty_message ~json ~join_error:Fn.id + (Cli_lib.Background_daemon.rpc_init flag + ~f:(fun port (config_file, json, performance) -> + let%bind compile_config = + let logger = Logger.create () in + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file + in + Runtime_config.Constants.compile_config conf + in + Daemon_rpcs.Client.dispatch_pretty_message ~compile_config ~json + ~join_error:Fn.id ~error_ctx:"Failed to clear histograms reported in status" (module Daemon_rpcs.Types.Status) Clear_hist_status.rpc (if performance then `Performance else `None) port ) ) -let get_nonce_exn ~rpc public_key port = - match%bind get_nonce ~rpc public_key port with +let get_nonce_exn ~compile_config ~rpc public_key port = + match%bind get_nonce ~compile_config ~rpc public_key port with | Error e -> eprintf "Failed to get nonce\n%s\n" e ; exit 3 @@ -470,8 +557,15 @@ let batch_send_payments = (List.init 3 ~f:(fun _ -> sample_info ())) ) ) ; exit 5 in - let main port (privkey_path, payments_path) = + let main port (config_file, privkey_path, payments_path) = let open Deferred.Let_syntax in + let%bind compile_config = + let logger = Logger.create () in + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file + in + Runtime_config.Constants.compile_config conf + in let%bind keypair = Secrets.Keypair.Terminal_stdin.read_exn ~which:"Mina keypair" privkey_path and infos = get_infos payments_path in @@ -486,8 +580,8 @@ let batch_send_payments = ~body:(Payment { receiver_pk; amount }) ~sign_choice:(User_command_input.Sign_choice.Keypair keypair) () ) in - Daemon_rpcs.Client.dispatch_with_message Daemon_rpcs.Send_user_commands.rpc - ts port + Daemon_rpcs.Client.dispatch_with_message ~compile_config + Daemon_rpcs.Send_user_commands.rpc ts port ~success:(fun _ -> "Successfully enqueued payments in pool") ~error:(fun e -> sprintf "Failed to send payments %s" (Error.to_string_hum e) ) @@ -495,7 +589,8 @@ let batch_send_payments = in Command.async ~summary:"Send multiple payments from a file" (Cli_lib.Background_daemon.rpc_init - (Args.zip2 Cli_lib.Flag.privkey_read_path payment_path_flag) + (Args.zip3 Cli_lib.Flag.conf_file Cli_lib.Flag.privkey_read_path + payment_path_flag ) ~f:main ) let transaction_id_to_string id = @@ -781,14 +876,24 @@ let export_ledger = Command.Param.(anon (ledger_args %: t)) in let plaintext_flag = Cli_lib.Flag.plaintext in - let flags = Args.zip3 state_hash_flag plaintext_flag ledger_kind in + let flags = + Args.zip4 Cli_lib.Flag.conf_file state_hash_flag plaintext_flag ledger_kind + in Command.async ~summary: "Print the specified ledger (default: staged ledger at the best tip). \ Note: Exporting snarked ledger is an expensive operation and can take a \ few seconds" (Cli_lib.Background_daemon.rpc_init flags - ~f:(fun port (state_hash, plaintext, ledger_kind) -> + ~f:(fun port (config_file, state_hash, plaintext, ledger_kind) -> + let open Deferred.Let_syntax in + let%bind compile_config = + let logger = Logger.create () in + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file + in + Runtime_config.Constants.compile_config conf + in let check_for_state_hash () = if Option.is_some state_hash then ( Format.eprintf "A state hash should not be given for %s@." @@ -801,23 +906,25 @@ let export_ledger = let state_hash = Option.map ~f:State_hash.of_base58_check_exn state_hash in - Daemon_rpcs.Client.dispatch Daemon_rpcs.Get_ledger.rpc state_hash - port + Daemon_rpcs.Client.dispatch ~compile_config + Daemon_rpcs.Get_ledger.rpc state_hash port | "snarked-ledger" -> let state_hash = Option.map ~f:State_hash.of_base58_check_exn state_hash in printf "Generating snarked ledger(this may take a few seconds)...\n" ; - Daemon_rpcs.Client.dispatch Daemon_rpcs.Get_snarked_ledger.rpc - state_hash port + Daemon_rpcs.Client.dispatch ~compile_config + Daemon_rpcs.Get_snarked_ledger.rpc state_hash port | "staking-epoch-ledger" -> check_for_state_hash () ; - Daemon_rpcs.Client.dispatch Daemon_rpcs.Get_staking_ledger.rpc + Daemon_rpcs.Client.dispatch ~compile_config + Daemon_rpcs.Get_staking_ledger.rpc Daemon_rpcs.Get_staking_ledger.Current port | "next-epoch-ledger" -> check_for_state_hash () ; - Daemon_rpcs.Client.dispatch Daemon_rpcs.Get_staking_ledger.rpc + Daemon_rpcs.Client.dispatch ~compile_config + Daemon_rpcs.Get_staking_ledger.rpc Daemon_rpcs.Get_staking_ledger.Next port | _ -> (* unreachable *) @@ -969,15 +1076,21 @@ let constraint_system_digests = Deferred.unit ) let snark_job_list = - let open Deferred.Let_syntax in - let open Command.Param in Command.async ~summary: "List of snark jobs in JSON format that are yet to be included in the \ blocks" - (Cli_lib.Background_daemon.rpc_init (return ()) ~f:(fun port () -> + (Cli_lib.Background_daemon.rpc_init Cli_lib.Flag.conf_file + ~f:(fun port config_file -> + let%bind compile_config = + let logger = Logger.create () in + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file + in + Runtime_config.Constants.compile_config conf + in match%map - Daemon_rpcs.Client.dispatch_join_errors + Daemon_rpcs.Client.dispatch_join_errors ~compile_config Daemon_rpcs.Snark_job_list.rpc () port with | Ok str -> @@ -1098,13 +1211,20 @@ let pending_snark_work = print_string (Yojson.Safe.to_string lst) ) ) ) let start_tracing = - let open Deferred.Let_syntax in - let open Command.Param in Command.async ~summary:"Start async tracing to $config-directory/trace/$pid.trace" - (Cli_lib.Background_daemon.rpc_init (return ()) ~f:(fun port () -> + (Cli_lib.Background_daemon.rpc_init Cli_lib.Flag.conf_file + ~f:(fun port config_file -> + let%bind compile_config = + let logger = Logger.create () in + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file + in + Runtime_config.Constants.compile_config conf + in match%map - Daemon_rpcs.Client.dispatch Daemon_rpcs.Start_tracing.rpc () port + Daemon_rpcs.Client.dispatch ~compile_config + Daemon_rpcs.Start_tracing.rpc () port with | Ok () -> print_endline "Daemon started tracing!" @@ -1112,12 +1232,19 @@ let start_tracing = Daemon_rpcs.Client.print_rpc_error e ) ) let stop_tracing = - let open Deferred.Let_syntax in - let open Command.Param in Command.async ~summary:"Stop async tracing" - (Cli_lib.Background_daemon.rpc_init (return ()) ~f:(fun port () -> + (Cli_lib.Background_daemon.rpc_init Cli_lib.Flag.conf_file + ~f:(fun port config_file -> + let%bind compile_config = + let logger = Logger.create () in + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file + in + Runtime_config.Constants.compile_config conf + in match%map - Daemon_rpcs.Client.dispatch Daemon_rpcs.Stop_tracing.rpc () port + Daemon_rpcs.Client.dispatch ~compile_config + Daemon_rpcs.Stop_tracing.rpc () port with | Ok () -> print_endline "Daemon stopped printing!" @@ -1125,16 +1252,22 @@ let stop_tracing = Daemon_rpcs.Client.print_rpc_error e ) ) let start_internal_tracing = - let open Deferred.Let_syntax in - let open Command.Param in Command.async ~summary: "Start internal tracing to \ $config-directory/internal-tracing/internal-trace.jsonl" - (Cli_lib.Background_daemon.rpc_init (return ()) ~f:(fun port () -> + (Cli_lib.Background_daemon.rpc_init Cli_lib.Flag.conf_file + ~f:(fun port config_file -> + let%bind compile_config = + let logger = Logger.create () in + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file + in + Runtime_config.Constants.compile_config conf + in match%map - Daemon_rpcs.Client.dispatch Daemon_rpcs.Start_internal_tracing.rpc () - port + Daemon_rpcs.Client.dispatch ~compile_config + Daemon_rpcs.Start_internal_tracing.rpc () port with | Ok () -> print_endline "Daemon internal started tracing!" @@ -1142,13 +1275,19 @@ let start_internal_tracing = Daemon_rpcs.Client.print_rpc_error e ) ) let stop_internal_tracing = - let open Deferred.Let_syntax in - let open Command.Param in Command.async ~summary:"Stop internal tracing" - (Cli_lib.Background_daemon.rpc_init (return ()) ~f:(fun port () -> + (Cli_lib.Background_daemon.rpc_init Cli_lib.Flag.conf_file + ~f:(fun port config_file -> + let%bind compile_config = + let logger = Logger.create () in + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file + in + Runtime_config.Constants.compile_config conf + in match%map - Daemon_rpcs.Client.dispatch Daemon_rpcs.Stop_internal_tracing.rpc () - port + Daemon_rpcs.Client.dispatch ~compile_config + Daemon_rpcs.Stop_internal_tracing.rpc () port with | Ok () -> print_endline "Daemon internal tracing stopped!" @@ -1255,7 +1394,6 @@ let import_key = ] and privkey_path = Cli_lib.Flag.privkey_read_path in fun () -> - let open Deferred.Let_syntax in let initial_password = ref None in let do_graphql graphql_endpoint = let%bind password = @@ -1385,7 +1523,6 @@ let export_key = (key will be exported using the same password)." (Cli_lib.Background_daemon.graphql_init flags ~f:(fun _ (export_path, pk, conf_dir) -> - let open Deferred.Let_syntax in let%bind home = Sys.home_directory () in let conf_dir = Option.value @@ -1403,7 +1540,6 @@ let export_key = "Password for exported account: " ~env:Secrets.Keypair.env ) in let%bind account = - let open Deferred.Result.Let_syntax in let%bind _ = Secrets.Wallets.unlock wallets ~needle:pk ~password in Secrets.Wallets.find_identity wallets ~needle:pk |> Result.of_option ~error:`Not_found @@ -1722,10 +1858,20 @@ let trustlist_add = let open Deferred.Let_syntax in let open Daemon_rpcs in Command.async ~summary:"Add an IP to the trustlist" - (Cli_lib.Background_daemon.rpc_init trustlist_ip_flag - ~f:(fun port trustlist_ip -> + (Cli_lib.Background_daemon.rpc_init + (Args.zip2 Cli_lib.Flag.conf_file trustlist_ip_flag) + ~f:(fun port (config_file, trustlist_ip) -> let trustlist_ip_string = Unix.Cidr.to_string trustlist_ip in - match%map Client.dispatch Add_trustlist.rpc trustlist_ip port with + let%bind compile_config = + let logger = Logger.create () in + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file + in + Runtime_config.Constants.compile_config conf + in + match%map + Client.dispatch ~compile_config Add_trustlist.rpc trustlist_ip port + with | Ok (Ok ()) -> printf "Added %s to client trustlist" trustlist_ip_string | Ok (Error e) -> @@ -1739,10 +1885,21 @@ let trustlist_remove = let open Deferred.Let_syntax in let open Daemon_rpcs in Command.async ~summary:"Remove a CIDR mask from the trustlist" - (Cli_lib.Background_daemon.rpc_init trustlist_ip_flag - ~f:(fun port trustlist_ip -> + (Cli_lib.Background_daemon.rpc_init + (Args.zip2 Cli_lib.Flag.conf_file trustlist_ip_flag) + ~f:(fun port (config_file, trustlist_ip) -> let trustlist_ip_string = Unix.Cidr.to_string trustlist_ip in - match%map Client.dispatch Remove_trustlist.rpc trustlist_ip port with + let%bind compile_config = + let logger = Logger.create () in + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file + in + Runtime_config.Constants.compile_config conf + in + match%map + Client.dispatch ~compile_config Remove_trustlist.rpc trustlist_ip + port + with | Ok (Ok ()) -> printf "Removed %s to client trustlist" trustlist_ip_string | Ok (Error e) -> @@ -1753,12 +1910,20 @@ let trustlist_remove = (Error.to_string_hum e) ) ) let trustlist_list = - let open Deferred.Let_syntax in let open Daemon_rpcs in - let open Command.Param in Command.async ~summary:"List the CIDR masks in the trustlist" - (Cli_lib.Background_daemon.rpc_init (return ()) ~f:(fun port () -> - match%map Client.dispatch Get_trustlist.rpc () port with + (Cli_lib.Background_daemon.rpc_init Cli_lib.Flag.conf_file + ~f:(fun port config_file -> + let%bind compile_config = + let logger = Logger.create () in + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file + in + Runtime_config.Constants.compile_config conf + in + match%map + Client.dispatch ~compile_config Get_trustlist.rpc () port + with | Ok ips -> printf "The following IPs are permitted to connect to the daemon \ @@ -1930,10 +2095,13 @@ let node_status = flag "--show-errors" ~aliases:[ "show-errors" ] no_arg ~doc:"Include error responses in output" in - let flags = Args.zip3 daemon_peers_flag peers_flag show_errors_flag in + let flags = + Args.zip4 Cli_lib.Flag.conf_file daemon_peers_flag peers_flag + show_errors_flag + in Command.async ~summary:"Get node statuses for a set of peers" (Cli_lib.Background_daemon.rpc_init flags - ~f:(fun port (daemon_peers, peers, show_errors) -> + ~f:(fun port (config_file, daemon_peers, peers, show_errors) -> if (Option.is_none peers && not daemon_peers) || (Option.is_some peers && daemon_peers) @@ -1945,9 +2113,16 @@ let node_status = Option.map peers ~f:(fun peers -> List.map peers ~f:Mina_net2.Multiaddr.of_string ) in + let%bind compile_config = + let logger = Logger.create () in + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file + in + Runtime_config.Constants.compile_config conf + in match%map - Daemon_rpcs.Client.dispatch Daemon_rpcs.Get_node_status.rpc - peer_ids_opt port + Daemon_rpcs.Client.dispatch ~compile_config + Daemon_rpcs.Get_node_status.rpc peer_ids_opt port with | Ok all_status_data -> let all_status_data = @@ -1967,11 +2142,19 @@ let node_status = let object_lifetime_statistics = let open Daemon_rpcs in - let open Command.Param in Command.async ~summary:"Dump internal object lifetime statistics to JSON" - (Cli_lib.Background_daemon.rpc_init (return ()) ~f:(fun port () -> + (Cli_lib.Background_daemon.rpc_init Cli_lib.Flag.conf_file + ~f:(fun port config_file -> + let%bind compile_config = + let logger = Logger.create () in + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file + in + Runtime_config.Constants.compile_config conf + in match%map - Client.dispatch Get_object_lifetime_statistics.rpc () port + Client.dispatch ~compile_config Get_object_lifetime_statistics.rpc () + port with | Ok stats -> print_endline stats @@ -2006,14 +2189,15 @@ let archive_blocks = and extensional_flag = Command.Param.flag "--extensional" ~aliases:[ "extensional" ] no_arg ~doc:"Blocks are in extensional JSON format" - in + and config_file = Cli_lib.Flag.conf_file in ( files , success_file , failure_file , log_successes , archive_process_location , precomputed_flag - , extensional_flag ) + , extensional_flag + , config_file ) in Command.async ~summary: @@ -2030,7 +2214,8 @@ let archive_blocks = , log_successes , archive_process_location , precomputed_flag - , extensional_flag ) + , extensional_flag + , config_file ) -> if Bool.equal precomputed_flag extensional_flag then failwith @@ -2078,13 +2263,21 @@ let archive_blocks = in let add_to_success_file = output_file_line success_file in let add_to_failure_file = output_file_line failure_file in + let%bind compile_config = + let logger = Logger.create () in + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file + in + Runtime_config.Constants.compile_config conf + in let send_precomputed_block = make_send_block ~graphql_make:(fun block -> Graphql_queries.Archive_precomputed_block.( make @@ makeVariables ~block ()) ) ~archive_dispatch: - Mina_lib.Archive_client.dispatch_precomputed_block + (Mina_lib.Archive_client.dispatch_precomputed_block + ~compile_config ) in let send_extensional_block = make_send_block @@ -2092,7 +2285,8 @@ let archive_blocks = Graphql_queries.Archive_extensional_block.( make @@ makeVariables ~block ()) ) ~archive_dispatch: - Mina_lib.Archive_client.dispatch_extensional_block + (Mina_lib.Archive_client.dispatch_extensional_block + ~compile_config ) in Deferred.List.iter files ~f:(fun path -> match%map @@ -2196,10 +2390,19 @@ let receipt_chain_hash = let chain_id_inputs = let open Deferred.Let_syntax in Command.async ~summary:"Print the inputs that yield the current chain id" - (Cli_lib.Background_daemon.rpc_init (Command.Param.all_unit []) - ~f:(fun port () -> + (Cli_lib.Background_daemon.rpc_init Cli_lib.Flag.conf_file + ~f:(fun port config_file -> let open Daemon_rpcs in - match%map Client.dispatch Chain_id_inputs.rpc () port with + let%bind compile_config = + let logger = Logger.create () in + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file + in + Runtime_config.Constants.compile_config conf + in + match%map + Client.dispatch ~compile_config Chain_id_inputs.rpc () port + with | Ok ( genesis_state_hash , genesis_constants @@ -2409,18 +2612,21 @@ let itn_create_accounts = (privkey_path, key_prefix, num_accounts, fee, amount, config_file) -> let open Deferred.Let_syntax in - let%bind genesis_constants, constraint_constants = + let%bind genesis_constants, constraint_constants, compile_config = let logger = Logger.create () in let%map conf = Runtime_config.Constants.load_constants ~logger config_file in Runtime_config.Constants. - (genesis_constants conf, constraint_constants conf) + ( genesis_constants conf + , constraint_constants conf + , compile_config conf ) in let args' = (privkey_path, key_prefix, num_accounts, fee, amount) in let genesis_constants = genesis_constants in let constraint_constants = constraint_constants in - Itn.create_accounts ~genesis_constants ~constraint_constants port args' )) + Itn.create_accounts ~genesis_constants ~constraint_constants + ~compile_config port args' )) module Visualization = struct let create_command (type rpc_response) ~name ~f @@ -2429,10 +2635,20 @@ module Visualization = struct Command.async ~summary:(sprintf !"Produce a visualization of the %s" name) (Cli_lib.Background_daemon.rpc_init - Command.Param.(anon @@ ("output-filepath" %: string)) - ~f:(fun port filename -> + (Args.zip2 Cli_lib.Flag.conf_file + Command.Param.(anon @@ ("output-filepath" %: string)) ) + ~f:(fun port (config_file, filename) -> + let%bind compile_config = + let logger = Logger.create () in + let%map conf = + Runtime_config.Constants.load_constants ~logger config_file + in + Runtime_config.Constants.compile_config conf + in let%map message = - match%map Daemon_rpcs.Client.dispatch rpc filename port with + match%map + Daemon_rpcs.Client.dispatch ~compile_config rpc filename port + with | Ok response -> f filename response | Error e -> diff --git a/src/app/cli/src/init/itn.ml b/src/app/cli/src/init/itn.ml index d02c987f0b0..2b689266e19 100644 --- a/src/app/cli/src/init/itn.ml +++ b/src/app/cli/src/init/itn.ml @@ -7,7 +7,8 @@ open Mina_base open Mina_transaction let create_accounts ~(genesis_constants : Genesis_constants.t) - ~(constraint_constants : Genesis_constants.Constraint_constants.t) port + ~(constraint_constants : Genesis_constants.Constraint_constants.t) + ~(compile_config : Mina_compile_config.t) port (privkey_path, key_prefix, num_accounts, fee, amount) = let keys_per_zkapp = 8 in let zkapps_per_block = 10 in @@ -37,7 +38,7 @@ let create_accounts ~(genesis_constants : Genesis_constants.t) in let%bind fee_payer_balance = match%bind - Daemon_rpcs.Client.dispatch Daemon_rpcs.Get_balance.rpc + Daemon_rpcs.Client.dispatch ~compile_config Daemon_rpcs.Get_balance.rpc fee_payer_account_id port with | Ok (Ok (Some balance)) -> @@ -60,8 +61,8 @@ let create_accounts ~(genesis_constants : Genesis_constants.t) let%bind fee_payer_initial_nonce = (* inferred nonce considers txns in pool, in addition to ledger *) match%map - Daemon_rpcs.Client.dispatch Daemon_rpcs.Get_inferred_nonce.rpc - fee_payer_account_id port + Daemon_rpcs.Client.dispatch ~compile_config + Daemon_rpcs.Get_inferred_nonce.rpc fee_payer_account_id port with | Ok (Ok (Some nonce)) -> Account.Nonce.of_uint32 nonce @@ -218,8 +219,8 @@ let create_accounts ~(genesis_constants : Genesis_constants.t) Format.printf " Public key: %s Balance change: %s%s@." pk sgn balance_change_str ) ) ; let%bind res = - Daemon_rpcs.Client.dispatch Daemon_rpcs.Send_zkapp_commands.rpc - zkapps_batch port + Daemon_rpcs.Client.dispatch ~compile_config + Daemon_rpcs.Send_zkapp_commands.rpc zkapps_batch port in ( match res with | Ok res_inner -> ( @@ -253,8 +254,8 @@ let create_accounts ~(genesis_constants : Genesis_constants.t) Deferred.List.for_all batch_pks ~f:(fun pk -> let account_id = Account_id.create pk Token_id.default in let%map res = - Daemon_rpcs.Client.dispatch Daemon_rpcs.Get_balance.rpc account_id - port + Daemon_rpcs.Client.dispatch ~compile_config + Daemon_rpcs.Get_balance.rpc account_id port in match res with | Ok (Ok (Some balance)) when Currency.Balance.(balance > zero) -> diff --git a/src/app/cli/src/init/mina_run.ml b/src/app/cli/src/init/mina_run.ml index 62063088c4a..a3b58a81b21 100644 --- a/src/app/cli/src/init/mina_run.ml +++ b/src/app/cli/src/init/mina_run.ml @@ -364,7 +364,19 @@ let setup_local_server ?(client_trustlist = []) ?rest_server_port List.map metadata ~f:(fun (s, value) -> (s, Yojson.Safe.from_string value) ) in - return @@ Itn_logger.log ~process ~timestamp ~message ~metadata () ) + let config = + { Itn_logger.rpc_handshake_timeout = + compile_config.rpc_handshake_timeout + ; rpc_heartbeat_timeout = + compile_config.rpc_heartbeat_timeout |> Time.Span.to_sec + |> Time_ns.Span.of_sec + ; rpc_heartbeat_send_every = + compile_config.rpc_heartbeat_send_every |> Time.Span.to_sec + |> Time_ns.Span.of_sec + } + in + return + @@ Itn_logger.log ~process ~timestamp ~message ~metadata ~config () ) ] in let log_snark_work_metrics (work : Snark_worker.Work.Result.t) = From 433490fe425f064333f69bc8929ba7ad297183f2 Mon Sep 17 00:00:00 2001 From: martyall Date: Mon, 7 Oct 2024 14:15:29 -0700 Subject: [PATCH 083/234] Trigger Build From 2c8af2c86262884f636fc92e36e554b3ca86e41b Mon Sep 17 00:00:00 2001 From: martyall Date: Mon, 7 Oct 2024 14:25:35 -0700 Subject: [PATCH 084/234] remove mina_node_config as dep --- src/lib/mina_metrics/prometheus_metrics/dune | 1 - 1 file changed, 1 deletion(-) diff --git a/src/lib/mina_metrics/prometheus_metrics/dune b/src/lib/mina_metrics/prometheus_metrics/dune index 6ce3904ca3d..97833ca2ae2 100644 --- a/src/lib/mina_metrics/prometheus_metrics/dune +++ b/src/lib/mina_metrics/prometheus_metrics/dune @@ -22,7 +22,6 @@ ;; local libraries logger o1trace - mina_node_config ) (instrumentation (backend bisect_ppx)) (preprocess (pps ppx_mina ppx_let ppx_version ppx_pipebang ppx_custom_printf ppx_here)) From 48603dfae00bc6b081a575f329285dd7c6c34ba0 Mon Sep 17 00:00:00 2001 From: martyall Date: Tue, 8 Oct 2024 20:49:47 -0700 Subject: [PATCH 085/234] Address George's PR comments --- src/app/archive/cli/archive_cli.ml | 18 +++-- src/app/archive/lib/processor.ml | 8 +- src/app/batch_txn_tool/batch_txn_tool.ml | 2 +- .../src/cli_entrypoint/mina_cli_entrypoint.ml | 17 ++--- src/app/cli/src/init/client.ml | 18 ++--- .../delegation_verify/delegation_verify.ml | 2 +- src/lib/cli_lib/commands.ml | 7 +- src/lib/cli_lib/flag.ml | 2 +- src/lib/cli_lib/flag.mli | 2 +- .../genesis_ledger_helper.ml | 30 ++++---- .../lib/genesis_ledger_helper_lib.ml | 23 +----- src/lib/runtime_config/runtime_config.ml | 76 ++++++++++++------- src/lib/snark_worker/functor.ml | 2 +- .../standalone/run_snark_worker.ml | 2 +- 14 files changed, 105 insertions(+), 104 deletions(-) diff --git a/src/app/archive/cli/archive_cli.ml b/src/app/archive/cli/archive_cli.ml index b53c9c63ed7..5ff9c6fa9f3 100644 --- a/src/app/archive/cli/archive_cli.ml +++ b/src/app/archive/cli/archive_cli.ml @@ -41,19 +41,21 @@ let command_run = fun () -> let logger = Logger.create () in let open Deferred.Let_syntax in - let%bind constants = - Runtime_config.Constants.load_constants ~logger + let%bind config = + Runtime_config.Json_loader.load_config_files ~logger (Option.to_list runtime_config_file) + |> Deferred.Or_error.ok_exn in - let%bind runtime_config_opt = + let constants = Runtime_config.Constants.load_constants' config in + let%bind precomputed_values_opt = match runtime_config_file with | None -> return None - | Some file -> + | Some _ -> Deferred.Or_error.( - Genesis_ledger_helper.Config_loader.load_config_files ~logger - [ file ] - >>| fun a -> Option.some @@ fst a) + Genesis_ledger_helper.Config_loader.init_from_config_file ~logger + ~constants config + >>| fun (a, _) -> Option.some a) |> Deferred.Or_error.ok_exn in Stdout_log.setup log_json log_level ; @@ -67,7 +69,7 @@ let command_run = ~postgres_address:postgres.value ~server_port: (Option.value server_port.value ~default:server_port.default) - ~delete_older_than ~runtime_config_opt ~missing_blocks_width ) + ~delete_older_than ~precomputed_values_opt ~missing_blocks_width ) let time_arg = (* Same timezone as Genesis_constants.genesis_state_timestamp. *) diff --git a/src/app/archive/lib/processor.ml b/src/app/archive/lib/processor.ml index 13ab51c7b46..9396084b92b 100644 --- a/src/app/archive/lib/processor.ml +++ b/src/app/archive/lib/processor.ml @@ -4729,8 +4729,8 @@ let run pool reader ~genesis_constants ~constraint_constants ~logger (* [add_genesis_accounts] is called when starting the archive process *) let add_genesis_accounts ~logger - ~(runtime_config_opt : Precomputed_values.t option) pool = - match runtime_config_opt with + ~(precomputed_values_opt : Precomputed_values.t option) pool = + match precomputed_values_opt with | None -> Deferred.unit | Some precomputed_values -> ( @@ -4861,7 +4861,7 @@ let create_metrics_server ~logger ~metrics_server_port ~missing_blocks_width let setup_server ~(genesis_constants : Genesis_constants.t) ~(constraint_constants : Genesis_constants.Constraint_constants.t) ~metrics_server_port ~logger ~postgres_address ~server_port - ~delete_older_than ~runtime_config_opt ~missing_blocks_width = + ~delete_older_than ~precomputed_values_opt ~missing_blocks_width = let where_to_listen = Async.Tcp.Where_to_listen.bind_to All_addresses (On_port server_port) in @@ -4890,7 +4890,7 @@ let setup_server ~(genesis_constants : Genesis_constants.t) ~metadata:[ ("error", `String (Caqti_error.show e)) ] ; Deferred.unit | Ok pool -> - let%bind () = add_genesis_accounts pool ~logger ~runtime_config_opt in + let%bind () = add_genesis_accounts pool ~logger ~precomputed_values_opt in run ~constraint_constants ~genesis_constants pool reader ~logger ~delete_older_than |> don't_wait_for ; diff --git a/src/app/batch_txn_tool/batch_txn_tool.ml b/src/app/batch_txn_tool/batch_txn_tool.ml index 572e369a571..9331c5a4207 100644 --- a/src/app/batch_txn_tool/batch_txn_tool.ml +++ b/src/app/batch_txn_tool/batch_txn_tool.ml @@ -387,7 +387,7 @@ let output_there_and_back_cmds = transactions, if this is not present then we use the env var \ MINA_PRIVKEY_PASS" (optional string) - and config_file = Cli_lib.Flag.conf_file + and config_file = Cli_lib.Flag.config_files and graphql_target_node_option = flag "--graphql-target-node" ~aliases:[ "graphql-target-node" ] ~doc: diff --git a/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml b/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml index 7bf9d25e446..22babea423c 100644 --- a/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml +++ b/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml @@ -1550,7 +1550,7 @@ let internal_commands ~itn_features logger = , Command.async ~summary:"Run prover on a sexp provided on a single line of stdin" (let open Command.Let_syntax in - let%map_open config_file = Cli_lib.Flag.conf_file in + let%map_open config_file = Cli_lib.Flag.config_files in fun () -> let logger = Logger.create () in let open Deferred.Let_syntax in @@ -1581,7 +1581,7 @@ let internal_commands ~itn_features logger = let%map_open filename = flag "--file" (required string) ~doc:"File containing the s-expression of the snark work to execute" - and config_file = Cli_lib.Flag.conf_file in + and config_file = Cli_lib.Flag.config_files in fun () -> let open Deferred.Let_syntax in @@ -1639,7 +1639,7 @@ let internal_commands ~itn_features logger = and limit = flag "--limit" ~aliases:[ "-limit" ] (optional int) ~doc:"limit the number of proofs taken from the file" - and config_file = Cli_lib.Flag.conf_file in + and config_file = Cli_lib.Flag.config_files in fun () -> let open Async in let logger = Logger.create () in @@ -1790,7 +1790,7 @@ let internal_commands ~itn_features logger = ; ( "test-genesis-block-generation" , Command.async ~summary:"Generate a genesis proof" (let open Command.Let_syntax in - let%map_open config_file = Cli_lib.Flag.conf_file + let%map_open config_file = Cli_lib.Flag.config_files and conf_dir = Cli_lib.Flag.conf_dir and genesis_dir = flag "--genesis-ledger-dir" ~aliases:[ "genesis-ledger-dir" ] @@ -1804,10 +1804,10 @@ let internal_commands ~itn_features logger = Parallel.init_master () ; let logger = Logger.create () in let conf_dir = Mina_lib.Conf_dir.compute_conf_dir conf_dir in - let cli_proof_level = Genesis_constants.Proof_level.Full in let%bind precomputed_values, _ = Genesis_ledger_helper.Config_loader.load_config_files ~logger - ~conf_dir ?genesis_dir ~cli_proof_level ~itn_features config_file + ~conf_dir ?genesis_dir ~cli_proof_level:Full ~itn_features + config_file |> Deferred.Or_error.ok_exn in let pids = Child_processes.Termination.create_pid_table () in @@ -1881,10 +1881,7 @@ let () = | [| _mina_exe; version |] when is_version_cmd version -> Mina_version.print_version () | _ -> - let itn_features = - Sys.getenv "MINA_ITN_FEATURES" - |> Option.value_map ~default:false ~f:bool_of_string - in + let itn_features = Mina_compile_config.Compiled.t.itn_features in Command.run (Command.group ~summary:"Mina" ~preserve_subcommand_order:() (mina_commands logger ~itn_features) ) ) ; diff --git a/src/app/cli/src/init/client.ml b/src/app/cli/src/init/client.ml index 9771267c082..bab6ad2bf70 100644 --- a/src/app/cli/src/init/client.ml +++ b/src/app/cli/src/init/client.ml @@ -513,7 +513,7 @@ let send_payment_graphql = flag "--amount" ~aliases:[ "amount" ] ~doc:"VALUE Payment amount you want to send" (required txn_amount) in - let config_file = Cli_lib.Flag.conf_file in + let config_file = Cli_lib.Flag.config_files in let args = Args.zip4 Cli_lib.Flag.signed_command_common receiver_flag amount_flag config_file @@ -558,7 +558,7 @@ let delegate_stake_graphql = ~doc:"PUBLICKEY Public key to which you want to delegate your stake" (required public_key_compressed) in - let config_file = Cli_lib.Flag.conf_file in + let config_file = Cli_lib.Flag.config_files in let args = Args.zip3 Cli_lib.Flag.signed_command_common receiver_flag config_file in @@ -835,7 +835,7 @@ let hash_ledger = flag "--ledger-file" ~doc:"LEDGER-FILE File containing an exported ledger" (required string)) - and config_file = Cli_lib.Flag.conf_file + and config_file = Cli_lib.Flag.config_files and plaintext = Cli_lib.Flag.plaintext in fun () -> let open Deferred.Let_syntax in @@ -947,7 +947,7 @@ let currency_in_ledger = let constraint_system_digests = let open Command.Let_syntax in Command.async ~summary:"Print MD5 digest of each SNARK constraint" - (let%map_open config_file = Cli_lib.Flag.conf_file in + (let%map_open config_file = Cli_lib.Flag.config_files in fun () -> let open Deferred.Let_syntax in let%bind constraint_constants, proof_level = @@ -1672,7 +1672,7 @@ let generate_libp2p_keypair = ~summary:"Generate a new libp2p keypair and print out the peer ID" (let open Command.Let_syntax in let%map_open privkey_path = Cli_lib.Flag.privkey_write_path - and config_file = Cli_lib.Flag.conf_file in + and config_file = Cli_lib.Flag.config_files in generate_libp2p_keypair_do privkey_path ~config_file) let dump_libp2p_keypair_do privkey_path ~config_file = @@ -1709,7 +1709,7 @@ let dump_libp2p_keypair = Command.async ~summary:"Print an existing libp2p keypair" (let open Command.Let_syntax in let%map_open privkey_path = Cli_lib.Flag.privkey_read_path - and config_file = Cli_lib.Flag.conf_file in + and config_file = Cli_lib.Flag.config_files in dump_libp2p_keypair_do privkey_path ~config_file) let trustlist_ip_flag = @@ -1840,7 +1840,7 @@ let compile_time_constants = let open Command.Let_syntax in Command.async ~summary:"Print a JSON map of the compile-time consensus parameters" - (let%map_open config_file = Cli_lib.Flag.conf_file in + (let%map_open config_file = Cli_lib.Flag.config_files in fun () -> let home = Core.Sys.home_directory () in let conf_dir = home ^/ Cli_lib.Default.conf_dir_name in @@ -2356,7 +2356,7 @@ let test_ledger_application = flag "--has-second-partition" ~doc:"Assume there is a second partition (scan state)" no_arg and tracing = flag "--tracing" ~doc:"Wrap test into tracing" no_arg - and config_file = Cli_lib.Flag.conf_file + and config_file = Cli_lib.Flag.config_files and no_masks = flag "--no-masks" ~doc:"Do not create masks" no_arg in Cli_lib.Exceptions.handle_nicely @@ fun () -> @@ -2399,7 +2399,7 @@ let itn_create_accounts = ~doc:"NN Amount in nanomina to be divided among new accounts" (required int) in - let config_file = Cli_lib.Flag.conf_file in + let config_file = Cli_lib.Flag.config_files in let args = Args.zip6 privkey_path key_prefix num_accounts fee amount config_file in diff --git a/src/app/delegation_verify/delegation_verify.ml b/src/app/delegation_verify/delegation_verify.ml index bb338c442b1..ac643b1db67 100644 --- a/src/app/delegation_verify/delegation_verify.ml +++ b/src/app/delegation_verify/delegation_verify.ml @@ -13,7 +13,7 @@ let get_filenames = let verify_snark_work ~verify_transaction_snarks ~proof ~message = verify_transaction_snarks [ (proof, message) ] -let config_flag = Cli_lib.Flag.conf_file +let config_flag = Cli_lib.Flag.config_files let keyspace_flag = let open Command.Param in diff --git a/src/lib/cli_lib/commands.ml b/src/lib/cli_lib/commands.ml index aff4d661ab9..4467e4cb915 100644 --- a/src/lib/cli_lib/commands.ml +++ b/src/lib/cli_lib/commands.ml @@ -230,7 +230,7 @@ module Vrf = struct flag "--total-stake" ~doc:"AMOUNT The total balance of all accounts in the epoch ledger" (optional int) - and config_file = Flag.conf_file in + and config_file = Flag.config_files in Exceptions.handle_nicely @@ fun () -> let env = Secrets.Keypair.env in @@ -302,7 +302,7 @@ module Vrf = struct stdin" (let open Command.Let_syntax in let%map_open privkey_path = Flag.privkey_read_path - and config_file = Flag.conf_file in + and config_file = Flag.config_files in Exceptions.handle_nicely @@ fun () -> let env = Secrets.Keypair.env in @@ -371,7 +371,7 @@ module Vrf = struct ledger; this should be done manually to confirm whether threshold_met \ in the output corresponds to an actual won block." (let open Command.Let_syntax in - let%map_open config_file = Flag.conf_file in + let%map_open config_file = Flag.config_files in Exceptions.handle_nicely @@ fun () -> let open Deferred.Let_syntax in @@ -382,7 +382,6 @@ module Vrf = struct in Runtime_config.Constants.constraint_constants conf in - (* TODO-someday: constraint constants from config file. *) let lexbuf = Lexing.from_channel In_channel.stdin in let lexer = Yojson.init_lexer () in let%bind () = diff --git a/src/lib/cli_lib/flag.ml b/src/lib/cli_lib/flag.ml index 051b2e69ffe..64b279757d6 100644 --- a/src/lib/cli_lib/flag.ml +++ b/src/lib/cli_lib/flag.ml @@ -33,7 +33,7 @@ let conf_dir = flag "--config-directory" ~aliases:[ "config-directory" ] ~doc:"DIR Configuration directory" (optional string) -let conf_file = +let config_files = let open Command.Param in flag "--config-file" ~aliases:[ "config-file" ] ~doc: diff --git a/src/lib/cli_lib/flag.mli b/src/lib/cli_lib/flag.mli index f95e5e3801a..dda9c630ce5 100644 --- a/src/lib/cli_lib/flag.mli +++ b/src/lib/cli_lib/flag.mli @@ -12,7 +12,7 @@ val privkey_read_path : string Command.Param.t val conf_dir : string option Command.Param.t -val conf_file : string list Command.Param.t +val config_files : string list Command.Param.t module Types : sig type 'a with_name = { name : string; value : 'a } diff --git a/src/lib/genesis_ledger_helper/genesis_ledger_helper.ml b/src/lib/genesis_ledger_helper/genesis_ledger_helper.ml index d8fcb85be5f..e3c4b62cbb3 100644 --- a/src/lib/genesis_ledger_helper/genesis_ledger_helper.ml +++ b/src/lib/genesis_ledger_helper/genesis_ledger_helper.ml @@ -772,6 +772,7 @@ module type Config_loader_intf = sig -> string list -> (Precomputed_values.t * Runtime_config.t) Deferred.Or_error.t + (* Mostly loads genesis ledger and epoch data *) val init_from_config_file : ?overwrite_version:Mina_numbers.Txn_version.t -> ?genesis_dir:string @@ -787,7 +788,6 @@ module Config_loader : Config_loader_intf = struct (config : Runtime_config.t) = print_config ~logger config ; let open Deferred.Or_error.Let_syntax in - let blockchain_proof_system_id = None in let constraint_constants = Runtime_config.Constants.constraint_constants constants in @@ -825,7 +825,7 @@ module Config_loader : Config_loader_intf = struct let proof_inputs = Genesis_proof.generate_inputs ~runtime_config:config ~proof_level ~ledger:genesis_ledger ~constraint_constants ~genesis_constants - ~compile_config ~blockchain_proof_system_id ~genesis_epoch_data + ~compile_config ~blockchain_proof_system_id:None ~genesis_epoch_data in (proof_inputs, config) @@ -841,19 +841,6 @@ module Config_loader : Config_loader_intf = struct let values = Genesis_proof.create_values_no_proof inputs in (values, config) - let%test_module "Account config test" = - ( module struct - let%test_unit "Runtime config <=> Account" = - let module Ledger = (val Genesis_ledger.for_unit_tests) in - let accounts = Lazy.force Ledger.accounts in - List.iter accounts ~f:(fun (sk, acc) -> - let acc_config = Accounts.Single.of_account acc sk in - let acc' = - Accounts.Single.to_account_with_pk acc_config |> Or_error.ok_exn - in - [%test_eq: Account.t] acc acc' ) - end ) - let load_config_files ?overwrite_version ?genesis_dir ?(itn_features = false) ?cli_proof_level ?conf_dir ~logger (config_files : string list) = let open Deferred.Or_error.Let_syntax in @@ -906,3 +893,16 @@ module Config_loader : Config_loader_intf = struct ~metadata ; Error.raise err end + +let%test_module "Account config test" = + ( module struct + let%test_unit "Runtime config <=> Account" = + let module Ledger = (val Genesis_ledger.for_unit_tests) in + let accounts = Lazy.force Ledger.accounts in + List.iter accounts ~f:(fun (sk, acc) -> + let acc_config = Accounts.Single.of_account acc sk in + let acc' = + Accounts.Single.to_account_with_pk acc_config |> Or_error.ok_exn + in + [%test_eq: Account.t] acc acc' ) + end ) diff --git a/src/lib/genesis_ledger_helper/lib/genesis_ledger_helper_lib.ml b/src/lib/genesis_ledger_helper/lib/genesis_ledger_helper_lib.ml index 6ca2c3465b5..7f886539d1d 100644 --- a/src/lib/genesis_ledger_helper/lib/genesis_ledger_helper_lib.ml +++ b/src/lib/genesis_ledger_helper/lib/genesis_ledger_helper_lib.ml @@ -425,25 +425,10 @@ let make_constraint_constants in let transaction_capacity_log_2 = match config.transaction_capacity with - | Some (Log_2 i) -> - i - | Some (Txns_per_second_x10 tps_goal_x10) -> - let max_coinbases = 2 in - let max_user_commands_per_block = - (* block_window_duration is in milliseconds, so divide by 1000 divide - by 10 again because we have tps * 10 - *) - tps_goal_x10 * block_window_duration_ms / (1000 * 10) - in - (* Log of the capacity of transactions per transition. - - 1 will only work if we don't have prover fees. - - 2 will work with prover fees, but not if we want a transaction - included in every block. - - At least 3 ensures a transaction per block and the staged-ledger - unit tests pass. - *) - 1 - + Core_kernel.Int.ceil_log2 (max_user_commands_per_block + max_coinbases) + | Some transaction_capacity -> + Runtime_config.Proof_keys.Transaction_capacity + .to_transaction_capacity_log_2 ~block_window_duration_ms + ~transaction_capacity | None -> default.transaction_capacity_log_2 in diff --git a/src/lib/runtime_config/runtime_config.ml b/src/lib/runtime_config/runtime_config.ml index e7560ce6eea..a31d67ad622 100644 --- a/src/lib/runtime_config/runtime_config.ml +++ b/src/lib/runtime_config/runtime_config.ml @@ -1001,14 +1001,6 @@ module Proof_keys = struct type t = Log_2 of int | Txns_per_second_x10 of int [@@deriving bin_io_unversioned] - let log2 = function Log_2 i -> Some i | Txns_per_second_x10 _ -> None - - let txns_per_second_x10 = function - | Log_2 _ -> - None - | Txns_per_second_x10 i -> - Some i - let to_json_layout : t -> Json_layout.Proof_keys.Transaction_capacity.t = function | Log_2 i -> @@ -1039,6 +1031,30 @@ module Proof_keys = struct let small : t = Log_2 2 let medium : t = Log_2 3 + + let to_transaction_capacity_log_2 ~block_window_duration_ms + ~transaction_capacity = + match transaction_capacity with + | Log_2 i -> + i + | Txns_per_second_x10 tps_goal_x10 -> + let max_coinbases = 2 in + let max_user_commands_per_block = + (* block_window_duration is in milliseconds, so divide by 1000 divide + by 10 again because we have tps * 10 + *) + tps_goal_x10 * block_window_duration_ms / (1000 * 10) + in + (* Log of the capacity of transactions per transition. + - 1 will only work if we don't have prover fees. + - 2 will work with prover fees, but not if we want a transaction + included in every block. + - At least 3 ensures a transaction per block and the staged-ledger + unit tests pass. + *) + 1 + + Core_kernel.Int.ceil_log2 + (max_user_commands_per_block + max_coinbases) end type t = @@ -1881,6 +1897,10 @@ module Constants : Constants_intf = struct in Option.first_some b a in + let block_window_duration_ms = + Option.value ~default:a.constraint_constants.block_window_duration_ms + Option.(b.proof >>= fun p -> p.block_window_duration_ms) + in { a.constraint_constants with sub_windows_per_window = Option.value ~default:a.constraint_constants.sub_windows_per_window @@ -1891,16 +1911,17 @@ module Constants : Constants_intf = struct ; work_delay = Option.value ~default:a.constraint_constants.work_delay Option.(b.proof >>= fun p -> p.work_delay) - ; block_window_duration_ms = - Option.value ~default:a.constraint_constants.block_window_duration_ms - Option.(b.proof >>= fun p -> p.block_window_duration_ms) + ; block_window_duration_ms ; transaction_capacity_log_2 = Option.value ~default:a.constraint_constants.transaction_capacity_log_2 Option.( b.proof >>= fun p -> - p.transaction_capacity >>= Proof_keys.Transaction_capacity.log2) + p.transaction_capacity + >>| fun transaction_capacity -> + Proof_keys.Transaction_capacity.to_transaction_capacity_log_2 + ~block_window_duration_ms ~transaction_capacity) ; coinbase_amount = Option.value ~default:a.constraint_constants.coinbase_amount Option.(b.proof >>= fun p -> p.coinbase_amount) @@ -1950,25 +1971,22 @@ module Constants : Constants_intf = struct { genesis_constants; constraint_constants; proof_level; compile_config } let load_constants' ?itn_features ?cli_proof_level runtime_config = - let constants = - let compile_constants = - { genesis_constants = Genesis_constants.Compiled.genesis_constants - ; constraint_constants = Genesis_constants.Compiled.constraint_constants - ; proof_level = Genesis_constants.Compiled.proof_level - ; compile_config = Mina_compile_config.Compiled.t - } - in - let cs = combine compile_constants runtime_config in - { cs with - proof_level = Option.value ~default:cs.proof_level cli_proof_level - ; compile_config = - { cs.compile_config with - itn_features = - Option.value ~default:cs.compile_config.itn_features itn_features - } + let compile_constants = + { genesis_constants = Genesis_constants.Compiled.genesis_constants + ; constraint_constants = Genesis_constants.Compiled.constraint_constants + ; proof_level = Genesis_constants.Compiled.proof_level + ; compile_config = Mina_compile_config.Compiled.t } in - constants + let cs = combine compile_constants runtime_config in + { cs with + proof_level = Option.value ~default:cs.proof_level cli_proof_level + ; compile_config = + { cs.compile_config with + itn_features = + Option.value ~default:cs.compile_config.itn_features itn_features + } + } (* Use this function if you don't need/want the ledger configuration *) let load_constants ?conf_dir ?commit_id_short ?itn_features ?cli_proof_level diff --git a/src/lib/snark_worker/functor.ml b/src/lib/snark_worker/functor.ml index aa60e19bbff..1dfa6f06dc6 100644 --- a/src/lib/snark_worker/functor.ml +++ b/src/lib/snark_worker/functor.ml @@ -359,7 +359,7 @@ module Make (Inputs : Intf.Inputs_intf) : (optional bool) ~doc: "true|false Shutdown when disconnected from daemon (default:true)" - and config_file = Cli_lib.Flag.conf_file + and config_file = Cli_lib.Flag.config_files and conf_dir = Cli_lib.Flag.conf_dir in fun () -> let logger = diff --git a/src/lib/snark_worker/standalone/run_snark_worker.ml b/src/lib/snark_worker/standalone/run_snark_worker.ml index c0c426fef8d..929f51b7b8d 100644 --- a/src/lib/snark_worker/standalone/run_snark_worker.ml +++ b/src/lib/snark_worker/standalone/run_snark_worker.ml @@ -8,7 +8,7 @@ let command = (let%map_open spec = flag "--spec-sexp" ~doc:"" (required (sexp_conv Prod.single_spec_of_sexp)) - and config_file = Cli_lib.Flag.conf_file + and config_file = Cli_lib.Flag.config_files and cli_proof_level = flag "--proof-level" ~doc:"" (optional_with_default Genesis_constants.Proof_level.Full From 9cd7d89cd241e49eeffef9344fdc04ae1b2ede8d Mon Sep 17 00:00:00 2001 From: dkijania Date: Wed, 9 Oct 2024 13:32:20 +0200 Subject: [PATCH 086/234] update toolchain with influx cli installed --- buildkite/src/Constants/ContainerImages.dhall | 6 +++--- dockerfiles/stages/3-toolchain | 9 +++++++++ 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/buildkite/src/Constants/ContainerImages.dhall b/buildkite/src/Constants/ContainerImages.dhall index f4916e06773..51d266b64fe 100644 --- a/buildkite/src/Constants/ContainerImages.dhall +++ b/buildkite/src/Constants/ContainerImages.dhall @@ -4,11 +4,11 @@ -- NOTE: minaToolchainBookworm is also used for building Ubuntu Jammy packages in CI { toolchainBase = "codaprotocol/ci-toolchain-base:v3" , minaToolchainBullseye = - "gcr.io/o1labs-192920/mina-toolchain@sha256:a1f60d69f3657060d6e7289dc770fd7c36fc5a067853019c2f3f6247cb4b6673" + "gcr.io/o1labs-192920/mina-toolchain@sha256:1e4b429fdf6a06e88abe9b4c9d54a80a3e818011a94806d070433a95d6af1229" , minaToolchainBookworm = - "gcr.io/o1labs-192920/mina-toolchain@sha256:a1f60d69f3657060d6e7289dc770fd7c36fc5a067853019c2f3f6247cb4b6673" + "gcr.io/o1labs-192920/mina-toolchain@sha256:1e4b429fdf6a06e88abe9b4c9d54a80a3e818011a94806d070433a95d6af1229" , minaToolchain = - "gcr.io/o1labs-192920/mina-toolchain@sha256:a1f60d69f3657060d6e7289dc770fd7c36fc5a067853019c2f3f6247cb4b6673" + "gcr.io/o1labs-192920/mina-toolchain@sha256:1e4b429fdf6a06e88abe9b4c9d54a80a3e818011a94806d070433a95d6af1229" , elixirToolchain = "elixir:1.10-alpine" , nodeToolchain = "node:14.13.1-stretch-slim" , ubuntu2004 = "ubuntu:20.04" diff --git a/dockerfiles/stages/3-toolchain b/dockerfiles/stages/3-toolchain index 3f66574da27..f3f13255ffc 100644 --- a/dockerfiles/stages/3-toolchain +++ b/dockerfiles/stages/3-toolchain @@ -12,6 +12,7 @@ ARG DEBS3_VERSION=0.11.6 ARG DHALL_VERSION=1.41.1 ARG DHALL_JSON_VERSION=1.7.10 ARG DHALL_BASH_VERSION=1.0.40 +ARG INFLUXDB_CLI_VERSION=2.7.5 USER root @@ -70,6 +71,14 @@ RUN curl -sLO https://github.com/MinaProtocol/deb-s3/releases/download/${DEBS3_V && gem install deb-s3-${DEBS3_VERSION}.gem \ && rm -f deb-s3-${DEBS3_VERSION}.gem +# --- deb-s3 tool +# Custom version, with lock only on manifest upload +RUN wget https://download.influxdata.com/influxdb/releases/influxdb2-client-${INFLUXDB_CLI_VERSION}-linux-amd64.tar.gz \ + && mkdir -p "influx_dir" && tar xvzf influxdb2-client-${INFLUXDB_CLI_VERSION}-linux-amd64.tar.gz -C influx_dir \ + && sudo cp influx_dir/influx /usr/local/bin/ \ + && rm influxdb2-client-${INFLUXDB_CLI_VERSION}-linux-amd64.tar.gz \ + && rm -rf influx_dir + # --- Docker Daemon RUN curl -sL https://download.docker.com/linux/static/stable/x86_64/docker-${DOCKER_VERSION}.tgz \ | tar --extract --gzip --strip-components 1 --directory=/usr/bin --file=- From 2a8ef4fdab168522be4180ee863e56974b6149ae Mon Sep 17 00:00:00 2001 From: martyall Date: Wed, 9 Oct 2024 10:06:06 -0700 Subject: [PATCH 087/234] Address George's PR comments --- .../src/cli_entrypoint/mina_cli_entrypoint.ml | 8 +- src/app/cli/src/init/client.ml | 226 +++--------------- src/app/cli/src/init/mina_run.ml | 19 +- src/lib/daemon_rpcs/client.ml | 10 +- .../mina_compile_config.ml | 12 +- .../node_config_for_unit_tests.ml | 2 - src/lib/snark_worker/functor.ml | 13 +- 7 files changed, 50 insertions(+), 240 deletions(-) diff --git a/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml b/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml index 9844eeeefd1..f521344d32b 100644 --- a/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml +++ b/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml @@ -55,12 +55,8 @@ let with_itn_logger ~itn_features ~(compile_config : Mina_compile_config.t) let conf = Logger.make_itn_logger_config ~rpc_handshake_timeout:compile_config.rpc_handshake_timeout - ~rpc_heartbeat_timeout: - ( compile_config.rpc_heartbeat_timeout |> Time.Span.to_sec - |> Time_ns.Span.of_sec ) - ~rpc_heartbeat_send_every: - ( compile_config.rpc_heartbeat_send_every |> Time.Span.to_sec - |> Time_ns.Span.of_sec ) + ~rpc_heartbeat_timeout:compile_config.rpc_heartbeat_timeout + ~rpc_heartbeat_send_every:compile_config.rpc_heartbeat_send_every in Logger.with_itn conf logger else logger diff --git a/src/app/cli/src/init/client.ml b/src/app/cli/src/init/client.ml index ea361b4ae25..d2747cb5342 100644 --- a/src/app/cli/src/init/client.ml +++ b/src/app/cli/src/init/client.ml @@ -39,19 +39,17 @@ let or_error_str ~f_ok ~error = function | Error e -> sprintf "%s\n%s\n" error (Error.to_string_hum e) +let load_compile_config ?(logger = Logger.create ()) config_file = + let%map conf = Runtime_config.Constants.load_constants ~logger config_file in + Runtime_config.Constants.compile_config conf + let stop_daemon = let open Deferred.Let_syntax in let open Daemon_rpcs in Command.async ~summary:"Stop the daemon" (Cli_lib.Background_daemon.rpc_init Cli_lib.Flag.conf_file ~f:(fun port config_file -> - let%bind compile_config = - let logger = Logger.create () in - let%map conf = - Runtime_config.Constants.load_constants ~logger config_file - in - Runtime_config.Constants.compile_config conf - in + let%bind compile_config = load_compile_config config_file in let%map res = Daemon_rpcs.Client.dispatch ~compile_config Stop_daemon.rpc () port in @@ -182,13 +180,7 @@ let get_trust_status = Command.async ~summary:"Get the trust status associated with an IP address" (Cli_lib.Background_daemon.rpc_init flags ~f:(fun port (config_file, ip_address, json) -> - let%bind compile_config = - let logger = Logger.create () in - let%map conf = - Runtime_config.Constants.load_constants ~logger config_file - in - Runtime_config.Constants.compile_config conf - in + let%bind compile_config = load_compile_config config_file in match%map Daemon_rpcs.Client.dispatch ~compile_config Daemon_rpcs.Get_trust_status.rpc ip_address port @@ -227,13 +219,7 @@ let get_trust_status_all = ~summary:"Get trust statuses for all peers known to the trust system" (Cli_lib.Background_daemon.rpc_init flags ~f:(fun port (config_file, nonzero, json) -> - let%bind compile_config = - let logger = Logger.create () in - let%map conf = - Runtime_config.Constants.load_constants ~logger config_file - in - Runtime_config.Constants.compile_config conf - in + let%bind compile_config = load_compile_config config_file in match%map Daemon_rpcs.Client.dispatch ~compile_config Daemon_rpcs.Get_trust_status_all.rpc () port @@ -272,13 +258,7 @@ let reset_trust_status = Command.async ~summary:"Reset the trust status associated with an IP address" (Cli_lib.Background_daemon.rpc_init flags ~f:(fun port (config_file, ip_address, json) -> - let%bind compile_config = - let logger = Logger.create () in - let%map conf = - Runtime_config.Constants.load_constants ~logger config_file - in - Runtime_config.Constants.compile_config conf - in + let%bind compile_config = load_compile_config config_file in match%map Daemon_rpcs.Client.dispatch ~compile_config Daemon_rpcs.Reset_trust_status.rpc ip_address port @@ -302,13 +282,7 @@ let get_public_keys = (Cli_lib.Background_daemon.rpc_init (Args.zip3 config_file with_details_flag Cli_lib.Flag.json) ~f:(fun port (config_file, is_balance_included, json) -> - let%bind compile_config = - let logger = Logger.create () in - let%map conf = - Runtime_config.Constants.load_constants ~logger config_file - in - Runtime_config.Constants.compile_config conf - in + let%bind compile_config = load_compile_config config_file in if is_balance_included then Daemon_rpcs.Client.dispatch_pretty_message ~compile_config ~json ~join_error:Or_error.join ~error_ctx @@ -364,13 +338,7 @@ let verify_receipt = (Args.zip5 config_file payment_path_flag proof_path_flag address_flag token_flag ) ~f:(fun port (config_file, payment_path, proof_path, pk, token_id) -> - let%bind compile_config = - let logger = Logger.create () in - let%map conf = - Runtime_config.Constants.load_constants ~logger config_file - in - Runtime_config.Constants.compile_config conf - in + let%bind compile_config = load_compile_config config_file in let account_id = Account_id.create pk token_id in let dispatch_result = let open Deferred.Or_error.Let_syntax in @@ -444,13 +412,7 @@ let get_nonce_cmd = (Cli_lib.Background_daemon.rpc_init flags ~f:(fun port (config_file, pk, token_flag) -> let account_id = Account_id.create pk token_flag in - let%bind compile_config = - let logger = Logger.create () in - let%map conf = - Runtime_config.Constants.load_constants ~logger config_file - in - Runtime_config.Constants.compile_config conf - in + let%bind compile_config = load_compile_config config_file in match%bind get_nonce ~compile_config ~rpc:Daemon_rpcs.Get_nonce.rpc account_id port @@ -470,13 +432,7 @@ let status = Command.async ~summary:"Get running daemon status" (Cli_lib.Background_daemon.rpc_init flag ~f:(fun port (config_file, json, performance) -> - let%bind compile_config = - let logger = Logger.create () in - let%map conf = - Runtime_config.Constants.load_constants ~logger config_file - in - Runtime_config.Constants.compile_config conf - in + let%bind compile_config = load_compile_config config_file in Daemon_rpcs.Client.dispatch_pretty_message ~compile_config ~json ~join_error:Fn.id ~error_ctx:"Failed to get status" (module Daemon_rpcs.Types.Status) @@ -492,13 +448,7 @@ let status_clear_hist = Command.async ~summary:"Clear histograms reported in status" (Cli_lib.Background_daemon.rpc_init flag ~f:(fun port (config_file, json, performance) -> - let%bind compile_config = - let logger = Logger.create () in - let%map conf = - Runtime_config.Constants.load_constants ~logger config_file - in - Runtime_config.Constants.compile_config conf - in + let%bind compile_config = load_compile_config config_file in Daemon_rpcs.Client.dispatch_pretty_message ~compile_config ~json ~join_error:Fn.id ~error_ctx:"Failed to clear histograms reported in status" @@ -559,13 +509,7 @@ let batch_send_payments = in let main port (config_file, privkey_path, payments_path) = let open Deferred.Let_syntax in - let%bind compile_config = - let logger = Logger.create () in - let%map conf = - Runtime_config.Constants.load_constants ~logger config_file - in - Runtime_config.Constants.compile_config conf - in + let%bind compile_config = load_compile_config config_file in let%bind keypair = Secrets.Keypair.Terminal_stdin.read_exn ~which:"Mina keypair" privkey_path and infos = get_infos payments_path in @@ -623,13 +567,7 @@ let send_payment_graphql = , config_file ) -> let open Deferred.Let_syntax in - let%bind compile_config = - let logger = Logger.create () in - let%map conf = - Runtime_config.Constants.load_constants ~logger config_file - in - Runtime_config.Constants.compile_config conf - in + let%bind compile_config = load_compile_config config_file in let fee = Option.value ~default:compile_config.default_transaction_fee fee in @@ -665,13 +603,7 @@ let delegate_stake_graphql = ({ Cli_lib.Flag.sender; fee; nonce; memo }, receiver, config_file) -> let open Deferred.Let_syntax in - let%bind compile_config = - let logger = Logger.create () in - let%map conf = - Runtime_config.Constants.load_constants ~logger config_file - in - Runtime_config.Constants.compile_config conf - in + let%bind compile_config = load_compile_config config_file in let fee = Option.value ~default:compile_config.default_transaction_fee fee in @@ -887,13 +819,7 @@ let export_ledger = (Cli_lib.Background_daemon.rpc_init flags ~f:(fun port (config_file, state_hash, plaintext, ledger_kind) -> let open Deferred.Let_syntax in - let%bind compile_config = - let logger = Logger.create () in - let%map conf = - Runtime_config.Constants.load_constants ~logger config_file - in - Runtime_config.Constants.compile_config conf - in + let%bind compile_config = load_compile_config config_file in let check_for_state_hash () = if Option.is_some state_hash then ( Format.eprintf "A state hash should not be given for %s@." @@ -1082,13 +1008,7 @@ let snark_job_list = blocks" (Cli_lib.Background_daemon.rpc_init Cli_lib.Flag.conf_file ~f:(fun port config_file -> - let%bind compile_config = - let logger = Logger.create () in - let%map conf = - Runtime_config.Constants.load_constants ~logger config_file - in - Runtime_config.Constants.compile_config conf - in + let%bind compile_config = load_compile_config config_file in match%map Daemon_rpcs.Client.dispatch_join_errors ~compile_config Daemon_rpcs.Snark_job_list.rpc () port @@ -1215,13 +1135,7 @@ let start_tracing = ~summary:"Start async tracing to $config-directory/trace/$pid.trace" (Cli_lib.Background_daemon.rpc_init Cli_lib.Flag.conf_file ~f:(fun port config_file -> - let%bind compile_config = - let logger = Logger.create () in - let%map conf = - Runtime_config.Constants.load_constants ~logger config_file - in - Runtime_config.Constants.compile_config conf - in + let%bind compile_config = load_compile_config config_file in match%map Daemon_rpcs.Client.dispatch ~compile_config Daemon_rpcs.Start_tracing.rpc () port @@ -1235,13 +1149,7 @@ let stop_tracing = Command.async ~summary:"Stop async tracing" (Cli_lib.Background_daemon.rpc_init Cli_lib.Flag.conf_file ~f:(fun port config_file -> - let%bind compile_config = - let logger = Logger.create () in - let%map conf = - Runtime_config.Constants.load_constants ~logger config_file - in - Runtime_config.Constants.compile_config conf - in + let%bind compile_config = load_compile_config config_file in match%map Daemon_rpcs.Client.dispatch ~compile_config Daemon_rpcs.Stop_tracing.rpc () port @@ -1258,13 +1166,7 @@ let start_internal_tracing = $config-directory/internal-tracing/internal-trace.jsonl" (Cli_lib.Background_daemon.rpc_init Cli_lib.Flag.conf_file ~f:(fun port config_file -> - let%bind compile_config = - let logger = Logger.create () in - let%map conf = - Runtime_config.Constants.load_constants ~logger config_file - in - Runtime_config.Constants.compile_config conf - in + let%bind compile_config = load_compile_config config_file in match%map Daemon_rpcs.Client.dispatch ~compile_config Daemon_rpcs.Start_internal_tracing.rpc () port @@ -1278,13 +1180,7 @@ let stop_internal_tracing = Command.async ~summary:"Stop internal tracing" (Cli_lib.Background_daemon.rpc_init Cli_lib.Flag.conf_file ~f:(fun port config_file -> - let%bind compile_config = - let logger = Logger.create () in - let%map conf = - Runtime_config.Constants.load_constants ~logger config_file - in - Runtime_config.Constants.compile_config conf - in + let%bind compile_config = load_compile_config config_file in match%map Daemon_rpcs.Client.dispatch ~compile_config Daemon_rpcs.Stop_internal_tracing.rpc () port @@ -1777,12 +1673,7 @@ let generate_libp2p_keypair_do privkey_path ~config_file = (let open Deferred.Let_syntax in (* FIXME: I'd like to accumulate messages into this logger and only dump them out in failure paths. *) let logger = Logger.null () in - let%bind compile_config = - let%map conf = - Runtime_config.Constants.load_constants ~logger config_file - in - Runtime_config.Constants.compile_config conf - in + let%bind compile_config = load_compile_config config_file in (* Using the helper only for keypair generation requires no state. *) File_system.with_temp_dir "mina-generate-libp2p-keypair" ~f:(fun tmpd -> match%bind @@ -1817,12 +1708,7 @@ let dump_libp2p_keypair_do privkey_path ~config_file = Deferred.ignore_m (let open Deferred.Let_syntax in let logger = Logger.null () in - let%bind compile_config = - let%map conf = - Runtime_config.Constants.load_constants ~logger config_file - in - Runtime_config.Constants.compile_config conf - in + let%bind compile_config = load_compile_config config_file in (* Using the helper only for keypair generation requires no state. *) File_system.with_temp_dir "mina-dump-libp2p-keypair" ~f:(fun tmpd -> @@ -1862,13 +1748,7 @@ let trustlist_add = (Args.zip2 Cli_lib.Flag.conf_file trustlist_ip_flag) ~f:(fun port (config_file, trustlist_ip) -> let trustlist_ip_string = Unix.Cidr.to_string trustlist_ip in - let%bind compile_config = - let logger = Logger.create () in - let%map conf = - Runtime_config.Constants.load_constants ~logger config_file - in - Runtime_config.Constants.compile_config conf - in + let%bind compile_config = load_compile_config config_file in match%map Client.dispatch ~compile_config Add_trustlist.rpc trustlist_ip port with @@ -1889,13 +1769,7 @@ let trustlist_remove = (Args.zip2 Cli_lib.Flag.conf_file trustlist_ip_flag) ~f:(fun port (config_file, trustlist_ip) -> let trustlist_ip_string = Unix.Cidr.to_string trustlist_ip in - let%bind compile_config = - let logger = Logger.create () in - let%map conf = - Runtime_config.Constants.load_constants ~logger config_file - in - Runtime_config.Constants.compile_config conf - in + let%bind compile_config = load_compile_config config_file in match%map Client.dispatch ~compile_config Remove_trustlist.rpc trustlist_ip port @@ -1914,13 +1788,7 @@ let trustlist_list = Command.async ~summary:"List the CIDR masks in the trustlist" (Cli_lib.Background_daemon.rpc_init Cli_lib.Flag.conf_file ~f:(fun port config_file -> - let%bind compile_config = - let logger = Logger.create () in - let%map conf = - Runtime_config.Constants.load_constants ~logger config_file - in - Runtime_config.Constants.compile_config conf - in + let%bind compile_config = load_compile_config config_file in match%map Client.dispatch ~compile_config Get_trustlist.rpc () port with @@ -2113,13 +1981,7 @@ let node_status = Option.map peers ~f:(fun peers -> List.map peers ~f:Mina_net2.Multiaddr.of_string ) in - let%bind compile_config = - let logger = Logger.create () in - let%map conf = - Runtime_config.Constants.load_constants ~logger config_file - in - Runtime_config.Constants.compile_config conf - in + let%bind compile_config = load_compile_config config_file in match%map Daemon_rpcs.Client.dispatch ~compile_config Daemon_rpcs.Get_node_status.rpc peer_ids_opt port @@ -2145,13 +2007,7 @@ let object_lifetime_statistics = Command.async ~summary:"Dump internal object lifetime statistics to JSON" (Cli_lib.Background_daemon.rpc_init Cli_lib.Flag.conf_file ~f:(fun port config_file -> - let%bind compile_config = - let logger = Logger.create () in - let%map conf = - Runtime_config.Constants.load_constants ~logger config_file - in - Runtime_config.Constants.compile_config conf - in + let%bind compile_config = load_compile_config config_file in match%map Client.dispatch ~compile_config Get_object_lifetime_statistics.rpc () port @@ -2263,13 +2119,7 @@ let archive_blocks = in let add_to_success_file = output_file_line success_file in let add_to_failure_file = output_file_line failure_file in - let%bind compile_config = - let logger = Logger.create () in - let%map conf = - Runtime_config.Constants.load_constants ~logger config_file - in - Runtime_config.Constants.compile_config conf - in + let%bind compile_config = load_compile_config config_file in let send_precomputed_block = make_send_block ~graphql_make:(fun block -> @@ -2393,13 +2243,7 @@ let chain_id_inputs = (Cli_lib.Background_daemon.rpc_init Cli_lib.Flag.conf_file ~f:(fun port config_file -> let open Daemon_rpcs in - let%bind compile_config = - let logger = Logger.create () in - let%map conf = - Runtime_config.Constants.load_constants ~logger config_file - in - Runtime_config.Constants.compile_config conf - in + let%bind compile_config = load_compile_config config_file in match%map Client.dispatch ~compile_config Chain_id_inputs.rpc () port with @@ -2638,13 +2482,7 @@ module Visualization = struct (Args.zip2 Cli_lib.Flag.conf_file Command.Param.(anon @@ ("output-filepath" %: string)) ) ~f:(fun port (config_file, filename) -> - let%bind compile_config = - let logger = Logger.create () in - let%map conf = - Runtime_config.Constants.load_constants ~logger config_file - in - Runtime_config.Constants.compile_config conf - in + let%bind compile_config = load_compile_config config_file in let%map message = match%map Daemon_rpcs.Client.dispatch ~compile_config rpc filename port diff --git a/src/app/cli/src/init/mina_run.ml b/src/app/cli/src/init/mina_run.ml index a3b58a81b21..94851641819 100644 --- a/src/app/cli/src/init/mina_run.ml +++ b/src/app/cli/src/init/mina_run.ml @@ -367,12 +367,8 @@ let setup_local_server ?(client_trustlist = []) ?rest_server_port let config = { Itn_logger.rpc_handshake_timeout = compile_config.rpc_handshake_timeout - ; rpc_heartbeat_timeout = - compile_config.rpc_heartbeat_timeout |> Time.Span.to_sec - |> Time_ns.Span.of_sec - ; rpc_heartbeat_send_every = - compile_config.rpc_heartbeat_send_every |> Time.Span.to_sec - |> Time_ns.Span.of_sec + ; rpc_heartbeat_timeout = compile_config.rpc_heartbeat_timeout + ; rpc_heartbeat_send_every = compile_config.rpc_heartbeat_send_every } in return @@ -612,15 +608,8 @@ let setup_local_server ?(client_trustlist = []) ?rest_server_port ~handshake_timeout:compile_config.rpc_handshake_timeout ~heartbeat_config: (Rpc.Connection.Heartbeat_config.create - ~timeout: - (Time_ns.Span.of_sec - (Time.Span.to_sec - compile_config.rpc_heartbeat_timeout ) ) - ~send_every: - (Time_ns.Span.of_sec - (Time.Span.to_sec - compile_config.rpc_heartbeat_send_every ) ) - () ) + ~timeout:compile_config.rpc_heartbeat_timeout + ~send_every:compile_config.rpc_heartbeat_send_every () ) reader writer ~implementations: (Rpc.Implementations.create_exn diff --git a/src/lib/daemon_rpcs/client.ml b/src/lib/daemon_rpcs/client.ml index 050a3491894..cb84061f574 100644 --- a/src/lib/daemon_rpcs/client.ml +++ b/src/lib/daemon_rpcs/client.ml @@ -17,14 +17,8 @@ let dispatch ~(compile_config : Mina_compile_config.t) rpc query ~handshake_timeout:compile_config.rpc_handshake_timeout ~heartbeat_config: (Rpc.Connection.Heartbeat_config.create - ~timeout: - ( compile_config.rpc_heartbeat_timeout |> Time.Span.to_sec - |> Time_ns.Span.of_sec ) - ~send_every: - ( compile_config.rpc_heartbeat_send_every - |> Time.Span.to_sec |> Time_ns.Span.of_sec ) - () ) - r w + ~timeout:compile_config.rpc_heartbeat_timeout + ~send_every:compile_config.rpc_heartbeat_send_every () ) r w ~connection_state:(fun _ -> ()) with | Error exn -> diff --git a/src/lib/mina_compile_config/mina_compile_config.ml b/src/lib/mina_compile_config/mina_compile_config.ml index bc5f7c30363..64b1cba2351 100644 --- a/src/lib/mina_compile_config/mina_compile_config.ml +++ b/src/lib/mina_compile_config/mina_compile_config.ml @@ -38,8 +38,8 @@ type t = ; network_id : string ; zkapp_cmd_limit : int option ; rpc_handshake_timeout : Time.Span.t - ; rpc_heartbeat_timeout : Time.Span.t - ; rpc_heartbeat_send_every : Time.Span.t + ; rpc_heartbeat_timeout : Time_ns.Span.t + ; rpc_heartbeat_send_every : Time_ns.Span.t ; zkapps_disabled : bool } [@@deriving sexp_of] @@ -62,9 +62,9 @@ let make (inputs : Inputs.t) = ; vrf_poll_interval = Float.of_int inputs.vrf_poll_interval_ms |> Time.Span.of_ms ; rpc_handshake_timeout = Time.Span.of_sec inputs.rpc_handshake_timeout_sec - ; rpc_heartbeat_timeout = Time.Span.of_sec inputs.rpc_heartbeat_timeout_sec + ; rpc_heartbeat_timeout = Time_ns.Span.of_sec inputs.rpc_heartbeat_timeout_sec ; rpc_heartbeat_send_every = - Time.Span.of_sec inputs.rpc_heartbeat_send_every_sec + Time_ns.Span.of_sec inputs.rpc_heartbeat_send_every_sec ; network_id = inputs.network_id ; zkapp_cmd_limit = inputs.zkapp_cmd_limit ; zkapps_disabled = inputs.zkapps_disabled @@ -89,9 +89,9 @@ let to_yojson t = ; ( "rpc_handshake_timeout" , `Float (Time.Span.to_sec t.rpc_handshake_timeout) ) ; ( "rpc_heartbeat_timeout" - , `Float (Time.Span.to_sec t.rpc_heartbeat_timeout) ) + , `Float (Time_ns.Span.to_sec t.rpc_heartbeat_timeout) ) ; ( "rpc_heartbeat_send_every" - , `Float (Time.Span.to_sec t.rpc_heartbeat_send_every) ) + , `Float (Time_ns.Span.to_sec t.rpc_heartbeat_send_every) ) ; ("network_id", `String t.network_id) ; ( "zkapp_cmd_limit" , Option.value_map ~default:`Null ~f:(fun x -> `Int x) t.zkapp_cmd_limit diff --git a/src/lib/node_config/for_unit_tests/node_config_for_unit_tests.ml b/src/lib/node_config/for_unit_tests/node_config_for_unit_tests.ml index 80f4c4fc397..78569141460 100644 --- a/src/lib/node_config/for_unit_tests/node_config_for_unit_tests.ml +++ b/src/lib/node_config/for_unit_tests/node_config_for_unit_tests.ml @@ -91,8 +91,6 @@ let max_action_elements = 100 let zkapp_cmd_limit_hardcap = 128 -(* These are fine to be non-configurable *) - let zkapps_disabled = false let rpc_handshake_timeout_sec = 60.0 diff --git a/src/lib/snark_worker/functor.ml b/src/lib/snark_worker/functor.ml index f4ea0d862f5..eedf2305eee 100644 --- a/src/lib/snark_worker/functor.ml +++ b/src/lib/snark_worker/functor.ml @@ -125,15 +125,10 @@ module Make (Inputs : Intf.Inputs_intf) : ~handshake_timeout:compile_config.rpc_handshake_timeout ~heartbeat_config: (Rpc.Connection.Heartbeat_config.create - ~timeout: - ( compile_config.rpc_heartbeat_timeout |> Time.Span.to_sec - |> Time_ns.Span.of_sec ) - ~send_every: - ( compile_config.rpc_heartbeat_send_every |> Time.Span.to_sec - |> Time_ns.Span.of_sec ) - () ) - (Tcp.Where_to_connect.of_host_and_port address) - (fun conn -> Rpc.Rpc.dispatch rpc conn query) + ~timeout:compile_config.rpc_heartbeat_timeout + ~send_every:compile_config.rpc_heartbeat_send_every () ) + (Tcp.Where_to_connect.of_host_and_port address) (fun conn -> + Rpc.Rpc.dispatch rpc conn query ) in match res with | Error exn -> From 2b5922a2a885cc8591b368265f9256ab86c7a9f8 Mon Sep 17 00:00:00 2001 From: dkijania Date: Thu, 10 Oct 2024 18:52:23 +0200 Subject: [PATCH 088/234] adjust port --- buildkite/src/Jobs/Test/ArchiveNodeUnitTest.dhall | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/buildkite/src/Jobs/Test/ArchiveNodeUnitTest.dhall b/buildkite/src/Jobs/Test/ArchiveNodeUnitTest.dhall index 2cd73c0090d..7ed13279f08 100644 --- a/buildkite/src/Jobs/Test/ArchiveNodeUnitTest.dhall +++ b/buildkite/src/Jobs/Test/ArchiveNodeUnitTest.dhall @@ -50,7 +50,7 @@ in Pipeline.build [ "POSTGRES_PASSWORD=${password}" , "POSTGRES_USER=${user}" , "POSTGRES_DB=${db}" - , "MINA_TEST_POSTGRES=postgres://${user}:${password}@localhost:5434/${db}" + , "MINA_TEST_POSTGRES=postgres://${user}:${password}@localhost:5433/${db}" , "GO=/usr/lib/go/bin/go" , "DUNE_INSTRUMENT_WITH=bisect_ppx" , "COVERALLS_TOKEN" From 15fd7ca55bd471fa0894735af07ddaf66a541f61 Mon Sep 17 00:00:00 2001 From: dkijania Date: Thu, 10 Oct 2024 19:29:07 +0200 Subject: [PATCH 089/234] Build rosetta mainnet/devnet docker --- buildkite/src/Command/MinaArtifact.dhall | 37 +++++++++++-------- .../Jobs/Test/RosettaIntegrationTests.dhall | 2 +- .../Test/RosettaIntegrationTestsLong.dhall | 2 +- scripts/debian/build.sh | 3 ++ 4 files changed, 27 insertions(+), 17 deletions(-) diff --git a/buildkite/src/Command/MinaArtifact.dhall b/buildkite/src/Command/MinaArtifact.dhall index 28793fb3d22..5b91eb3fec5 100644 --- a/buildkite/src/Command/MinaArtifact.dhall +++ b/buildkite/src/Command/MinaArtifact.dhall @@ -209,21 +209,28 @@ let docker_step } ] , Rosetta = - [ DockerImage.ReleaseSpec::{ - , deps = deps - , service = "mina-rosetta" - , network = "berkeley" - , build_flags = spec.buildFlags - , deb_repo = DebianRepo.Type.Local - , deb_profile = spec.profile - , deb_codename = - "${DebianVersions.lowerName spec.debVersion}" - , step_key = - "rosetta-${DebianVersions.lowerName - spec.debVersion}${BuildFlags.toLabelSegment - spec.buildFlags}-docker-image" - } - ] + Prelude.List.map + Network.Type + DockerImage.ReleaseSpec.Type + ( \(n : Network.Type) + -> DockerImage.ReleaseSpec::{ + , deps = deps + , service = + Artifacts.dockerName Artifacts.Type.Rosetta + , network = Network.lowerName n + , deb_codename = + "${DebianVersions.lowerName spec.debVersion}" + , deb_profile = spec.profile + , build_flags = spec.buildFlags + , deb_repo = DebianRepo.Type.Local + , step_key = + "rosetta-${Network.lowerName + n}-${DebianVersions.lowerName + spec.debVersion}${BuildFlags.toLabelSegment + spec.buildFlags}-docker-image" + } + ) + spec.networks , ZkappTestTransaction = [ DockerImage.ReleaseSpec::{ , deps = deps diff --git a/buildkite/src/Jobs/Test/RosettaIntegrationTests.dhall b/buildkite/src/Jobs/Test/RosettaIntegrationTests.dhall index 4d7ae4b9c3b..21397b34be7 100644 --- a/buildkite/src/Jobs/Test/RosettaIntegrationTests.dhall +++ b/buildkite/src/Jobs/Test/RosettaIntegrationTests.dhall @@ -71,7 +71,7 @@ in Pipeline.build , depends_on = Dockers.dependsOn Dockers.Type.Bullseye - (None Network.Type) + (Some Network.Type.Berkeley) Profiles.Type.Standard Artifacts.Type.Rosetta } diff --git a/buildkite/src/Jobs/Test/RosettaIntegrationTestsLong.dhall b/buildkite/src/Jobs/Test/RosettaIntegrationTestsLong.dhall index 10ff09fea4e..9fb8d631225 100644 --- a/buildkite/src/Jobs/Test/RosettaIntegrationTestsLong.dhall +++ b/buildkite/src/Jobs/Test/RosettaIntegrationTestsLong.dhall @@ -66,7 +66,7 @@ in Pipeline.build , depends_on = Dockers.dependsOn Dockers.Type.Bullseye - (None Network.Type) + (Some Network.Type.Berkeley) Profiles.Type.Standard Artifacts.Type.Rosetta } diff --git a/scripts/debian/build.sh b/scripts/debian/build.sh index 1df887ce9d9..1c1fff73126 100755 --- a/scripts/debian/build.sh +++ b/scripts/debian/build.sh @@ -28,6 +28,9 @@ if [ $# -eq 0 ] build_daemon_berkeley_deb build_daemon_mainnet_deb build_daemon_devnet_deb + build_rosetta_berkeley_deb + build_rosetta_mainnet_deb + build_rosetta_devnet_deb build_test_executive_deb build_functional_test_suite_deb build_zkapp_test_transaction_deb From 5faff120896ec8f8c5371d38557d2acd4a1cfaf8 Mon Sep 17 00:00:00 2001 From: dkijania Date: Thu, 10 Oct 2024 21:20:12 +0200 Subject: [PATCH 090/234] fix port again --- buildkite/src/Jobs/Test/ArchiveNodeUnitTest.dhall | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/buildkite/src/Jobs/Test/ArchiveNodeUnitTest.dhall b/buildkite/src/Jobs/Test/ArchiveNodeUnitTest.dhall index 7ed13279f08..041ce56bbd7 100644 --- a/buildkite/src/Jobs/Test/ArchiveNodeUnitTest.dhall +++ b/buildkite/src/Jobs/Test/ArchiveNodeUnitTest.dhall @@ -50,7 +50,7 @@ in Pipeline.build [ "POSTGRES_PASSWORD=${password}" , "POSTGRES_USER=${user}" , "POSTGRES_DB=${db}" - , "MINA_TEST_POSTGRES=postgres://${user}:${password}@localhost:5433/${db}" + , "MINA_TEST_POSTGRES=postgres://${user}:${password}@localhost:5432/${db}" , "GO=/usr/lib/go/bin/go" , "DUNE_INSTRUMENT_WITH=bisect_ppx" , "COVERALLS_TOKEN" From 2ba4e3dd6f86f91683b42f5ba12bd4869cb37b89 Mon Sep 17 00:00:00 2001 From: svv232 Date: Tue, 17 Sep 2024 21:24:05 -0400 Subject: [PATCH 091/234] refactoring delete in capnp proto --- src/libp2p_ipc/libp2p_ipc.capnp | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/libp2p_ipc/libp2p_ipc.capnp b/src/libp2p_ipc/libp2p_ipc.capnp index 8e603d46178..fca0b6b1139 100644 --- a/src/libp2p_ipc/libp2p_ipc.capnp +++ b/src/libp2p_ipc/libp2p_ipc.capnp @@ -332,7 +332,7 @@ struct Libp2pHelperInterface { result @1 :ValidationResult; } - struct DeleteResource { + struct RemoveResource { ids @0 :List(RootBlockId); } @@ -420,7 +420,7 @@ struct Libp2pHelperInterface { union { validation @1 :Libp2pHelperInterface.Validation; addResource @2 :Libp2pHelperInterface.AddResource; - deleteResource @3 :Libp2pHelperInterface.DeleteResource; + removeResource @3 :Libp2pHelperInterface.RemoveResource; downloadResource @4 :Libp2pHelperInterface.DownloadResource; heartbeatPeer @5 :Libp2pHelperInterface.HeartbeatPeer; } @@ -475,6 +475,7 @@ struct DaemonInterface { struct ResourceUpdate { type @0 :ResourceUpdateType; ids @1 :List(RootBlockId); + tag @2 :UInt8; } struct PushMessage { From 3139f54233aeac01080f7aae36cc3962768bf05c Mon Sep 17 00:00:00 2001 From: svv232 Date: Tue, 17 Sep 2024 21:27:02 -0400 Subject: [PATCH 092/234] new interfaces for bitswap message commands --- src/libp2p_ipc/libp2p_ipc.mli | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/libp2p_ipc/libp2p_ipc.mli b/src/libp2p_ipc/libp2p_ipc.mli index bdb960ffaf8..99e9c75acaa 100644 --- a/src/libp2p_ipc/libp2p_ipc.mli +++ b/src/libp2p_ipc/libp2p_ipc.mli @@ -36,7 +36,7 @@ module Subscription_id : sig val create : unit -> t end -val undefined_union : context:string -> int -> unit +val undefined_union : context:string -> int -> 'a val unsafe_parse_peer_id : peer_id -> Peer.Id.t @@ -97,6 +97,11 @@ val create_validation_push_message : val create_add_resource_push_message : tag:int -> data:string -> push_message +val create_download_resource_push_message : + tag:int -> ids:string list -> push_message + +val create_remove_resource_push_message : ids:string list -> push_message + val create_heartbeat_peer_push_message : peer_id:Peer.Id.t -> push_message val push_message_to_outgoing_message : push_message -> outgoing_message From 79c8f569faf437915a22c3ff5555f2086fb26122 Mon Sep 17 00:00:00 2001 From: svv232 Date: Tue, 17 Sep 2024 21:27:43 -0400 Subject: [PATCH 093/234] adding message definitions from bitswap pr --- src/libp2p_ipc/libp2p_ipc.ml | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/src/libp2p_ipc/libp2p_ipc.ml b/src/libp2p_ipc/libp2p_ipc.ml index 6258b436f34..d66bef5b895 100644 --- a/src/libp2p_ipc/libp2p_ipc.ml +++ b/src/libp2p_ipc/libp2p_ipc.ml @@ -265,6 +265,40 @@ let push_message_to_outgoing_message request = Builder.Libp2pHelperInterface.Message.( builder_op push_message_set_builder request) + let create_remove_resource_push_message ~ids = + let ids = + List.map ids ~f:(fun id -> + build' + (module Builder.RootBlockId) + Builder.RootBlockId.(op blake2b_hash_set id) ) + in + build' + (module Builder.Libp2pHelperInterface.PushMessage) + Builder.Libp2pHelperInterface.PushMessage.( + builder_op header_set_builder (create_push_message_header ()) + *> reader_op remove_resource_set_reader + (build + (module Builder.Libp2pHelperInterface.RemoveResource) + Builder.Libp2pHelperInterface.RemoveResource.( + list_op ids_set_list ids) )) + + let create_download_resource_push_message ~tag ~ids = + let ids = + List.map ids ~f:(fun id -> + build' + (module Builder.RootBlockId) + Builder.RootBlockId.(op blake2b_hash_set id) ) + in + build' + (module Builder.Libp2pHelperInterface.PushMessage) + Builder.Libp2pHelperInterface.PushMessage.( + builder_op header_set_builder (create_push_message_header ()) + *> reader_op download_resource_set_reader + (build + (module Builder.Libp2pHelperInterface.DownloadResource) + Builder.Libp2pHelperInterface.DownloadResource.( + op tag_set_exn tag *> list_op ids_set_list ids) )) + let create_add_resource_push_message ~tag ~data = build' (module Builder.Libp2pHelperInterface.PushMessage) From c0032ec1ef8acafea7e9100561ba681478272584 Mon Sep 17 00:00:00 2001 From: svv232 Date: Tue, 17 Sep 2024 21:28:21 -0400 Subject: [PATCH 094/234] fmt --- src/libp2p_ipc/libp2p_ipc.ml | 62 ++++++++++++++++++------------------ 1 file changed, 31 insertions(+), 31 deletions(-) diff --git a/src/libp2p_ipc/libp2p_ipc.ml b/src/libp2p_ipc/libp2p_ipc.ml index d66bef5b895..d1f0d40f81b 100644 --- a/src/libp2p_ipc/libp2p_ipc.ml +++ b/src/libp2p_ipc/libp2p_ipc.ml @@ -265,39 +265,39 @@ let push_message_to_outgoing_message request = Builder.Libp2pHelperInterface.Message.( builder_op push_message_set_builder request) - let create_remove_resource_push_message ~ids = - let ids = - List.map ids ~f:(fun id -> - build' - (module Builder.RootBlockId) - Builder.RootBlockId.(op blake2b_hash_set id) ) - in +let create_remove_resource_push_message ~ids = + let ids = + List.map ids ~f:(fun id -> build' - (module Builder.Libp2pHelperInterface.PushMessage) - Builder.Libp2pHelperInterface.PushMessage.( - builder_op header_set_builder (create_push_message_header ()) - *> reader_op remove_resource_set_reader - (build - (module Builder.Libp2pHelperInterface.RemoveResource) - Builder.Libp2pHelperInterface.RemoveResource.( - list_op ids_set_list ids) )) - - let create_download_resource_push_message ~tag ~ids = - let ids = - List.map ids ~f:(fun id -> - build' - (module Builder.RootBlockId) - Builder.RootBlockId.(op blake2b_hash_set id) ) - in + (module Builder.RootBlockId) + Builder.RootBlockId.(op blake2b_hash_set id) ) + in + build' + (module Builder.Libp2pHelperInterface.PushMessage) + Builder.Libp2pHelperInterface.PushMessage.( + builder_op header_set_builder (create_push_message_header ()) + *> reader_op remove_resource_set_reader + (build + (module Builder.Libp2pHelperInterface.RemoveResource) + Builder.Libp2pHelperInterface.RemoveResource.( + list_op ids_set_list ids) )) + +let create_download_resource_push_message ~tag ~ids = + let ids = + List.map ids ~f:(fun id -> build' - (module Builder.Libp2pHelperInterface.PushMessage) - Builder.Libp2pHelperInterface.PushMessage.( - builder_op header_set_builder (create_push_message_header ()) - *> reader_op download_resource_set_reader - (build - (module Builder.Libp2pHelperInterface.DownloadResource) - Builder.Libp2pHelperInterface.DownloadResource.( - op tag_set_exn tag *> list_op ids_set_list ids) )) + (module Builder.RootBlockId) + Builder.RootBlockId.(op blake2b_hash_set id) ) + in + build' + (module Builder.Libp2pHelperInterface.PushMessage) + Builder.Libp2pHelperInterface.PushMessage.( + builder_op header_set_builder (create_push_message_header ()) + *> reader_op download_resource_set_reader + (build + (module Builder.Libp2pHelperInterface.DownloadResource) + Builder.Libp2pHelperInterface.DownloadResource.( + op tag_set_exn tag *> list_op ids_set_list ids) )) let create_add_resource_push_message ~tag ~data = build' From 716b250f7c795ed0810712072544d67ebef37559 Mon Sep 17 00:00:00 2001 From: svv232 Date: Tue, 17 Sep 2024 21:37:51 -0400 Subject: [PATCH 095/234] fixing undefined union call after type signature change --- src/lib/mina_net2/libp2p_helper.ml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/lib/mina_net2/libp2p_helper.ml b/src/lib/mina_net2/libp2p_helper.ml index 2ef08749016..116a85be3c1 100644 --- a/src/lib/mina_net2/libp2p_helper.ml +++ b/src/lib/mina_net2/libp2p_helper.ml @@ -221,8 +221,7 @@ let handle_incoming_message t msg ~handle_push_message = handle_push_message t (DaemonInterface.PushMessage.get push_msg) ) ) | Undefined n -> - Libp2p_ipc.undefined_union ~context:"DaemonInterface.Message" n ; - Deferred.unit + Libp2p_ipc.undefined_union ~context:"DaemonInterface.Message" n let spawn ?(allow_multiple_instances = false) ~logger ~pids ~conf_dir ~handle_push_message () = From b2796f728800a12c6d4eb58bc58b7f32fe71733c Mon Sep 17 00:00:00 2001 From: svv232 Date: Tue, 17 Sep 2024 21:56:31 -0400 Subject: [PATCH 096/234] refactoring delete to remove --- .../src/libp2p_helper/bitswap_msg.go | 16 ++++++++-------- .../src/libp2p_helper/bitswap_test.go | 8 ++++---- .../src/libp2p_helper/incoming_msg.go | 2 +- 3 files changed, 13 insertions(+), 13 deletions(-) diff --git a/src/app/libp2p_helper/src/libp2p_helper/bitswap_msg.go b/src/app/libp2p_helper/src/libp2p_helper/bitswap_msg.go index ab6f18ec140..d41e67a1306 100644 --- a/src/app/libp2p_helper/src/libp2p_helper/bitswap_msg.go +++ b/src/app/libp2p_helper/src/libp2p_helper/bitswap_msg.go @@ -27,12 +27,12 @@ func (m AddResourcePush) handle(app *app) { } } -type DeleteResourcePushT = ipc.Libp2pHelperInterface_DeleteResource -type DeleteResourcePush DeleteResourcePushT +type RemoveResourcePushT = ipc.Libp2pHelperInterface_RemoveResource +type RemoveResourcePush RemoveResourcePushT -func fromDeleteResourcePush(m ipcPushMessage) (pushMessage, error) { - i, err := m.DeleteResource() - return DeleteResourcePush(i), err +func fromRemoveResourcePush(m ipcPushMessage) (pushMessage, error) { + i, err := m.RemoveResource() + return RemoveResourcePush(i), err } func extractRootBlockList(l ipc.RootBlockId_List) ([]root, error) { @@ -52,14 +52,14 @@ func extractRootBlockList(l ipc.RootBlockId_List) ([]root, error) { return ids, nil } -func (m DeleteResourcePush) handle(app *app) { - idsM, err := DeleteResourcePushT(m).Ids() +func (m RemoveResourcePush) handle(app *app) { + idsM, err := RemoveResourcePushT(m).Ids() var links []root if err == nil { links, err = extractRootBlockList(idsM) } if err != nil { - app.P2p.Logger.Errorf("DeleteResourcePush.handle: error %s", err) + app.P2p.Logger.Errorf("RemoveResourcePush.handle: error %s", err) return } app.bitswapCtx.deleteCmds <- bitswapDeleteCmd{links} diff --git a/src/app/libp2p_helper/src/libp2p_helper/bitswap_test.go b/src/app/libp2p_helper/src/libp2p_helper/bitswap_test.go index ec96f2ccd67..5af46623601 100644 --- a/src/app/libp2p_helper/src/libp2p_helper/bitswap_test.go +++ b/src/app/libp2p_helper/src/libp2p_helper/bitswap_test.go @@ -71,12 +71,12 @@ func getRootIds(ids ipc.RootBlockId_List) ([]BitswapBlockLink, error) { return links, nil } -func deleteResource(n testNode, root root) error { +func removeResource(n testNode, root root) error { _, seg, err := capnp.NewMessage(capnp.SingleSegment(nil)) if err != nil { return err } - m, err := ipc.NewRootLibp2pHelperInterface_DeleteResource(seg) + m, err := ipc.NewRootLibp2pHelperInterface_RemoveResource(seg) if err != nil { return err } @@ -88,7 +88,7 @@ func deleteResource(n testNode, root root) error { if err != nil { return err } - DeleteResourcePush(m).handle(n.node) + RemoveResourcePush(m).handle(n.node) return nil } @@ -393,7 +393,7 @@ func (conf bitswapTestConfig) execute(nodes []testNode, delayBeforeDownload bool if !resourceReplicated[ni] { continue } - err = deleteResource(nodes[ni], roots[ni]) + err = removeResource(nodes[ni], roots[ni]) if err != nil { return fmt.Errorf("Error removing own resources: %v", err) } diff --git a/src/app/libp2p_helper/src/libp2p_helper/incoming_msg.go b/src/app/libp2p_helper/src/libp2p_helper/incoming_msg.go index a4472c443c4..d7e7c0f88dc 100644 --- a/src/app/libp2p_helper/src/libp2p_helper/incoming_msg.go +++ b/src/app/libp2p_helper/src/libp2p_helper/incoming_msg.go @@ -34,7 +34,7 @@ var rpcRequestExtractors = map[ipc.Libp2pHelperInterface_RpcRequest_Which]extrac var pushMesssageExtractors = map[ipc.Libp2pHelperInterface_PushMessage_Which]extractPushMessage{ ipc.Libp2pHelperInterface_PushMessage_Which_addResource: fromAddResourcePush, - ipc.Libp2pHelperInterface_PushMessage_Which_deleteResource: fromDeleteResourcePush, + ipc.Libp2pHelperInterface_PushMessage_Which_removeResource: fromRemoveResourcePush, ipc.Libp2pHelperInterface_PushMessage_Which_downloadResource: fromDownloadResourcePush, ipc.Libp2pHelperInterface_PushMessage_Which_validation: fromValidationPush, ipc.Libp2pHelperInterface_PushMessage_Which_heartbeatPeer: fromHeartbeatPeerPush, From 53bef580dc9f3fcc202a96dbe624eedd3ac778df Mon Sep 17 00:00:00 2001 From: georgeee Date: Mon, 14 Oct 2024 13:45:38 +0000 Subject: [PATCH 097/234] Use tag in Go's part of IPC Fix sending resource update to provide tag accourding to the new Capnproto interface. --- .../src/libp2p_helper/bitswap.go | 71 +++------------ .../src/libp2p_helper/bitswap_delete.go | 87 +++++++++++++++++++ .../src/libp2p_helper/bitswap_downloader.go | 10 +-- .../libp2p_helper/src/libp2p_helper/msg.go | 3 +- 4 files changed, 106 insertions(+), 65 deletions(-) create mode 100644 src/app/libp2p_helper/src/libp2p_helper/bitswap_delete.go diff --git a/src/app/libp2p_helper/src/libp2p_helper/bitswap.go b/src/app/libp2p_helper/src/libp2p_helper/bitswap.go index f12063ed48b..8fd76b2a7aa 100644 --- a/src/app/libp2p_helper/src/libp2p_helper/bitswap.go +++ b/src/app/libp2p_helper/src/libp2p_helper/bitswap.go @@ -12,7 +12,6 @@ import ( blocks "github.com/ipfs/go-block-format" "github.com/ipfs/go-cid" exchange "github.com/ipfs/go-ipfs-exchange-interface" - ipld "github.com/ipfs/go-ipld-format" ) type bitswapDeleteCmd struct { @@ -91,62 +90,14 @@ func announceNewRootBlock(ctx context.Context, engine *bitswap.Bitswap, storage return storage.SetStatus(ctx, root, codanet.Full) } -func (bs *BitswapCtx) deleteRoot(root BitswapBlockLink) error { - if err := bs.storage.SetStatus(bs.ctx, root, codanet.Deleting); err != nil { - return err - } - ClearRootDownloadState(bs, root) - allDescendants := []BitswapBlockLink{root} - viewBlockF := func(b []byte) error { - links, _, err := ReadBitswapBlock(b) - if err == nil { - for _, l := range links { - var l2 BitswapBlockLink - copy(l2[:], l[:]) - allDescendants = append(allDescendants, l2) - } - } - return err - } - for _, block := range allDescendants { - if err := bs.storage.ViewBlock(bs.ctx, block, viewBlockF); err != nil && err != (ipld.ErrNotFound{Cid: codanet.BlockHashToCid(block)}) { - return err - } - } - if err := bs.storage.DeleteBlocks(bs.ctx, allDescendants); err != nil { - return err - } - return bs.storage.DeleteStatus(bs.ctx, root) -} - -func ClearRootDownloadState(bs BitswapState, root root) { - rootStates := bs.RootDownloadStates() - state, has := rootStates[root] - if !has { - return - } - nodeParams := bs.NodeDownloadParams() - delete(rootStates, root) - state.allDescendants.ForEach(func(c cid.Cid) error { - np, hasNp := nodeParams[c] - if hasNp { - delete(np, root) - if len(np) == 0 { - delete(nodeParams, c) - } - } - return nil - }) - state.cancelF() +func (bs *BitswapCtx) SendResourceUpdate(type_ ipc.ResourceUpdateType, tag BitswapDataTag, root root) { + bs.SendResourceUpdates(type_, tag, root) } -func (bs *BitswapCtx) SendResourceUpdate(type_ ipc.ResourceUpdateType, root root) { - bs.SendResourceUpdates(type_, root) -} -func (bs *BitswapCtx) SendResourceUpdates(type_ ipc.ResourceUpdateType, roots ...root) { +func (bs *BitswapCtx) SendResourceUpdates(type_ ipc.ResourceUpdateType, tag BitswapDataTag, roots ...root) { // Non-blocking upcall sending select { - case bs.outMsgChan <- mkResourceUpdatedUpcall(type_, roots): + case bs.outMsgChan <- mkResourceUpdatedUpcall(type_, tag, roots): default: for _, root := range roots { bitswapLogger.Errorf("Failed to send resource update of type %d"+ @@ -242,25 +193,27 @@ func (bs *BitswapCtx) Loop() { ClearRootDownloadState(bs, root) case cmd := <-bs.addCmds: configuredCheck() - blocks, root := SplitDataToBitswapBlocksLengthPrefixedWithTag(bs.maxBlockSize, cmd.data, BlockBodyTag) + blocks, root := SplitDataToBitswapBlocksLengthPrefixedWithTag(bs.maxBlockSize, cmd.data, cmd.tag) err := announceNewRootBlock(bs.ctx, bs.engine, bs.storage, blocks, root) if err == nil { - bs.SendResourceUpdate(ipc.ResourceUpdateType_added, root) + bs.SendResourceUpdate(ipc.ResourceUpdateType_added, cmd.tag, root) } else { bitswapLogger.Errorf("Failed to announce root cid %s (%s)", codanet.BlockHashToCidSuffix(root), err) } case cmd := <-bs.deleteCmds: configuredCheck() - success := []root{} + success := map[BitswapDataTag][]root{} for _, root := range cmd.rootIds { - err := bs.deleteRoot(root) + tag, err := DeleteRoot(bs, root) if err == nil { - success = append(success, root) + success[tag] = append(success[tag], root) } else { bitswapLogger.Errorf("Error processing delete request for %s: %s", codanet.BlockHashToCidSuffix(root), err) } } - bs.SendResourceUpdates(ipc.ResourceUpdateType_removed, success...) + for tag, roots := range success { + bs.SendResourceUpdates(ipc.ResourceUpdateType_removed, tag, roots...) + } case cmd := <-bs.downloadCmds: configuredCheck() // We put all ids to map to avoid diff --git a/src/app/libp2p_helper/src/libp2p_helper/bitswap_delete.go b/src/app/libp2p_helper/src/libp2p_helper/bitswap_delete.go new file mode 100644 index 00000000000..16660d1ceb5 --- /dev/null +++ b/src/app/libp2p_helper/src/libp2p_helper/bitswap_delete.go @@ -0,0 +1,87 @@ +package main + +import ( + "codanet" + "errors" + + "github.com/ipfs/go-cid" + ipld "github.com/ipfs/go-ipld-format" +) + +func ClearRootDownloadState(bs BitswapState, root root) { + rootStates := bs.RootDownloadStates() + state, has := rootStates[root] + if !has { + return + } + nodeParams := bs.NodeDownloadParams() + delete(rootStates, root) + state.allDescendants.ForEach(func(c cid.Cid) error { + np, hasNp := nodeParams[c] + if hasNp { + delete(np, root) + if len(np) == 0 { + delete(nodeParams, c) + } + } + return nil + }) + state.cancelF() +} + +func DeleteRoot(bs BitswapState, root BitswapBlockLink) (BitswapDataTag, error) { + if err := bs.SetStatus(root, codanet.Deleting); err != nil { + return 255, err + } + var tag BitswapDataTag + { + // Determining tag of root being deleted + state, has := bs.RootDownloadStates()[root] + if has { + tag = state.getTag() + } else { + err := bs.ViewBlock(root, func(b []byte) error { + _, fullBlockData, err := ReadBitswapBlock(b) + if err != nil { + return err + } + if len(fullBlockData) < 5 { + return errors.New("root block is too short") + } + tag = BitswapDataTag(fullBlockData[4]) + return nil + }) + if err != nil { + return 255, err + } + } + } + ClearRootDownloadState(bs, root) + descendantMap := map[[32]byte]struct{}{root: {}} + allDescendants := []BitswapBlockLink{root} + viewBlockF := func(b []byte) error { + links, _, err := ReadBitswapBlock(b) + if err == nil { + for _, l := range links { + var l2 BitswapBlockLink + copy(l2[:], l[:]) + _, has := descendantMap[l2] + if !has { + descendantMap[l2] = struct{}{} + allDescendants = append(allDescendants, l2) + } + } + } + return err + } + for i := 0; i < len(allDescendants); i++ { + block := allDescendants[i] + if err := bs.ViewBlock(block, viewBlockF); err != nil && err != (ipld.ErrNotFound{Cid: codanet.BlockHashToCid(block)}) { + return tag, err + } + } + if err := bs.DeleteBlocks(allDescendants); err != nil { + return tag, err + } + return tag, bs.DeleteStatus(root) +} diff --git a/src/app/libp2p_helper/src/libp2p_helper/bitswap_downloader.go b/src/app/libp2p_helper/src/libp2p_helper/bitswap_downloader.go index 8273e02a5bc..201af79abec 100644 --- a/src/app/libp2p_helper/src/libp2p_helper/bitswap_downloader.go +++ b/src/app/libp2p_helper/src/libp2p_helper/bitswap_downloader.go @@ -86,7 +86,7 @@ type BitswapState interface { DepthIndices() DepthIndices NewSession(downloadTimeout time.Duration) (BlockRequester, context.CancelFunc) RegisterDeadlineTracker(root, time.Duration) - SendResourceUpdate(type_ ipc.ResourceUpdateType, root root) + SendResourceUpdate(type_ ipc.ResourceUpdateType, tag BitswapDataTag, root root) CheckInvariants() } @@ -109,7 +109,7 @@ func kickStartRootDownload(root_ BitswapBlockLink, tag BitswapDataTag, bs Bitswa bitswapLogger.Debugf("Skipping download request for %s due to status: %s", codanet.BlockHashToCidSuffix(root_), err) status, err := bs.GetStatus(root_) if err == nil && status == codanet.Full { - bs.SendResourceUpdate(ipc.ResourceUpdateType_added, root_) + bs.SendResourceUpdate(ipc.ResourceUpdateType_added, tag, root_) } return } @@ -280,8 +280,8 @@ func processDownloadedBlock(block blocks.Block, bs BitswapState) { newParams, malformed := processDownloadedBlockStep(oldPs, block, rps, bs.MaxBlockSize(), depthIndices, bs.DataConfig()) for root, err := range malformed { bitswapLogger.Warnf("Block %s of root %s is malformed: %s", id, codanet.BlockHashToCidSuffix(root), err) - ClearRootDownloadState(bs, root) - bs.SendResourceUpdate(ipc.ResourceUpdateType_broken, root) + DeleteRoot(bs, root) + bs.SendResourceUpdate(ipc.ResourceUpdateType_broken, rps[root].getTag(), root) } blocksToProcess := make([]blocks.Block, 0) @@ -338,7 +338,7 @@ func processDownloadedBlock(block blocks.Block, bs BitswapState) { bitswapLogger.Warnf("Failed to update status of fully downloaded root %s: %s", root, err) } ClearRootDownloadState(bs, root) - bs.SendResourceUpdate(ipc.ResourceUpdateType_added, root) + bs.SendResourceUpdate(ipc.ResourceUpdateType_added, rootState.tag, root) } } for _, b := range blocksToProcess { diff --git a/src/app/libp2p_helper/src/libp2p_helper/msg.go b/src/app/libp2p_helper/src/libp2p_helper/msg.go index 053bbd64062..680b76bd487 100644 --- a/src/app/libp2p_helper/src/libp2p_helper/msg.go +++ b/src/app/libp2p_helper/src/libp2p_helper/msg.go @@ -395,7 +395,7 @@ func mkStreamMessageReceivedUpcall(streamIdx uint64, data []byte) *capnp.Message }) } -func mkResourceUpdatedUpcall(type_ ipc.ResourceUpdateType, rootIds []root) *capnp.Message { +func mkResourceUpdatedUpcall(type_ ipc.ResourceUpdateType, tag BitswapDataTag, rootIds []root) *capnp.Message { return mkPushMsg(func(m ipc.DaemonInterface_PushMessage) { im, err := m.NewResourceUpdated() panicOnErr(err) @@ -403,6 +403,7 @@ func mkResourceUpdatedUpcall(type_ ipc.ResourceUpdateType, rootIds []root) *capn panic("too many root ids in a single upcall") } im.SetType(type_) + im.SetTag(uint8(tag)) mIds, err := im.NewIds(int32(len(rootIds))) panicOnErr(err) for i, rootId := range rootIds { From 9cfeeeafeab81d2c2fdd6e298064c35ad525fa4a Mon Sep 17 00:00:00 2001 From: dkijania Date: Thu, 10 Oct 2024 21:23:38 +0200 Subject: [PATCH 098/234] Remove duplicated proof loading from verifier process --- opam.export | 2 + src/app/archive/lib/test.ml | 6 +- .../src/cli_entrypoint/mina_cli_entrypoint.ml | 11 +++- .../src/init/transaction_snark_profiler.ml | 7 ++- .../bootstrap_controller.ml | 7 ++- src/lib/ledger_catchup/normal_catchup.ml | 6 +- src/lib/ledger_catchup/super_catchup.ml | 6 +- src/lib/mina_lib/mina_lib.ml | 7 +-- src/lib/mina_lib/tests/tests.ml | 2 +- src/lib/network_pool/batcher.ml | 7 ++- src/lib/network_pool/snark_pool.ml | 6 +- src/lib/network_pool/test.ml | 6 +- src/lib/network_pool/transaction_pool.ml | 11 +++- src/lib/prover/intf.ml | 3 + src/lib/prover/prover.ml | 26 ++++++++- src/lib/staged_ledger/staged_ledger.ml | 16 ++++- .../transaction_inclusion_status.ml | 7 ++- .../full_frontier/full_frontier.ml | 7 ++- .../transition_handler/catchup_scheduler.ml | 5 +- src/lib/transition_handler/processor.ml | 6 +- src/lib/verifier/dummy.ml | 27 ++------- src/lib/verifier/for_test.ml | 18 ++++++ src/lib/verifier/for_test.mli | 2 + src/lib/verifier/prod.ml | 58 +++++-------------- src/lib/verifier/verifier.ml | 1 + src/lib/verifier/verifier.mli | 2 + src/lib/verifier/verifier_intf.ml | 2 +- 27 files changed, 162 insertions(+), 102 deletions(-) create mode 100644 src/lib/verifier/for_test.ml create mode 100644 src/lib/verifier/for_test.mli diff --git a/opam.export b/opam.export index 6896719355c..6f18c3e6057 100644 --- a/opam.export +++ b/opam.export @@ -21,6 +21,7 @@ roots: [ "js_of_ocaml-toplevel.4.0.0" "lmdb.1.0" "menhir.20210419" + "memtrace_viewer.0.16.0" "merlin.4.5-414" "ocaml-base-compiler.4.14.0" "ocamlformat.0.20.1" @@ -153,6 +154,7 @@ installed: [ "merlin.4.5-414" "merlin-extend.0.6.1" "mew.0.1.0" + "memtrace.0.2.3" "mew_vi.0.5.0" "minicli.5.0.2" "mirage-crypto-ec.0.11.0" diff --git a/src/app/archive/lib/test.ml b/src/app/archive/lib/test.ml index 8600c231ecf..4d7d9afd509 100644 --- a/src/app/archive/lib/test.ml +++ b/src/app/archive/lib/test.ml @@ -20,10 +20,12 @@ let%test_module "Archive node unit tests" = let verifier = Async.Thread_safe.block_on_async_exn (fun () -> - Verifier.create ~logger ~proof_level ~constraint_constants + let%bind verification_key = Lazy.force (Verifier.For_test.get_blockchain_verification_key ~constraint_constants ~proof_level) in + Verifier.create ~logger ~proof_level ~conf_dir:None ~pids:(Child_processes.Termination.create_pid_table ()) - ~commit_id:"not specified for unit tests" () ) + ~commit_id:"not specified for unit tests" () + ~verification_key ) module Genesis_ledger = (val Genesis_ledger.for_unit_tests) diff --git a/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml b/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml index b90833be0a1..a201ec7a8c9 100644 --- a/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml +++ b/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml @@ -1770,10 +1770,17 @@ let internal_commands logger = | Error err -> failwithf "Could not parse JSON: %s" err () ) ) in + + + + + let%bind verification_key = Lazy.force (Verifier.For_test.get_blockchain_verification_key ~constraint_constants ~proof_level) in let%bind verifier = Verifier.create ~commit_id:Mina_version.commit_id ~logger - ~proof_level ~constraint_constants ~pids:(Pid.Table.create ()) - ~conf_dir:(Some conf_dir) () + ~proof_level ~pids:(Pid.Table.create ()) + ~conf_dir:(Some conf_dir) + ~verification_key + () in let%bind result = let cap lst = diff --git a/src/app/cli/src/init/transaction_snark_profiler.ml b/src/app/cli/src/init/transaction_snark_profiler.ml index ef4e26d284b..bbcdeccb762 100644 --- a/src/app/cli/src/init/transaction_snark_profiler.ml +++ b/src/app/cli/src/init/transaction_snark_profiler.ml @@ -15,11 +15,16 @@ let run ~genesis_constants ~constraint_constants ~proof_level ?min_num_updates ~max_num_updates () ) in Parallel.init_master () ; + + let verifier = Async.Thread_safe.block_on_async_exn (fun () -> + let open Async.Deferred.Let_syntax in + let%bind verification_key = Lazy.force (Verifier.For_test.get_blockchain_verification_key ~constraint_constants ~proof_level) in Verifier.create ~commit_id:Mina_version.commit_id ~logger ~proof_level - ~constraint_constants ~conf_dir:None + ~conf_dir:None ~pids:(Child_processes.Termination.create_pid_table ()) + ~verification_key () ) in let rec go n = diff --git a/src/lib/bootstrap_controller/bootstrap_controller.ml b/src/lib/bootstrap_controller/bootstrap_controller.ml index ffdc8fb8c2f..1653990b841 100644 --- a/src/lib/bootstrap_controller/bootstrap_controller.ml +++ b/src/lib/bootstrap_controller/bootstrap_controller.ml @@ -743,12 +743,15 @@ let%test_module "Bootstrap_controller tests" = let compile_config = compile_config end + let verifier = Async.Thread_safe.block_on_async_exn (fun () -> - Verifier.create ~logger ~proof_level ~constraint_constants + let%bind verification_key = Lazy.force (Verifier.For_test.get_blockchain_verification_key ~constraint_constants ~proof_level) in + Verifier.create ~logger ~proof_level ~conf_dir:None ~pids:(Child_processes.Termination.create_pid_table ()) - ~commit_id:"not specified for unit tests" () ) + ~commit_id:"not specified for unit tests" + ~verification_key () ) module Genesis_ledger = (val precomputed_values.genesis_ledger) diff --git a/src/lib/ledger_catchup/normal_catchup.ml b/src/lib/ledger_catchup/normal_catchup.ml index 99f0b38e200..ee7ce4fea8d 100644 --- a/src/lib/ledger_catchup/normal_catchup.ml +++ b/src/lib/ledger_catchup/normal_catchup.ml @@ -908,10 +908,12 @@ let%test_module "Ledger_catchup tests" = let verifier = Async.Thread_safe.block_on_async_exn (fun () -> - Verifier.create ~logger ~proof_level ~constraint_constants + let%bind verification_key = Lazy.force (Verifier.For_test.get_blockchain_verification_key ~constraint_constants ~proof_level) in + Verifier.create ~logger ~proof_level ~conf_dir:None ~pids:(Child_processes.Termination.create_pid_table ()) - ~commit_id:"not specified for unit tests" () ) + ~commit_id:"not specified for unit tests" + ~verification_key () ) module Context = struct let logger = logger diff --git a/src/lib/ledger_catchup/super_catchup.ml b/src/lib/ledger_catchup/super_catchup.ml index 4ca6305617c..268d77e4505 100644 --- a/src/lib/ledger_catchup/super_catchup.ml +++ b/src/lib/ledger_catchup/super_catchup.ml @@ -1453,10 +1453,12 @@ let%test_module "Ledger_catchup tests" = let verifier = Async.Thread_safe.block_on_async_exn (fun () -> - Verifier.create ~logger ~proof_level ~constraint_constants + let%bind verification_key = Lazy.force (Verifier.For_test.get_blockchain_verification_key ~constraint_constants ~proof_level) in + Verifier.create ~logger ~proof_level ~conf_dir:None ~pids:(Child_processes.Termination.create_pid_table ()) - ~commit_id:"not specified for unit tests" () ) + ~commit_id:"not specified for unit tests" + ~verification_key () ) module Context = struct let logger = logger diff --git a/src/lib/mina_lib/mina_lib.ml b/src/lib/mina_lib/mina_lib.ml index 972c8ec1375..089d3ed9376 100644 --- a/src/lib/mina_lib/mina_lib.ml +++ b/src/lib/mina_lib/mina_lib.ml @@ -1561,15 +1561,14 @@ let create ~commit_id ?wallets (config : Config.t) = ~metadata:[ ("exn", Error_json.error_to_yojson err) ] ) ) (fun () -> O1trace.thread "manage_verifier_subprocess" (fun () -> - let%bind verifier = + let%bind verification_key = Prover.get_blockchain_verification_key prover >>| Or_error.ok_exn in + let%bind verifier = Verifier.create ~commit_id ~logger:config.logger ~enable_internal_tracing: (Internal_tracing.is_enabled ()) ~internal_trace_filename:"verifier-internal-trace.jsonl" ~proof_level:config.precomputed_values.proof_level - ~constraint_constants: - config.precomputed_values.constraint_constants - ~pids:config.pids ~conf_dir:(Some config.conf_dir) () + ~pids:config.pids ~conf_dir:(Some config.conf_dir) ~verification_key () in let%map () = set_itn_data (module Verifier) verifier in verifier ) ) diff --git a/src/lib/mina_lib/tests/tests.ml b/src/lib/mina_lib/tests/tests.ml index 4897b8a9b35..426e365cc71 100644 --- a/src/lib/mina_lib/tests/tests.ml +++ b/src/lib/mina_lib/tests/tests.ml @@ -143,7 +143,7 @@ let%test_module "Epoch ledger sync tests" = let make_verifier (module Context : CONTEXT) = let open Context in Verifier.create ~logger ~proof_level:precomputed_values.proof_level - ~constraint_constants:precomputed_values.constraint_constants ~pids + ~pids ~conf_dir:(Some (make_dirname "verifier")) ~commit_id:"not specified for unit tests" () diff --git a/src/lib/network_pool/batcher.ml b/src/lib/network_pool/batcher.ml index c2a9a147165..7e451af373a 100644 --- a/src/lib/network_pool/batcher.ml +++ b/src/lib/network_pool/batcher.ml @@ -498,10 +498,13 @@ module Snark_pool = struct let verifier = Async.Thread_safe.block_on_async_exn (fun () -> - Verifier.create ~logger ~proof_level ~constraint_constants + let%bind verification_key = Lazy.force (Verifier.For_test.get_blockchain_verification_key ~constraint_constants ~proof_level) in + Verifier.create ~logger ~proof_level ~conf_dir:None ~pids:(Child_processes.Termination.create_pid_table ()) - ~commit_id:"not specified for unit tests" () ) + ~commit_id:"not specified for unit tests" + ~verification_key + () ) let gen_proofs = let open Quickcheck.Generator.Let_syntax in diff --git a/src/lib/network_pool/snark_pool.ml b/src/lib/network_pool/snark_pool.ml index 6ede322b927..e7dda2e5cf5 100644 --- a/src/lib/network_pool/snark_pool.ml +++ b/src/lib/network_pool/snark_pool.ml @@ -591,10 +591,12 @@ let%test_module "random set test" = let verifier = Async.Thread_safe.block_on_async_exn (fun () -> - Verifier.create ~logger ~proof_level ~constraint_constants + let%bind verification_key = Lazy.force (Verifier.For_test.get_blockchain_verification_key ~constraint_constants ~proof_level) in + Verifier.create ~logger ~proof_level ~conf_dir:None ~pids:(Child_processes.Termination.create_pid_table ()) - ~commit_id:"not specified for unit tests" () ) + ~commit_id:"not specified for unit tests" + ~verification_key () ) module Mock_snark_pool = Make (Mocks.Base_ledger) (Mocks.Staged_ledger) (Mocks.Transition_frontier) diff --git a/src/lib/network_pool/test.ml b/src/lib/network_pool/test.ml index b0ddac4e0a4..0e4a398e0d8 100644 --- a/src/lib/network_pool/test.ml +++ b/src/lib/network_pool/test.ml @@ -27,10 +27,12 @@ let%test_module "network pool test" = let verifier = Async.Thread_safe.block_on_async_exn (fun () -> - Verifier.create ~logger ~proof_level ~constraint_constants + let%bind verification_key = Lazy.force (Verifier.For_test.get_blockchain_verification_key ~constraint_constants ~proof_level) in + Verifier.create ~logger ~proof_level ~conf_dir:None ~pids:(Child_processes.Termination.create_pid_table ()) - ~commit_id:"not specified for unit tests" () ) + ~commit_id:"not specified for unit tests" () + ~verification_key ) module Mock_snark_pool = Snark_pool.Make (Mocks.Base_ledger) (Mocks.Staged_ledger) diff --git a/src/lib/network_pool/transaction_pool.ml b/src/lib/network_pool/transaction_pool.ml index e99224ecdb8..3156345d58a 100644 --- a/src/lib/network_pool/transaction_pool.ml +++ b/src/lib/network_pool/transaction_pool.ml @@ -1667,10 +1667,13 @@ let%test_module _ = let verifier = Async.Thread_safe.block_on_async_exn (fun () -> - Verifier.create ~logger ~proof_level ~constraint_constants + let%bind verification_key = Lazy.force (Verifier.For_test.get_blockchain_verification_key ~constraint_constants ~proof_level) in + Verifier.create ~logger ~proof_level ~conf_dir:None ~pids:(Child_processes.Termination.create_pid_table ()) - ~commit_id:"not specified for unit tests" () ) + ~commit_id:"not specified for unit tests" () + ~verification_key + ) let `VK vk, `Prover prover = Transaction_snark.For_tests.create_trivial_snapp ~constraint_constants () @@ -3084,11 +3087,13 @@ let%test_module _ = let%test "account update with a different network id that uses proof \ authorization would be rejected" = Thread_safe.block_on_async_exn (fun () -> + let%bind verification_key = Lazy.force (Verifier.For_test.get_blockchain_verification_key ~constraint_constants ~proof_level) in let%bind verifier_full = - Verifier.create ~logger ~proof_level:Full ~constraint_constants + Verifier.create ~logger ~proof_level:Full ~conf_dir:None ~pids:(Child_processes.Termination.create_pid_table ()) ~commit_id:"not specified for unit tests" () + ~verification_key in let%bind test = setup_test ~verifier:verifier_full diff --git a/src/lib/prover/intf.ml b/src/lib/prover/intf.ml index 0f6964f140e..e2778f542ab 100644 --- a/src/lib/prover/intf.ml +++ b/src/lib/prover/intf.ml @@ -57,4 +57,7 @@ module type S = sig sets the process kind for the Itn logger to "prover" *) val set_itn_logger_data : t -> daemon_port:int -> unit Deferred.Or_error.t + + val get_blockchain_verification_key : + t -> Pickles.Verification_key.t Deferred.Or_error.t end diff --git a/src/lib/prover/prover.ml b/src/lib/prover/prover.ml index 09e386d036c..5b913a1eabb 100644 --- a/src/lib/prover/prover.ml +++ b/src/lib/prover/prover.ml @@ -56,6 +56,9 @@ module Worker_state = struct val toggle_internal_tracing : bool -> unit val set_itn_logger_data : daemon_port:int -> unit + + val get_blockchain_verification_key : + unit -> Pickles.Verification_key.t Deferred.t end (* bin_io required by rpc_parallel *) @@ -103,7 +106,6 @@ module Worker_state = struct Pickles.Cache_handle.generate_or_load B.cache_handle |> Promise.to_deferred in - ( module struct module T = T module B = B @@ -161,6 +163,9 @@ module Worker_state = struct let set_itn_logger_data ~daemon_port = Itn_logger.set_data ~process_kind:"prover" ~daemon_port + + let get_blockchain_verification_key () = + Lazy.force B.Proof.verification_key end : S ) | Check -> Deferred.return @@ -202,6 +207,9 @@ module Worker_state = struct let toggle_internal_tracing _ = () let set_itn_logger_data ~daemon_port:_ = () + + let get_blockchain_verification_key () = + Deferred.return (Lazy.force Pickles.Verification_key.dummy) end : S ) | No_check -> Deferred.return @@ -221,6 +229,9 @@ module Worker_state = struct let toggle_internal_tracing _ = () let set_itn_logger_data ~daemon_port:_ = () + + let get_blockchain_verification_key () = + Deferred.return (Lazy.force Pickles.Verification_key.dummy) end : S ) let get = Fn.id @@ -275,6 +286,12 @@ module Functions = struct let (module M) = Worker_state.get w in M.set_itn_logger_data ~daemon_port ; Deferred.unit ) + + let get_blockchain_verification_key = + create bin_unit [%bin_type_class: Pickles.Verification_key.Stable.Latest.t] + (fun w () -> + let (module M) = Worker_state.get w in + M.get_blockchain_verification_key () ) end module Worker = struct @@ -288,6 +305,8 @@ module Worker = struct ; verify_blockchain : ('w, Blockchain.t, unit Or_error.t) F.t ; toggle_internal_tracing : ('w, bool, unit) F.t ; set_itn_logger_data : ('w, int, unit) F.t + ; get_blockchain_verification_key : + ('w, unit, Pickles.Verification_key.t) F.t } module Worker_state = Worker_state @@ -316,6 +335,7 @@ module Worker = struct ; verify_blockchain = f verify_blockchain ; toggle_internal_tracing = f toggle_internal_tracing ; set_itn_logger_data = f set_itn_logger_data + ; get_blockchain_verification_key = f get_blockchain_verification_key } let init_worker_state @@ -566,3 +586,7 @@ let toggle_internal_tracing { connection; _ } enabled = let set_itn_logger_data { connection; _ } ~daemon_port = Worker.Connection.run connection ~f:Worker.functions.set_itn_logger_data ~arg:daemon_port + +let get_blockchain_verification_key { connection; _ } = + Worker.Connection.run connection + ~f:Worker.functions.get_blockchain_verification_key ~arg:() diff --git a/src/lib/staged_ledger/staged_ledger.ml b/src/lib/staged_ledger/staged_ledger.ml index af202350850..c2694e0fb14 100644 --- a/src/lib/staged_ledger/staged_ledger.ml +++ b/src/lib/staged_ledger/staged_ledger.ml @@ -2389,10 +2389,12 @@ let%test_module "staged ledger tests" = let verifier = Async.Thread_safe.block_on_async_exn (fun () -> - Verifier.create ~logger ~proof_level ~constraint_constants + let%bind verification_key = Lazy.force (Verifier.For_test.get_blockchain_verification_key ~constraint_constants ~proof_level) in + Verifier.create ~logger ~proof_level ~conf_dir:None ~pids:(Child_processes.Termination.create_pid_table ()) - ~commit_id:"not specified for unit tests" () ) + ~commit_id:"not specified for unit tests" () + ~verification_key ) let find_vk ledger = Zkapp_command.Verifiable.load_vk_from_ledger ~get:(Ledger.get ledger) @@ -5187,12 +5189,20 @@ let%test_module "staged ledger tests" = (Staged_ledger_diff.With_valid_signatures_and_proofs .commands diff ) = 1 ) ; + let%bind verifier_full = + + let%bind verification_key = Lazy.force (Verifier.For_test.get_blockchain_verification_key + ~constraint_constants + ~proof_level:Full) + + in Verifier.create ~logger ~proof_level:Full - ~constraint_constants ~conf_dir:None + ~conf_dir:None ~pids: (Child_processes.Termination.create_pid_table ()) ~commit_id:"not specified for unit tests" () + ~verification_key in match%map Sl.apply ~constraint_constants ~global_slot !sl diff --git a/src/lib/transaction_inclusion_status/transaction_inclusion_status.ml b/src/lib/transaction_inclusion_status/transaction_inclusion_status.ml index 555bf91eeb6..4b5f926e809 100644 --- a/src/lib/transaction_inclusion_status/transaction_inclusion_status.ml +++ b/src/lib/transaction_inclusion_status/transaction_inclusion_status.ml @@ -90,12 +90,15 @@ let%test_module "transaction_status" = let block_window_duration = Mina_compile_config.For_unit_tests.t.block_window_duration + let verifier = Async.Thread_safe.block_on_async_exn (fun () -> - Verifier.create ~logger ~proof_level ~constraint_constants + let%bind verification_key = Lazy.force (Verifier.For_test.get_blockchain_verification_key ~constraint_constants ~proof_level) in + Verifier.create ~logger ~proof_level ~conf_dir:None ~pids:(Child_processes.Termination.create_pid_table ()) - ~commit_id:"not specified for unit tests" () ) + ~commit_id:"not specified for unit tests" + ~verification_key () ) let key_gen = let open Quickcheck.Generator in diff --git a/src/lib/transition_frontier/full_frontier/full_frontier.ml b/src/lib/transition_frontier/full_frontier/full_frontier.ml index 9d152fa7d8a..9981fca9197 100644 --- a/src/lib/transition_frontier/full_frontier/full_frontier.ml +++ b/src/lib/transition_frontier/full_frontier/full_frontier.ml @@ -962,10 +962,13 @@ module For_tests = struct let verifier () = Async.Thread_safe.block_on_async_exn (fun () -> - Verifier.create ~logger ~proof_level ~constraint_constants + let open Async.Deferred.Let_syntax in + let%bind verification_key = Lazy.force (Verifier.For_test.get_blockchain_verification_key ~constraint_constants ~proof_level) in + Verifier.create ~logger ~proof_level ~conf_dir:None ~pids:(Child_processes.Termination.create_pid_table ()) - ~commit_id:"not specified for unit tests" () ) + ~commit_id:"not specified for unit tests" () + ~verification_key ) module Genesis_ledger = (val precomputed_values.genesis_ledger) diff --git a/src/lib/transition_handler/catchup_scheduler.ml b/src/lib/transition_handler/catchup_scheduler.ml index 6dd6693e9e8..bda1c3c4031 100644 --- a/src/lib/transition_handler/catchup_scheduler.ml +++ b/src/lib/transition_handler/catchup_scheduler.ml @@ -372,8 +372,9 @@ let%test_module "Transition_handler.Catchup_scheduler tests" = let verifier = Async.Thread_safe.block_on_async_exn (fun () -> - Verifier.create ~logger ~proof_level ~constraint_constants - ~conf_dir:None ~pids ~commit_id:"not specified for unit tests" () ) + let%bind verification_key = Lazy.force (Verifier.For_test.get_blockchain_verification_key ~constraint_constants ~proof_level) in + Verifier.create ~logger ~proof_level + ~conf_dir:None ~pids ~commit_id:"not specified for unit tests" ~verification_key () ) (* cast a breadcrumb into a cached, enveloped, partially validated transition *) let downcast_breadcrumb breadcrumb = diff --git a/src/lib/transition_handler/processor.ml b/src/lib/transition_handler/processor.ml index 78655b431c7..ada993b6f3c 100644 --- a/src/lib/transition_handler/processor.ml +++ b/src/lib/transition_handler/processor.ml @@ -498,10 +498,12 @@ let%test_module "Transition_handler.Processor tests" = let verifier = Async.Thread_safe.block_on_async_exn (fun () -> - Verifier.create ~logger ~proof_level ~constraint_constants + let%bind verification_key = Lazy.force (Verifier.For_test.get_blockchain_verification_key ~constraint_constants ~proof_level) in + Verifier.create ~logger ~proof_level ~conf_dir:None ~pids:(Child_processes.Termination.create_pid_table ()) - ~commit_id:"not specified for unit tests" () ) + ~commit_id:"not specified for unit tests" () + ~verification_key ) module Context = struct let logger = logger diff --git a/src/lib/verifier/dummy.ml b/src/lib/verifier/dummy.ml index 249e7423240..d826f50f27b 100644 --- a/src/lib/verifier/dummy.ml +++ b/src/lib/verifier/dummy.ml @@ -4,11 +4,10 @@ open Mina_base type t = { proof_level : Genesis_constants.Proof_level.t - ; constraint_constants : Genesis_constants.Constraint_constants.t ; verify_blockchain_snarks : Blockchain_snark.Blockchain.t list -> unit Or_error.t Or_error.t Deferred.t - ; verification_key : Pickles.Verification_key.t Deferred.t Lazy.t + ; verification_key : Pickles.Verification_key.t ; verify_transaction_snarks : (Ledger_proof.Prod.t * Mina_base.Sok_message.t) list -> unit Or_error.t Or_error.t Deferred.t @@ -21,23 +20,11 @@ let invalid_to_error = Common.invalid_to_error type ledger_proof = Ledger_proof.t let create ~logger:_ ?enable_internal_tracing:_ ?internal_trace_filename:_ - ~proof_level ~constraint_constants ~pids:_ ~conf_dir:_ ~commit_id:_ () = - let module T = Transaction_snark.Make (struct - let constraint_constants = constraint_constants - - let proof_level = proof_level - end) in - let module B = Blockchain_snark.Blockchain_snark_state.Make (struct - let tag = T.tag - - let constraint_constants = constraint_constants - - let proof_level = proof_level - end) in + ~proof_level ~pids:_ ~conf_dir:_ ~commit_id:_ ~verification_key () = let verify_blockchain_snarks chains = match proof_level with | Genesis_constants.Proof_level.Full -> - B.Proof.verify + Blockchain_snark.Blockchain_snark_state.verify ~key:verification_key (List.map chains ~f:(fun snark -> ( Blockchain_snark.Blockchain.state snark , Blockchain_snark.Blockchain.proof snark ) ) ) @@ -48,7 +35,7 @@ let create ~logger:_ ?enable_internal_tracing:_ ?internal_trace_filename:_ let verify_transaction_snarks ts = match proof_level with | Full -> ( - match Or_error.try_with (fun () -> T.verify ts) with + match Or_error.try_with (fun () -> Transaction_snark.verify ~key:verification_key ts) with | Ok result -> result |> Deferred.map ~f:Or_error.return | Error e -> @@ -75,9 +62,8 @@ let create ~logger:_ ?enable_internal_tracing:_ ?internal_trace_filename:_ Deferred.return { proof_level - ; constraint_constants ; verify_blockchain_snarks - ; verification_key = B.Proof.verification_key + ; verification_key ; verify_transaction_snarks } @@ -162,8 +148,7 @@ let verify_transaction_snarks { verify_transaction_snarks; _ } ts = verify_transaction_snarks ts let get_blockchain_verification_key { verification_key; _ } = - Deferred.Or_error.try_with ~here:[%here] (fun () -> - Lazy.force verification_key ) + Deferred.Or_error.return verification_key let toggle_internal_tracing _ _ = Deferred.Or_error.ok_unit diff --git a/src/lib/verifier/for_test.ml b/src/lib/verifier/for_test.ml new file mode 100644 index 00000000000..d75b437ccec --- /dev/null +++ b/src/lib/verifier/for_test.ml @@ -0,0 +1,18 @@ + +let get_blockchain_verification_key ~constraint_constants ~proof_level = + let module T = Transaction_snark.Make (struct + let constraint_constants = constraint_constants + + let proof_level = proof_level + end) + + in + let module B = Blockchain_snark.Blockchain_snark_state.Make (struct + let tag = T.tag + + let constraint_constants = constraint_constants + + let proof_level = proof_level + end) + in + B.Proof.verification_key diff --git a/src/lib/verifier/for_test.mli b/src/lib/verifier/for_test.mli new file mode 100644 index 00000000000..1c87755cfc6 --- /dev/null +++ b/src/lib/verifier/for_test.mli @@ -0,0 +1,2 @@ + +val get_blockchain_verification_key : constraint_constants:Genesis_constants.Constraint_constants.t -> proof_level:Genesis_constants.Proof_level.t -> Pickles.Verification_key.t Async.Deferred.t Lazy.t \ No newline at end of file diff --git a/src/lib/verifier/prod.ml b/src/lib/verifier/prod.ml index d05ebe733d1..caed7c7a916 100644 --- a/src/lib/verifier/prod.ml +++ b/src/lib/verifier/prod.ml @@ -68,34 +68,21 @@ module Worker_state = struct ; internal_trace_filename : string option ; logger : Logger.t ; proof_level : Genesis_constants.Proof_level.t - ; constraint_constants : Genesis_constants.Constraint_constants.t ; commit_id : string + ; verification_key: Pickles.Verification_key.Stable.Latest.t } [@@deriving bin_io_unversioned] type t = (module S) - let create { logger; proof_level; constraint_constants; commit_id; _ } : + let create { logger; proof_level; commit_id; verification_key; _ } : t Deferred.t = match proof_level with | Full -> Pickles.Side_loaded.srs_precomputation () ; Deferred.return (let module M = struct - module T = Transaction_snark.Make (struct - let constraint_constants = constraint_constants - - let proof_level = proof_level - end) - - module B = Blockchain_snark_state.Make (struct - let tag = T.tag - - let constraint_constants = constraint_constants - - let proof_level = proof_level - end) - + let verify_commands (cs : User_command.Verifiable.t With_status.t With_id_tag.t list ) @@ -157,7 +144,7 @@ module Worker_state = struct [%log internal] "Verifier_verify_commands_done" ; result - let verify_blockchain_snarks = B.Proof.verify + let verify_blockchain_snarks bs = Context_logger.with_logger (Some logger) @@ -165,12 +152,12 @@ module Worker_state = struct Internal_tracing.Context_call.with_call_id @@ fun () -> [%log internal] "Verifier_verify_blockchain_snarks" ; - let%map result = verify_blockchain_snarks bs in + let%map result = Blockchain_snark_state.verify ~key:verification_key bs in [%log internal] "Verifier_verify_blockchain_snarks_done" ; result let verify_transaction_snarks ts = - match Or_error.try_with (fun () -> T.verify ts) with + match Or_error.try_with (fun () -> Transaction_snark.verify ts ~key:verification_key) with | Ok result -> result | Error e -> @@ -191,7 +178,7 @@ module Worker_state = struct result let get_blockchain_verification_key () = - Lazy.force B.Proof.verification_key + Deferred.return verification_key let toggle_internal_tracing enabled = don't_wait_for @@ -233,23 +220,7 @@ module Worker_state = struct let verify_transaction_snarks _ = Deferred.return (Ok ()) - let vk = - lazy - (let module T = Transaction_snark.Make (struct - let constraint_constants = constraint_constants - - let proof_level = proof_level - end) in - let module B = Blockchain_snark_state.Make (struct - let tag = T.tag - - let constraint_constants = constraint_constants - - let proof_level = proof_level - end) in - Lazy.force B.Proof.verification_key ) - - let get_blockchain_verification_key () = Lazy.force vk + let get_blockchain_verification_key () = Deferred.return verification_key let toggle_internal_tracing _ = () @@ -281,7 +252,7 @@ module Worker = struct list ) F.t ; get_blockchain_verification_key : - ('w, unit, Pickles.Verification_key.t) F.t + ('w, unit, Pickles.Verification_key.t ) F.t ; toggle_internal_tracing : ('w, bool, unit) F.t ; set_itn_logger_data : ('w, int, unit) F.t } @@ -390,8 +361,8 @@ module Worker = struct ; internal_trace_filename ; logger ; proof_level - ; constraint_constants ; commit_id + ; verification_key } = if Option.is_some conf_dir then ( let max_size = 256 * 1024 * 512 in @@ -424,8 +395,8 @@ module Worker = struct ; internal_trace_filename ; logger ; proof_level - ; constraint_constants ; commit_id + ; verification_key } let init_connection_state ~connection:_ ~worker_state:_ () = Deferred.unit @@ -445,7 +416,7 @@ type t = { worker : worker Ivar.t ref; logger : Logger.t } (* TODO: investigate why conf_dir wasn't being used *) let create ~logger ?(enable_internal_tracing = false) ?internal_trace_filename - ~proof_level ~constraint_constants ~pids ~conf_dir ~commit_id () : + ~proof_level ~pids ~conf_dir ~commit_id ~verification_key () : t Deferred.t = let on_failure err = [%log error] "Verifier process failed with error $err" @@ -483,8 +454,8 @@ let create ~logger ?(enable_internal_tracing = false) ?internal_trace_filename ; internal_trace_filename ; logger ; proof_level - ; constraint_constants ; commit_id + ; verification_key } ) |> Deferred.Result.map_error ~f:Error.of_exn in @@ -756,7 +727,7 @@ let get_blockchain_verification_key { worker; logger } = Worker.Connection.run connection ~f:Worker.functions.get_blockchain_verification_key ~arg:() |> Deferred.Or_error.map ~f:(fun x -> `Continue x) ) ) - + let toggle_internal_tracing { worker; logger } enabled = with_retry ~logger (fun () -> let%bind { connection; _ } = Ivar.read !worker in @@ -770,3 +741,4 @@ let set_itn_logger_data { worker; logger } ~daemon_port = Worker.Connection.run connection ~f:Worker.functions.set_itn_logger_data ~arg:daemon_port |> Deferred.Or_error.map ~f:(fun x -> `Continue x) ) + diff --git a/src/lib/verifier/verifier.ml b/src/lib/verifier/verifier.ml index b747900a221..0ce00970669 100644 --- a/src/lib/verifier/verifier.ml +++ b/src/lib/verifier/verifier.ml @@ -1,6 +1,7 @@ module Failure = Verification_failure module Prod = Prod module Dummy = Dummy +module For_test = For_test let m = if Base__Import.am_testing then diff --git a/src/lib/verifier/verifier.mli b/src/lib/verifier/verifier.mli index 99f2bb645de..4a80161594b 100644 --- a/src/lib/verifier/verifier.mli +++ b/src/lib/verifier/verifier.mli @@ -1,5 +1,7 @@ module Failure = Verification_failure +module For_test = For_test + module Dummy : module type of Dummy module Prod : module type of Prod diff --git a/src/lib/verifier/verifier_intf.ml b/src/lib/verifier/verifier_intf.ml index 9b293f07f88..518636ebd05 100644 --- a/src/lib/verifier/verifier_intf.ml +++ b/src/lib/verifier/verifier_intf.ml @@ -65,10 +65,10 @@ module type S = sig -> ?enable_internal_tracing:bool -> ?internal_trace_filename:string -> proof_level:Genesis_constants.Proof_level.t - -> constraint_constants:Genesis_constants.Constraint_constants.t -> pids:Child_processes.Termination.t -> conf_dir:string option -> commit_id:string + -> verification_key:Pickles.Verification_key.t -> unit -> t Deferred.t end From 5cd4d646631d28ae03df62c500eb104cda0ab4bd Mon Sep 17 00:00:00 2001 From: dkijania Date: Mon, 14 Oct 2024 15:48:20 +0200 Subject: [PATCH 099/234] Add script to dump memory usage for local network --- scripts/mina-local-network/monitor_memory.py | 165 +++++++++++++++++++ 1 file changed, 165 insertions(+) create mode 100644 scripts/mina-local-network/monitor_memory.py diff --git a/scripts/mina-local-network/monitor_memory.py b/scripts/mina-local-network/monitor_memory.py new file mode 100644 index 00000000000..2b35b00fd8d --- /dev/null +++ b/scripts/mina-local-network/monitor_memory.py @@ -0,0 +1,165 @@ +import collections +import os +import re +import time +import csv +import psutil +import signal +import sys +import argparse + +import math + +class MinaProcess: + def __init__(self,node_name): + self.node_name = node_name + self.mina_process = "mina.exe" + self.metrics = { + "main":[], + "prover":[], + "verifier":[], + "vrf":[] + } + + def headers(self): + return [self.node_name,f"{self.node_name}_prover", f"{self.node_name}_verifier", f"{self.node_name}_vrf"] + + def get_node_name_process(self,p): + process_name = None + for arg in p.cmdline(): + if m := re.match(f".*/nodes/(.*)/.*", arg): + process_name = m.group(1) + return process_name + return process_name + + def is_mina_process(self,p): + try: + return ((self.mina_process in p.name() ) and + (self.node_name == self.get_node_name_process(p)) and + ("daemon" in p.cmdline())) + except: + return False + + + def append_vrf(self,value): + self.metrics["vrf"].append(value) + def append_verifier(self,value): + self.metrics["verifier"].append(value) + def append_prover(self,value): + self.metrics["prover"].append(value) + + def append(self,value): + self.metrics["main"].append(value) + + def append_zeroes(self): + self.append_vrf(0) + self.append_verifier(0) + self.append_prover(0) + self.append(0) + + def metrics_values(self,row): + row_values = [] + for values in self.metrics.values(): + row_values.append(values[row]) + return row_values + + def len(self): + assert len(self.metrics["vrf"]) == len(self.metrics["prover"]) == len(self.metrics["verifier"]) == len(self.metrics["main"]) + return len(self.metrics["vrf"]) + + +def convert_size(size_bytes): + return str(size_bytes/ 1024 / 1024) + +def processes(whales,fishes,nodes): + processes = ["seed","snark_coordinator"] + processes.extend([f"whale_{i}" for i in range(0,whales)]) + processes.extend([f"fish_{i}" for i in range(0,fishes)]) + processes.extend([f"node_{i}" for i in range(0,nodes)]) + + return list([MinaProcess(x) for x in processes]) + + +def write_header(file,columns): + with open(file, 'w') as csvfile: + writer = csv.writer(csvfile, delimiter=',',) + writer.writerow(columns) + +def write_line(file,columns): + with open(file, 'a') as csvfile: + writer = csv.writer(csvfile, delimiter=',',) + writer.writerow(columns) + +def main(whales,fishes,nodes,file,interval): + + mina_processes = processes(whales,fishes,nodes) + headers = [] + + for x in mina_processes: + headers.extend(x.headers()) + + write_header(file, headers) + + print("Press Ctrl +c to finish") + + while True: + for x in mina_processes: + matches = list(filter(lambda p: x.is_mina_process(p), psutil.process_iter())) + if len(matches) == 0 : + x.append_zeroes() + else: + p = matches[0] + try: + children = sorted(p.children(), key=lambda x: x.create_time()) + + def get_mem_for_child_or_default(child): + if not child: + return 0 + else: + return convert_size(child.memory_info()[0]) + + if len(children) > 2 : + x.append_vrf(get_mem_for_child_or_default(children[2])) + x.append_verifier(get_mem_for_child_or_default(children[1])) + x.append_prover(get_mem_for_child_or_default(children[0])) + elif len(children) > 1: + x.append_vrf(0) + x.append_verifier(get_mem_for_child_or_default(children[1])) + x.append_prover(get_mem_for_child_or_default(children[0])) + elif len(children) > 0: + x.append_vrf(0) + x.append_verifier(0) + x.append_prover(get_mem_for_child_or_default(children[0])) + else: + x.append_vrf(0) + x.append_verifier(0) + x.append_prover(0) + + x.append(convert_size(p.memory_info()[0])) + + except (psutil.NoSuchProcess, psutil.ZombieProcess): + pass + + last_row = mina_processes[0].len() + row = [] + for proc in mina_processes: + row.extend(proc.metrics_values(last_row -1)) + write_line(file, row) + + time.sleep(interval) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + prog='local network metrics', + description='Program to measure local network mem usage') + + parser.add_argument('-o', '--output-file', default="metrics.csv") + parser.add_argument('-w', '--whales', default=2, type=int) + parser.add_argument('-f', '--fishes', default=1, type=int) + parser.add_argument('-n', '--nodes', default=1, type=int) + parser.add_argument('-i', '--interval', type=float, default=0.5) + + args = parser.parse_args() + + main(args.whales,args.fishes,args.nodes,args.output_file,args.interval) \ No newline at end of file From 5a5884d42023a4fe779e812b8bde99bbb2b16b1d Mon Sep 17 00:00:00 2001 From: dkijania Date: Mon, 14 Oct 2024 15:56:43 +0200 Subject: [PATCH 100/234] clean up code --- opam.export | 2 -- src/app/cli/src/init/transaction_snark_profiler.ml | 2 -- src/lib/bootstrap_controller/bootstrap_controller.ml | 1 - 3 files changed, 5 deletions(-) diff --git a/opam.export b/opam.export index 6f18c3e6057..6896719355c 100644 --- a/opam.export +++ b/opam.export @@ -21,7 +21,6 @@ roots: [ "js_of_ocaml-toplevel.4.0.0" "lmdb.1.0" "menhir.20210419" - "memtrace_viewer.0.16.0" "merlin.4.5-414" "ocaml-base-compiler.4.14.0" "ocamlformat.0.20.1" @@ -154,7 +153,6 @@ installed: [ "merlin.4.5-414" "merlin-extend.0.6.1" "mew.0.1.0" - "memtrace.0.2.3" "mew_vi.0.5.0" "minicli.5.0.2" "mirage-crypto-ec.0.11.0" diff --git a/src/app/cli/src/init/transaction_snark_profiler.ml b/src/app/cli/src/init/transaction_snark_profiler.ml index bbcdeccb762..79565a7852b 100644 --- a/src/app/cli/src/init/transaction_snark_profiler.ml +++ b/src/app/cli/src/init/transaction_snark_profiler.ml @@ -15,8 +15,6 @@ let run ~genesis_constants ~constraint_constants ~proof_level ?min_num_updates ~max_num_updates () ) in Parallel.init_master () ; - - let verifier = Async.Thread_safe.block_on_async_exn (fun () -> let open Async.Deferred.Let_syntax in diff --git a/src/lib/bootstrap_controller/bootstrap_controller.ml b/src/lib/bootstrap_controller/bootstrap_controller.ml index 1653990b841..d08b496d131 100644 --- a/src/lib/bootstrap_controller/bootstrap_controller.ml +++ b/src/lib/bootstrap_controller/bootstrap_controller.ml @@ -743,7 +743,6 @@ let%test_module "Bootstrap_controller tests" = let compile_config = compile_config end - let verifier = Async.Thread_safe.block_on_async_exn (fun () -> let%bind verification_key = Lazy.force (Verifier.For_test.get_blockchain_verification_key ~constraint_constants ~proof_level) in From f0df1d2008e3a14d74090f840f975ad78f2a2d2f Mon Sep 17 00:00:00 2001 From: svv232 Date: Tue, 17 Sep 2024 23:56:01 -0400 Subject: [PATCH 101/234] adding codanet comment --- src/app/libp2p_helper/src/codanet.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/app/libp2p_helper/src/codanet.go b/src/app/libp2p_helper/src/codanet.go index df4c732d47d..4349af9f08e 100644 --- a/src/app/libp2p_helper/src/codanet.go +++ b/src/app/libp2p_helper/src/codanet.go @@ -758,6 +758,8 @@ func MakeHelper(ctx context.Context, listenOn []ma.Multiaddr, externalAddr ma.Mu return nil, err } bitswapNetwork := bitnet.NewFromIpfsHost(host, kad, bitnet.Prefix(BitSwapExchange)) + // Block store is provided, but only read-only methods are used + // TODO update Bitswap libraries to require only read-only methods bs := bitswap.New(context.Background(), bitswapNetwork, bstore.Blockstore()) // nil fields are initialized by beginAdvertising From eda4d2cb079044b6848216ad1c392c6e7dd9ed66 Mon Sep 17 00:00:00 2001 From: georgeee Date: Mon, 14 Oct 2024 12:37:59 +0000 Subject: [PATCH 102/234] Add CidToBlockHash function --- src/app/libp2p_helper/src/bitswap_storage.go | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/src/app/libp2p_helper/src/bitswap_storage.go b/src/app/libp2p_helper/src/bitswap_storage.go index 5a54e36eb2f..8e0ffb6cbb8 100644 --- a/src/app/libp2p_helper/src/bitswap_storage.go +++ b/src/app/libp2p_helper/src/bitswap_storage.go @@ -2,6 +2,7 @@ package codanet import ( "context" + "errors" "fmt" "github.com/ipfs/boxo/blockstore" @@ -131,6 +132,16 @@ func (bs *BitswapStorageLmdb) DeleteBlocks(ctx context.Context, keys [][32]byte) return bs.blockstore.DeleteMany(ctx, cids) } +func CidToBlockHash(id cid.Cid) ([32]byte, error) { + mh, err := multihash.Decode(id.Hash()) + var res [32]byte + if err == nil && mh.Code == MULTI_HASH_CODE && id.Prefix().Codec == cid.Raw && len(mh.Digest) == 32 { + copy(res[:], mh.Digest) + return res, nil + } + return res, errors.New("unexpected format of cid") +} + const ( BS_BLOCK_PREFIX byte = iota BS_STATUS_PREFIX From 66ff73b3554e58eef27debdbc56e75b8b248cd7a Mon Sep 17 00:00:00 2001 From: georgeee Date: Mon, 14 Oct 2024 14:22:03 +0000 Subject: [PATCH 103/234] Test bitswap with lower block size Motivation: block size that we';ll be using in production is too large for effective bug searching via property tests. --- src/app/libp2p_helper/src/libp2p_helper/bitswap.go | 4 ++++ src/app/libp2p_helper/src/libp2p_helper/util_test.go | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/src/app/libp2p_helper/src/libp2p_helper/bitswap.go b/src/app/libp2p_helper/src/libp2p_helper/bitswap.go index 8fd76b2a7aa..18c0c4b48d7 100644 --- a/src/app/libp2p_helper/src/libp2p_helper/bitswap.go +++ b/src/app/libp2p_helper/src/libp2p_helper/bitswap.go @@ -47,6 +47,10 @@ type BitswapCtx struct { func NewBitswapCtx(ctx context.Context, outMsgChan chan<- *capnp.Message) *BitswapCtx { maxBlockSize := 1 << 18 // 256 KiB + return NewBitswapCtxWithMaxBlockSize(maxBlockSize, ctx, outMsgChan) +} + +func NewBitswapCtxWithMaxBlockSize(maxBlockSize int, ctx context.Context, outMsgChan chan<- *capnp.Message) *BitswapCtx { return &BitswapCtx{ downloadCmds: make(chan bitswapDownloadCmd, 100), addCmds: make(chan bitswapAddCmd, 100), diff --git a/src/app/libp2p_helper/src/libp2p_helper/util_test.go b/src/app/libp2p_helper/src/libp2p_helper/util_test.go index 6047e490b0c..acd95262c3e 100644 --- a/src/app/libp2p_helper/src/libp2p_helper/util_test.go +++ b/src/app/libp2p_helper/src/libp2p_helper/util_test.go @@ -82,7 +82,7 @@ func newTestAppWithMaxConnsAndCtxAndGrace(t *testing.T, privkey crypto.PrivKey, panicOnErr(helper.Host.Close()) }) outChan := make(chan *capnp.Message, 64) - bitswapCtx := NewBitswapCtx(ctx, outChan) + bitswapCtx := NewBitswapCtxWithMaxBlockSize(1<<9, ctx, outChan) bitswapCtx.engine = helper.Bitswap bitswapCtx.storage = helper.BitswapStorage From e9767d34cba954d61403abd1b02dd82fc392e911 Mon Sep 17 00:00:00 2001 From: svv232 Date: Tue, 17 Sep 2024 23:57:40 -0400 Subject: [PATCH 104/234] Bump bitswap lmdb version (doesn't compile) Commit doesn't compile. Commit that comes next in the branch is going to fix it. Separation is made for greater clarity during review. Integrates commit https://github.com/o1-labs/go-bs-lmdb/commit/1dc365f4b1733209015f274409740e3af5a569f8 which introduces a method to atomically delete blocks after checking a predicate (`DeleteBlocksIf`). --- src/app/libp2p_helper/src/go.mod | 2 +- src/app/libp2p_helper/src/go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/app/libp2p_helper/src/go.mod b/src/app/libp2p_helper/src/go.mod index 18a7f3b41b2..6cd6d662ec4 100644 --- a/src/app/libp2p_helper/src/go.mod +++ b/src/app/libp2p_helper/src/go.mod @@ -20,7 +20,7 @@ require ( github.com/libp2p/go-libp2p-record v0.2.0 github.com/multiformats/go-multiaddr v0.9.0 github.com/multiformats/go-multihash v0.2.3 - github.com/o1-labs/go-bs-lmdb v1.1.0 + github.com/o1-labs/go-bs-lmdb v1.2.1 github.com/o1-labs/go-libp2p-kad-dht-patcher v1.1.0 github.com/prometheus/client_golang v1.14.0 github.com/shirou/gopsutil/v3 v3.22.7 diff --git a/src/app/libp2p_helper/src/go.sum b/src/app/libp2p_helper/src/go.sum index f5b51bef0b4..4cc48c38f22 100644 --- a/src/app/libp2p_helper/src/go.sum +++ b/src/app/libp2p_helper/src/go.sum @@ -413,8 +413,8 @@ github.com/multiformats/go-varint v0.0.7/go.mod h1:r8PUYw/fD/SjBCiKOoDlGF6QawOEL github.com/neelance/astrewrite v0.0.0-20160511093645-99348263ae86/go.mod h1:kHJEU3ofeGjhHklVoIGuVj85JJwZ6kWPaJwCIxgnFmo= github.com/neelance/sourcemap v0.0.0-20151028013722-8c68805598ab/go.mod h1:Qr6/a/Q4r9LP1IltGz7tA7iOK1WonHEYhu1HRBA7ZiM= github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= -github.com/o1-labs/go-bs-lmdb v1.1.0 h1:qfZNYs5tMJ5Ym23avKIbpubfWJBtSJ1REhu51qb9U6M= -github.com/o1-labs/go-bs-lmdb v1.1.0/go.mod h1:MGOPzEutRw24iN0qrSSbqOIzbgnyFQST/cKlnVSBmnI= +github.com/o1-labs/go-bs-lmdb v1.2.1 h1:vweOC9utt/eUiaJvoREMuTvfR/uAej86nXopudKmzgU= +github.com/o1-labs/go-bs-lmdb v1.2.1/go.mod h1:MGOPzEutRw24iN0qrSSbqOIzbgnyFQST/cKlnVSBmnI= github.com/o1-labs/go-bs-tests v0.0.6 h1:MytWPo5kOMgxK29UkU6ycWRJrMhuILUSQAtYpn5ek0g= github.com/o1-labs/go-libp2p-kad-dht-patcher v1.1.0 h1:YXmSCpS/VADts1gJbyEbFLsveun8t5HVfrRQbUHm7ds= github.com/o1-labs/go-libp2p-kad-dht-patcher v1.1.0/go.mod h1:ANWfn2GqDHigP/bw2boP1PnUG2WL3UNdfnfckVfJOIc= From 00e73745857ba909494db3ca1a8d3c39b291fb69 Mon Sep 17 00:00:00 2001 From: svv232 Date: Wed, 18 Sep 2024 01:10:07 -0400 Subject: [PATCH 105/234] Condition deletion of bitswap blocks on references New behavior introduced by the commit: if a bitswap block references is used by some root not fully deleted from the storage, it won't be deleted. --- src/app/libp2p_helper/src/bitswap_storage.go | 64 +++++++++++++++++-- .../src/libp2p_helper/bitswap.go | 9 +++ .../src/libp2p_helper/bitswap_delete.go | 3 + .../src/libp2p_helper/bitswap_downloader.go | 20 ++++-- 4 files changed, 86 insertions(+), 10 deletions(-) diff --git a/src/app/libp2p_helper/src/bitswap_storage.go b/src/app/libp2p_helper/src/bitswap_storage.go index 8e0ffb6cbb8..753c181e0a2 100644 --- a/src/app/libp2p_helper/src/bitswap_storage.go +++ b/src/app/libp2p_helper/src/bitswap_storage.go @@ -1,6 +1,7 @@ package codanet import ( + "bytes" "context" "errors" "fmt" @@ -25,14 +26,23 @@ type BitswapStorage interface { GetStatus(ctx context.Context, key [32]byte) (RootBlockStatus, error) SetStatus(ctx context.Context, key [32]byte, value RootBlockStatus) error DeleteStatus(ctx context.Context, key [32]byte) error + // Delete blocks for which no reference exist DeleteBlocks(ctx context.Context, keys [][32]byte) error ViewBlock(ctx context.Context, key [32]byte, callback func([]byte) error) error StoreBlocks(ctx context.Context, blocks []blocks.Block) error + // Reference (when exists=true) or dereference (when exists=false) + // blocks related to the specified root. + // Blocks with references are protected from deletion. + UpdateReferences(ctx context.Context, root [32]byte, exists bool, keys ...[32]byte) error } type BitswapStorageLmdb struct { blockstore *lmdbbs.Blockstore statusDB lmdb.DBI + // Reference DB: maps a composite key `` to empty bytes, + // functioning as a set. Querying reference DB by the `` prefix + // allows to determine whether some root is still referencing the key + refsDB lmdb.DBI } func OpenBitswapStorageLmdb(path string) (*BitswapStorageLmdb, error) { @@ -52,13 +62,30 @@ func OpenBitswapStorageLmdb(path string) (*BitswapStorageLmdb, error) { if err != nil { return nil, fmt.Errorf("failed to create/open lmdb status database: %s", err) } - return &BitswapStorageLmdb{blockstore: blockstore, statusDB: statusDB}, nil + refsDB, err := blockstore.OpenDB("refs") + if err != nil { + return nil, fmt.Errorf("failed to create/open lmdb refs database: %s", err) + } + return &BitswapStorageLmdb{blockstore: blockstore, statusDB: statusDB, refsDB: refsDB}, nil } func (b *BitswapStorageLmdb) Blockstore() blockstore.Blockstore { return b.blockstore } +func (bs *BitswapStorageLmdb) UpdateReferences(ctx context.Context, root [32]byte, exists bool, keys ...[32]byte) error { + for _, key := range keys { + compositeKey := append(key[:], root[:]...) + err := bs.blockstore.PutData(ctx, bs.refsDB, compositeKey, func([]byte, bool) ([]byte, bool, error) { + return nil, exists, nil + }) + if err != nil { + return err + } + } + return nil +} + func UnmarshalRootBlockStatus(r []byte) (res RootBlockStatus, err error) { err = fmt.Errorf("wrong root block status retrieved: %v", r) if len(r) != 1 { @@ -119,17 +146,42 @@ func (bs *BitswapStorageLmdb) SetStatus(ctx context.Context, key [32]byte, newSt } } if !isStatusTransitionAllowed(exists, prev, newStatus) { - return nil, false, fmt.Errorf("wrong status transition: from %d to %d", prev, newStatus) + return nil, false, fmt.Errorf("wrong status transition: from %d to %d (exists: %v)", prev, newStatus, exists) } return []byte{byte(newStatus)}, true, nil }) } + +// hasKeyWithPrefix checks whether there is at least one key in the DB +// with the given prefix +func hasKeyWithPrefix(db lmdb.DBI, txn *lmdb.Txn, prefix []byte) (bool, error) { + cur, err := txn.OpenCursor(db) + if err != nil { + return false, err + } + defer cur.Close() + // Set cursor to return keys that are greater-than prefix + // (using ascending order) and return the first such key + k, _, err := cur.Get(prefix, nil, lmdb.SetRange) + if lmdb.IsNotFound(err) { + return false, nil + } else if err != nil { + return false, err + } + // Check whether the key contains is actually prefixed or merely greater + return bytes.HasPrefix(k, prefix), nil +} + func (bs *BitswapStorageLmdb) DeleteBlocks(ctx context.Context, keys [][32]byte) error { - cids := make([]cid.Cid, len(keys)) - for i, key := range keys { - cids[i] = BlockHashToCid(key) + keys_ := make([][]byte, len(keys)) + for i := range keys { + keys_[i] = keys[i][:] } - return bs.blockstore.DeleteMany(ctx, cids) + return bs.blockstore.DeleteBlocksIf(ctx, keys_, func(txn *lmdb.Txn, key []byte) (bool, error) { + // Delete a block from blocksDB if it has no references in the refsDB + hasPrefix, err := hasKeyWithPrefix(bs.refsDB, txn, key) + return !hasPrefix, err + }) } func CidToBlockHash(id cid.Cid) ([32]byte, error) { diff --git a/src/app/libp2p_helper/src/libp2p_helper/bitswap.go b/src/app/libp2p_helper/src/libp2p_helper/bitswap.go index 18c0c4b48d7..dd0b6d98e1f 100644 --- a/src/app/libp2p_helper/src/libp2p_helper/bitswap.go +++ b/src/app/libp2p_helper/src/libp2p_helper/bitswap.go @@ -78,10 +78,16 @@ func announceNewRootBlock(ctx context.Context, engine *bitswap.Bitswap, storage return err } bs := make([]blocks.Block, 0, len(blockMap)) + keys := make([][32]byte, 0, len(blockMap)) for h, b := range blockMap { bitswapLogger.Debugf("Publishing block %s (%d bytes)", codanet.BlockHashToCidSuffix(h), len(b)) block, _ := blocks.NewBlockWithCid(b, codanet.BlockHashToCid(h)) bs = append(bs, block) + keys = append(keys, h) + } + err = storage.UpdateReferences(ctx, root, true, keys...) + if err != nil { + return err } err = storage.StoreBlocks(ctx, bs) if err != nil { @@ -142,6 +148,9 @@ func (bs *BitswapCtx) RegisterDeadlineTracker(root_ root, downloadTimeout time.D func (bs *BitswapCtx) GetStatus(key [32]byte) (codanet.RootBlockStatus, error) { return bs.storage.GetStatus(bs.ctx, key) } +func (bs *BitswapCtx) UpdateReferences(root [32]byte, exists bool, keys ...[32]byte) error { + return bs.storage.UpdateReferences(bs.ctx, root, exists, keys...) +} func (bs *BitswapCtx) SetStatus(key [32]byte, value codanet.RootBlockStatus) error { return bs.storage.SetStatus(bs.ctx, key, value) } diff --git a/src/app/libp2p_helper/src/libp2p_helper/bitswap_delete.go b/src/app/libp2p_helper/src/libp2p_helper/bitswap_delete.go index 16660d1ceb5..039305f901f 100644 --- a/src/app/libp2p_helper/src/libp2p_helper/bitswap_delete.go +++ b/src/app/libp2p_helper/src/libp2p_helper/bitswap_delete.go @@ -80,6 +80,9 @@ func DeleteRoot(bs BitswapState, root BitswapBlockLink) (BitswapDataTag, error) return tag, err } } + if err := bs.UpdateReferences(root, false, allDescendants...); err != nil { + return tag, err + } if err := bs.DeleteBlocks(allDescendants); err != nil { return tag, err } diff --git a/src/app/libp2p_helper/src/libp2p_helper/bitswap_downloader.go b/src/app/libp2p_helper/src/libp2p_helper/bitswap_downloader.go index 201af79abec..9a1f15e6968 100644 --- a/src/app/libp2p_helper/src/libp2p_helper/bitswap_downloader.go +++ b/src/app/libp2p_helper/src/libp2p_helper/bitswap_downloader.go @@ -76,7 +76,11 @@ type BitswapState interface { GetStatus(key [32]byte) (codanet.RootBlockStatus, error) SetStatus(key [32]byte, value codanet.RootBlockStatus) error DeleteStatus(key [32]byte) error + // Delete blocks for which no reference exist DeleteBlocks(keys [][32]byte) error + // Reference or dereference blocks related to the root. + // Blocks with references are protected from deletion. + UpdateReferences(root [32]byte, exists bool, keys ...[32]byte) error ViewBlock(key [32]byte, callback func([]byte) error) error StoreDownloadedBlock(block blocks.Block) error NodeDownloadParams() map[cid.Cid]map[root][]NodeIndex @@ -251,10 +255,6 @@ func processDownloadedBlockStep(params map[root][]NodeIndex, block blocks.Block, func processDownloadedBlock(block blocks.Block, bs BitswapState) { bs.CheckInvariants() id := block.Cid() - err := bs.StoreDownloadedBlock(block) - if err != nil { - bitswapLogger.Errorf("Failed to store block %s", id) - } nodeDownloadParams := bs.NodeDownloadParams() rootDownloadStates := bs.RootDownloadStates() depthIndices := bs.DepthIndices() @@ -276,6 +276,18 @@ func processDownloadedBlock(block blocks.Block, bs BitswapState) { } rootState.remainingNodeCounter = rootState.remainingNodeCounter - len(ixs) rps[root] = rootState + blockHash, err := codanet.CidToBlockHash(id) + if err == nil { + err = bs.UpdateReferences(root, true, blockHash) + } + if err != nil { + bitswapLogger.Errorf("Failed to strore reference for block %s (to root %s)", + id, codanet.BlockHashToCidSuffix(root)) + } + } + err := bs.StoreDownloadedBlock(block) + if err != nil { + bitswapLogger.Errorf("Failed to store block %s", id) } newParams, malformed := processDownloadedBlockStep(oldPs, block, rps, bs.MaxBlockSize(), depthIndices, bs.DataConfig()) for root, err := range malformed { From 98bf2ea6bc403f2cf0c01dff79f88511249c40b9 Mon Sep 17 00:00:00 2001 From: svv232 Date: Wed, 18 Sep 2024 02:41:26 -0400 Subject: [PATCH 106/234] Fix bitswap downloader tests Update the test after reference tracking change. --- .../libp2p_helper/bitswap_downloader_test.go | 34 +++++++++++++++++-- 1 file changed, 31 insertions(+), 3 deletions(-) diff --git a/src/app/libp2p_helper/src/libp2p_helper/bitswap_downloader_test.go b/src/app/libp2p_helper/src/libp2p_helper/bitswap_downloader_test.go index af1b2b5378b..84a787d3f5b 100644 --- a/src/app/libp2p_helper/src/libp2p_helper/bitswap_downloader_test.go +++ b/src/app/libp2p_helper/src/libp2p_helper/bitswap_downloader_test.go @@ -610,6 +610,7 @@ func TestProcessDownloadedBlockStep(t *testing.T) { type testBitswapState struct { r *rand.Rand statuses map[BitswapBlockLink]codanet.RootBlockStatus + refs map[BitswapBlockLink]map[root]struct{} blocks map[cid.Cid][]byte nodeDownloadParams map[cid.Cid]map[root][]NodeIndex rootDownloadStates map[root]*RootDownloadState @@ -670,7 +671,7 @@ func (bs *testBitswapState) RegisterDeadlineTracker(root_ root, downloadTimeout downloadTimeout time.Duration }{root: root_, downloadTimeout: downloadTimeout}) } -func (bs *testBitswapState) SendResourceUpdate(type_ ipc.ResourceUpdateType, root root) { +func (bs *testBitswapState) SendResourceUpdate(type_ ipc.ResourceUpdateType, tag BitswapDataTag, root root) { type1, has := bs.resourceUpdates[root] if has && type1 != type_ { panic("duplicate resource update") @@ -688,12 +689,37 @@ func (bs *testBitswapState) DeleteStatus(key [32]byte) error { delete(bs.statuses, BitswapBlockLink(key)) return nil } + func (bs *testBitswapState) DeleteBlocks(keys [][32]byte) error { for _, key := range keys { - delete(bs.blocks, codanet.BlockHashToCid(key)) + if len(bs.refs[key]) == 0 { + delete(bs.blocks, codanet.BlockHashToCid(key)) + } } return nil } + +func (bs *testBitswapState) UpdateReferences(root_ [32]byte, exists bool, keys ...[32]byte) error { + for _, key := range keys { + keyRefs, hasKeyRefs := bs.refs[key] + if exists { + if !hasKeyRefs { + keyRefs = make(map[root]struct{}) + bs.refs[key] = keyRefs + } + keyRefs[root_] = struct{}{} + } else { + if hasKeyRefs { + delete(keyRefs, root_) + if len(keyRefs) == 0 { + delete(bs.refs, key) + } + } + } + } + return nil +} + func (bs *testBitswapState) ViewBlock(key [32]byte, callback func([]byte) error) error { cid := codanet.BlockHashToCid(key) b, has := bs.blocks[cid] @@ -734,7 +760,7 @@ func (bs *testBitswapState) CheckInvariants() { } } -func testBitswapDownloadDo(t *testing.T, r *rand.Rand, bg blockGroup, prepopulatedBlocks *cid.Set, removedBlocks map[cid.Cid]root, expectedToFail []root) { +func testBitswapDownloadDo(t *testing.T, r *rand.Rand, bg blockGroup, prepopulatedBlocks *cid.Set, removedBlocks map[cid.Cid]root, expectedToFail []root) *testBitswapState { expectedToTimeout := map[root]bool{} for _, b := range removedBlocks { expectedToTimeout[b] = true @@ -749,6 +775,7 @@ func testBitswapDownloadDo(t *testing.T, r *rand.Rand, bg blockGroup, prepopulat bs := &testBitswapState{ r: r, statuses: map[BitswapBlockLink]codanet.RootBlockStatus{}, + refs: map[BitswapBlockLink]map[root]struct{}{}, blocks: initBlocks, nodeDownloadParams: map[cid.Cid]map[root][]NodeIndex{}, rootDownloadStates: map[root]*RootDownloadState{}, @@ -857,6 +884,7 @@ loop: if expectedToTimeoutTotal != len(bs.rootDownloadStates) { t.Error("Unexpected number of root download states") } + return bs } func genLargeBlockGroup(r *rand.Rand) (blockGroup, map[cid.Cid]root, []root) { From 3eef6b28717a8b589f70b89e2d82c892152b18eb Mon Sep 17 00:00:00 2001 From: svv232 Date: Wed, 18 Sep 2024 03:08:42 -0400 Subject: [PATCH 107/234] fixing nix vendor hash --- nix/libp2p_helper.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nix/libp2p_helper.json b/nix/libp2p_helper.json index 7a0a9b7e34a..783913efe5a 100644 --- a/nix/libp2p_helper.json +++ b/nix/libp2p_helper.json @@ -1 +1 @@ -{"go.mod":"d5de7e35a76f5c9ce7d6c98f0da39c763961e77b8c94761b1e89ab4bdfdc2a97","go.sum":"586fd920114d3875ec3e1d739921d77d30ad8e2f297b67781ca41d25a81b65a9","vendorSha256":"sha256-vyKrKi5bqm8Mf2rUOojSY0IXHcuNpcVNvd1Iu1RBxDo="} \ No newline at end of file +{"go.mod":"6c45e03ccef1f79541f021cf358fa69bf80cb69b58ae92c776bc09cbb1cc8096","go.sum":"d0f40cfc7b2dc7000cd0a0be051c6a832bdbf880fee88550f2b409690cc18774","vendorSha256":"sha256-x/ZReaHGNsDshohcF4+p9Xj/JTK3gMUyeTgJkaN/eUc="} \ No newline at end of file From adab594d1657ce0e3f7213da65bfaf213d6fe68d Mon Sep 17 00:00:00 2001 From: svv232 Date: Wed, 18 Sep 2024 23:18:41 -0400 Subject: [PATCH 108/234] Increase short test timeout Commentary from Sai: The 40 minute timeout wasn't enough for these changes with some runs. I found relatively low variance with a 60 minute timeout. I wasn't sure what the problem was initially on remote until i got the test passing locally after waiting longer. We run the new downloader test as well as the util test. It seems justified to me, because the downloader was not required before. --- src/app/libp2p_helper/Makefile | 2 +- .../libp2p_helper/src/libp2p_helper/bitswap_downloader_test.go | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/app/libp2p_helper/Makefile b/src/app/libp2p_helper/Makefile index f7b59a4f466..9292677edda 100644 --- a/src/app/libp2p_helper/Makefile +++ b/src/app/libp2p_helper/Makefile @@ -14,7 +14,7 @@ libp2p_helper: ../../libp2p_ipc/libp2p_ipc.capnp.go test: ../../libp2p_ipc/libp2p_ipc.capnp.go cd src/libp2p_helper \ && (ulimit -n 65536 || true) \ - && $(GO) test -short -timeout 40m + && $(GO) test -short -timeout 60m test-bs-qc: ../../libp2p_ipc/libp2p_ipc.capnp.go cd src/libp2p_helper \ diff --git a/src/app/libp2p_helper/src/libp2p_helper/bitswap_downloader_test.go b/src/app/libp2p_helper/src/libp2p_helper/bitswap_downloader_test.go index 84a787d3f5b..03d63839135 100644 --- a/src/app/libp2p_helper/src/libp2p_helper/bitswap_downloader_test.go +++ b/src/app/libp2p_helper/src/libp2p_helper/bitswap_downloader_test.go @@ -967,7 +967,7 @@ func TestBitswapDownload(t *testing.T) { } } -func TestBitswapDownloadPrepoluated(t *testing.T) { +func TestBitswapDownloadPrepopulated(t *testing.T) { seed := time.Now().Unix() t.Logf("Seed: %d", seed) r := rand.New(rand.NewSource(seed)) From 637a9dc190804ba78abe830ad5b7cbeec7164d11 Mon Sep 17 00:00:00 2001 From: svv232 Date: Wed, 18 Sep 2024 23:18:55 -0400 Subject: [PATCH 109/234] Change local network interface in unit test --- src/app/libp2p_helper/src/libp2p_helper/config_msg_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/app/libp2p_helper/src/libp2p_helper/config_msg_test.go b/src/app/libp2p_helper/src/libp2p_helper/config_msg_test.go index 2b8070932ae..099572f1106 100644 --- a/src/app/libp2p_helper/src/libp2p_helper/config_msg_test.go +++ b/src/app/libp2p_helper/src/libp2p_helper/config_msg_test.go @@ -253,7 +253,7 @@ func TestGetListeningAddrs(t *testing.T) { } func TestListen(t *testing.T) { - addrStr := "/ip4/127.0.0.2/tcp/8000" + addrStr := "/ip4/127.0.0.1/tcp/8000" testApp, _ := newTestApp(t, nil, true) From 80538e200b837cbe799d8955858dce5c06cf7b35 Mon Sep 17 00:00:00 2001 From: dkijania Date: Mon, 14 Oct 2024 21:06:52 +0200 Subject: [PATCH 110/234] pass port to setup-database-for-archive-node.sh as argument and pass it in dhall --- buildkite/scripts/setup-database-for-archive-node.sh | 3 ++- buildkite/src/Jobs/Test/ArchiveNodeUnitTest.dhall | 6 ++++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/buildkite/scripts/setup-database-for-archive-node.sh b/buildkite/scripts/setup-database-for-archive-node.sh index 9aa9062b223..cf494a1ffaa 100755 --- a/buildkite/scripts/setup-database-for-archive-node.sh +++ b/buildkite/scripts/setup-database-for-archive-node.sh @@ -5,6 +5,7 @@ set -euo pipefail user=$1 password=$2 db=$3 +port=$4 sudo service postgresql start @@ -12,4 +13,4 @@ sudo -u postgres psql -c "CREATE USER ${user} WITH LOGIN SUPERUSER PASSWORD '${p sudo pg_isready service postgresql status sudo -u postgres createdb -O $user $db -PGPASSWORD=$password psql -h localhost -p 5434 -U $user -d $db -a -f src/app/archive/create_schema.sql +PGPASSWORD=$password psql -h localhost -p $port -U $user -d $db -a -f src/app/archive/create_schema.sql diff --git a/buildkite/src/Jobs/Test/ArchiveNodeUnitTest.dhall b/buildkite/src/Jobs/Test/ArchiveNodeUnitTest.dhall index 041ce56bbd7..1e79bacaa22 100644 --- a/buildkite/src/Jobs/Test/ArchiveNodeUnitTest.dhall +++ b/buildkite/src/Jobs/Test/ArchiveNodeUnitTest.dhall @@ -24,6 +24,8 @@ let password = "codarules" let db = "archiver" +let port = "5432" + let command_key = "archive-unit-tests" in Pipeline.build @@ -50,14 +52,14 @@ in Pipeline.build [ "POSTGRES_PASSWORD=${password}" , "POSTGRES_USER=${user}" , "POSTGRES_DB=${db}" - , "MINA_TEST_POSTGRES=postgres://${user}:${password}@localhost:5432/${db}" + , "MINA_TEST_POSTGRES=postgres://${user}:${password}@localhost:${port}/${db}" , "GO=/usr/lib/go/bin/go" , "DUNE_INSTRUMENT_WITH=bisect_ppx" , "COVERALLS_TOKEN" ] ( Prelude.Text.concatSep " && " - [ "bash buildkite/scripts/setup-database-for-archive-node.sh ${user} ${password} ${db}" + [ "bash buildkite/scripts/setup-database-for-archive-node.sh ${user} ${password} ${db} ${port}" , WithCargo.withCargo "eval \\\$(opam config env) && dune runtest src/app/archive && buildkite/scripts/upload-partial-coverage-data.sh ${command_key} dev" ] From 45385b2375e72ad917f3b858e70e64066fe72a83 Mon Sep 17 00:00:00 2001 From: dkijania Date: Mon, 14 Oct 2024 21:21:59 +0200 Subject: [PATCH 111/234] adjust port --- buildkite/src/Jobs/Test/ArchiveNodeUnitTest.dhall | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/buildkite/src/Jobs/Test/ArchiveNodeUnitTest.dhall b/buildkite/src/Jobs/Test/ArchiveNodeUnitTest.dhall index 1e79bacaa22..b3799a03535 100644 --- a/buildkite/src/Jobs/Test/ArchiveNodeUnitTest.dhall +++ b/buildkite/src/Jobs/Test/ArchiveNodeUnitTest.dhall @@ -24,7 +24,7 @@ let password = "codarules" let db = "archiver" -let port = "5432" +let port = "5433" let command_key = "archive-unit-tests" From 3ffbc4a051413612db12b3604c6db940d12abd10 Mon Sep 17 00:00:00 2001 From: dkijania Date: Mon, 14 Oct 2024 21:43:24 +0200 Subject: [PATCH 112/234] reformat --- src/app/archive/lib/test.ml | 12 ++++---- .../src/cli_entrypoint/mina_cli_entrypoint.ml | 15 +++++----- .../src/init/transaction_snark_profiler.ml | 11 +++++--- .../bootstrap_controller.ml | 12 ++++---- src/lib/ledger_catchup/normal_catchup.ml | 12 ++++---- src/lib/ledger_catchup/super_catchup.ml | 12 ++++---- src/lib/mina_lib/mina_lib.ml | 10 +++++-- src/lib/mina_lib/tests/tests.ml | 3 +- src/lib/network_pool/batcher.ml | 13 +++++---- src/lib/network_pool/snark_pool.ml | 12 ++++---- src/lib/network_pool/test.ml | 12 ++++---- src/lib/network_pool/transaction_pool.ml | 25 +++++++++-------- src/lib/staged_ledger/staged_ledger.ml | 25 +++++++++-------- .../transaction_inclusion_status.ml | 13 +++++---- .../full_frontier/full_frontier.ml | 14 ++++++---- .../transition_handler/catchup_scheduler.ml | 10 +++++-- src/lib/transition_handler/processor.ml | 12 ++++---- src/lib/verifier/dummy.ml | 9 ++++-- src/lib/verifier/for_test.ml | 22 ++++++--------- src/lib/verifier/for_test.mli | 6 ++-- src/lib/verifier/prod.ml | 28 ++++++++++--------- src/lib/verifier/verifier.mli | 1 - 22 files changed, 161 insertions(+), 128 deletions(-) diff --git a/src/app/archive/lib/test.ml b/src/app/archive/lib/test.ml index 4d7d9afd509..83108bf299f 100644 --- a/src/app/archive/lib/test.ml +++ b/src/app/archive/lib/test.ml @@ -20,12 +20,14 @@ let%test_module "Archive node unit tests" = let verifier = Async.Thread_safe.block_on_async_exn (fun () -> - let%bind verification_key = Lazy.force (Verifier.For_test.get_blockchain_verification_key ~constraint_constants ~proof_level) in - Verifier.create ~logger ~proof_level - ~conf_dir:None + let%bind verification_key = + Lazy.force + (Verifier.For_test.get_blockchain_verification_key + ~constraint_constants ~proof_level ) + in + Verifier.create ~logger ~proof_level ~conf_dir:None ~pids:(Child_processes.Termination.create_pid_table ()) - ~commit_id:"not specified for unit tests" () - ~verification_key ) + ~commit_id:"not specified for unit tests" () ~verification_key ) module Genesis_ledger = (val Genesis_ledger.for_unit_tests) diff --git a/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml b/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml index a201ec7a8c9..a55e72c8d0c 100644 --- a/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml +++ b/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml @@ -1771,16 +1771,15 @@ let internal_commands logger = failwithf "Could not parse JSON: %s" err () ) ) in - - - - let%bind verification_key = Lazy.force (Verifier.For_test.get_blockchain_verification_key ~constraint_constants ~proof_level) in + let%bind verification_key = + Lazy.force + (Verifier.For_test.get_blockchain_verification_key + ~constraint_constants ~proof_level ) + in let%bind verifier = Verifier.create ~commit_id:Mina_version.commit_id ~logger - ~proof_level ~pids:(Pid.Table.create ()) - ~conf_dir:(Some conf_dir) - ~verification_key - () + ~proof_level ~pids:(Pid.Table.create ()) ~conf_dir:(Some conf_dir) + ~verification_key () in let%bind result = let cap lst = diff --git a/src/app/cli/src/init/transaction_snark_profiler.ml b/src/app/cli/src/init/transaction_snark_profiler.ml index 79565a7852b..f0b94b01bd4 100644 --- a/src/app/cli/src/init/transaction_snark_profiler.ml +++ b/src/app/cli/src/init/transaction_snark_profiler.ml @@ -17,13 +17,16 @@ let run ~genesis_constants ~constraint_constants ~proof_level Parallel.init_master () ; let verifier = Async.Thread_safe.block_on_async_exn (fun () -> - let open Async.Deferred.Let_syntax in - let%bind verification_key = Lazy.force (Verifier.For_test.get_blockchain_verification_key ~constraint_constants ~proof_level) in + let open Async.Deferred.Let_syntax in + let%bind verification_key = + Lazy.force + (Verifier.For_test.get_blockchain_verification_key + ~constraint_constants ~proof_level ) + in Verifier.create ~commit_id:Mina_version.commit_id ~logger ~proof_level ~conf_dir:None ~pids:(Child_processes.Termination.create_pid_table ()) - ~verification_key - () ) + ~verification_key () ) in let rec go n = if n <= 0 then () diff --git a/src/lib/bootstrap_controller/bootstrap_controller.ml b/src/lib/bootstrap_controller/bootstrap_controller.ml index d08b496d131..a191d045d6d 100644 --- a/src/lib/bootstrap_controller/bootstrap_controller.ml +++ b/src/lib/bootstrap_controller/bootstrap_controller.ml @@ -745,12 +745,14 @@ let%test_module "Bootstrap_controller tests" = let verifier = Async.Thread_safe.block_on_async_exn (fun () -> - let%bind verification_key = Lazy.force (Verifier.For_test.get_blockchain_verification_key ~constraint_constants ~proof_level) in - Verifier.create ~logger ~proof_level - ~conf_dir:None + let%bind verification_key = + Lazy.force + (Verifier.For_test.get_blockchain_verification_key + ~constraint_constants ~proof_level ) + in + Verifier.create ~logger ~proof_level ~conf_dir:None ~pids:(Child_processes.Termination.create_pid_table ()) - ~commit_id:"not specified for unit tests" - ~verification_key () ) + ~commit_id:"not specified for unit tests" ~verification_key () ) module Genesis_ledger = (val precomputed_values.genesis_ledger) diff --git a/src/lib/ledger_catchup/normal_catchup.ml b/src/lib/ledger_catchup/normal_catchup.ml index ee7ce4fea8d..00396a235f5 100644 --- a/src/lib/ledger_catchup/normal_catchup.ml +++ b/src/lib/ledger_catchup/normal_catchup.ml @@ -908,12 +908,14 @@ let%test_module "Ledger_catchup tests" = let verifier = Async.Thread_safe.block_on_async_exn (fun () -> - let%bind verification_key = Lazy.force (Verifier.For_test.get_blockchain_verification_key ~constraint_constants ~proof_level) in - Verifier.create ~logger ~proof_level - ~conf_dir:None + let%bind verification_key = + Lazy.force + (Verifier.For_test.get_blockchain_verification_key + ~constraint_constants ~proof_level ) + in + Verifier.create ~logger ~proof_level ~conf_dir:None ~pids:(Child_processes.Termination.create_pid_table ()) - ~commit_id:"not specified for unit tests" - ~verification_key () ) + ~commit_id:"not specified for unit tests" ~verification_key () ) module Context = struct let logger = logger diff --git a/src/lib/ledger_catchup/super_catchup.ml b/src/lib/ledger_catchup/super_catchup.ml index 268d77e4505..9e1b91d8007 100644 --- a/src/lib/ledger_catchup/super_catchup.ml +++ b/src/lib/ledger_catchup/super_catchup.ml @@ -1453,12 +1453,14 @@ let%test_module "Ledger_catchup tests" = let verifier = Async.Thread_safe.block_on_async_exn (fun () -> - let%bind verification_key = Lazy.force (Verifier.For_test.get_blockchain_verification_key ~constraint_constants ~proof_level) in - Verifier.create ~logger ~proof_level - ~conf_dir:None + let%bind verification_key = + Lazy.force + (Verifier.For_test.get_blockchain_verification_key + ~constraint_constants ~proof_level ) + in + Verifier.create ~logger ~proof_level ~conf_dir:None ~pids:(Child_processes.Termination.create_pid_table ()) - ~commit_id:"not specified for unit tests" - ~verification_key () ) + ~commit_id:"not specified for unit tests" ~verification_key () ) module Context = struct let logger = logger diff --git a/src/lib/mina_lib/mina_lib.ml b/src/lib/mina_lib/mina_lib.ml index 089d3ed9376..3e4fa067548 100644 --- a/src/lib/mina_lib/mina_lib.ml +++ b/src/lib/mina_lib/mina_lib.ml @@ -1561,14 +1561,18 @@ let create ~commit_id ?wallets (config : Config.t) = ~metadata:[ ("exn", Error_json.error_to_yojson err) ] ) ) (fun () -> O1trace.thread "manage_verifier_subprocess" (fun () -> - let%bind verification_key = Prover.get_blockchain_verification_key prover >>| Or_error.ok_exn in - let%bind verifier = + let%bind verification_key = + Prover.get_blockchain_verification_key prover + >>| Or_error.ok_exn + in + let%bind verifier = Verifier.create ~commit_id ~logger:config.logger ~enable_internal_tracing: (Internal_tracing.is_enabled ()) ~internal_trace_filename:"verifier-internal-trace.jsonl" ~proof_level:config.precomputed_values.proof_level - ~pids:config.pids ~conf_dir:(Some config.conf_dir) ~verification_key () + ~pids:config.pids ~conf_dir:(Some config.conf_dir) + ~verification_key () in let%map () = set_itn_data (module Verifier) verifier in verifier ) ) diff --git a/src/lib/mina_lib/tests/tests.ml b/src/lib/mina_lib/tests/tests.ml index 426e365cc71..f5b3ec23b68 100644 --- a/src/lib/mina_lib/tests/tests.ml +++ b/src/lib/mina_lib/tests/tests.ml @@ -142,8 +142,7 @@ let%test_module "Epoch ledger sync tests" = let make_verifier (module Context : CONTEXT) = let open Context in - Verifier.create ~logger ~proof_level:precomputed_values.proof_level - ~pids + Verifier.create ~logger ~proof_level:precomputed_values.proof_level ~pids ~conf_dir:(Some (make_dirname "verifier")) ~commit_id:"not specified for unit tests" () diff --git a/src/lib/network_pool/batcher.ml b/src/lib/network_pool/batcher.ml index 7e451af373a..252a866a7a7 100644 --- a/src/lib/network_pool/batcher.ml +++ b/src/lib/network_pool/batcher.ml @@ -498,13 +498,14 @@ module Snark_pool = struct let verifier = Async.Thread_safe.block_on_async_exn (fun () -> - let%bind verification_key = Lazy.force (Verifier.For_test.get_blockchain_verification_key ~constraint_constants ~proof_level) in - Verifier.create ~logger ~proof_level - ~conf_dir:None + let%bind verification_key = + Lazy.force + (Verifier.For_test.get_blockchain_verification_key + ~constraint_constants ~proof_level ) + in + Verifier.create ~logger ~proof_level ~conf_dir:None ~pids:(Child_processes.Termination.create_pid_table ()) - ~commit_id:"not specified for unit tests" - ~verification_key - () ) + ~commit_id:"not specified for unit tests" ~verification_key () ) let gen_proofs = let open Quickcheck.Generator.Let_syntax in diff --git a/src/lib/network_pool/snark_pool.ml b/src/lib/network_pool/snark_pool.ml index e7dda2e5cf5..a4fa0dde0ee 100644 --- a/src/lib/network_pool/snark_pool.ml +++ b/src/lib/network_pool/snark_pool.ml @@ -591,12 +591,14 @@ let%test_module "random set test" = let verifier = Async.Thread_safe.block_on_async_exn (fun () -> - let%bind verification_key = Lazy.force (Verifier.For_test.get_blockchain_verification_key ~constraint_constants ~proof_level) in - Verifier.create ~logger ~proof_level - ~conf_dir:None + let%bind verification_key = + Lazy.force + (Verifier.For_test.get_blockchain_verification_key + ~constraint_constants ~proof_level ) + in + Verifier.create ~logger ~proof_level ~conf_dir:None ~pids:(Child_processes.Termination.create_pid_table ()) - ~commit_id:"not specified for unit tests" - ~verification_key () ) + ~commit_id:"not specified for unit tests" ~verification_key () ) module Mock_snark_pool = Make (Mocks.Base_ledger) (Mocks.Staged_ledger) (Mocks.Transition_frontier) diff --git a/src/lib/network_pool/test.ml b/src/lib/network_pool/test.ml index 0e4a398e0d8..f14efa466e7 100644 --- a/src/lib/network_pool/test.ml +++ b/src/lib/network_pool/test.ml @@ -27,12 +27,14 @@ let%test_module "network pool test" = let verifier = Async.Thread_safe.block_on_async_exn (fun () -> - let%bind verification_key = Lazy.force (Verifier.For_test.get_blockchain_verification_key ~constraint_constants ~proof_level) in - Verifier.create ~logger ~proof_level - ~conf_dir:None + let%bind verification_key = + Lazy.force + (Verifier.For_test.get_blockchain_verification_key + ~constraint_constants ~proof_level ) + in + Verifier.create ~logger ~proof_level ~conf_dir:None ~pids:(Child_processes.Termination.create_pid_table ()) - ~commit_id:"not specified for unit tests" () - ~verification_key ) + ~commit_id:"not specified for unit tests" () ~verification_key ) module Mock_snark_pool = Snark_pool.Make (Mocks.Base_ledger) (Mocks.Staged_ledger) diff --git a/src/lib/network_pool/transaction_pool.ml b/src/lib/network_pool/transaction_pool.ml index 3156345d58a..7640dd91f65 100644 --- a/src/lib/network_pool/transaction_pool.ml +++ b/src/lib/network_pool/transaction_pool.ml @@ -1667,13 +1667,14 @@ let%test_module _ = let verifier = Async.Thread_safe.block_on_async_exn (fun () -> - let%bind verification_key = Lazy.force (Verifier.For_test.get_blockchain_verification_key ~constraint_constants ~proof_level) in - Verifier.create ~logger ~proof_level - ~conf_dir:None + let%bind verification_key = + Lazy.force + (Verifier.For_test.get_blockchain_verification_key + ~constraint_constants ~proof_level ) + in + Verifier.create ~logger ~proof_level ~conf_dir:None ~pids:(Child_processes.Termination.create_pid_table ()) - ~commit_id:"not specified for unit tests" () - ~verification_key - ) + ~commit_id:"not specified for unit tests" () ~verification_key ) let `VK vk, `Prover prover = Transaction_snark.For_tests.create_trivial_snapp ~constraint_constants () @@ -3087,13 +3088,15 @@ let%test_module _ = let%test "account update with a different network id that uses proof \ authorization would be rejected" = Thread_safe.block_on_async_exn (fun () -> - let%bind verification_key = Lazy.force (Verifier.For_test.get_blockchain_verification_key ~constraint_constants ~proof_level) in + let%bind verification_key = + Lazy.force + (Verifier.For_test.get_blockchain_verification_key + ~constraint_constants ~proof_level ) + in let%bind verifier_full = - Verifier.create ~logger ~proof_level:Full - ~conf_dir:None + Verifier.create ~logger ~proof_level:Full ~conf_dir:None ~pids:(Child_processes.Termination.create_pid_table ()) - ~commit_id:"not specified for unit tests" () - ~verification_key + ~commit_id:"not specified for unit tests" () ~verification_key in let%bind test = setup_test ~verifier:verifier_full diff --git a/src/lib/staged_ledger/staged_ledger.ml b/src/lib/staged_ledger/staged_ledger.ml index c2694e0fb14..e87d30ca7e5 100644 --- a/src/lib/staged_ledger/staged_ledger.ml +++ b/src/lib/staged_ledger/staged_ledger.ml @@ -2389,12 +2389,14 @@ let%test_module "staged ledger tests" = let verifier = Async.Thread_safe.block_on_async_exn (fun () -> - let%bind verification_key = Lazy.force (Verifier.For_test.get_blockchain_verification_key ~constraint_constants ~proof_level) in - Verifier.create ~logger ~proof_level - ~conf_dir:None + let%bind verification_key = + Lazy.force + (Verifier.For_test.get_blockchain_verification_key + ~constraint_constants ~proof_level ) + in + Verifier.create ~logger ~proof_level ~conf_dir:None ~pids:(Child_processes.Termination.create_pid_table ()) - ~commit_id:"not specified for unit tests" () - ~verification_key ) + ~commit_id:"not specified for unit tests" () ~verification_key ) let find_vk ledger = Zkapp_command.Verifiable.load_vk_from_ledger ~get:(Ledger.get ledger) @@ -5191,14 +5193,13 @@ let%test_module "staged ledger tests" = = 1 ) ; let%bind verifier_full = - - let%bind verification_key = Lazy.force (Verifier.For_test.get_blockchain_verification_key - ~constraint_constants - ~proof_level:Full) - + let%bind verification_key = + Lazy.force + (Verifier.For_test.get_blockchain_verification_key + ~constraint_constants ~proof_level:Full ) in - Verifier.create ~logger ~proof_level:Full - ~conf_dir:None + + Verifier.create ~logger ~proof_level:Full ~conf_dir:None ~pids: (Child_processes.Termination.create_pid_table ()) ~commit_id:"not specified for unit tests" () diff --git a/src/lib/transaction_inclusion_status/transaction_inclusion_status.ml b/src/lib/transaction_inclusion_status/transaction_inclusion_status.ml index 4b5f926e809..3a71ce3572c 100644 --- a/src/lib/transaction_inclusion_status/transaction_inclusion_status.ml +++ b/src/lib/transaction_inclusion_status/transaction_inclusion_status.ml @@ -90,15 +90,16 @@ let%test_module "transaction_status" = let block_window_duration = Mina_compile_config.For_unit_tests.t.block_window_duration - let verifier = Async.Thread_safe.block_on_async_exn (fun () -> - let%bind verification_key = Lazy.force (Verifier.For_test.get_blockchain_verification_key ~constraint_constants ~proof_level) in - Verifier.create ~logger ~proof_level - ~conf_dir:None + let%bind verification_key = + Lazy.force + (Verifier.For_test.get_blockchain_verification_key + ~constraint_constants ~proof_level ) + in + Verifier.create ~logger ~proof_level ~conf_dir:None ~pids:(Child_processes.Termination.create_pid_table ()) - ~commit_id:"not specified for unit tests" - ~verification_key () ) + ~commit_id:"not specified for unit tests" ~verification_key () ) let key_gen = let open Quickcheck.Generator in diff --git a/src/lib/transition_frontier/full_frontier/full_frontier.ml b/src/lib/transition_frontier/full_frontier/full_frontier.ml index 9981fca9197..311546ec628 100644 --- a/src/lib/transition_frontier/full_frontier/full_frontier.ml +++ b/src/lib/transition_frontier/full_frontier/full_frontier.ml @@ -962,13 +962,15 @@ module For_tests = struct let verifier () = Async.Thread_safe.block_on_async_exn (fun () -> - let open Async.Deferred.Let_syntax in - let%bind verification_key = Lazy.force (Verifier.For_test.get_blockchain_verification_key ~constraint_constants ~proof_level) in - Verifier.create ~logger ~proof_level - ~conf_dir:None + let open Async.Deferred.Let_syntax in + let%bind verification_key = + Lazy.force + (Verifier.For_test.get_blockchain_verification_key + ~constraint_constants ~proof_level ) + in + Verifier.create ~logger ~proof_level ~conf_dir:None ~pids:(Child_processes.Termination.create_pid_table ()) - ~commit_id:"not specified for unit tests" () - ~verification_key ) + ~commit_id:"not specified for unit tests" () ~verification_key ) module Genesis_ledger = (val precomputed_values.genesis_ledger) diff --git a/src/lib/transition_handler/catchup_scheduler.ml b/src/lib/transition_handler/catchup_scheduler.ml index bda1c3c4031..c4950997f15 100644 --- a/src/lib/transition_handler/catchup_scheduler.ml +++ b/src/lib/transition_handler/catchup_scheduler.ml @@ -372,9 +372,13 @@ let%test_module "Transition_handler.Catchup_scheduler tests" = let verifier = Async.Thread_safe.block_on_async_exn (fun () -> - let%bind verification_key = Lazy.force (Verifier.For_test.get_blockchain_verification_key ~constraint_constants ~proof_level) in - Verifier.create ~logger ~proof_level - ~conf_dir:None ~pids ~commit_id:"not specified for unit tests" ~verification_key () ) + let%bind verification_key = + Lazy.force + (Verifier.For_test.get_blockchain_verification_key + ~constraint_constants ~proof_level ) + in + Verifier.create ~logger ~proof_level ~conf_dir:None ~pids + ~commit_id:"not specified for unit tests" ~verification_key () ) (* cast a breadcrumb into a cached, enveloped, partially validated transition *) let downcast_breadcrumb breadcrumb = diff --git a/src/lib/transition_handler/processor.ml b/src/lib/transition_handler/processor.ml index ada993b6f3c..f725a165d0e 100644 --- a/src/lib/transition_handler/processor.ml +++ b/src/lib/transition_handler/processor.ml @@ -498,12 +498,14 @@ let%test_module "Transition_handler.Processor tests" = let verifier = Async.Thread_safe.block_on_async_exn (fun () -> - let%bind verification_key = Lazy.force (Verifier.For_test.get_blockchain_verification_key ~constraint_constants ~proof_level) in - Verifier.create ~logger ~proof_level - ~conf_dir:None + let%bind verification_key = + Lazy.force + (Verifier.For_test.get_blockchain_verification_key + ~constraint_constants ~proof_level ) + in + Verifier.create ~logger ~proof_level ~conf_dir:None ~pids:(Child_processes.Termination.create_pid_table ()) - ~commit_id:"not specified for unit tests" () - ~verification_key ) + ~commit_id:"not specified for unit tests" () ~verification_key ) module Context = struct let logger = logger diff --git a/src/lib/verifier/dummy.ml b/src/lib/verifier/dummy.ml index d826f50f27b..40c9291062f 100644 --- a/src/lib/verifier/dummy.ml +++ b/src/lib/verifier/dummy.ml @@ -24,7 +24,7 @@ let create ~logger:_ ?enable_internal_tracing:_ ?internal_trace_filename:_ let verify_blockchain_snarks chains = match proof_level with | Genesis_constants.Proof_level.Full -> - Blockchain_snark.Blockchain_snark_state.verify ~key:verification_key + Blockchain_snark.Blockchain_snark_state.verify ~key:verification_key (List.map chains ~f:(fun snark -> ( Blockchain_snark.Blockchain.state snark , Blockchain_snark.Blockchain.proof snark ) ) ) @@ -35,7 +35,10 @@ let create ~logger:_ ?enable_internal_tracing:_ ?internal_trace_filename:_ let verify_transaction_snarks ts = match proof_level with | Full -> ( - match Or_error.try_with (fun () -> Transaction_snark.verify ~key:verification_key ts) with + match + Or_error.try_with (fun () -> + Transaction_snark.verify ~key:verification_key ts ) + with | Ok result -> result |> Deferred.map ~f:Or_error.return | Error e -> @@ -148,7 +151,7 @@ let verify_transaction_snarks { verify_transaction_snarks; _ } ts = verify_transaction_snarks ts let get_blockchain_verification_key { verification_key; _ } = - Deferred.Or_error.return verification_key + Deferred.Or_error.return verification_key let toggle_internal_tracing _ _ = Deferred.Or_error.ok_unit diff --git a/src/lib/verifier/for_test.ml b/src/lib/verifier/for_test.ml index d75b437ccec..5524ef05b08 100644 --- a/src/lib/verifier/for_test.ml +++ b/src/lib/verifier/for_test.ml @@ -1,18 +1,14 @@ - let get_blockchain_verification_key ~constraint_constants ~proof_level = let module T = Transaction_snark.Make (struct - let constraint_constants = constraint_constants - - let proof_level = proof_level - end) + let constraint_constants = constraint_constants - in - let module B = Blockchain_snark.Blockchain_snark_state.Make (struct - let tag = T.tag + let proof_level = proof_level + end) in + let module B = Blockchain_snark.Blockchain_snark_state.Make (struct + let tag = T.tag - let constraint_constants = constraint_constants + let constraint_constants = constraint_constants - let proof_level = proof_level - end) - in - B.Proof.verification_key + let proof_level = proof_level + end) in + B.Proof.verification_key diff --git a/src/lib/verifier/for_test.mli b/src/lib/verifier/for_test.mli index 1c87755cfc6..00f3be59bcf 100644 --- a/src/lib/verifier/for_test.mli +++ b/src/lib/verifier/for_test.mli @@ -1,2 +1,4 @@ - -val get_blockchain_verification_key : constraint_constants:Genesis_constants.Constraint_constants.t -> proof_level:Genesis_constants.Proof_level.t -> Pickles.Verification_key.t Async.Deferred.t Lazy.t \ No newline at end of file +val get_blockchain_verification_key : + constraint_constants:Genesis_constants.Constraint_constants.t + -> proof_level:Genesis_constants.Proof_level.t + -> Pickles.Verification_key.t Async.Deferred.t Lazy.t diff --git a/src/lib/verifier/prod.ml b/src/lib/verifier/prod.ml index caed7c7a916..2241e6e61e0 100644 --- a/src/lib/verifier/prod.ml +++ b/src/lib/verifier/prod.ml @@ -69,7 +69,7 @@ module Worker_state = struct ; logger : Logger.t ; proof_level : Genesis_constants.Proof_level.t ; commit_id : string - ; verification_key: Pickles.Verification_key.Stable.Latest.t + ; verification_key : Pickles.Verification_key.Stable.Latest.t } [@@deriving bin_io_unversioned] @@ -82,7 +82,6 @@ module Worker_state = struct Pickles.Side_loaded.srs_precomputation () ; Deferred.return (let module M = struct - let verify_commands (cs : User_command.Verifiable.t With_status.t With_id_tag.t list ) @@ -144,20 +143,23 @@ module Worker_state = struct [%log internal] "Verifier_verify_commands_done" ; result - - let verify_blockchain_snarks bs = Context_logger.with_logger (Some logger) @@ fun () -> Internal_tracing.Context_call.with_call_id @@ fun () -> [%log internal] "Verifier_verify_blockchain_snarks" ; - let%map result = Blockchain_snark_state.verify ~key:verification_key bs in + let%map result = + Blockchain_snark_state.verify ~key:verification_key bs + in [%log internal] "Verifier_verify_blockchain_snarks_done" ; result let verify_transaction_snarks ts = - match Or_error.try_with (fun () -> Transaction_snark.verify ts ~key:verification_key) with + match + Or_error.try_with (fun () -> + Transaction_snark.verify ts ~key:verification_key ) + with | Ok result -> result | Error e -> @@ -178,7 +180,7 @@ module Worker_state = struct result let get_blockchain_verification_key () = - Deferred.return verification_key + Deferred.return verification_key let toggle_internal_tracing enabled = don't_wait_for @@ -220,7 +222,8 @@ module Worker_state = struct let verify_transaction_snarks _ = Deferred.return (Ok ()) - let get_blockchain_verification_key () = Deferred.return verification_key + let get_blockchain_verification_key () = + Deferred.return verification_key let toggle_internal_tracing _ = () @@ -252,7 +255,7 @@ module Worker = struct list ) F.t ; get_blockchain_verification_key : - ('w, unit, Pickles.Verification_key.t ) F.t + ('w, unit, Pickles.Verification_key.t) F.t ; toggle_internal_tracing : ('w, bool, unit) F.t ; set_itn_logger_data : ('w, int, unit) F.t } @@ -416,8 +419,8 @@ type t = { worker : worker Ivar.t ref; logger : Logger.t } (* TODO: investigate why conf_dir wasn't being used *) let create ~logger ?(enable_internal_tracing = false) ?internal_trace_filename - ~proof_level ~pids ~conf_dir ~commit_id ~verification_key () : - t Deferred.t = + ~proof_level ~pids ~conf_dir ~commit_id ~verification_key () : t Deferred.t + = let on_failure err = [%log error] "Verifier process failed with error $err" ~metadata:[ ("err", Error_json.error_to_yojson err) ] ; @@ -727,7 +730,7 @@ let get_blockchain_verification_key { worker; logger } = Worker.Connection.run connection ~f:Worker.functions.get_blockchain_verification_key ~arg:() |> Deferred.Or_error.map ~f:(fun x -> `Continue x) ) ) - + let toggle_internal_tracing { worker; logger } enabled = with_retry ~logger (fun () -> let%bind { connection; _ } = Ivar.read !worker in @@ -741,4 +744,3 @@ let set_itn_logger_data { worker; logger } ~daemon_port = Worker.Connection.run connection ~f:Worker.functions.set_itn_logger_data ~arg:daemon_port |> Deferred.Or_error.map ~f:(fun x -> `Continue x) ) - diff --git a/src/lib/verifier/verifier.mli b/src/lib/verifier/verifier.mli index 4a80161594b..49b204f2c33 100644 --- a/src/lib/verifier/verifier.mli +++ b/src/lib/verifier/verifier.mli @@ -1,5 +1,4 @@ module Failure = Verification_failure - module For_test = For_test module Dummy : module type of Dummy From 91c177bf495a2aa547f22dde3750c117bf7ef841 Mon Sep 17 00:00:00 2001 From: mrmr1993 Date: Mon, 14 Oct 2024 21:28:19 +0100 Subject: [PATCH 113/234] Remove build of libp2p_helper that we never use --- buildkite/src/Command/Libp2pHelperBuild.dhall | 41 ------------------- 1 file changed, 41 deletions(-) delete mode 100644 buildkite/src/Command/Libp2pHelperBuild.dhall diff --git a/buildkite/src/Command/Libp2pHelperBuild.dhall b/buildkite/src/Command/Libp2pHelperBuild.dhall deleted file mode 100644 index c00c51aa07f..00000000000 --- a/buildkite/src/Command/Libp2pHelperBuild.dhall +++ /dev/null @@ -1,41 +0,0 @@ -let Command = ./Base.dhall - -let Size = ./Size.dhall - -let Toolchain = ../Constants/Toolchain.dhall - -let BuildFlags = ../Constants/BuildFlags.dhall - -let Cmd = ../Lib/Cmds.dhall - -let DebianVersions = ../Constants/DebianVersions.dhall - -let commands = - \(debVersion : DebianVersions.DebVersion) - -> [ Cmd.run "chmod -R 777 src/app/libp2p_helper" - , Cmd.run "chmod -R 777 src/libp2p_ipc" - , Cmd.runInDocker - Cmd.Docker::{ - , image = Toolchain.image debVersion - , extraEnv = [ "GO=/usr/lib/go/bin/go" ] - } - "make libp2p_helper" - , Cmd.run - "cp src/app/libp2p_helper/result/bin/libp2p_helper . && buildkite/scripts/buildkite-artifact-helper.sh libp2p_helper" - ] - -let cmdConfig = - \(debVersion : DebianVersions.DebVersion) - -> \(buildFlags : BuildFlags.Type) - -> Command.build - Command.Config::{ - , commands = commands debVersion - , label = - "Build Libp2p helper for ${DebianVersions.capitalName - debVersion} ${BuildFlags.toSuffixUppercase - buildFlags}" - , key = "libp2p-helper${BuildFlags.toLabelSegment buildFlags}" - , target = Size.Multi - } - -in { step = cmdConfig } From eb7ccc27045118139a0ace884b832ce70dbbaeed Mon Sep 17 00:00:00 2001 From: dkijania Date: Mon, 14 Oct 2024 22:37:05 +0200 Subject: [PATCH 114/234] clean up code. remove unused functions --- scripts/version-linter.py | 40 +++++++++++++-------------------------- 1 file changed, 13 insertions(+), 27 deletions(-) diff --git a/scripts/version-linter.py b/scripts/version-linter.py index 08fa2fc84a8..e7a4ba9c6c3 100755 --- a/scripts/version-linter.py +++ b/scripts/version-linter.py @@ -19,8 +19,6 @@ """ import subprocess -import os -import io import sys import re import sexpdata @@ -33,49 +31,37 @@ def set_error(): exit_code=1 def latest_branch_commit(branch): + ''' + Retrieves latest commit on branch + ''' print ('Retrieving', branch, 'head commit...') result=subprocess.run(['git','log','-n','1','--format="%h"','--abbrev=7',f'{branch}'], capture_output=True) output=result.stdout.decode('ascii') print ('command stdout:', output) print ('command stderr:', result.stderr.decode('ascii')) - return output.replace('"','').splitlines() - + return output.replace('"','') + def url_to_type_shape_file(file): ''' Return url to mina type shape file ''' return f'https://storage.googleapis.com/mina-type-shapes/{file}' -def sha_exists(sha1): - ''' - Checks if mina type shape with given sha exists - ''' - file = type_shape_file(sha1) - return url_exists(url_to_type_shape_file(file)) - def url_exists(url): ''' Checks if url exists (by sending head and validating that status code is ok) ''' return requests.head(url).status_code == 200 -def find_latest_type_shape_ref_on(branch): - ''' - Function tries to find best type shape reference commit by retrieving n last commits - and iterate over collection testing if any item points to valid url - ''' - commits = latest_branch_commit(branch) - candidates = list(filter(lambda x: sha_exists(x), commits)) - if not any(candidates): - raise Exception(f'Cannot find type shape file for {branch}. I tried {n} last commits') - else: - return candidates[0] - def download_type_shape(role,branch,sha1) : file=type_shape_file(sha1) + url=url_to_type_shape_file(file) + if not url_exists(url): + raise Exception(f"reference file for '{sha1}' commit does not exists. Url does not exists {url} ") + print ('Downloading type shape file',file,'for',role,'branch',branch,'at commit',sha1) - result=subprocess.run(['wget','--no-clobber',url_to_type_shape_file(file)]) + subprocess.run(['wget','--no-clobber',url], check=True) def type_shape_file(sha1) : # created by buildkite build-artifact script @@ -269,17 +255,17 @@ def assert_commit(commit, desc): subprocess.run(['git','fetch'],capture_output=False) - base_branch_commit = find_latest_type_shape_ref_on(base_branch) + base_branch_commit = latest_branch_commit(base_branch) download_type_shape('base',base_branch,base_branch_commit) print('') - release_branch_commit=find_latest_type_shape_ref_on(release_branch) + release_branch_commit=latest_branch_commit(release_branch) download_type_shape('release',release_branch,release_branch_commit) print('') - pr_branch_commit=find_latest_type_shape_ref_on(pr_branch) + pr_branch_commit=latest_branch_commit(pr_branch) download_type_shape('pr',pr_branch,pr_branch_commit) print('') From 4b7a01b679cbc67dc3f3511955a4347e45b61a07 Mon Sep 17 00:00:00 2001 From: mrmr1993 Date: Mon, 14 Oct 2024 21:53:21 +0100 Subject: [PATCH 115/234] Stop attempting to use removed step --- buildkite/src/Command/MinaArtifact.dhall | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/buildkite/src/Command/MinaArtifact.dhall b/buildkite/src/Command/MinaArtifact.dhall index 5e6968df95f..8ebb33d097c 100644 --- a/buildkite/src/Command/MinaArtifact.dhall +++ b/buildkite/src/Command/MinaArtifact.dhall @@ -16,8 +16,6 @@ let JobSpec = ../Pipeline/JobSpec.dhall let Size = ./Size.dhall -let Libp2p = ./Libp2pHelperBuild.dhall - let DockerImage = ./DockerImage.dhall let DebianVersions = ../Constants/DebianVersions.dhall @@ -319,8 +317,7 @@ let pipeline : MinaBuildSpec.Type -> Pipeline.Config.Type = \(spec : MinaBuildSpec.Type) -> let steps = - [ Libp2p.step spec.debVersion spec.buildFlags - , build_artifacts spec + [ build_artifacts spec , publish_to_debian_repo spec ] From 0dcedf7709044c679d571cdfdfe9a5413e35bb82 Mon Sep 17 00:00:00 2001 From: mrmr1993 Date: Mon, 14 Oct 2024 22:15:36 +0100 Subject: [PATCH 116/234] Remove TODO that we'll never do --- buildkite/scripts/build-artifact.sh | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/buildkite/scripts/build-artifact.sh b/buildkite/scripts/build-artifact.sh index 0a6aa47ae29..a2d83970416 100755 --- a/buildkite/scripts/build-artifact.sh +++ b/buildkite/scripts/build-artifact.sh @@ -17,8 +17,7 @@ else fi -# TODO: Stop building lib_p2p multiple times by pulling from buildkite-agent artifacts or docker or somewhere -echo "--- Build libp2p_helper TODO: use the previously uploaded build artifact" +echo "--- Build libp2p_helper" make -C src/app/libp2p_helper MAINNET_TARGETS="" From 57f7b368e27373066ceee7ecef44bd16cd746d0a Mon Sep 17 00:00:00 2001 From: mrmr1993 Date: Tue, 15 Oct 2024 00:12:24 +0100 Subject: [PATCH 117/234] Reformat --- buildkite/src/Command/MinaArtifact.dhall | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/buildkite/src/Command/MinaArtifact.dhall b/buildkite/src/Command/MinaArtifact.dhall index 8ebb33d097c..6367f3f4006 100644 --- a/buildkite/src/Command/MinaArtifact.dhall +++ b/buildkite/src/Command/MinaArtifact.dhall @@ -316,10 +316,7 @@ let publish_to_debian_repo = let pipeline : MinaBuildSpec.Type -> Pipeline.Config.Type = \(spec : MinaBuildSpec.Type) - -> let steps = - [ build_artifacts spec - , publish_to_debian_repo spec - ] + -> let steps = [ build_artifacts spec, publish_to_debian_repo spec ] in Pipeline.Config::{ , spec = JobSpec::{ From e63e130e01779500d0355390d9cbb0061f1bd6d8 Mon Sep 17 00:00:00 2001 From: mrmr1993 Date: Tue, 15 Oct 2024 00:26:26 +0100 Subject: [PATCH 118/234] Remove hard-coded assumption that test runners have 16 cores --- buildkite/scripts/fuzzy-zkapp-test.sh | 2 +- buildkite/scripts/unit-test.sh | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/buildkite/scripts/fuzzy-zkapp-test.sh b/buildkite/scripts/fuzzy-zkapp-test.sh index 04b2ebb7e78..bc62bcd4a79 100755 --- a/buildkite/scripts/fuzzy-zkapp-test.sh +++ b/buildkite/scripts/fuzzy-zkapp-test.sh @@ -21,7 +21,7 @@ export LIBP2P_NIXLESS=1 PATH=/usr/lib/go/bin:$PATH GO=/usr/lib/go/bin/go # skip running all of the tests that have already succeeded, since dune will # only retry those tests that failed. echo "--- Run fuzzy zkapp tests" -time dune exec "${path}" --profile="${profile}" -j16 -- --timeout "${timeout}" --individual-test-timeout "${individual_test_timeout}" --seed "${RANDOM}" +time dune exec "${path}" --profile="${profile}" -- --timeout "${timeout}" --individual-test-timeout "${individual_test_timeout}" --seed "${RANDOM}" STATUS=$? if [ "$STATUS" -ne 0 ]; then ./scripts/link-coredumps.sh && exit "$STATUS" diff --git a/buildkite/scripts/unit-test.sh b/buildkite/scripts/unit-test.sh index 35d697d7b39..4b02c1c9e5e 100755 --- a/buildkite/scripts/unit-test.sh +++ b/buildkite/scripts/unit-test.sh @@ -20,10 +20,10 @@ export LIBP2P_NIXLESS=1 PATH=/usr/lib/go/bin:$PATH GO=/usr/lib/go/bin/go time make build echo "--- Build all targets" -dune build "${path}" --profile="${profile}" -j16 +dune build "${path}" --profile="${profile}" echo "--- Check for changes to verification keys" -time dune runtest "src/app/print_blockchain_snark_vk" --profile="${profile}" -j16 +time dune runtest "src/app/print_blockchain_snark_vk" --profile="${profile}" # Turn on the proof-cache assertion, so that CI will fail if the proofs need to # be updated. @@ -33,8 +33,8 @@ export ERROR_ON_PROOF=true # skip running all of the tests that have already succeeded, since dune will # only retry those tests that failed. echo "--- Run unit tests" -time dune runtest "${path}" --profile="${profile}" -j16 || \ +time dune runtest "${path}" --profile="${profile}" || \ (./scripts/link-coredumps.sh && \ echo "--- Retrying failed unit tests" && \ - time dune runtest "${path}" --profile="${profile}" -j16 || \ + time dune runtest "${path}" --profile="${profile}" || \ (./scripts/link-coredumps.sh && false)) From 19ceb744bdb45bc79232f82714af001127ccd90f Mon Sep 17 00:00:00 2001 From: mrmr1993 Date: Tue, 15 Oct 2024 00:29:32 +0100 Subject: [PATCH 119/234] Remove additional use of Libp2p build --- buildkite/src/Command/MinaArtifact.dhall | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/buildkite/src/Command/MinaArtifact.dhall b/buildkite/src/Command/MinaArtifact.dhall index da9d468c1d4..301a8dfe2e7 100644 --- a/buildkite/src/Command/MinaArtifact.dhall +++ b/buildkite/src/Command/MinaArtifact.dhall @@ -318,10 +318,7 @@ let onlyDebianPipeline = \(spec : MinaBuildSpec.Type) -> pipelineBuilder spec - [ Libp2p.step spec.debVersion spec.buildFlags - , build_artifacts spec - , publish_to_debian_repo spec - ] + [ build_artifacts spec, publish_to_debian_repo spec ] let pipeline : MinaBuildSpec.Type -> Pipeline.Config.Type From 20a08ebbc352c1f13b2dc096d763871e90edca48 Mon Sep 17 00:00:00 2001 From: mrmr1993 Date: Tue, 15 Oct 2024 00:48:46 +0100 Subject: [PATCH 120/234] Use the appropriate logger for snark pool batcher --- src/lib/network_pool/batcher.ml | 5 ++--- src/lib/network_pool/batcher.mli | 2 +- src/lib/network_pool/snark_pool.ml | 2 +- 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/src/lib/network_pool/batcher.ml b/src/lib/network_pool/batcher.ml index c2a9a147165..16759fd478e 100644 --- a/src/lib/network_pool/batcher.ml +++ b/src/lib/network_pool/batcher.ml @@ -431,8 +431,7 @@ module Snark_pool = struct let open Deferred.Or_error.Let_syntax in match%map verify t p with Ok () -> true | Error _ -> false - let create verifier : t = - let logger = Logger.create () in + let create ~logger verifier : t = create (* TODO: Make this a proper config detail once we have data on what a good default would be. @@ -539,7 +538,7 @@ module Snark_pool = struct Envelope.Incoming.gen data_gen let run_test proof_lists = - let batcher = create verifier in + let batcher = create ~logger verifier in Deferred.List.iter proof_lists ~f:(fun (invalid_proofs, proof_list) -> let%map r = verify' batcher proof_list in let (`Invalid ps) = Or_error.ok_exn r in diff --git a/src/lib/network_pool/batcher.mli b/src/lib/network_pool/batcher.mli index e08d8ed30d0..268b5b6bee9 100644 --- a/src/lib/network_pool/batcher.mli +++ b/src/lib/network_pool/batcher.mli @@ -9,7 +9,7 @@ module Snark_pool : sig type t [@@deriving sexp] - val create : Verifier.t -> t + val create : logger:Logger.t -> Verifier.t -> t val verify : t -> proof_envelope -> bool Deferred.Or_error.t end diff --git a/src/lib/network_pool/snark_pool.ml b/src/lib/network_pool/snark_pool.ml index 6ede322b927..8343d8ead0c 100644 --- a/src/lib/network_pool/snark_pool.ml +++ b/src/lib/network_pool/snark_pool.ml @@ -256,7 +256,7 @@ struct } ; frontier = (fun () -> Broadcast_pipe.Reader.peek frontier_broadcast_pipe) - ; batcher = Batcher.Snark_pool.create config.verifier + ; batcher = Batcher.Snark_pool.create ~logger config.verifier ; logger ; config ; account_creation_fee = From 87f72147a72f0c98df761006c8f5604f90231f5d Mon Sep 17 00:00:00 2001 From: mrmr1993 Date: Tue, 15 Oct 2024 00:53:10 +0100 Subject: [PATCH 121/234] Show log output for batcher tests --- src/lib/network_pool/batcher.ml | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/lib/network_pool/batcher.ml b/src/lib/network_pool/batcher.ml index 16759fd478e..010a36f8eb0 100644 --- a/src/lib/network_pool/batcher.ml +++ b/src/lib/network_pool/batcher.ml @@ -2,9 +2,6 @@ open Core_kernel open Async_kernel open Network_peer -(* Only show stdout for failed inline tests. *) -open Inline_test_quiet_logs - module Id = Unique_id.Int () type ('init, 'result) elt = From 13b5c833f2d3edd5aa0fbd635859879eed028212 Mon Sep 17 00:00:00 2001 From: mrmr1993 Date: Tue, 15 Oct 2024 01:45:41 +0100 Subject: [PATCH 122/234] Don't randomly print out SCHNORR BACKTRACE every 1000 hash verifications --- src/lib/network_pool/batcher.ml | 9 ++++----- src/lib/network_pool/batcher.mli | 4 ++-- src/lib/network_pool/transaction_pool.ml | 4 ++-- src/lib/signature_lib/schnorr.ml | 3 --- 4 files changed, 8 insertions(+), 12 deletions(-) diff --git a/src/lib/network_pool/batcher.ml b/src/lib/network_pool/batcher.ml index 010a36f8eb0..9f42bbc4dfd 100644 --- a/src/lib/network_pool/batcher.ml +++ b/src/lib/network_pool/batcher.ml @@ -42,7 +42,7 @@ type ('init, 'partially_validated, 'result) t = } [@@deriving sexp] -let create ?(how_to_add = `Enqueue_back) ?logger ?compare_init +let create ?(how_to_add = `Enqueue_back) ~logger ?compare_init ?(weight = fun _ -> 1) ?max_weight_per_call verifier = { state = Waiting ; queue = Q.create () @@ -51,7 +51,7 @@ let create ?(how_to_add = `Enqueue_back) ?logger ?compare_init ; verifier ; weight ; max_weight_per_call - ; logger = Option.value logger ~default:(Logger.create ()) + ; logger } let call_verifier t (ps : 'proof list) = t.verifier ps @@ -75,7 +75,7 @@ let rec determine_outcome : (* First separate out all the known results. That information will definitely be included in the outcome. *) - let logger = Logger.create () in + let logger = v.logger in let potentially_invalid = List.filter_map (List.zip_exn ps res) ~f:(fun (elt, r) -> match r with @@ -300,8 +300,7 @@ module Transaction_pool = struct (Array.to_list (Array.map a ~f:(function `Valid c -> Some c | _ -> None)) ) - let create verifier : t = - let logger = Logger.create () in + let create ~logger verifier : t = create ~compare_init:compare_envelope ~logger (fun (ds : input list) -> O1trace.thread "dispatching_transaction_pool_batcher_verification" (fun () -> diff --git a/src/lib/network_pool/batcher.mli b/src/lib/network_pool/batcher.mli index 268b5b6bee9..c46bcb0b2e3 100644 --- a/src/lib/network_pool/batcher.mli +++ b/src/lib/network_pool/batcher.mli @@ -18,7 +18,7 @@ type ('initial, 'partially_validated, 'result) t val create : ?how_to_add:[ `Insert | `Enqueue_back ] - -> ?logger:Logger.t + -> logger:Logger.t -> ?compare_init:('init -> 'init -> int) -> ?weight:('init -> int) -> ?max_weight_per_call:int @@ -42,7 +42,7 @@ module Transaction_pool : sig type t [@@deriving sexp] - val create : Verifier.t -> t + val create : logger:Logger.t -> Verifier.t -> t val verify : t diff --git a/src/lib/network_pool/transaction_pool.ml b/src/lib/network_pool/transaction_pool.ml index e99224ecdb8..9cf697e2c06 100644 --- a/src/lib/network_pool/transaction_pool.ml +++ b/src/lib/network_pool/transaction_pool.ml @@ -831,7 +831,7 @@ struct ; remaining_in_batch = max_per_15_seconds ; config ; logger - ; batcher = Batcher.create config.verifier + ; batcher = Batcher.create ~logger config.verifier ; best_tip_diff_relay = None ; best_tip_ledger = None ; verification_key_table = Vk_refcount_table.create () @@ -1661,7 +1661,7 @@ let%test_module _ = let minimum_fee = Currency.Fee.to_nanomina_int genesis_constants.minimum_user_command_fee - let logger = Logger.create () + let logger = Logger.null () let time_controller = Block_time.Controller.basic ~logger diff --git a/src/lib/signature_lib/schnorr.ml b/src/lib/signature_lib/schnorr.ml index f31d64ee927..a1ef7c23443 100644 --- a/src/lib/signature_lib/schnorr.ml +++ b/src/lib/signature_lib/schnorr.ml @@ -240,9 +240,6 @@ module Make let verify ?signature_kind ((r, s) : Signature.t) (pk : Public_key.t) (m : Message.t) = - if Random.int 1000 = 0 then ( - print_endline "SCHNORR BACKTRACE:" ; - Printexc.print_backtrace stdout ) ; let hash = Message.hash ?signature_kind in let e = hash ~public_key:pk ~r m in let r_pt = Curve.(scale one s + negate (scale pk e)) in From e3da5c15f0f2c74bb6326577dabe8eb43a91bf5e Mon Sep 17 00:00:00 2001 From: mrmr1993 Date: Tue, 15 Oct 2024 01:49:18 +0100 Subject: [PATCH 123/234] Turn off debug printing --- src/lib/network_pool/transaction_pool.ml | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/src/lib/network_pool/transaction_pool.ml b/src/lib/network_pool/transaction_pool.ml index 9cf697e2c06..3a9dd48bcad 100644 --- a/src/lib/network_pool/transaction_pool.ml +++ b/src/lib/network_pool/transaction_pool.ml @@ -2203,19 +2203,22 @@ let%test_module _ = let tm1 = Time.now () in [%log' info test.txn_pool.logger] "Time for add_commands: %0.04f sec" (Time.diff tm1 tm0 |> Time.Span.to_sec) ; + let debug = false in ( match result with | Ok (`Accept, _, rejects) -> - List.iter rejects ~f:(fun (cmd, err) -> - Core.Printf.printf - !"command was rejected because %s: %{Yojson.Safe}\n%!" - (Diff_versioned.Diff_error.to_string_name err) - (User_command.to_yojson cmd) ) + if debug then + List.iter rejects ~f:(fun (cmd, err) -> + Core.Printf.printf + !"command was rejected because %s: %{Yojson.Safe}\n%!" + (Diff_versioned.Diff_error.to_string_name err) + (User_command.to_yojson cmd) ) | Ok (`Reject, _, _) -> failwith "diff was rejected during application" | Error (`Other err) -> - Core.Printf.printf - !"failed to apply diff to pool: %s\n%!" - (Error.to_string_hum err) ) ; + if debug then + Core.Printf.printf + !"failed to apply diff to pool: %s\n%!" + (Error.to_string_hum err) ) ; result let add_commands' ?local test cs = From 92329e9834bf4ba0b3a0be372b9ae4483cca8233 Mon Sep 17 00:00:00 2001 From: mrmr1993 Date: Tue, 15 Oct 2024 01:55:10 +0100 Subject: [PATCH 124/234] Stop using Inline_test_quiet_logs for Network_pool tests --- src/lib/network_pool/snark_pool.ml | 3 --- src/lib/network_pool/test.ml | 3 --- src/lib/network_pool/transaction_pool.ml | 2 -- 3 files changed, 8 deletions(-) diff --git a/src/lib/network_pool/snark_pool.ml b/src/lib/network_pool/snark_pool.ml index 8343d8ead0c..5b152112a75 100644 --- a/src/lib/network_pool/snark_pool.ml +++ b/src/lib/network_pool/snark_pool.ml @@ -559,9 +559,6 @@ module Diff_versioned = struct [@@deriving compare, sexp, to_yojson, hash] end -(* Only show stdout for failed inline tests. *) -open Inline_test_quiet_logs - let%test_module "random set test" = ( module struct open Mina_base diff --git a/src/lib/network_pool/test.ml b/src/lib/network_pool/test.ml index b0ddac4e0a4..5181b7aabc8 100644 --- a/src/lib/network_pool/test.ml +++ b/src/lib/network_pool/test.ml @@ -3,9 +3,6 @@ open Core_kernel open Pipe_lib open Network_peer -(* Only show stdout for failed inline tests. *) -open Inline_test_quiet_logs - let%test_module "network pool test" = ( module struct let trust_system = Mocks.trust_system diff --git a/src/lib/network_pool/transaction_pool.ml b/src/lib/network_pool/transaction_pool.ml index 3a9dd48bcad..fe4e4bf2b8a 100644 --- a/src/lib/network_pool/transaction_pool.ml +++ b/src/lib/network_pool/transaction_pool.ml @@ -3,8 +3,6 @@ transactions (user commands) and providing them to the block producer code. *) -(* Only show stdout for failed inline tests.*) -open Inline_test_quiet_logs open Core open Async open Mina_base From 39ebd835538b3c1a2e236a71630fa350067c31c0 Mon Sep 17 00:00:00 2001 From: svv232 Date: Tue, 15 Oct 2024 00:50:44 -0400 Subject: [PATCH 125/234] remove dead block storage library as it was no longer being used and failing unit tests --- src/lib/block_storage/block_storage.ml | 223 ------------------------ src/lib/block_storage/block_storage.mli | 12 -- src/lib/block_storage/dune | 48 ----- 3 files changed, 283 deletions(-) delete mode 100644 src/lib/block_storage/block_storage.ml delete mode 100644 src/lib/block_storage/block_storage.mli delete mode 100644 src/lib/block_storage/dune diff --git a/src/lib/block_storage/block_storage.ml b/src/lib/block_storage/block_storage.ml deleted file mode 100644 index 21cb2e1b05d..00000000000 --- a/src/lib/block_storage/block_storage.ml +++ /dev/null @@ -1,223 +0,0 @@ -(* Only show stdout for failed inline tests. *) -open Inline_test_quiet_logs -open Core_kernel - -type t = - { (* statuses is a map from 32-byte key to a 1-byte value representing the status of a root bitswap block *) - statuses : (Consensus.Body_reference.t, int, [ `Uni ]) Lmdb.Map.t - ; blocks : (Blake2.t, Bigstring.t, [ `Uni ]) Lmdb.Map.t - ; logger : Logger.t - ; env : Lmdb.Env.t - } - -module Root_block_status = struct - type t = Partial | Full | Deleting [@@deriving enum] -end - -let body_tag = Staged_ledger_diff.Body.Tag.(to_enum Body) - -let full_status = Root_block_status.to_enum Full - -let uint8_conv = - Lmdb.Conv.make - ~flags:Lmdb.Conv.Flags.(integer_key + integer_dup + dup_fixed) - ~serialise:(fun alloc x -> - let a = alloc 1 in - Bigstring.set_uint8_exn a ~pos:0 x ; - a ) - ~deserialise:(Bigstring.get_uint8 ~pos:0) - () - -let blake2_conv = - Lmdb.Conv.( - make - ~serialise:(fun alloc x -> - let str = Blake2.to_raw_string x in - serialise string alloc str ) - ~deserialise:(fun s -> deserialise string s |> Blake2.of_raw_string) - ()) - -let open_ ~logger dir = - let env = Lmdb.Env.create ~max_maps:1 Ro dir in - (* Env. *) - let blocks = - Lmdb.Map.open_existing ~key:blake2_conv ~value:Lmdb.Conv.bigstring Nodup env - in - let statuses = - Lmdb.Map.open_existing ~key:blake2_conv ~value:uint8_conv ~name:"status" - Nodup env - in - { blocks; statuses; logger; env } - -let get_status { statuses; logger; _ } body_ref = - try - let raw_status = Lmdb.Map.get statuses body_ref in - match Root_block_status.of_enum raw_status with - | None -> - [%log error] "Unexpected status $status for $body_reference" - ~metadata: - [ ("status", `Int raw_status) - ; ("body_reference", Consensus.Body_reference.to_yojson body_ref) - ] ; - None - | Some x -> - Some x - with Lmdb.Not_found -> None - -let read_body_impl blocks txn root_ref = - let find_block ref = - try Lmdb.Map.get ~txn blocks ref |> Some with Lmdb.Not_found -> None - in - let%bind.Or_error raw_root_block = - Option.value_map - ~f:(fun x -> Ok x) - ~default: - (Or_error.error_string - (sprintf "root block %s not found" @@ Blake2.to_hex root_ref) ) - (find_block root_ref) - in - let%bind.Or_error root_links, root_data = - Staged_ledger_diff.Bitswap_block.parse_block ~hash:root_ref raw_root_block - in - let%bind.Or_error () = - if Bigstring.length root_data < 5 then - Or_error.error_string - @@ sprintf "Couldn't read root block for %s: data section is too short" - @@ Consensus.Body_reference.to_hex root_ref - else Ok () - in - let len = Bigstring.get_uint32_le root_data ~pos:0 - 1 in - let%bind.Or_error () = - let raw_tag = Bigstring.get_uint8 root_data ~pos:4 in - if body_tag = raw_tag then Ok () - else - Or_error.error_string - @@ sprintf "Unexpected tag %s for block %s" (Int.to_string raw_tag) - (Consensus.Body_reference.to_hex root_ref) - in - let buf = Bigstring.create len in - let pos = ref (Bigstring.length root_data - 5) in - Bigstring.blit ~src:root_data ~src_pos:5 ~dst:buf ~dst_pos:0 ~len:!pos ; - let q = Queue.create () in - Queue.enqueue_all q root_links ; - let%map.Or_error () = - Staged_ledger_diff.Bitswap_block.iter_links q - ~report_chunk:(fun data -> - Bigstring.blit ~src:data ~src_pos:0 ~dst:buf ~dst_pos:!pos - ~len:(Bigstring.length data) ; - pos := !pos + Bigstring.length data ) - ~find_block - in - Staged_ledger_diff.Body.Stable.Latest.bin_read_t buf ~pos_ref:(ref 0) - -let read_body { statuses; logger; blocks; env } body_ref = - let impl txn = - try - if Lmdb.Map.get ~txn statuses body_ref = full_status then ( - match read_body_impl blocks txn body_ref with - | Ok r -> - Some r - | Error e -> - [%log error] - "Couldn't read body for $body_reference with Full status: $error" - ~metadata: - [ ("body_reference", Consensus.Body_reference.to_yojson body_ref) - ; ("error", `String (Error.to_string_hum e)) - ] ; - None ) - else None - with Lmdb.Not_found -> None - in - match Lmdb.Txn.go Ro env impl with - | None -> - [%log error] - "LMDB transaction failed unexpectedly while reading block \ - $body_reference" - ~metadata: - [ ("body_reference", Consensus.Body_reference.to_yojson body_ref) ] ; - None - | Some x -> - x - -let%test_module "Block storage tests" = - ( module struct - open Full_frontier.For_tests - open Async_kernel - open Frontier_base - - let () = - Backtrace.elide := false ; - Async.Scheduler.set_record_backtraces true - - let logger = Logger.create () - - let verifier = verifier () - - let%test_unit "Write a block to db and read it" = - Quickcheck.test (gen_breadcrumb ~verifier ()) ~trials:4 - ~f:(fun make_breadcrumb -> - let frontier = create_frontier () in - let root = Full_frontier.root frontier in - let open Mina_net2.For_tests in - let res_updated_ivar = Ivar.create () in - let handle_push_message _ msg = - ( match msg with - | Libp2p_ipc.Reader.DaemonInterface.PushMessage.ResourceUpdated m - -> ( - let open Libp2p_ipc.Reader.DaemonInterface.ResourceUpdate in - match (type_get m, ids_get_list m) with - | Added, [ id_ ] -> - let id = - Libp2p_ipc.Reader.RootBlockId.blake2b_hash_get id_ - in - Ivar.fill_if_empty res_updated_ivar id - | _ -> - () ) - | _ -> - () ) ; - Deferred.unit - in - Helper.test_with_libp2p_helper ~logger ~handle_push_message - (fun conf_dir helper -> - let%bind me = generate_random_keypair helper in - let maddr = - multiaddr_to_libp2p_ipc - @@ Mina_net2.Multiaddr.of_string "/ip4/127.0.0.1/tcp/12878" - in - let libp2p_config = - Libp2p_ipc.create_libp2p_config - ~private_key:(Mina_net2.Keypair.secret me) - ~statedir:conf_dir ~listen_on:[ maddr ] - ~external_multiaddr:maddr ~network_id:"s" - ~unsafe_no_trust_ip:true ~flood:false ~direct_peers:[] - ~seed_peers:[] ~known_private_ip_nets:[] ~peer_exchange:true - ~peer_protection_ratio:0.2 ~min_connections:20 - ~max_connections:40 ~validation_queue_size:250 - ~gating_config:empty_libp2p_ipc_gating_config - ?metrics_port:None ~topic_config:[] () - in - let%bind _ = - Helper.do_rpc helper - (module Libp2p_ipc.Rpcs.Configure) - (Libp2p_ipc.Rpcs.Configure.create_request ~libp2p_config) - >>| Or_error.ok_exn - in - let%bind breadcrumb = make_breadcrumb root in - let body = Breadcrumb.block breadcrumb |> Mina_block.body in - let body_ref = Staged_ledger_diff.Body.compute_reference body in - [%log info] "Sending add resource" ; - Helper.send_add_resource ~tag:Staged_ledger_diff.Body.Tag.Body - ~body helper ; - [%log info] "Waiting for push message" ; - let%map id = Ivar.read res_updated_ivar in - [%log info] "Push message received" ; - [%test_eq: String.t] - (Consensus.Body_reference.to_raw_string body_ref) - id ; - let db = - open_ ~logger (String.concat ~sep:"/" [ conf_dir; "block-db" ]) - in - [%test_eq: Staged_ledger_diff.Body.t option] (Some body) - (read_body db body_ref) ) ; - clean_up_persistent_root ~frontier ) - end ) diff --git a/src/lib/block_storage/block_storage.mli b/src/lib/block_storage/block_storage.mli deleted file mode 100644 index 8a85f59e83b..00000000000 --- a/src/lib/block_storage/block_storage.mli +++ /dev/null @@ -1,12 +0,0 @@ -type t - -module Root_block_status : sig - type t = Partial | Full | Deleting [@@deriving enum] -end - -val open_ : logger:Logger.t -> string -> t - -val get_status : t -> Consensus.Body_reference.t -> Root_block_status.t option - -val read_body : - t -> Consensus.Body_reference.t -> Staged_ledger_diff.Body.t option diff --git a/src/lib/block_storage/dune b/src/lib/block_storage/dune deleted file mode 100644 index d9a2f0f14e0..00000000000 --- a/src/lib/block_storage/dune +++ /dev/null @@ -1,48 +0,0 @@ -(library - (name block_storage) - (public_name block_storage) - (libraries - ;; opam libraries - async - base58 - base64 - capnp - digestif - stdio - core - libp2p_ipc - yojson - async_kernel - core_kernel - bin_prot.shape - ppx_inline_test.config - async_unix - sexplib0 - base.caml - base.base_internalhash_types - splittable_random - lmdb - integers - ;; local libraries - blake2 - error_json - child_processes - file_system - logger - network_peer - pipe_lib - timeout_lib - mina_metrics - o1trace - staged_ledger_diff - consensus - mina_net2 - ;; test deps - inline_test_quiet_logs - transition_frontier_base - mina_block - transition_frontier_full_frontier - ) - (inline_tests (flags -verbose -show-counts)) - (instrumentation (backend bisect_ppx)) - (preprocess (pps ppx_mina ppx_version ppx_jane ppx_deriving.std ppx_let ppx_deriving_yojson))) From d16bb8681ccb969555c048e19f27e67cc7eea984 Mon Sep 17 00:00:00 2001 From: svv232 Date: Tue, 15 Oct 2024 01:33:11 -0400 Subject: [PATCH 126/234] importing validation callback from minarpc branch --- src/lib/mina_net2/validation_callback.ml | 30 ++++++++++------------- src/lib/mina_net2/validation_callback.mli | 6 ++--- 2 files changed, 15 insertions(+), 21 deletions(-) diff --git a/src/lib/mina_net2/validation_callback.ml b/src/lib/mina_net2/validation_callback.ml index 7688067a7e7..a7da7b1ecec 100644 --- a/src/lib/mina_net2/validation_callback.ml +++ b/src/lib/mina_net2/validation_callback.ml @@ -72,7 +72,7 @@ let record_timeout_metrics cb = Mina_metrics.Counter.inc_one M.validations_timed_out let record_validation_metrics message_type (result : validation_result) - validation_time processing_time ~block_window_duration:_ (*TODO remove*) = + validation_time processing_time = match metrics_of_message_type message_type with | None -> () @@ -81,14 +81,11 @@ let record_validation_metrics message_type (result : validation_result) | `Ignore -> Mina_metrics.Counter.inc_one M.ignored | `Accept -> - let module Validation_time = M.Validation_time in - Validation_time.update validation_time ; - let module Processing_time = M.Processing_time in - Processing_time.update processing_time + M.Validation_time.update validation_time ; + M.Processing_time.update processing_time | `Reject -> Mina_metrics.Counter.inc_one M.rejected ; - let module Rejection_time = M.Rejection_time in - Rejection_time.update processing_time ) + M.Rejection_time.update processing_time ) let await_timeout cb = if is_expired cb then Deferred.return () @@ -101,7 +98,7 @@ let await_timeout cb = ( Time_ns.Span.to_span_float_round_nearest @@ Time_ns.diff expires_at (Time_ns.now ()) ) -let await ~block_window_duration cb = +let await cb = if is_expired cb then (record_timeout_metrics cb ; Deferred.return None) else match cb.expiration with @@ -122,20 +119,19 @@ let await ~block_window_duration cb = Time_ns.abs_diff (Time_ns.now ()) cb.created_at |> Time_ns.Span.to_ms |> Time.Span.of_ms in - record_validation_metrics ~block_window_duration cb.message_type - result validation_time processing_time ; + record_validation_metrics cb.message_type result validation_time + processing_time ; Some result | `Timeout -> record_timeout_metrics cb ; None ) -let await_exn ~block_window_duration cb = - match%map await ~block_window_duration cb with - | None -> - failwith "timeout" - | Some result -> - result +let await_exn cb = + match%map await cb with None -> failwith "timeout" | Some result -> result let fire_if_not_already_fired cb result = - if not (is_expired cb) then Ivar.fill_if_empty cb.signal result + if not (is_expired cb) then + if Ivar.is_full cb.signal then + [%log' error (Logger.create ())] "Ivar.fill bug is here!" + else Ivar.fill cb.signal result let set_message_type t x = t.message_type <- x diff --git a/src/lib/mina_net2/validation_callback.mli b/src/lib/mina_net2/validation_callback.mli index 352dd161642..ac87f3a8b48 100644 --- a/src/lib/mina_net2/validation_callback.mli +++ b/src/lib/mina_net2/validation_callback.mli @@ -11,11 +11,9 @@ val create_without_expiration : unit -> t val is_expired : t -> bool -val await : - block_window_duration:Time.Span.t -> t -> validation_result option Deferred.t +val await : t -> validation_result option Deferred.t -val await_exn : - block_window_duration:Time.Span.t -> t -> validation_result Deferred.t +val await_exn : t -> validation_result Deferred.t (** May return a deferred that never resolves, in the case of callbacks without expiration. *) val await_timeout : t -> unit Deferred.t From 96c8b6c6a8c74133a68286e14a54c566b56d2740 Mon Sep 17 00:00:00 2001 From: svv232 Date: Tue, 15 Oct 2024 01:33:43 -0400 Subject: [PATCH 127/234] removing block window duration from top level --- src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml | 1 - src/app/cli/src/init/client.ml | 8 ++------ 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml b/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml index b90833be0a1..ae9424b3ca2 100644 --- a/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml +++ b/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml @@ -1205,7 +1205,6 @@ Pass one of -peer, -peer-list-file, -seed, -peer-list-url.|} ; ; time_controller ; pubsub_v1 ; pubsub_v0 - ; block_window_duration = compile_config.block_window_duration } in let net_config = diff --git a/src/app/cli/src/init/client.ml b/src/app/cli/src/init/client.ml index ba7d52dccec..409413d0f00 100644 --- a/src/app/cli/src/init/client.ml +++ b/src/app/cli/src/init/client.ml @@ -1612,14 +1612,12 @@ let generate_libp2p_keypair_do privkey_path = (let open Deferred.Let_syntax in (* FIXME: I'd like to accumulate messages into this logger and only dump them out in failure paths. *) let logger = Logger.null () in - let compile_config = Mina_compile_config.Compiled.t in (* Using the helper only for keypair generation requires no state. *) File_system.with_temp_dir "mina-generate-libp2p-keypair" ~f:(fun tmpd -> match%bind Mina_net2.create ~logger ~conf_dir:tmpd ~all_peers_seen_metric:false ~pids:(Child_processes.Termination.create_pid_table ()) - ~on_peer_connected:ignore ~on_peer_disconnected:ignore - ~block_window_duration:compile_config.block_window_duration () + ~on_peer_connected:ignore ~on_peer_disconnected:ignore () with | Ok net -> let%bind me = Mina_net2.generate_random_keypair net in @@ -1646,14 +1644,12 @@ let dump_libp2p_keypair_do privkey_path = Deferred.ignore_m (let open Deferred.Let_syntax in let logger = Logger.null () in - let compile_config = Mina_compile_config.Compiled.t in (* Using the helper only for keypair generation requires no state. *) File_system.with_temp_dir "mina-dump-libp2p-keypair" ~f:(fun tmpd -> match%bind Mina_net2.create ~logger ~conf_dir:tmpd ~all_peers_seen_metric:false ~pids:(Child_processes.Termination.create_pid_table ()) - ~on_peer_connected:ignore ~on_peer_disconnected:ignore - ~block_window_duration:compile_config.block_window_duration () + ~on_peer_connected:ignore ~on_peer_disconnected:ignore () with | Ok net -> let%bind () = Mina_net2.shutdown net in From 7fe09919d9d88041dd822acc737985bff87d3718 Mon Sep 17 00:00:00 2001 From: svv232 Date: Tue, 15 Oct 2024 01:35:09 -0400 Subject: [PATCH 128/234] removing block duration window in mina networking modules and lipbp2p layer --- src/lib/gossip_net/libp2p.ml | 7 ++----- src/lib/mina_lib/mina_lib.ml | 1 - src/lib/mina_lib/tests/tests.ml | 2 -- src/lib/mina_net2/mina_net2.ml | 8 ++------ src/lib/mina_net2/mina_net2.mli | 1 - src/lib/mina_net2/subscription.ml | 6 ++---- src/lib/mina_net2/subscription.mli | 1 - src/lib/mina_net2/tests/all_ipc.ml | 3 +-- src/lib/mina_net2/tests/tests.ml | 9 +++------ src/lib/network_pool/network_pool_base.ml | 2 -- src/lib/network_pool/pool_sink.ml | 11 ++--------- 11 files changed, 12 insertions(+), 39 deletions(-) diff --git a/src/lib/gossip_net/libp2p.ml b/src/lib/gossip_net/libp2p.ml index b0855a7742c..4de2c63250c 100644 --- a/src/lib/gossip_net/libp2p.ml +++ b/src/lib/gossip_net/libp2p.ml @@ -52,7 +52,6 @@ module Config = struct ; mutable keypair : Mina_net2.Keypair.t option ; all_peers_seen_metric : bool ; known_private_ip_nets : Core.Unix.Cidr.t list - ; block_window_duration : Time.Span.t } [@@deriving make] end @@ -220,8 +219,7 @@ module Make (Rpc_interface : RPC_INTERFACE) : ctx first_peer_ivar high_connectivity_ivar ~added_seeds ~pids ~on_unexpected_termination ~sinks: - (Message.Any_sinks (sinksM, (sink_block, sink_tx, sink_snark_work))) - ~block_window_duration = + (Message.Any_sinks (sinksM, (sink_block, sink_tx, sink_snark_work))) = let module Sinks = (val sinksM) in let ctr = ref 0 in let record_peer_connection () = @@ -258,7 +256,7 @@ module Make (Rpc_interface : RPC_INTERFACE) : ~all_peers_seen_metric:config.all_peers_seen_metric ~on_peer_connected:(fun _ -> record_peer_connection ()) ~on_peer_disconnected:ignore ~logger:config.logger ~conf_dir - ~pids ~block_window_duration () ) ) + ~pids () ) ) with | Ok (Ok net2) -> ( let open Mina_net2 in @@ -629,7 +627,6 @@ module Make (Rpc_interface : RPC_INTERFACE) : create_libp2p ~allow_multiple_instances config rpc_handlers first_peer_ivar high_connectivity_ivar ~added_seeds ~pids ~on_unexpected_termination:restart_libp2p ~sinks - ~block_window_duration:config.block_window_duration in on_libp2p_create libp2p ; Deferred.ignore_m libp2p and restart_libp2p () = don't_wait_for (start_libp2p ()) in diff --git a/src/lib/mina_lib/mina_lib.ml b/src/lib/mina_lib/mina_lib.ml index 972c8ec1375..6110923f018 100644 --- a/src/lib/mina_lib/mina_lib.ml +++ b/src/lib/mina_lib/mina_lib.ml @@ -1866,7 +1866,6 @@ let create ~commit_id ?wallets (config : Config.t) = ; consensus_constants ; genesis_constants = config.precomputed_values.genesis_constants ; constraint_constants - ; block_window_duration } in let sinks = (block_sink, tx_remote_sink, snark_remote_sink) in diff --git a/src/lib/mina_lib/tests/tests.ml b/src/lib/mina_lib/tests/tests.ml index 4897b8a9b35..b3a36d5a2a4 100644 --- a/src/lib/mina_lib/tests/tests.ml +++ b/src/lib/mina_lib/tests/tests.ml @@ -185,7 +185,6 @@ let%test_module "Epoch ledger sync tests" = ; consensus_constants ; genesis_constants = precomputed_values.genesis_constants ; constraint_constants - ; block_window_duration = compile_config.block_window_duration } in let _transaction_pool, tx_remote_sink, _tx_local_sink = @@ -273,7 +272,6 @@ let%test_module "Epoch ledger sync tests" = ; time_controller ; pubsub_v1 ; pubsub_v0 - ; block_window_duration = compile_config.block_window_duration } in Mina_networking.Gossip_net.( diff --git a/src/lib/mina_net2/mina_net2.ml b/src/lib/mina_net2/mina_net2.ml index d2db2d0bbad..a649a5d978c 100644 --- a/src/lib/mina_net2/mina_net2.ml +++ b/src/lib/mina_net2/mina_net2.ml @@ -94,7 +94,6 @@ type t = ; mutable banned_ips : Unix.Inet_addr.t list ; peer_connected_callback : string -> unit ; peer_disconnected_callback : string -> unit - ; block_window_duration : Time.Span.t } let banned_ips t = t.banned_ips @@ -383,8 +382,7 @@ let handle_push_message t push_message = upon (O1trace.thread "validate_libp2p_gossip" (fun () -> Subscription.handle_and_validate sub ~validation_expiration - ~sender ~data - ~block_window_duration:t.block_window_duration ) ) + ~sender ~data ) ) (function | `Validation_timeout -> [%log' warn t.logger] @@ -545,8 +543,7 @@ let handle_push_message t push_message = Libp2p_ipc.undefined_union ~context:"DaemonInterface.PushMessage" n let create ?(allow_multiple_instances = false) ~all_peers_seen_metric ~logger - ~pids ~conf_dir ~on_peer_connected ~on_peer_disconnected - ~block_window_duration () = + ~pids ~conf_dir ~on_peer_connected ~on_peer_disconnected () = let open Deferred.Or_error.Let_syntax in let push_message_handler = ref (fun _msg -> @@ -577,7 +574,6 @@ let create ?(allow_multiple_instances = false) ~all_peers_seen_metric ~logger ; peer_disconnected_callback = (fun peer_id -> on_peer_disconnected (Peer.Id.unsafe_of_string peer_id)) ; protocol_handlers = Hashtbl.create (module String) - ; block_window_duration } in (push_message_handler := fun msg -> handle_push_message t msg) ; diff --git a/src/lib/mina_net2/mina_net2.mli b/src/lib/mina_net2/mina_net2.mli index f6f8cfd927b..6b76e6a7049 100644 --- a/src/lib/mina_net2/mina_net2.mli +++ b/src/lib/mina_net2/mina_net2.mli @@ -139,7 +139,6 @@ val create : -> conf_dir:string -> on_peer_connected:(Peer.Id.t -> unit) -> on_peer_disconnected:(Peer.Id.t -> unit) - -> block_window_duration:Time.Span.t -> unit -> t Deferred.Or_error.t diff --git a/src/lib/mina_net2/subscription.ml b/src/lib/mina_net2/subscription.ml index 61b15d96af5..69ca2a4959e 100644 --- a/src/lib/mina_net2/subscription.ml +++ b/src/lib/mina_net2/subscription.ml @@ -50,7 +50,7 @@ let unsubscribe ~helper sub = else Deferred.Or_error.error_string "already unsubscribed" let handle_and_validate sub ~validation_expiration ~(sender : Peer.t) - ~data:raw_data ~block_window_duration = + ~data:raw_data = let open Libp2p_ipc.Reader.ValidationResult in let wrap_message data = if @@ -65,9 +65,7 @@ let handle_and_validate sub ~validation_expiration ~(sender : Peer.t) Validation_callback.create validation_expiration in let%bind () = sub.validator (wrap_message data) validation_callback in - match%map - Validation_callback.await ~block_window_duration validation_callback - with + match%map Validation_callback.await validation_callback with | Some `Accept -> `Validation_result Accept | Some `Reject -> diff --git a/src/lib/mina_net2/subscription.mli b/src/lib/mina_net2/subscription.mli index 022763618e3..6809c782440 100644 --- a/src/lib/mina_net2/subscription.mli +++ b/src/lib/mina_net2/subscription.mli @@ -29,7 +29,6 @@ val handle_and_validate : -> validation_expiration:Time_ns.t -> sender:Peer.t -> data:string - -> block_window_duration:Time.Span.t -> [ `Validation_result of Libp2p_ipc.validation_result | `Validation_timeout | `Decoding_error of Error.t ] diff --git a/src/lib/mina_net2/tests/all_ipc.ml b/src/lib/mina_net2/tests/all_ipc.ml index de778d0f125..cb9c6c3438f 100644 --- a/src/lib/mina_net2/tests/all_ipc.ml +++ b/src/lib/mina_net2/tests/all_ipc.ml @@ -543,8 +543,7 @@ let%test_module "all-ipc test" = let%bind node = create ~all_peers_seen_metric:false ~logger:(Logger.extend logger [ ("name", `String local_name) ]) - ~conf_dir ~pids ~on_peer_connected ~on_peer_disconnected - ~block_window_duration () + ~conf_dir ~pids ~on_peer_connected ~on_peer_disconnected () >>| Or_error.ok_exn in let%bind kp_a = diff --git a/src/lib/mina_net2/tests/tests.ml b/src/lib/mina_net2/tests/tests.ml index 3bfefc9c951..865392609c1 100644 --- a/src/lib/mina_net2/tests/tests.ml +++ b/src/lib/mina_net2/tests/tests.ml @@ -11,9 +11,6 @@ let%test_module "Mina network tests" = let pids = Child_processes.Termination.create_pid_table () - let block_window_duration = - Mina_compile_config.For_unit_tests.t.block_window_duration - let setup_two_nodes network_id = let%bind a_tmp = Unix.mkdtemp "p2p_helper_test_a" in let%bind b_tmp = Unix.mkdtemp "p2p_helper_test_b" in @@ -22,21 +19,21 @@ let%test_module "Mina network tests" = create ~all_peers_seen_metric:false ~logger:(Logger.extend logger [ ("name", `String "a") ]) ~conf_dir:a_tmp ~pids ~on_peer_connected:Fn.ignore - ~on_peer_disconnected:Fn.ignore ~block_window_duration () + ~on_peer_disconnected:Fn.ignore () >>| Or_error.ok_exn in let%bind b = create ~all_peers_seen_metric:false ~logger:(Logger.extend logger [ ("name", `String "b") ]) ~conf_dir:b_tmp ~pids ~on_peer_connected:Fn.ignore - ~on_peer_disconnected:Fn.ignore ~block_window_duration () + ~on_peer_disconnected:Fn.ignore () >>| Or_error.ok_exn in let%bind c = create ~all_peers_seen_metric:false ~logger:(Logger.extend logger [ ("name", `String "c") ]) ~conf_dir:c_tmp ~pids ~on_peer_connected:Fn.ignore - ~on_peer_disconnected:Fn.ignore ~block_window_duration () + ~on_peer_disconnected:Fn.ignore () >>| Or_error.ok_exn in let%bind kp_a = generate_random_keypair a in diff --git a/src/lib/network_pool/network_pool_base.ml b/src/lib/network_pool/network_pool_base.ml index cf4c1e36674..1fbb36be82d 100644 --- a/src/lib/network_pool/network_pool_base.ml +++ b/src/lib/network_pool/network_pool_base.ml @@ -185,7 +185,6 @@ end) ~unwrap:(function | Diff m -> m | _ -> failwith "unexpected message type" ) ~trace_label:Resource_pool.label ~logger resource_pool - ~block_window_duration in let local_r, local_w, _ = Local_sink.create @@ -193,7 +192,6 @@ end) ~unwrap:(function | Diff m -> m | _ -> failwith "unexpected message type" ) ~trace_label:Resource_pool.label ~logger resource_pool - ~block_window_duration in log_rate_limiter_occasionally network_pool remote_rl ; (*priority: Transition frontier diffs > local diffs > incoming diffs*) diff --git a/src/lib/network_pool/pool_sink.ml b/src/lib/network_pool/pool_sink.ml index 50ca9371593..8be8f96c3b6 100644 --- a/src/lib/network_pool/pool_sink.ml +++ b/src/lib/network_pool/pool_sink.ml @@ -26,7 +26,6 @@ module type Pool_sink = sig -> trace_label:string -> logger:Logger.t -> pool - -> block_window_duration:Time.Span.t -> 'wrapped_t Strict_pipe.Reader.t * t * Rate_limiter.t end @@ -72,7 +71,6 @@ module Base ; throttle : unit Throttle.t ; on_push : unit -> unit Deferred.t ; log_gossip_heard : bool - ; block_window_duration : Time.Span.t } -> t | Void @@ -145,7 +143,6 @@ module Base ; throttle ; on_push ; log_gossip_heard - ; block_window_duration } -> O1trace.sync_thread (sprintf "handle_%s_gossip" trace_label) @@ fun () -> @@ -157,10 +154,7 @@ module Base | BC.External cb'' -> Diff.update_metrics env' cb'' ~log_gossip_heard ~logger ; don't_wait_for - ( match%map - Mina_net2.Validation_callback.await ~block_window_duration - cb'' - with + ( match%map Mina_net2.Validation_callback.await cb'' with | None -> let diff = Envelope.Incoming.data env' in [%log error] @@ -197,7 +191,7 @@ module Base Deferred.unit let create ?(on_push = Fn.const Deferred.unit) ?(log_gossip_heard = false) - ~wrap ~unwrap ~trace_label ~logger pool ~block_window_duration = + ~wrap ~unwrap ~trace_label ~logger pool = let r, writer = Strict_pipe.create ~name:"verified network pool diffs" (Buffered @@ -223,7 +217,6 @@ module Base ; throttle ; on_push ; log_gossip_heard - ; block_window_duration } , rate_limiter ) From 9d78d4a5a82642a0c5ec4f623d0a75615fc0b0d7 Mon Sep 17 00:00:00 2001 From: svv232 Date: Tue, 15 Oct 2024 01:35:50 -0400 Subject: [PATCH 129/234] remove bwd in transition handler --- src/lib/transition_handler/block_sink.ml | 9 +----- src/lib/transition_handler/block_sink.mli | 2 -- .../transition_handler/catchup_scheduler.ml | 30 +++++++------------ src/lib/transition_handler/processor.ml | 4 +-- 4 files changed, 12 insertions(+), 33 deletions(-) diff --git a/src/lib/transition_handler/block_sink.ml b/src/lib/transition_handler/block_sink.ml index d345e3f58c3..ecad7d1d2d7 100644 --- a/src/lib/transition_handler/block_sink.ml +++ b/src/lib/transition_handler/block_sink.ml @@ -23,7 +23,6 @@ type block_sink_config = ; consensus_constants : Consensus.Constants.t ; genesis_constants : Genesis_constants.t ; constraint_constants : Genesis_constants.Constraint_constants.t - ; block_window_duration : Time.Span.t } type t = @@ -37,7 +36,6 @@ type t = ; consensus_constants : Consensus.Constants.t ; genesis_constants : Genesis_constants.t ; constraint_constants : Genesis_constants.Constraint_constants.t - ; block_window_duration : Time.Span.t } | Void @@ -59,7 +57,6 @@ let push sink (b_or_h, `Time_received tm, `Valid_cb cb) = ; consensus_constants ; genesis_constants ; constraint_constants - ; block_window_duration } -> O1trace.sync_thread "handle_block_gossip" @@ fun () -> @@ -105,9 +102,7 @@ let push sink (b_or_h, `Time_received tm, `Valid_cb cb) = :: txs_meta ) ; [%log internal] "External_block_received" ; don't_wait_for - ( match%map - Mina_net2.Validation_callback.await ~block_window_duration cb - with + ( match%map Mina_net2.Validation_callback.await cb with | Some `Accept -> let processing_time_span = Time.diff @@ -238,7 +233,6 @@ let create ; consensus_constants ; genesis_constants ; constraint_constants - ; block_window_duration } = let rate_limiter = Network_pool.Rate_limiter.create @@ -260,7 +254,6 @@ let create ; consensus_constants ; genesis_constants ; constraint_constants - ; block_window_duration } ) let void = Void diff --git a/src/lib/transition_handler/block_sink.mli b/src/lib/transition_handler/block_sink.mli index 17d59ff72a6..7757d381dd4 100644 --- a/src/lib/transition_handler/block_sink.mli +++ b/src/lib/transition_handler/block_sink.mli @@ -1,6 +1,5 @@ open Network_peer open Mina_base -open Core_kernel type Structured_log_events.t += | Block_received of { state_hash : State_hash.t; sender : Envelope.Sender.t } @@ -26,7 +25,6 @@ type block_sink_config = ; consensus_constants : Consensus.Constants.t ; genesis_constants : Genesis_constants.t ; constraint_constants : Genesis_constants.Constraint_constants.t - ; block_window_duration : Time.Span.t } val create : diff --git a/src/lib/transition_handler/catchup_scheduler.ml b/src/lib/transition_handler/catchup_scheduler.ml index 6dd6693e9e8..0c70d5ce17e 100644 --- a/src/lib/transition_handler/catchup_scheduler.ml +++ b/src/lib/transition_handler/catchup_scheduler.ml @@ -234,29 +234,26 @@ let make_timeout t transition_with_hash duration = Existing code is safe as long as header-only gossip topic isn't actually used in the code.logger I.e. this TODO has to be resolved before bit-catchup work fully lands. *) -let register_validation_callback ~hash ~valid_cb ~block_window_duration t = +let register_validation_callback ~hash ~valid_cb t = Option.value_map valid_cb ~default:() ~f:(fun data -> match Hashtbl.add t.validation_callbacks ~key:hash ~data with | `Ok -> (* Clean up entry upon callback resolution *) upon - ( Deferred.ignore_m - @@ Mina_net2.Validation_callback.await ~block_window_duration data - ) + (Deferred.ignore_m @@ Mina_net2.Validation_callback.await data) (fun () -> Hashtbl.remove t.validation_callbacks hash) | `Duplicate -> [%log' warn t.logger] "Double validation callback for $state_hash" ~metadata:[ ("state_hash", Mina_base.State_hash.to_yojson hash) ] ) -let watch t ~timeout_duration ~cached_transition ~valid_cb - ~block_window_duration = +let watch t ~timeout_duration ~cached_transition ~valid_cb = let transition_with_hash, _ = Envelope.Incoming.data (Cached.peek cached_transition) in let hash = State_hash.With_state_hashes.state_hash transition_with_hash in let parent_hash = get_parent_hash transition_with_hash in log_block_metadata ~logger:t.logger ~parent_hash hash ; - register_validation_callback ~hash ~valid_cb ~block_window_duration t ; + register_validation_callback ~hash ~valid_cb t ; match Hashtbl.find t.collected_transitions parent_hash with | None -> let remaining_time = cancel_timeout t hash in @@ -303,7 +300,7 @@ let watch t ~timeout_duration ~cached_transition ~valid_cb for it is triggered (and validation callback is registered to be resolved when catchup receives corresponding block). *) -let watch_header t ~header_with_hash ~valid_cb ~block_window_duration = +let watch_header t ~header_with_hash ~valid_cb = let hash = State_hash.With_state_hashes.state_hash header_with_hash in let parent_hash = With_hash.data header_with_hash @@ -312,7 +309,7 @@ let watch_header t ~header_with_hash ~valid_cb ~block_window_duration = log_block_metadata ~logger:t.logger ~parent_hash hash ; match Hashtbl.find t.collected_transitions hash with | None -> - register_validation_callback ~hash ~valid_cb ~block_window_duration t ; + register_validation_callback ~hash ~valid_cb t ; if Writer.is_closed t.catchup_job_writer then [%log' trace t.logger] "catchup job pipe was closed; attempt to write to closed pipe" @@ -367,9 +364,6 @@ let%test_module "Transition_handler.Catchup_scheduler tests" = let create = create ~logger ~trust_system ~time_controller - let block_window_duration = - Mina_compile_config.For_unit_tests.t.block_window_duration - let verifier = Async.Thread_safe.block_on_async_exn (fun () -> Verifier.create ~logger ~proof_level ~constraint_constants @@ -407,8 +401,7 @@ let%test_module "Transition_handler.Catchup_scheduler tests" = in watch scheduler ~timeout_duration ~valid_cb:None ~cached_transition: - (Cached.pure @@ downcast_breadcrumb disjoint_breadcrumb) - ~block_window_duration ; + (Cached.pure @@ downcast_breadcrumb disjoint_breadcrumb) ; Async.Thread_safe.block_on_async_exn (fun () -> match%map Block_time.Timeout.await @@ -468,8 +461,7 @@ let%test_module "Transition_handler.Catchup_scheduler tests" = in watch scheduler ~timeout_duration ~valid_cb:None ~cached_transition: - (Cached.transform ~f:downcast_breadcrumb breadcrumb_2) - ~block_window_duration ; + (Cached.transform ~f:downcast_breadcrumb breadcrumb_2) ; Async.Thread_safe.block_on_async_exn (fun () -> Transition_frontier.add_breadcrumb_exn frontier (Cached.peek breadcrumb_1) ) ; @@ -548,8 +540,7 @@ let%test_module "Transition_handler.Catchup_scheduler tests" = in watch scheduler ~timeout_duration ~valid_cb:None ~cached_transition: - (Cached.pure @@ downcast_breadcrumb oldest_breadcrumb) - ~block_window_duration ; + (Cached.pure @@ downcast_breadcrumb oldest_breadcrumb) ; assert ( has_timeout_parent_hash scheduler (Transition_frontier.Breadcrumb.parent_hash oldest_breadcrumb) ) ; @@ -558,8 +549,7 @@ let%test_module "Transition_handler.Catchup_scheduler tests" = ~f:(fun prev_breadcrumb curr_breadcrumb -> watch scheduler ~timeout_duration ~valid_cb:None ~cached_transition: - (Cached.pure @@ downcast_breadcrumb curr_breadcrumb) - ~block_window_duration ; + (Cached.pure @@ downcast_breadcrumb curr_breadcrumb) ; assert ( not @@ has_timeout_parent_hash scheduler diff --git a/src/lib/transition_handler/processor.ml b/src/lib/transition_handler/processor.ml index 78655b431c7..97d197ba2f2 100644 --- a/src/lib/transition_handler/processor.ml +++ b/src/lib/transition_handler/processor.ml @@ -172,7 +172,6 @@ let process_transition ~context:(module Context : CONTEXT) ~trust_system | Ok _ | Error `Parent_missing_from_frontier -> [%log internal] "Schedule_catchup" ; Catchup_scheduler.watch_header catchup_scheduler ~valid_cb - ~block_window_duration:compile_config.block_window_duration ~header_with_hash ; return () | Error `Not_selected_over_frontier_root -> @@ -240,8 +239,7 @@ let process_transition ~context:(module Context : CONTEXT) ~trust_system in Catchup_scheduler.watch catchup_scheduler ~timeout_duration ~cached_transition:cached_initially_validated_transition - ~valid_cb - ~block_window_duration:compile_config.block_window_duration ; + ~valid_cb ; return (Error ()) ) in (* TODO: only access parent in transition frontier once (already done in call to validate dependencies) #2485 *) From 7867c2b4767bb70d82d2a7ff9a43aed91b2503d6 Mon Sep 17 00:00:00 2001 From: svv232 Date: Tue, 15 Oct 2024 02:25:38 -0400 Subject: [PATCH 130/234] removing dead code in ipc test for mina net 2 --- src/lib/mina_net2/tests/all_ipc.ml | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/lib/mina_net2/tests/all_ipc.ml b/src/lib/mina_net2/tests/all_ipc.ml index cb9c6c3438f..4027c6cea31 100644 --- a/src/lib/mina_net2/tests/all_ipc.ml +++ b/src/lib/mina_net2/tests/all_ipc.ml @@ -78,9 +78,6 @@ let%test_module "all-ipc test" = let bob_status = "This is major Tom to ground control\nI'm stepping through the door" - let block_window_duration = - Mina_compile_config.For_unit_tests.t.block_window_duration - type messages = { topic_a_msg_1 : string ; topic_a_msg_2 : string From 62aa4dfcc0a627ca501fc6bfb90825fb070a3d83 Mon Sep 17 00:00:00 2001 From: dkijania Date: Tue, 15 Oct 2024 09:47:11 +0200 Subject: [PATCH 131/234] consolidated benchmarks execution into python app --- buildkite/scripts/benchmarks.sh | 36 ++ buildkite/scripts/build-artifact.sh | 5 + .../scripts/run-snark-transaction-profiler.sh | 23 - scripts/benchmarks.sh | 19 - scripts/benchmarks/.gitignore | 4 + scripts/benchmarks/README.md | 97 ++++ scripts/benchmarks/__main__.py | 143 +++++ scripts/benchmarks/lib/__init__.py | 3 + scripts/benchmarks/lib/bench.py | 522 ++++++++++++++++++ scripts/benchmarks/lib/influx.py | 156 ++++++ scripts/benchmarks/lib/utils.py | 54 ++ scripts/benchmarks/requirements.txt | 2 + scripts/benchmarks/result_comparator.py | 27 + scripts/benchmarks/result_parser.py | 218 ++++++++ scripts/snark_transaction_profiler.py | 68 --- scripts/zkapp_metrics.sh | 16 - 16 files changed, 1267 insertions(+), 126 deletions(-) create mode 100755 buildkite/scripts/benchmarks.sh delete mode 100755 buildkite/scripts/run-snark-transaction-profiler.sh delete mode 100755 scripts/benchmarks.sh create mode 100644 scripts/benchmarks/.gitignore create mode 100644 scripts/benchmarks/README.md create mode 100644 scripts/benchmarks/__main__.py create mode 100644 scripts/benchmarks/lib/__init__.py create mode 100644 scripts/benchmarks/lib/bench.py create mode 100644 scripts/benchmarks/lib/influx.py create mode 100644 scripts/benchmarks/lib/utils.py create mode 100644 scripts/benchmarks/requirements.txt create mode 100644 scripts/benchmarks/result_comparator.py create mode 100755 scripts/benchmarks/result_parser.py delete mode 100755 scripts/snark_transaction_profiler.py delete mode 100755 scripts/zkapp_metrics.sh diff --git a/buildkite/scripts/benchmarks.sh b/buildkite/scripts/benchmarks.sh new file mode 100755 index 00000000000..3514b0b5e73 --- /dev/null +++ b/buildkite/scripts/benchmarks.sh @@ -0,0 +1,36 @@ +#!/bin/bash + +set -eox pipefail + +# Don't prompt for answers during apt-get install +export DEBIAN_FRONTEND=noninteractive +YELLOW_THRESHOLD="0.1" +RED_THRESHOLD="0.3" + +sudo apt-get update +sudo apt-get install -y git apt-transport-https ca-certificates tzdata curl python3 + +git config --global --add safe.directory /workdir + +source buildkite/scripts/export-git-env-vars.sh + +source buildkite/scripts/debian/install.sh "mina-berkeley,mina-test-suite" 1 + +pip install parse +pip install -r ./scripts/benchmarks/requirements.txt + +MAINLINE_BRANCHES="-m develop -m compatile -m master -m dkijania/build_performance_tooling_in_ci" +EXTRA_ARGS="--genesis-ledger-path ./genesis_ledgers/devnet.json" + +while [[ "$#" -gt 0 ]]; do case $1 in + heap-usage) BENCHMARK="heap-usage"; ;; + mina-base) BENCHMARK="mina-base"; ;; + ledger-export) BENCHMARK="ledger-export"; ;; + snark) BENCHMARK="snark"; ;; + zkapp) BENCHMARK="zkapp"; ;; + --yellow-threshold) YELLOW_THRESHOLD="$2"; shift;; + --red-threshold) RED_THRESHOLD="$2"; shift;; + *) echo "Unknown parameter passed: $1"; exit 1;; +esac; shift; done + +python3 ./scripts/benchmarks test --benchmark ${BENCHMARK} --branch ${BUILDKITE_BRANCH} --tmpfile ${BENCHMARK}.csv --yellow-threshold $YELLOW_THRESHOLD --red-threshold $RED_THRESHOLD $MAINLINE_BRANCHES $EXTRA_ARGS \ No newline at end of file diff --git a/buildkite/scripts/build-artifact.sh b/buildkite/scripts/build-artifact.sh index 263a258ef3c..ba7ba2349ef 100755 --- a/buildkite/scripts/build-artifact.sh +++ b/buildkite/scripts/build-artifact.sh @@ -45,4 +45,9 @@ dune build "--profile=${DUNE_PROFILE}" $INSTRUMENTED_PARAM \ src/app/rosetta/indexer_test/indexer_test.exe \ src/app/rosetta/ocaml-signer/signer_testnet_signatures.exe \ src/app/test_executive/test_executive.exe \ + src/app/benchmarks/benchmarks.exe \ + src/app/ledger_export_bench/ledger_export_benchmark.exe \ + src/app/disk_caching_stats/disk_caching_stats.exe \ + src/app/heap_usage/heap_usage.exe \ + src/app/zkapp_limits/zkapp_limits.exe \ src/test/command_line_tests/command_line_tests.exe # 2>&1 | tee /tmp/buildocaml.log diff --git a/buildkite/scripts/run-snark-transaction-profiler.sh b/buildkite/scripts/run-snark-transaction-profiler.sh deleted file mode 100755 index 802cd730632..00000000000 --- a/buildkite/scripts/run-snark-transaction-profiler.sh +++ /dev/null @@ -1,23 +0,0 @@ -#!/bin/bash - -set -eo pipefail - -# Don't prompt for answers during apt-get install -export DEBIAN_FRONTEND=noninteractive - -sudo apt-get update -sudo apt-get install -y git apt-transport-https ca-certificates tzdata curl python3 - -TESTNET_NAME="berkeley" - -git config --global --add safe.directory /workdir -source buildkite/scripts/export-git-env-vars.sh - -source buildkite/scripts/debian/install.sh "mina-${TESTNET_NAME}" 1 - -K=1 -MAX_NUM_UPDATES=4 -MIN_NUM_UPDATES=2 - -echo "-- Run Snark Transaction Profiler with parameters: --zkapps --k ${K} --max-num-updates ${MAX_NUM_UPDATES} --min-num-updates ${MIN_NUM_UPDATES}" -python3 ./scripts/snark_transaction_profiler.py ${K} ${MAX_NUM_UPDATES} ${MIN_NUM_UPDATES} diff --git a/scripts/benchmarks.sh b/scripts/benchmarks.sh deleted file mode 100755 index b072c9983a4..00000000000 --- a/scripts/benchmarks.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/sh - -# runs inline benchmarks -# requires that app/benchmarks/benchmarks.exe is built -# run with -help to see available flags - -export BENCHMARKS_RUNNER=TRUE -export X_LIBRARY_INLINING=true - -GIT_ROOT="`git rev-parse --show-toplevel`" - -BENCHMARK_EXE=$GIT_ROOT/_build/default/src/app/benchmarks/benchmarks.exe - -if [ ! -f "$BENCHMARK_EXE" ]; then - echo "Please run 'make benchmarks' before running this script"; - exit 1 -fi - -exec $BENCHMARK_EXE "$@" -run-without-cross-library-inlining -suppress-warnings diff --git a/scripts/benchmarks/.gitignore b/scripts/benchmarks/.gitignore new file mode 100644 index 00000000000..749ccdafd4f --- /dev/null +++ b/scripts/benchmarks/.gitignore @@ -0,0 +1,4 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class diff --git a/scripts/benchmarks/README.md b/scripts/benchmarks/README.md new file mode 100644 index 00000000000..afeda98a98d --- /dev/null +++ b/scripts/benchmarks/README.md @@ -0,0 +1,97 @@ +# Benchmarks + +Python app for running all major mina benchmarks of various type + +- mina-benchmarks +- snark-profiler +- heap-usage +- zkapp-limits +- ledger-export + +It requires all underlying app to be present on os By default app uses +official name (like mina, mina-heap-usage etc.). + +In order to upload files to influx db all 4 influx env vars need to be defined: +- INFLUX_BUCKET_NAME +- INFLUX_ORG +- INFLUX_TOKEN +- INFLUX_HOST + +More details here: +https://docs.influxdata.com/influxdb/cloud/reference/cli/influx/#credential-precedence + +## Installation + +Project depends on Python in version 3+ + + +```commandline +pip install -r ./scripts/benchmarks/requirements.txt +``` + +## Usage + +python3 ./scripts/benchmarks run --benchmark mina-base --path _build/default/src/app/benchmarks/benchmarks.exe --influx --branch compatible --format csv --outfile mina_base.csv + +## Commands + +### ls + +Prints all supported benchmarks + +```commandline + python3 scripts/benchmarks ls +``` + +### run + +runs benchmark. + +INFO: each benchmark can have its own set of additional parameters + +example: +```commandline +python3 scripts/benchmarks run --benchmark snark --path _build/default/src/app/cli/src/mina.exe --branch compatible --outfile zkap_limits.csv +``` + +### parse + +Parses textual output of benchmark to csv + +```commandline +python3 scripts/benchmarks parse --benchmark mina-base --influx --branch compatible --infile output.out --outfile mina_base.csv +``` + + +### compare + +Compare result against moving average from influx db + +```commandline +python3 scripts/benchmarks compare --infile vrf_lib_tests_mina_base.csv --yellow-threshold 0.1 --red-threshold 0.2 +``` + +### upload + +Uploads data to influx db + +```commandline +python3 scripts/benchmarks upload --infile mina_base_mina_base.csv +``` + +### test + +Aggregates all above commands with logic to only upload data if branch is amongst mainline branches + +```commandline +python3 scripts/benchmarks test --benchmark snark --path _build/default/src/app/cli/src/mina.exe --branch compatible --tmpfile zkap_limits.csv +``` + + +## Further work + +Application is meant to be run in CI. Currently it exits when values exceeds moving average. +Some process need to be agreed how to handle situation where increase in value is expected and values should be uploaded to +influx db. One proposal is to add env var which can bypass comparison + additional logic which will allow value which exceeds +moving average but does not exceed highest one +(as we may end up in situation that moving average won't allow further values and we need to bypass them as well until avg will catchup with expected increase) diff --git a/scripts/benchmarks/__main__.py b/scripts/benchmarks/__main__.py new file mode 100644 index 00000000000..b9a7e8571ed --- /dev/null +++ b/scripts/benchmarks/__main__.py @@ -0,0 +1,143 @@ +""" + Mina benchmark runner + + Capable of running,parsing to csv, comparing with historical data stored in influx and uploading to influx. + + Requirements: + + all INFLUX_* env vars need to be defined (INFLUX_HOST,INFLUX_TOKEN,INFLUX_BUCKET_NAME,INFLUX_ORG) + +""" + +import argparse +from pathlib import Path + +from lib import * + +parser = argparse.ArgumentParser(description='Executes mina benchmarks') +subparsers = parser.add_subparsers(dest="cmd") +run_bench = subparsers.add_parser('run') +run_bench.add_argument("--outfile", required=True, help="output file") +run_bench.add_argument("--benchmark", type= BenchmarkType, help="benchmark to run") +run_bench.add_argument("--influx", action='store_true', help = "Required only if --format=csv. Makes csv complaint with influx csv ") +run_bench.add_argument("--format", type=Format, help="output file format [text,csv]", default=Format.text) +run_bench.add_argument("--path", help="override path to benchmark") +run_bench.add_argument("--branch", default="test", help="Required only if --format=csv. Add branch name to csv file") +run_bench.add_argument("--genesis-ledger-path") + +parse_bench = subparsers.add_parser('parse',help="parse textual benchmark output to csv") +parse_bench.add_argument("--benchmark", type= BenchmarkType, help="benchmark to run") +parse_bench.add_argument("--infile",help="input file") +parse_bench.add_argument("--influx", action='store_true', help="assure output file is compliant with influx schena") +parse_bench.add_argument("--branch", help="adds additional colum in csv with branch from which benchmarks where built") +parse_bench.add_argument("--outfile", help="output file") + +compare_bench = subparsers.add_parser('compare', help="compare current data with historical downloaded from influx db") +compare_bench.add_argument("--benchmark", type= BenchmarkType, help="benchmark to run") +compare_bench.add_argument("--infile", help="input file") +compare_bench.add_argument("--yellow-threshold",help="defines how many percent current measurement can exceed average so app will trigger warning", + type=float, + choices=[Range(0.0, 1.0)], + default=0.1) +compare_bench.add_argument("--red-threshold",help="defines how many percent current measurement can exceed average so app will exit with error", + type=float, + choices=[Range(0.0, 1.0)], + default=0.2) + +upload_bench = subparsers.add_parser('upload') +upload_bench.add_argument("--infile") + +test_bench = subparsers.add_parser('test', help="Performs entire cycle of operations from run till upload") +test_bench.add_argument("--benchmark", type=BenchmarkType, help="benchmark to test") +test_bench.add_argument("--tmpfile", help="temporary location of result file") +test_bench.add_argument("--path") +test_bench.add_argument("--yellow-threshold", + help="defines how many percent current measurement can exceed average so app will trigger warning", + type=float, + choices=[Range(0.0, 1.0)], + default=0.1) +test_bench.add_argument("--red-threshold", + help="defines how many percent current measurement can exceed average so app will exit with error", + type=float, + choices=[Range(0.0, 1.0)], + default=0.2) +test_bench.add_argument("--branch", help="branch which was used in tests") +test_bench.add_argument("--genesis-ledger-path", help="Applicable only for ledger-export benchmark. Location of genesis config file") +test_bench.add_argument('-m','--mainline-branches', action='append', help='Defines mainline branch. If values of \'--branch\' parameter is among mainline branches then result will be uploaded') + + +upload_bench = subparsers.add_parser('ls') + +args = parser.parse_args() + +logging.basicConfig(level=logging.DEBUG) + +default_mainline_branches = ["develop", "compatible", "master"] + + +def select_benchmark(kind): + if kind == BenchmarkType.mina_base: + return MinaBaseBenchmark() + elif kind == BenchmarkType.zkapp: + return ZkappLimitsBenchmark() + elif kind == BenchmarkType.heap_usage: + return HeapUsageBenchmark() + elif kind == BenchmarkType.snark: + return SnarkBenchmark() + elif kind == BenchmarkType.ledger_export: + if args.genesis_ledger_path is None: + print( + "--genesis-ledger-path need to be provided when running ledger export benchmark" + ) + exit(1) + return LedgerExportBenchmark(args.genesis_ledger_path) + +if args.cmd == "ls": + benches = [str(b) for b in BenchmarkType] + print("\n".join(benches)) + exit(0) + +if args.benchmark is None: + print("benchmark not selected") + exit(1) + +bench = select_benchmark(args.benchmark) + +if args.cmd == "run": + output = bench.run(path=args.path) + if args.format == "text": + with open(args.outfile, 'w') as file: + file.write(output) + else: + files = ",".join( + bench.parse(output, args.outfile, args.influx, args.branch)) + print(f"produced files: {files}") + +if args.cmd == "parse": + files = bench.parse(Path(args.infile).read_text(), args.outfile, args.influx, args.branch) + print(f'Parsed files: \n{",".join(files)}') + + +if args.cmd == "compare": + bench.compare(args.infile, args.yellow_threshold, args.red_threshold) + +if args.cmd == "upload": + bench.upload(args.infile) + +if args.cmd == "test": + output = bench.run(path=args.path) + files = bench.parse(output, + args.tmpfile, + influxdb=True, + branch=args.branch) + + [ + bench.compare(file, args.yellow_threshold, args.red_threshold) + for file in files + ] + + mainline_branches = default_mainline_branches if args.mainline_branches is None else args.mainline_branches + + if args.branch in mainline_branches: + for file in files: + bench.upload(file) \ No newline at end of file diff --git a/scripts/benchmarks/lib/__init__.py b/scripts/benchmarks/lib/__init__.py new file mode 100644 index 00000000000..d4e612aaf46 --- /dev/null +++ b/scripts/benchmarks/lib/__init__.py @@ -0,0 +1,3 @@ +from .influx import * +from .bench import * +from .utils import * diff --git a/scripts/benchmarks/lib/bench.py b/scripts/benchmarks/lib/bench.py new file mode 100644 index 00000000000..47e45da64c3 --- /dev/null +++ b/scripts/benchmarks/lib/bench.py @@ -0,0 +1,522 @@ +import re +from abc import ABC + +import parse +from pathlib import Path +import io +import os +from enum import Enum +import logging +from lib.utils import isclose, assert_cmd +from lib.influx import * + +import csv +import abc + +logger = logging.getLogger(__name__) + + +class Benchmark(abc.ABC): + """ + Abstract class which aggregate all necessary operations + (run,parse) which then are implemented by children. + Moreover, for all general and common operations like upload it has concrete implementation + + """ + + def __init__(self, kind): + self.kind = kind + self.influx_client = Influx() + + def headers_to_influx(self, headers): + return "#datatype " + ",".join( + [header.influx_kind for header in headers]) + + @abc.abstractmethod + def default_path(self): + pass + + @abc.abstractmethod + def name_header(self): + pass + + @abc.abstractmethod + def branch_header(self): + pass + + def headers_to_name(self, headers): + return list(map(lambda x: x.name, headers)) + + @abc.abstractmethod + def headers(self): + pass + + @abc.abstractmethod + def fields(self): + pass + + @abc.abstractmethod + def run(self, path): + pass + + @abc.abstractmethod + def parse(self, content, output_filename, influxdb, branch): + pass + + def compare(self, result_file, yellow_threshold, red_threshold): + with open(result_file, newline='') as csvfile: + reader = csv.reader(csvfile, delimiter=',') + for i in range(2): + next(reader) + for row in reader: + for field in self.fields(): + value = float(row[field.pos]) + name = row[self.name_header().pos] + branch = row[self.branch_header().pos] + result = self.influx_client.query_moving_average( + name, branch, str(field), self.branch_header()) + + if not any(result): + logger.warning( + f"Skipping comparison for {name} as there are no historical data available yet" + ) + else: + average = float(result[-1].records[-1]["_value"]) + + current_red_threshold = average * red_threshold + current_yellow_threshold = average * yellow_threshold + + logger.debug( + f"calculated thresholds: [red={current_red_threshold},yellow={current_yellow_threshold}]" + ) + + if isclose(value + red_threshold, average): + logger.error( + f"{name} measurement exceeds time greatly ({value + current_red_threshold} against {average}). failing the build" + ) + exit(1) + elif isclose(value + yellow_threshold, average): + logger.warning( + f"WARNING: {name} measurement exceeds expected time ({value + current_yellow_threshold} against {average})" + ) + else: + logger.info( + f"comparison succesful for {name}. {value} is less than threshold [yellow={average + current_yellow_threshold},red={average + current_red_threshold}]" + ) + + def upload(self, file): + self.influx_client.upload_csv(file) + + +class BenchmarkType(Enum): + mina_base = 'mina-base' + snark = 'snark' + heap_usage = 'heap-usage' + zkapp = 'zkapp' + ledger_export = 'ledger-export' + + def __str__(self): + return self.value + + +class JaneStreetBenchmark(Benchmark, ABC): + """ + Abstract class for native ocaml benchmarks which has the same format + + """ + name = MeasurementColumn("Name", 0) + time_per_runs = FieldColumn("Time/Run", 1, "us") + cycles_per_runs = FieldColumn("Cycls/Run", 2, "kc") + minor_words_per_runs = FieldColumn("mWd/Run", 3, "w") + major_words_per_runs = FieldColumn("mjWd/Run", 4, "w") + promotions_per_runs = FieldColumn("Prom/Run", 5, "w") + branch = TagColumn("gitbranch", 6) + + def __init__(self, kind): + Benchmark.__init__(self, kind) + + def headers(self): + return [ + MinaBaseBenchmark.name, MinaBaseBenchmark.time_per_runs, + MinaBaseBenchmark.cycles_per_runs, + MinaBaseBenchmark.minor_words_per_runs, + MinaBaseBenchmark.major_words_per_runs, + MinaBaseBenchmark.promotions_per_runs, MinaBaseBenchmark.branch + ] + + def fields(self): + return [ + MinaBaseBenchmark.time_per_runs, MinaBaseBenchmark.cycles_per_runs, + MinaBaseBenchmark.minor_words_per_runs, + MinaBaseBenchmark.major_words_per_runs, + MinaBaseBenchmark.promotions_per_runs + ] + + def name_header(self): + return self.name + + def branch_header(self): + return self.branch + + def export_to_csv(self, lines, filename, influxdb, branch): + with open(filename, 'w') as csvfile: + + csvwriter = csv.writer(csvfile) + + if influxdb: + csvfile.write(self.headers_to_influx(self.headers()) + "\n") + + for line in lines: + if line.startswith('│'): + + rows = list(map(lambda x: x.strip(), line.split('│'))) + rows = list(filter(lambda x: x, rows)) + + if rows[0].startswith(MinaBaseBenchmark.name.name): + rows[ + 1] += " " + MinaBaseBenchmark.time_per_runs.format_unit( + ) + rows[ + 2] += " " + MinaBaseBenchmark.cycles_per_runs.format_unit( + ) + rows[ + 3] += " " + MinaBaseBenchmark.minor_words_per_runs.format_unit( + ) + rows[ + 4] += " " + MinaBaseBenchmark.major_words_per_runs.format_unit( + ) + rows[ + 5] += " " + MinaBaseBenchmark.promotions_per_runs.format_unit( + ) + rows.append("gitbranch") + + else: + # remove [.*] from name + rows[0] = re.sub('\[.*?\]', '', rows[0]).strip() + time = rows[1] + # remove units from values + if not time.endswith("us"): + if time.endswith("ns"): + time = float(time[:-2]) * 1_000 + rows[1] = time + else: + raise Exception( + "Time can be expressed only in us or ns") + else: + # us + rows[1] = time[:-2] + # kc + rows[2] = rows[2][:-2] + # w + rows[3] = rows[3][:-1] + # w + rows[4] = rows[4][:-1] + # w + rows[5] = rows[5][:-1] + rows.append(branch) + + csvwriter.writerow(rows[:]) + + def parse(self, content, output_filename, influxdb, branch): + buf = io.StringIO(content) + lines = buf.readlines() + + starts = [] + ends = [] + files = [] + for i, e in enumerate(lines): + if "Running" in e: + starts.append(i) + + if not any(starts): + self.export_to_csv(lines, output_filename, influxdb, branch) + else: + for start in starts[1:]: + ends.append(start) + + ends.append(len(lines) - 1) + + for start, end in zip(starts, ends): + name = parse.parse('Running inline tests in library "{}"', + lines[start].strip())[0] + file = f'{name}_{output_filename}' + logger.info(f"exporting {file}..") + self.export_to_csv(lines[start:end], f'{file}', influxdb, + branch) + files.append(file) + + return files + + +class MinaBaseBenchmark(JaneStreetBenchmark): + + def __init__(self): + JaneStreetBenchmark.__init__(self, BenchmarkType.mina_base) + + def run(self, path=None): + path = self.default_path() if path is None else path + cmd = [ + path, "time", "cycles", "alloc", "-clear-columns", "-all-values", + "-width", "1000", "-run-without-cross-library-inlining", + "-suppress-warnings" + ] + envs = os.environ.copy() + envs["BENCHMARKS_RUNNER"] = "TRUE" + envs["X_LIBRARY_INLINING"] = "true" + + return assert_cmd(cmd, envs) + + def default_path(self): + return "mina-benchmarks" + + +class LedgerExportBenchmark(JaneStreetBenchmark): + + def __init__(self, genesis_ledger_path): + JaneStreetBenchmark.__init__(self, BenchmarkType.ledger_export) + self.genesis_ledger_path = genesis_ledger_path + + def run(self, path=None): + path = self.default_path() if path is None else path + cmd = [ + path, "time", "cycles", "alloc", "-clear-columns", "-all-values", + "-width", "1000" + ] + envs = os.environ.copy() + envs["RUNTIME_CONFIG"] = self.genesis_ledger_path + + return assert_cmd(cmd, envs) + + def default_path(self): + return "mina-ledger-export-benchmark" + + +class ZkappLimitsBenchmark(Benchmark): + + name = MeasurementColumn("Name", 0) + proofs_updates = FieldColumn("proofs updates", 1, "") + signed_updates = FieldColumn("signed updates", 2, "") + pairs_of_signed = FieldColumn("pairs of signed", 3, "") + total_account_updates = FieldColumn("total account updates", 4, "") + cost = FieldColumn("cost", 5, "") + category = TagColumn("category", 6) + branch = TagColumn("gitbranch", 7) + + def __init__(self): + Benchmark.__init__(self, BenchmarkType.zkapp) + + def default_path(self): + return "mina-zkapp-limits" + + def fields(self): + return [ + self.proofs_updates, self.pairs_of_signed, + self.total_account_updates, self.cost + ] + + def name_header(self): + return self.name + + def branch_header(self): + return self.branch + + def headers(self): + return [ + ZkappLimitsBenchmark.name, ZkappLimitsBenchmark.proofs_updates, + ZkappLimitsBenchmark.signed_updates, + ZkappLimitsBenchmark.pairs_of_signed, + ZkappLimitsBenchmark.total_account_updates, + ZkappLimitsBenchmark.cost, ZkappLimitsBenchmark.category, + ZkappLimitsBenchmark.branch + ] + + def parse(self, content, output_filename, influxdb, branch): + + buf = io.StringIO(content) + lines = buf.readlines() + + stats = [list(map(lambda x: x.name, self.headers()))] + + for line in lines: + if line == '': + continue + + syntax = "Proofs updates=(?P\d+) Signed/None updates=(?P\d+) Pairs of Signed/None updates=(?P\d+): Total account updates: (?P\d+) Cost: (?P[0-9]*[.]?[0-9]+)" + + match = re.match(syntax, line) + + if match: + proofs_updates = int(match.group("proofs_updates")) + signed_updates = int(match.group("signed_updates")) + pairs_of_signed_updates = int( + match.group("pairs_of_signed_updates")) + total_account_updates = int( + match.group("total_account_updates")) + cost = float(match.group(ZkappLimitsBenchmark.cost.name)) + name = f"P{proofs_updates}S{signed_updates}PS{pairs_of_signed_updates}TA{total_account_updates}" + tag = "zkapp" + stats.append((name, proofs_updates, signed_updates, + pairs_of_signed_updates, total_account_updates, + cost, tag, branch)) + + with open(output_filename, 'w') as csvfile: + if influxdb: + csvfile.write( + self.headers_to_influx(self.headers()) + "\n") + csvwriter = csv.writer(csvfile) + csvwriter.writerows(stats) + + return [output_filename] + + def run(self, path=None): + path = self.default_path() if path is None else path + return assert_cmd([path]) + + +class SnarkBenchmark(Benchmark): + + name = MeasurementColumn("name", 0) + proofs_updates = FieldColumn("proofs updates", 1, "") + nonproofs_pairs = FieldColumn("non-proof pairs", 2, "") + nonproofs_singles = FieldColumn("non-proof singles", 3, "") + verification_time = FieldColumn("verification time", 4, "[s]") + proving_time = FieldColumn("value", 5, "[s]") + category = TagColumn("category", 6) + branch = TagColumn("gitbranch", 7) + + k = 1 + max_num_updates = 4 + min_num_updates = 2 + + def name_header(self): + return self.name + + def branch_header(self): + return self.branch + + def __init__(self): + Benchmark.__init__(self, BenchmarkType.snark) + + def headers(self): + return [ + SnarkBenchmark.name, SnarkBenchmark.proofs_updates, + SnarkBenchmark.nonproofs_pairs, SnarkBenchmark.nonproofs_singles, + SnarkBenchmark.verification_time, SnarkBenchmark.proving_time, + SnarkBenchmark.category, SnarkBenchmark.branch + ] + + def fields(self): + return [ + SnarkBenchmark.proofs_updates, SnarkBenchmark.nonproofs_pairs, + SnarkBenchmark.nonproofs_singles, SnarkBenchmark.verification_time, SnarkBenchmark.proving_time + ] + + def parse(self, content, output_filename, influxdb, branch): + buf = io.StringIO(content) + lines = buf.readlines() + rows = [] + category = "snark" + rows.append(list(map(lambda x: x.name, self.headers()))) + + for line in lines: + if line.startswith("|"): + if "--" in line: + continue + elif line.startswith("| No.|"): + continue + else: + cols = line.split("|") + cols = list(map(lambda x: x.strip(), cols)) + cols = list(filter(lambda x: x, cols)) + + #| No.| Proof updates| Non-proof pairs| Non-proof singles| Mempool verification time (sec)| Transaction proving time (sec)|Permutation| + proof_update = cols[1] + non_proof_pairs = cols[2] + non_proof_singles = cols[3] + verification_time = cols[4] + proving_time = cols[5] + name = cols[6] + + rows.append((name,proof_update,non_proof_pairs,non_proof_singles,verification_time,proving_time, + category,branch)) + + with open(output_filename, 'w') as csvfile: + if influxdb: + csvfile.write(self.headers_to_influx(self.headers()) + "\n") + + csvwriter = csv.writer(csvfile) + csvwriter.writerows(rows) + + return [ output_filename ] + + def default_path(self): + return "mina" + + def run(self, path=None): + path = self.default_path() if path is None else path + return assert_cmd([ + path, "transaction-snark-profiler", "--zkapps", "--k", + str(self.k), "--max-num-updates", + str(self.max_num_updates), "--min-num-updates", + str(self.min_num_updates) + ]) + + +class HeapUsageBenchmark(Benchmark): + + name = MeasurementColumn("Name", 0) + heap_words = FieldColumn("heap words", 1, "") + bytes = FieldColumn("bytes", 2, "") + category = TagColumn("category", 3) + branch = TagColumn("gitbranch", 4) + + def __init__(self): + Benchmark.__init__(self, BenchmarkType.heap_usage) + + def name_header(self): + return self.name + + def branch_header(self): + return self.branch + + def headers(self): + return [ + HeapUsageBenchmark.name, HeapUsageBenchmark.heap_words, + HeapUsageBenchmark.bytes, HeapUsageBenchmark.category, + HeapUsageBenchmark.branch + ] + + def fields(self): + return [ + HeapUsageBenchmark.heap_words, + HeapUsageBenchmark.bytes + ] + + def parse(self, content, output_filename, influxdb, branch): + buf = io.StringIO(content) + lines = buf.readlines() + rows = [] + rows.append(self.headers_to_name(self.headers())) + + for i, line in enumerate(lines): + if line.startswith("Data of type"): + sanitized_line = line.replace(" ", "").strip() + row = list( + parse.parse("Dataoftype{}uses{}heapwords={}bytes", + sanitized_line)) + row.extend(("heap_usage", branch)) + rows.append(row) + + with open(output_filename, 'w') as csvfile: + if influxdb: + csvfile.write(self.headers_to_influx(self.headers()) + "\n") + csvwriter = csv.writer(csvfile) + csvwriter.writerows(rows) + return [output_filename] + + def default_path(self): + return "mina-heap-usage" + + def run(self, path=None): + path = self.default_path() if path is None else path + return assert_cmd([path]) diff --git a/scripts/benchmarks/lib/influx.py b/scripts/benchmarks/lib/influx.py new file mode 100644 index 00000000000..c8f92dca485 --- /dev/null +++ b/scripts/benchmarks/lib/influx.py @@ -0,0 +1,156 @@ +import logging +import os +import subprocess +import time +from pathlib import Path + +import influxdb_client + +logger = logging.getLogger(__name__) + + +class HeaderColumn: + """ + Specialized column class for influx upload. + It accepts influx_kind [string,double,tag..] and pos which helps find it in csv when parsing + """ + + def __init__(self, name, influx_kind, pos): + self.name = name + self.influx_kind = influx_kind + self.pos = pos + + +class MeasurementColumn(HeaderColumn): + """ + Column header which represents influx measurement header + """ + + def __init__(self, name, pos): + HeaderColumn.__init__(self, name, influx_kind="measurement", pos=pos) + + +class FieldColumn(HeaderColumn): + """ + Column header which represents influx field header. + It has additional unit field which can be formatted as part of name + Currently field is always a double (there was no need so far for different type) + """ + + def __init__(self, name, pos, unit=None): + HeaderColumn.__init__(self, name, influx_kind="double", pos=pos) + self.unit = unit + + def __str__(self): + if self.unit: + return f"{self.name} [{self.unit}]" + else: + return f"{self.name}" + + def format_unit(self): + return f"[{self.unit}]" + + +class TagColumn(HeaderColumn): + """ + Specialized header for inglux tag + """ + + def __init__(self, name, pos): + HeaderColumn.__init__(self, name, influx_kind="tag", pos=pos) + + +class Influx: + """ + Influx helper which wraps influx cli and python api + It requires INFLUX_* env vars to be set + and raises RuntimeException if they are not defined + """ + + host = "INFLUX_HOST" + token = "INFLUX_TOKEN" + org = "INFLUX_ORG" + bucket = "INFLUX_BUCKET_NAME" + + @staticmethod + def check_envs(): + if Influx.host not in os.environ: + raise RuntimeError(f"{Influx.host} env var not defined") + if Influx.token not in os.environ: + raise RuntimeError(f"{Influx.token} env var not defined") + if Influx.org not in os.environ: + raise RuntimeError(f"{Influx.org} env var not defined") + if Influx.bucket not in os.environ: + raise RuntimeError(f"{Influx.bucket} env var not defined") + + def __init__(self, moving_average_size=10): + Influx.check_envs() + self.client = influxdb_client.InfluxDBClient( + url=os.environ[Influx.host], + token=os.environ[Influx.token], + org=os.environ[Influx.org], + bucket=os.environ[Influx.bucket]) + self.moving_average_size = moving_average_size + + def __get_moving_average_query(self, name, branch, field, branch_header): + """ + Constructs moving average query from influx for comparison purposes + """ + + bucket = os.environ[Influx.bucket] + return f"from(bucket: \"{bucket}\") \ + |> range(start: -10d) \ + |> filter (fn: (r) => (r[\"{branch_header.name}\"] == \"{branch}\" ) \ + and r._measurement == \"{name}\" \ + and r._field == \"{field}\" ) \ + |> keep(columns: [\"_value\"]) \ + |> movingAverage(n:{self.moving_average_size}) " + + def query_moving_average(self, name, branch, field, branch_header): + """ + Retrieves moving average from influx db for particular + branch and field + """ + + query = self.__get_moving_average_query(name, branch, field, + branch_header) + logger.debug(f"running influx query: {query}") + query_api = self.client.query_api() + return query_api.query(query) + + def upload_csv(self, file): + """ + Uploads csv to influx db. File need to be formatter according to influx requirements: + https://docs.influxdata.com/influxdb/cloud/reference/syntax/annotated-csv/ + + WARNING: InfluxDb write api is not very friendly with csv which contains more than measurement + in csv file (which is our case). I decided to use influx cli as it supports multiple measurements in + single csv file. + Unfortunately influx cli has nasty issue when calling from python similar to: + (similar to hanging queries problem: https://community.influxdata.com/t/influxdb-hanging-queries/1522). + My workaround is to use --http-debug flag, then read output of command and if there is 204 status code + returned i kill influx cli + """ + + if not Path(file).is_file(): + raise RuntimeError(f"cannot find {file}") + + if not open(file).readline().rstrip().startswith("#datatype"): + raise RuntimeError( + f"{file} is badly formatted and not eligible for uploading to influx db. " + f"see more at https://docs.influxdata.com/influxdb/cloud/reference/syntax/annotated-csv/" + ) + + process = subprocess.Popen([ + "influx", "write", "--http-debug", "--format=csv", f"--file={file}" + ], + stderr=subprocess.PIPE) + + timeout = time.time() + 60 # 1 minute + while True: + line = process.stderr.readline() + if b"HTTP/2.0 204 No Content" in line or time.time() > timeout: + process.kill() + break + + logger.info(f"{file} uploaded to influx db") diff --git a/scripts/benchmarks/lib/utils.py b/scripts/benchmarks/lib/utils.py new file mode 100644 index 00000000000..382dce31d04 --- /dev/null +++ b/scripts/benchmarks/lib/utils.py @@ -0,0 +1,54 @@ +import subprocess +import logging +from enum import Enum + +logger = logging.getLogger(__name__) + + +def isclose(a, b, rel_tol=1e-09, abs_tol=0.0): + return abs(a - b) <= max(rel_tol * max(abs(a), abs(b)), abs_tol) + + +def assert_cmd(cmd, envs=None): + logger.debug(f"running command {cmd}") + result = subprocess.run(cmd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + env=envs) + + if result.returncode != 0: + err = result.stderr.decode("UTF-8") + logger.error( + f"{cmd} resulted in errorcode {result.returncode} with message {err}" + ) + raise RuntimeError(f"cmd failed: {cmd} with stderr: {err}") + + output = result.stdout.decode("UTF-8") + logger.debug(f"command output: {output}") + return output + +class Range(object): + + def __init__(self, start, end): + self.start = start + self.end = end + + def __eq__(self, other): + return self.start <= other <= self.end + + def __contains__(self, item): + return self.__eq__(item) + + def __iter__(self): + yield self + + def __str__(self): + return '[{0},{1}]'.format(self.start, self.end) + + +class Format(Enum): + csv = 'csv' + text = 'text' + + def __str__(self): + return self.value diff --git a/scripts/benchmarks/requirements.txt b/scripts/benchmarks/requirements.txt new file mode 100644 index 00000000000..22ccbdce44d --- /dev/null +++ b/scripts/benchmarks/requirements.txt @@ -0,0 +1,2 @@ +influxdb_client==1.46.0 +parse==1.20.1 diff --git a/scripts/benchmarks/result_comparator.py b/scripts/benchmarks/result_comparator.py new file mode 100644 index 00000000000..783b325af2d --- /dev/null +++ b/scripts/benchmarks/result_comparator.py @@ -0,0 +1,27 @@ +import csv +import argparse +import subprocess + + +parser = argparse.ArgumentParser(description='Calculate actual benchmark values against influx db') +parser.add_argument('--infile', + help='input csv file with actual benchmark') +parser.add_argument('--red-threshold', + help='value above which app return exit 1') +parser.add_argument('--yellow-threshold', + help='value above which app return warning', + ) +args = parser.parse_args() + +with open(args.infile, newline='') as csvfile: + rows = list(csv.reader(csvfile)) + + headers_rows = rows[1] + name_pos = [ i for i,x in enumerate(headers_rows) if x == "Name"][0] + branch_pos = [ i for i,x in enumerate(headers_rows) if x == "gitbranch"][0] + + for items in rows[2:]: + name = items[name_pos] + branch = items[branch_pos] + output = subprocess.run(["influx", "query", f'from(bucket: "mina-benchmarks") |> range(start: -10d) |> filter (fn: (r) => (r._tag["gitbranch"] == "{branch}" ) and r._measurement == "{name}") |> keep(columns: ["_value"]) |> movingAverage(n:1) ']).stdout.read() + print(output) \ No newline at end of file diff --git a/scripts/benchmarks/result_parser.py b/scripts/benchmarks/result_parser.py new file mode 100755 index 00000000000..d5d0e5e0f09 --- /dev/null +++ b/scripts/benchmarks/result_parser.py @@ -0,0 +1,218 @@ +import csv +import argparse +import re +from parse import * +from pathlib import Path + +from enum import Enum + + +class Benchmark(Enum): + tabular = 'tabular' + snark = 'snark' + heap_usage = 'heap-usage' + zkapp = 'zkapp' + + def __str__(self): + return self.value + + +def export_to_csv(lines, filename, influxdb, branch): + with open(filename, 'w') as csvfile: + + csvwriter = csv.writer(csvfile) + + if influxdb: + csvfile.write("#datatype measurement,double,double,double,double,tag\n") + + for line in lines: + if line.startswith('│'): + + rows = list(map(lambda x: x.strip(), line.split('│'))) + rows = list(filter(lambda x: x, rows)) + + if rows[0].startswith("Name"): + rows[1] += " [us]" + rows[2] += " [kc]" + rows[3] += " [w]" + rows[4] += " [w]" + rows.append("gitbranch") + + else: + # remove [.*] from name + rows[0] = re.sub('\[.*?\]', '', rows[0]).strip() + time = rows[1] + # remove units from values + if not time.endswith("us"): + if time.endswith("ns"): + time = float(time[:-2])* 1_000 + rows[1] = time + else: + raise Exception("Time can be expressed only in us or ns") + else: + # us + rows[1] = time[:-2] + # kc + rows[2] = rows[2][:-2] + # w + rows[3] = rows[3][:-1] + # w + rows[4] = rows[4][:-1] + + rows.append(branch) + + csvwriter.writerow(rows[:]) + + +def parse_zkapp_limits(input_filename, output_filename, influxdb, branch): + with open(input_filename, 'r', encoding='UTF-8') as file: + lines = file.readlines() + stats = [] + header = ["proofs updates", "signed updates", "pairs of signed", "total account updates", "cost" , "gitbranch"] + stats.append(header) + + for line in lines: + if line == '': + continue + + syntax = "Proofs updates=(?P\d+) Signed/None updates=(?P\d+) Pairs of Signed/None updates=(?P\d+): Total account updates: (?P\d+) Cost: (?P[0-9]*[.]?[0-9]+)" + + match = re.match(syntax, line) + + if match: + proofs_updates = int(match.group('proofs_updates')) + signed_updates = int(match.group('signed_updates')) + pairs_of_signed_updates = int(match.group('pairs_of_signed_updates')) + total_account_updates = int(match.group('total_account_updates')) + cost = float(match.group('cost')) + name = f"P{proofs_updates}S{signed_updates}PS{pairs_of_signed_updates}TA{total_account_updates}" + tag = "zkapp" + stats.append((name,proofs_updates, signed_updates, pairs_of_signed_updates, total_account_updates, cost, tag, branch)) + + with open(output_filename, 'w') as csvfile: + if influxdb: + csvfile.write("#datatype measurement,double,double,double,double,double,tag\n") + csvwriter = csv.writer(csvfile) + csvwriter.writerows(stats) + + +def parse_snark_format(input_filename, output_filename, influxdb, branch): + with open(input_filename, 'r', encoding='UTF-8') as file: + lines = file.readlines() + stats = [] + zkapps = [] + + header = ["measurement", "proof updates", "nonproofs", "value", "tag", "gitbranch"] + stats.append(header) + + for line in lines: + if line == '': + continue + + syntax = 'Generated zkapp transactions with (?P\d+) updates and (?P\d+) proof updates in (?P