diff --git a/.xrefcheck.yaml b/.xrefcheck.yaml
deleted file mode 100644
index a1c87cb6104..00000000000
--- a/.xrefcheck.yaml
+++ /dev/null
@@ -1,63 +0,0 @@
-# Parameters of repository traversal.
-traversal:
- # Files and folders which we pretend do not exist
- # (so they are neither analyzed nor can be referenced).
- ignored:
- # Git files
- - .git
- # Build artifacts
- - _build
- - _opam
- # Git submodules
- - src/external
- - src/lib/marlin
- - src/lib/crypto/proof-systems
- - src/lib/snarky
- - frontend/wallet/tablecloth
- # Unsure of the relevance anymore
- - frontend/wallet/README.md
-
-# Verification parameters.
-verification:
- # On 'anchor not found' error, how much similar anchors should be displayed as
- # hint. Number should be between 0 and 1, larger value means stricter filter.
- anchorSimilarityThreshold: 0.5
-
- # When checking external references, how long to wait on request before
- # declaring "Response timeout".
- externalRefCheckTimeout: 10s
-
- # Prefixes of files, references in which should not be analyzed.
- notScanned:
- - .github/pull_request_template.md
- - .github/issue_template.md
- - .github/PULL_REQUEST_TEMPLATE
- - .github/ISSUE_TEMPLATE
-
- # Glob patterns describing the files which do not physically exist in the
- # repository but should be treated as existing nevertheless.
- virtualFiles:
- - ../../../issues
- - ../../../issues/*
- - ../../../pulls
- - ../../../pulls/*
-
- # POSIX extended regular expressions that match external references
- # that have to be ignored (not verified).
- # It is an optional parameter, so it can be omitted.
- ignoreRefs:
- - "https://github.com/.*" # Otherwise Resource unavailable (429 too many requests)
-
- # Check localhost links.
- checkLocalhost: false
-
- # Skip links which return 403 or 401 code.
- ignoreAuthFailures: true
-
-# Parameters of scanners for various file types.
-scanners:
- markdown:
- # Flavor of markdown, e.g. GitHub-flavor.
- #
- # This affects which anchors are generated for headers.
- flavor: GitHub
diff --git a/README.md b/README.md
index b848104e1d8..66a29467441 100644
--- a/README.md
+++ b/README.md
@@ -1,14 +1,18 @@
-### Build status
+
-| Develop | Berkeley | Compatible |
-| ------- | -------- | ---------- |
-| [![Build status - develop](https://badge.buildkite.com/0c47452f3ea619d3217d388e0de522b218db28c3e161887a9a.svg?branch=develop)](https://buildkite.com/o-1-labs-2/mina-end-to-end-nightlies) | [![Build status - berkeley](https://badge.buildkite.com/0c47452f3ea619d3217d388e0de522b218db28c3e161887a9a.svg?branch=berkeley)](https://buildkite.com/o-1-labs-2/mina-end-to-end-nightlies) | [![Build status - compatible](https://badge.buildkite.com/0c47452f3ea619d3217d388e0de522b218db28c3e161887a9a.svg?branch=compatible)](https://buildkite.com/o-1-labs-2/mina-end-to-end-nightlies)
+
Mina
-
-
-
+
-# Mina
+ ![GitHub stars](https://img.shields.io/github/stars/minaprotocol/mina) ![GitHub forks](https://img.shields.io/github/forks/minaprotocol/mina)
+
+![GitHub contributors](https://img.shields.io/github/contributors/minaprotocol/mina) ![GitHub commit activity](https://img.shields.io/github/commit-activity/m/minaprotocol/mina) ![GitHub last commit](https://img.shields.io/github/last-commit/minaprotocol/mina)
+
+| Develop[^1] | Compatible[^2] | Master[^3] |
+| ------- | ---------- | ---------- |
+| [![Build status - develop](https://badge.buildkite.com/0c47452f3ea619d3217d388e0de522b218db28c3e161887a9a.svg?branch=develop)](https://buildkite.com/o-1-labs-2/mina-end-to-end-nightlies) | [![Build status - compatible](https://badge.buildkite.com/0c47452f3ea619d3217d388e0de522b218db28c3e161887a9a.svg?branch=compatible)](https://buildkite.com/o-1-labs-2/mina-end-to-end-nightlies) | [![Build status - master](https://badge.buildkite.com/0c47452f3ea619d3217d388e0de522b218db28c3e161887a9a.svg?branch=master)](https://buildkite.com/o-1-labs-2/mina-end-to-end-nightlies)
+
+
Mina is the first cryptocurrency with a lightweight, constant-sized blockchain. This is the main source code repository for the Mina project and contains code for the OCaml protocol implementation, the [Mina Protocol website](https://minaprotocol.com), and wallet. Enjoy!
@@ -60,3 +64,7 @@ The [Node Developers](https://docs.minaprotocol.com/node-developers) docs contai
[Apache 2.0](LICENSE)
Commits older than 2018-10-03 do not have a [LICENSE](LICENSE) file or this notice, but are distributed under the same terms.
+
+[^1]: Develop is a mainline branch containing code that may be not compatible with current mainnet and may require major upgrade (hardfork).
+[^2]: Compatible is a mainline branch containing code which does not need hardfork in order to apply it to mainnet.
+[^3]: Branch which contains current mainnet code.
diff --git a/automation/services/mina-bp-stats/.gitignore b/automation/services/mina-bp-stats/.gitignore
deleted file mode 100644
index 62c893550ad..00000000000
--- a/automation/services/mina-bp-stats/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-.idea/
\ No newline at end of file
diff --git a/automation/services/mina-bp-stats/ingest-lambda/README.md b/automation/services/mina-bp-stats/ingest-lambda/README.md
deleted file mode 100644
index 15ccc99c224..00000000000
--- a/automation/services/mina-bp-stats/ingest-lambda/README.md
+++ /dev/null
@@ -1,10 +0,0 @@
-# Mina Block Producer Ingest Lambda
-
-This is a simple ingestion lambda that tags incoming stats data and lands things in a GCS bucket.
-
-## Configuration
-
-This lambda takes in 2 environment variables that should be configured in the google console.
-
-- `TOKEN` - The token used to authenticate incoming requests
-- `GOOGLE_STORAGE_BUCKET` - The GCS bucket to store incoming data in
diff --git a/automation/services/mina-bp-stats/ingest-lambda/index.js b/automation/services/mina-bp-stats/ingest-lambda/index.js
deleted file mode 100644
index 4ddfbe466d9..00000000000
--- a/automation/services/mina-bp-stats/ingest-lambda/index.js
+++ /dev/null
@@ -1,44 +0,0 @@
-const {Storage} = require('@google-cloud/storage');
-
-exports.handleRequest = async (req, res) => {
- if (process.env.TOKEN === undefined){
- return res.status(500).send("TOKEN envar not set")
- }
- if (process.env.GOOGLE_STORAGE_BUCKET === undefined){
- return res.status(500).send("GOOGLE_STORAGE_BUCKET envar not set")
- }
-
- if (!req.query.token || req.query.token !== process.env.TOKEN){
- return res.status(401).send("Bad token")
- }
-
- const now = new Date()
- const dateStamp = now.toISOString().split('T')[0]
-
- const ipAddress = req.headers['x-forwarded-for'] || req.connection.remoteAddress
- const receivedAt = now.getTime()
-
- const recvPayload = req.body
-
- const bpKeys = recvPayload.daemonStatus.blockProductionKeys
-
- if (bpKeys.length === 0){
- return res.status(400).send("Invalid block production keys")
- }
-
- const payload = {
- receivedAt,
- receivedFrom: ipAddress,
- blockProducerKey: bpKeys[0],
- nodeData: recvPayload
- }
-
- // Upload to gstorage
- const storage = new Storage()
- const myBucket = storage.bucket(process.env.GOOGLE_STORAGE_BUCKET)
- const file = myBucket.file(`${dateStamp}.${now.getTime()}.${recvPayload.blockHeight}.json`)
- const contents = JSON.stringify(payload, null, 2)
- await file.save(contents, {contentType: "application/json"})
-
- return res.status(200).send("OK")
-};
diff --git a/automation/services/mina-bp-stats/ingest-lambda/package.json b/automation/services/mina-bp-stats/ingest-lambda/package.json
deleted file mode 100644
index e07d3274999..00000000000
--- a/automation/services/mina-bp-stats/ingest-lambda/package.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "name": "mina-bp-ingest",
- "version": "1.0.0",
- "dependencies": {
- "@google-cloud/storage": "^5.8.1"
- }
-}
diff --git a/automation/services/mina-bp-stats/sidecar/.gitignore b/automation/services/mina-bp-stats/sidecar/.gitignore
deleted file mode 100644
index 12d6a9c3220..00000000000
--- a/automation/services/mina-bp-stats/sidecar/.gitignore
+++ /dev/null
@@ -1,2 +0,0 @@
-*.deb
-deb_build
diff --git a/automation/services/mina-bp-stats/sidecar/Dockerfile b/automation/services/mina-bp-stats/sidecar/Dockerfile
deleted file mode 100644
index c87006499ff..00000000000
--- a/automation/services/mina-bp-stats/sidecar/Dockerfile
+++ /dev/null
@@ -1,5 +0,0 @@
-FROM python:alpine
-
-COPY sidecar.py /opt/sidecar.py
-
-CMD python3 /opt/sidecar.py
\ No newline at end of file
diff --git a/automation/services/mina-bp-stats/sidecar/README.md b/automation/services/mina-bp-stats/sidecar/README.md
deleted file mode 100644
index c18ad9a270f..00000000000
--- a/automation/services/mina-bp-stats/sidecar/README.md
+++ /dev/null
@@ -1,155 +0,0 @@
-# Mina Block Producer Metrics Sidecar
-
-This is a simple sidecar that communicates with Mina nodes to ship off uptime data for analysis.
-
-Unless you're a founding block producer, you shouldn't need to run this sidecar, and you'll need to talk with the Mina team to get a special URL to make it work properly.
-
-## Configuration
-
-The sidecar takes 2 approaches to configuration, a pair of envars, or a configuration file.
-
-**Note**: Environment variables always take precedence, even if the config file is available and valid.
-
-#### Envars
-- `MINA_BP_UPLOAD_URL` - The URL to upload block producer statistics to
-- `MINA_NODE_URL` - The URL that the sidecar will reach out to to get statistics from
-
-#### Config File
-[config-file]: #config-file
-The mina metrics sidecar will also look at `/etc/mina-sidecar.json` for its configuration variables, and the file should look like this:
-
-```
-{
- "uploadURL": "https://your.upload.url.here?token=someToken",
- "nodeURL": "https://your.mina.node.here:4321"
-}
-```
-
-The `uploadURL` parameter should be given to you by the Mina engineers
-
-## Running with Docker
-Running in docker should be as straight forward as running any other docker image.
-
-#### Pulling from dockerhub
-We push updates to `minaprotocol/mina-bp-stats-sidecar:latest` so you can simply run the following to pull the image down:
-
-```
-$ docker pull minaprotocol/mina-bp-stats-sidecar:latest
-```
-
-#### Building locally
-This is un-necessary if you use the version from dockerhub (which is recommended).
-
-If you want to build this image yourself though, you can run `docker build -t mina-sidecar .` in this folder to build the image while naming it "mina-sidecar".
-
-You should then substitute that in lieu of `minaprotocol/mina-bp-stats-sidecar:latest` for the rest of the commands below.
-
-#### Running with envars
-```bash
-$ docker run --rm -it -e MINA_BP_UPLOAD_URL=https://some-url-here -e MINA_NODE_URL=https://localhost:4321 minaprotocol/mina-bp-stats-sidecar:latest
-```
-
-#### Running with a config file
-```bash
-$ docker run --rm -it -v $(pwd)/mina-sidecar.json:/etc/mina-sidecar.json minaprotocol/mina-bp-stats-sidecar:latest
-```
-#### You can even bake your own docker image with the config file already in it
-```bash
-# Copy the example and make edits
-$ cp mina-sidecar-example.json mina-sidecar.json
-$ vim mina-sidecar.json # Make edits to the config
-# Create custom Dockerfile
-$ cat < Dockerfile.custom
-FROM minaprotocol/mina-bp-stats-sidecar:latest
-COPY your_custom_config.conf /etc/mina-sidecar.json
-EOF
-$ docker build -t your-custom-sidecar -f Dockerfile.custom .
-$ docker run --rm -it your-custom-sidecar
-```
-
-## Running with debian package
-
-Running the sidecar as a debian package is as simple as installing the package, editing the config file, and enabling the service.
-
-#### Installing the package
-
-This package will install 3 files:
-
-- `/usr/local/bin/mina-bp-stats-sidecar` (the mina sidecar program)
-- `/etc/mina-sidecar.json` (the config file for the mina sidecar)
-- `/etc/systemd/system/mina-bp-stats-sidecar.service` (the systemd config to run it as a service)
-
-Installing the deb directly should be done with `apt install`, which will install the dependencies along side the service:
-
-```
-$ apt install ./mina-bp-stats-sidecar.deb
-```
-
-If you prefer to use `dpkg`, you can do so after installing the dependencies:
-
-```
-$ apt-get update && apt-get install python3 python3-certifi
-$ dpkg -i ./mina-bp-stats-sidecar.deb
-```
-
-#### Configuring and Running
-
-See the [Config File](#config-file) section above for what should be in the `/etc/mina-sidecar.json` file.
-
-To (optionally) enable the service to run on reboot you can use:
-
-```
-$ systemctl enable mina-bp-stats-sidecar
-```
-
-Then to start the service itself:
-
-```
-$ service mina-bp-stats-sidecar start
-```
-
-From there you can check that it's running and see the most recent logs with `service mina-bp-stats-sidecar status`:
-
-```
-$ service mina-bp-stats-sidecar status
-● mina-bp-stats-sidecar.service - Mina Block Producer Stats Sidecar
- Loaded: loaded (/etc/systemd/system/mina-bp-stats-sidecar.service; disabled; vendor preset: enabled)
- Active: active (running) since Fri 2021-03-12 02:43:37 CET; 3s ago
- Main PID: 1906 (python3)
- Tasks: 1 (limit: 2300)
- CGroup: /system.slice/mina-bp-stats-sidecar.service
- └─1906 python3 /usr/local/bin/mina-bp-stats-sidecar
-
-INFO:root:Found /etc/mina-sidecar.json on the filesystem, using config file
-INFO:root:Starting Mina Block Producer Sidecar
-INFO:root:Fetching block 2136...
-INFO:root:Got block data
-INFO:root:Finished! New tip 2136...
-```
-
-#### Monitoring/Logging
-
-If you want to get logs from the sidecar service, you can use `journalctl`:
-
-```
-# Similar to "tail -f" for the sidecar service
-$ journalctl -f -u mina-bp-stats-sidecar.service
-```
-
-## Issues
-
-#### HTTP error 400
-
-If you get a 400 while running your sidecar:
-
-```
-INFO:root:Fetching block 2136...
-INFO:root:Got block data
-ERROR:root:HTTP Error 400: Bad Request
-
--- TRACEBACK --
-
-ERROR:root:Sleeping for 30s and trying again
-```
-
-It likely means you're shipping off data to the ingest pipeline without any block producer key configured on your Mina node - since your BP key is your identity we can't accept node data since we don't know who is submitting it!
diff --git a/automation/services/mina-bp-stats/sidecar/build.sh b/automation/services/mina-bp-stats/sidecar/build.sh
deleted file mode 100755
index 0c243741da9..00000000000
--- a/automation/services/mina-bp-stats/sidecar/build.sh
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/usr/bin/env bash
-
-BUILDDIR="${BUILDDIR:-deb_build}"
-
-# Get CWD if run locally or run through "source"
-CURRENT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
-
-rm -rf "${BUILDDIR}"
-
-mkdir -p "${BUILDDIR}/DEBIAN"
-
-cat << EOF > "${BUILDDIR}/DEBIAN/control"
-Package: mina-bp-stats-sidecar
-Version: ${MINA_DEB_VERSION}
-License: Apache-2.0
-Vendor: none
-Architecture: all
-Maintainer: o(1)Labs
-Installed-Size:
-Depends: python3, python3-certifi
-Section: base
-Priority: optional
-Homepage: https://minaprotocol.com/
-Description: A telemetry sidecar that ships stats about node status
- back to Mina HQ for analysis.
- Built from ${GITHASH} by ${BUILD_URL}
-EOF
-
-mkdir -p "${BUILDDIR}/usr/local/bin"
-mkdir -p "${BUILDDIR}/etc"
-mkdir -p "${BUILDDIR}/etc/systemd/system/"
-
-cp "${CURRENT_DIR}/sidecar.py" "${BUILDDIR}/usr/local/bin/mina-bp-stats-sidecar"
-cp "${CURRENT_DIR}/mina-sidecar-example.json" "${BUILDDIR}/etc/mina-sidecar.json"
-cp "${CURRENT_DIR}/mina-bp-stats-sidecar.service" "${BUILDDIR}/etc/systemd/system/mina-bp-stats-sidecar.service"
-
-fakeroot dpkg-deb --build "${BUILDDIR}" "mina-sidecar_${MINA_DEB_VERSION}.deb"
-
-rm -rf "${BUILDDIR}"
diff --git a/automation/services/mina-bp-stats/sidecar/mina-bp-stats-sidecar.service b/automation/services/mina-bp-stats/sidecar/mina-bp-stats-sidecar.service
deleted file mode 100644
index d7fc212813c..00000000000
--- a/automation/services/mina-bp-stats/sidecar/mina-bp-stats-sidecar.service
+++ /dev/null
@@ -1,7 +0,0 @@
-[Unit]
-Description=Mina Block Producer Stats Sidecar
-[Service]
-ExecStart=/usr/local/bin/mina-bp-stats-sidecar
-SuccessExitStatus=143
-[Install]
-WantedBy=multi-user.target
\ No newline at end of file
diff --git a/automation/services/mina-bp-stats/sidecar/mina-sidecar-example.json b/automation/services/mina-bp-stats/sidecar/mina-sidecar-example.json
deleted file mode 100644
index 179a5c8c708..00000000000
--- a/automation/services/mina-bp-stats/sidecar/mina-sidecar-example.json
+++ /dev/null
@@ -1,4 +0,0 @@
-{
- "uploadURL": "https://some-host.somewhere/some-endpoing?token=some-token",
- "nodeURL": "https://some.node.somewhere:3085"
-}
diff --git a/automation/services/mina-bp-stats/sidecar/sidecar.py b/automation/services/mina-bp-stats/sidecar/sidecar.py
deleted file mode 100755
index 2b356ca9329..00000000000
--- a/automation/services/mina-bp-stats/sidecar/sidecar.py
+++ /dev/null
@@ -1,182 +0,0 @@
-#!/usr/bin/env python3
-
-import os
-import json
-import logging
-import time
-import math
-import urllib.request
-import urllib.parse
-
-logging.basicConfig(level=logging.INFO)
-
-MINA_CONFIG_FILE = '/etc/mina-sidecar.json'
-MINA_BLOCK_PRODUCER_URL_ENVAR = 'MINA_BP_UPLOAD_URL'
-MINA_NODE_URL_ENVAR = 'MINA_NODE_URL'
-
-FETCH_INTERVAL = 60 * 3 # Fetch updates every 3 mins
-ERROR_SLEEP_INTERVAL = 30 # On errors, sleep for 30s before trying again
-FINALIZATION_THRESHOLD = 12 # 12 blocks back is considered "finalized"
-
-SYNC_STATUS_GRAPHQL = '''
-query SyncStatus {
- daemonStatus {
- syncStatus
- blockchainLength
- }
-}
-'''
-
-FETCH_BLOCK_GRAPHQL = '''
-query FetchBlockData($blockID: Int!) {
- version
- daemonStatus {
- blockchainLength
- syncStatus
- chainId
- commitId
- highestBlockLengthReceived
- highestUnvalidatedBlockLengthReceived
- stateHash
- blockProductionKeys
- uptimeSecs
- }
- block(height: $blockID) {
- stateHash
- }
-}
-'''
-
-upload_url, node_url = (None, None)
-
-if os.path.exists(MINA_CONFIG_FILE):
- logging.info("Found {} on the filesystem, using config file".format(MINA_CONFIG_FILE))
- with open(MINA_CONFIG_FILE) as f:
- config_file = f.read().strip()
- parsed_config_file = json.loads(config_file)
- upload_url = parsed_config_file['uploadURL'].rstrip('/')
- node_url = parsed_config_file['nodeURL'].rstrip('/')
-
-if MINA_BLOCK_PRODUCER_URL_ENVAR in os.environ:
- logging.info("Found {} in the environment, using envar".format(MINA_BLOCK_PRODUCER_URL_ENVAR))
- upload_url = os.environ[MINA_BLOCK_PRODUCER_URL_ENVAR]
-
-if MINA_NODE_URL_ENVAR in os.environ:
- logging.info("Found {} in the environment, using envar".format(MINA_NODE_URL_ENVAR))
- node_url = os.environ[MINA_NODE_URL_ENVAR]
-
-if upload_url is None:
- raise Exception("Could not find {} or {} environment variable is not set.".format(MINA_CONFIG_FILE, MINA_BLOCK_PRODUCER_URL_ENVAR))
-
-if node_url is None:
- raise Exception("Could not find {} or {} environment variable is not set.".format(MINA_CONFIG_FILE, MINA_NODE_URL_ENVAR))
-
-def fetch_mina_status():
- url = node_url + '/graphql'
- request = urllib.request.Request(
- url,
- headers={'Content-Type': 'application/json'},
- data=json.dumps({
- "query": SYNC_STATUS_GRAPHQL,
- "variables": {},
- "operationName": "SyncStatus"
- }).encode()
- )
- response = urllib.request.urlopen(request)
- response_body = response.read().decode('utf-8')
- parsed_body = json.loads(response_body)['data']
-
- return parsed_body['daemonStatus']['syncStatus'], parsed_body['daemonStatus']['blockchainLength']
-
-def fetch_block(block_id):
- url = node_url + '/graphql'
- request = urllib.request.Request(
- url,
- headers={'Content-Type': 'application/json'},
- data=json.dumps({
- "query": FETCH_BLOCK_GRAPHQL,
- "variables": {'blockID': block_id},
- "operationName": "FetchBlockData"
- }).encode()
- )
-
- response = urllib.request.urlopen(request)
- response_body = response.read().decode('utf-8')
- response_data = json.loads(response_body)['data']
- if response_data is None:
- raise Exception("Response seems to be an error! {}".format(response_body))
-
- return response_data
-
-def send_update(block_data, block_height):
- block_data.update({
- "retrievedAt": math.floor(time.time() * 1000),
- "blockHeight": block_height
- })
- request = urllib.request.Request(
- upload_url,
- headers={'Content-Type': 'application/json'},
- data=json.dumps(block_data).encode()
- )
-
- response = urllib.request.urlopen(request)
-
- assert response.getcode() == 200, "Non-200 from BP flush endpoint! [{}] - ".format(response.getcode(), response.read())
-
-def check_mina_node_sync_state_and_fetch_head():
- while True:
- try:
- mina_sync_status, current_head = fetch_mina_status()
- if mina_sync_status == "SYNCED":
- logging.debug("Mina sync status is acceptable ({}), continuing!".format(mina_sync_status))
- break
- logging.info("Mina sync status is {}. Sleeping for 5s and trying again".format(mina_sync_status))
- except Exception as fetch_exception:
- logging.exception(fetch_exception)
-
- time.sleep(5)
-
- return current_head
-
-if __name__ == '__main__':
- logging.info("Starting Mina Block Producer Sidecar")
-
- # On init ensure our node is synced and happy
- head_block_id = check_mina_node_sync_state_and_fetch_head()
-
- # Go back FINALIZATION_THRESHOLD blocks from the tip to have a finalized block
- current_finalized_tip = head_block_id - FINALIZATION_THRESHOLD
-
- # We're done with init to the point where we can start shipping off data
- while True:
- try:
- logging.info("Fetching block {}...".format(current_finalized_tip))
-
- block_data = fetch_block(current_finalized_tip)
-
- logging.info("Got block data ", block_data)
-
- send_update(block_data, current_finalized_tip)
-
- current_finalized_tip = block_data['daemonStatus']['blockchainLength'] - FINALIZATION_THRESHOLD # Go set a new finalized block
-
- logging.info("Finished! New tip {}...".format(current_finalized_tip))
-
- time.sleep(FETCH_INTERVAL)
- except Exception as e:
- # If we encounter an error at all, log it, sleep, and then kick
- # off the init process to go fetch the current tip/head to ensure
- # we never try to fetch past 290 blocks (k=290)
- logging.exception(e)
-
- logging.error("Sleeping for {}s and trying again".format(ERROR_SLEEP_INTERVAL))
-
- time.sleep(ERROR_SLEEP_INTERVAL)
-
- head_block_id = check_mina_node_sync_state_and_fetch_head()
-
- logging.info("Found new head at {}".format(head_block_id))
-
- current_finalized_tip = head_block_id - FINALIZATION_THRESHOLD
-
- logging.info("Continuing with finalized tip block of {}".format(current_finalized_tip))
diff --git a/buildkite/Makefile b/buildkite/Makefile
index 33de08c0af8..eff56f84029 100644
--- a/buildkite/Makefile
+++ b/buildkite/Makefile
@@ -14,4 +14,22 @@ lint:
find ./src/ -name "*.dhall" -print0 | xargs -I{} -0 -n1 bash -c 'echo "{}" && dhall --ascii lint --inplace {} || exit 255'
format:
- find ./src/ -name "*.dhall" -print0 | xargs -I{} -0 -n1 bash -c 'echo "{}" && dhall --ascii format --inplace {} || exit 255'
\ No newline at end of file
+ find ./src/ -name "*.dhall" -print0 | xargs -I{} -0 -n1 bash -c 'echo "{}" && dhall --ascii format --inplace {} || exit 255'
+
+dump_pipelines:
+ $(eval TMP := $(shell mktemp -d))
+ scripts/dhall/dump_dhall_to_pipelines.sh src/Jobs "$(TMP)"
+
+check_deps: dump_pipelines
+ python3 scripts/dhall/checker.py --root "$(TMP)" deps
+
+check_dirty: dump_pipelines
+ python3 scripts/dhall/checker.py --root "$(TMP)" dirty-when --repo "$(PWD)/../"
+
+check_dups: dump_pipelines
+ python3 scripts/dhall/checker.py --root "$(TMP)" dups
+
+check_names: dump_pipelines
+ python3 scripts/dhall/checker.py --root "$(TMP)" names
+
+all: check_syntax lint format check_deps check_dirty check_dups check_names
\ No newline at end of file
diff --git a/buildkite/scripts/bench/install.sh b/buildkite/scripts/bench/install.sh
new file mode 100644
index 00000000000..ea06e6ee02b
--- /dev/null
+++ b/buildkite/scripts/bench/install.sh
@@ -0,0 +1,18 @@
+#!/bin/bash
+
+set -eo pipefail
+
+# Don't prompt for answers during apt-get install
+export DEBIAN_FRONTEND=noninteractive
+
+sudo apt-get update
+sudo apt-get install -y git apt-transport-https ca-certificates tzdata curl python3
+
+TESTNET_NAME="berkeley"
+
+git config --global --add safe.directory /workdir
+source buildkite/scripts/export-git-env-vars.sh
+
+source buildkite/scripts/debian/install.sh "mina-test-suite,mina-$TESTNET_NAME" 1
+
+pip3 install -r scripts/benchmarks/requirements.txt
\ No newline at end of file
diff --git a/buildkite/scripts/bench/snark_transaction_profiler.sh b/buildkite/scripts/bench/snark_transaction_profiler.sh
new file mode 100755
index 00000000000..2436efb92d4
--- /dev/null
+++ b/buildkite/scripts/bench/snark_transaction_profiler.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+
+set -eo pipefail
+
+source buildkite/scripts/bench/install.sh
+
+K=1
+MAX_NUM_UPDATES=4
+MIN_NUM_UPDATES=2
+
+echo "-- Run Snark Transaction Profiler with parameters: --k ${K} --max-num-updates ${MAX_NUM_UPDATES} --min-num-updates ${MIN_NUM_UPDATES}"
+python3 ./scripts/benchmarks run --benchmark snark --k ${K} --max-num-updates ${MAX_NUM_UPDATES} --min-num-updates ${MIN_NUM_UPDATES} --outfile snark.out
diff --git a/buildkite/scripts/bench/zkapp_metrics.sh b/buildkite/scripts/bench/zkapp_metrics.sh
new file mode 100755
index 00000000000..829ece87cb1
--- /dev/null
+++ b/buildkite/scripts/bench/zkapp_metrics.sh
@@ -0,0 +1,9 @@
+#!/bin/bash
+
+set -eo pipefail
+
+source buildkite/scripts/bench/install.sh
+
+python3 ./scripts/benchmarks run --benchmark zkapp --outfile zakpp-out
+
+python3 ./scripts/benchmarks run --benchmark heap-usage --outfile heap-usage.out
\ No newline at end of file
diff --git a/buildkite/scripts/build-artifact.sh b/buildkite/scripts/build-artifact.sh
index 263a258ef3c..eddb5f159c0 100755
--- a/buildkite/scripts/build-artifact.sh
+++ b/buildkite/scripts/build-artifact.sh
@@ -17,14 +17,13 @@ else
fi
-# TODO: Stop building lib_p2p multiple times by pulling from buildkite-agent artifacts or docker or somewhere
-echo "--- Build libp2p_helper TODO: use the previously uploaded build artifact"
+echo "--- Build libp2p_helper"
make -C src/app/libp2p_helper
MAINNET_TARGETS=""
[[ ${MINA_BUILD_MAINNET} ]] && MAINNET_TARGETS="src/app/cli/src/mina_mainnet_signatures.exe src/app/rosetta/rosetta_mainnet_signatures.exe src/app/rosetta/ocaml-signer/signer_mainnet_signatures.exe"
-echo "--- Build all major tagets required for packaging"
+echo "--- Build all major targets required for packaging"
echo "Building from Commit SHA: ${MINA_COMMIT_SHA1}"
echo "Rust Version: $(rustc --version)"
dune build "--profile=${DUNE_PROFILE}" $INSTRUMENTED_PARAM \
@@ -45,4 +44,10 @@ dune build "--profile=${DUNE_PROFILE}" $INSTRUMENTED_PARAM \
src/app/rosetta/indexer_test/indexer_test.exe \
src/app/rosetta/ocaml-signer/signer_testnet_signatures.exe \
src/app/test_executive/test_executive.exe \
- src/test/command_line_tests/command_line_tests.exe # 2>&1 | tee /tmp/buildocaml.log
+ src/app/benchmarks/benchmarks.exe \
+ src/app/ledger_export_bench/ledger_export_benchmark.exe \
+ src/app/disk_caching_stats/disk_caching_stats.exe \
+ src/app/heap_usage/heap_usage.exe \
+ src/app/zkapp_limits/zkapp_limits.exe \
+ src/test/command_line_tests/command_line_tests.exe \
+ src/test/archive/patch_archive_test/patch_archive_test.exe
diff --git a/buildkite/scripts/dhall/checker.py b/buildkite/scripts/dhall/checker.py
new file mode 100755
index 00000000000..50ad95acd8c
--- /dev/null
+++ b/buildkite/scripts/dhall/checker.py
@@ -0,0 +1,252 @@
+"""
+ Runs dhall checks like:
+
+ - validate if all dependencies in jobs are covered
+
+ python3 buildkite/scripts/dhall/checker.py --root ./buildkite/src/Jobs deps
+
+ - all dirtyWhen entries relates to existing files
+
+ python3 buildkite/scripts/dhall/checker.py --root ./buildkite/src/Jobs dirty-when
+
+ - print commands for given job
+
+ python3 buildkite/scripts/dhall/checker.py --root ./buildkite/src/Jobs print-cmd --job SingleNodeTest
+"""
+
+
+import argparse
+import subprocess
+import os
+from glob import glob
+import tempfile
+from pathlib import Path
+import yaml
+
+
+class CmdColors:
+ HEADER = '\033[95m'
+ OKBLUE = '\033[94m'
+ OKCYAN = '\033[96m'
+ OKGREEN = '\033[92m'
+ WARNING = '\033[93m'
+ FAIL = '\033[91m'
+ ENDC = '\033[0m'
+ BOLD = '\033[1m'
+ UNDERLINE = '\033[4m'
+
+
+class PipelineInfoBuilder:
+
+ def __init__(self, temp, file):
+ with open(f"{temp}/{file}") as stream:
+ try:
+ self.pipeline = yaml.safe_load(stream)
+ self.file = file
+ except yaml.YAMLError as exc:
+ print(f"cannot parse correctly {temp}/{file}, due to {exc}")
+ exit(1)
+
+ def get_steps(self):
+ steps = []
+ for step in self.pipeline["pipeline"]["steps"]:
+ key = step["key"]
+ deps = []
+ if "depends_on" in step:
+ for dependsOn in step["depends_on"]:
+ deps.append(dependsOn["step"])
+ commands = step["commands"]
+ steps.append(Step(key, deps, commands))
+ return steps
+
+ def get_dirty(self):
+ dirty = []
+ for dirtyWhen in self.pipeline["spec"]["dirtyWhen"]:
+ path = dirtyWhen["dir"][0] if "dir" in dirtyWhen else ""
+ exts = dirtyWhen["exts"][0] if "exts" in dirtyWhen else ""
+ strictEnd = bool(dirtyWhen["strictEnd"]) if (
+ "strictEnd" in dirtyWhen) else False
+ strictStart = bool(dirtyWhen["strictStart"]) if (
+ "strictStart" in dirtyWhen) else False
+ dirty.append(DirtyWhen(path=path, strictStart=strictStart,
+ strictEnd=strictEnd, extension=exts))
+ return dirty
+
+ def build(self):
+ name = self.pipeline["spec"]["name"]
+ steps = self.get_steps()
+ dirty = self.get_dirty()
+ return PipelineInfo(self.file, self.pipeline, name, steps, dirty)
+
+
+class DirtyWhen:
+
+ def __init__(self, path, extension, strictStart, strictEnd):
+ self.path = path
+ self.extension = extension
+ self.strictStart = strictStart
+ self.strictEnd = strictEnd
+
+ def calculate_path(self,repo):
+ if not self.path:
+ return glob(os.path.join(repo,f'**/*{self.extension}'))
+ if not self.extension:
+ if self.strictEnd and self.strictStart:
+ return glob(os.path.join(repo, f'{self.path}'))
+ if not self.strictEnd and self.strictStart:
+ return glob(os.path.join(repo, f'{self.path}*'))
+ if not self.strictStart and self.strictEnd:
+ return glob(os.path.join(repo, f'**/{self.path}'), recursive= True)
+ if not self.strictStart and not self.strictEnd:
+ return glob(os.path.join(repo, f'*{self.path}*'))
+ return glob(os.path.join(repo, f'{self.path}.{self.extension}'))
+
+ def __str__(self):
+ return f"path: '{self.path}', exts: '{self.extension}', startStrict:{self.strictStart}, startEnd:{self.strictEnd}"
+
+
+class Step:
+
+ def __init__(self, key, deps, commands):
+ self.key = key
+ self.deps = deps
+ self.commands = commands
+
+
+class PipelineInfo:
+
+ def __init__(self, file, pipeline, name, steps, dirty):
+ self.file = file
+ self.name = name
+ self.pipeline = pipeline
+ self.steps = steps
+ self.dirty = dirty
+
+ def keys(self):
+ return [step.key for step in self.steps]
+
+
+parser = argparse.ArgumentParser(description='Executes mina benchmarks')
+parser.add_argument("--root", required=True,
+ help="root folder where all dhall files resides")
+
+subparsers = parser.add_subparsers(dest="cmd")
+dirty_when = subparsers.add_parser('dirty-when')
+dirty_when.add_argument("--repo", required=True,
+ help="root folder for mina repo")
+
+subparsers.add_parser('deps')
+
+
+run = subparsers.add_parser('print-cmd')
+run.add_argument("--job", required=True, help="job to run")
+run.add_argument("--step", required=False, help="job to run")
+
+subparsers.add_parser('dups')
+
+subparsers.add_parser('names')
+
+args = parser.parse_args()
+
+pipelinesInfo = [PipelineInfoBuilder(args.root, file).build()
+ for file in os.listdir(path=args.root)]
+
+if args.cmd == "deps":
+
+ keys = []
+ for pipeline in pipelinesInfo:
+ keys.extend(pipeline.keys())
+
+ failedSteps = []
+
+ for pipeline in pipelinesInfo:
+ for step in pipeline.steps:
+ for dep in step.deps:
+ if not dep in keys:
+ failedSteps.append((pipeline, step, dep))
+
+ if any(failedSteps):
+ print("Fatal: Missing dependency resolution found:")
+ for (pipeline, step, dep) in failedSteps:
+ file = str.replace(pipeline.file, ".yml", ".dhall")
+ print(
+ f"\t{CmdColors.FAIL}[FATAL] Unresolved dependency for step '{step.key}' in '{file}' depends on non existing job '{dep}'{CmdColors.ENDC}")
+ exit(1)
+ else:
+ print('Pipelines definitions correct')
+
+if args.cmd == "print-cmd":
+ pipeline = next(filter(lambda x: args.job in x.file, pipelinesInfo))
+
+ def get_steps():
+ if args.step:
+ return [next(filter(lambda x: args.step in x.key, pipeline.steps))]
+ else:
+ return pipeline.steps
+
+ steps = get_steps()
+
+ for step in steps:
+ for command in step.commands:
+ if not command.startswith("echo"):
+ print(command)
+
+if args.cmd == "dirty-when":
+
+ failedSteps = []
+
+ for pipeline in pipelinesInfo:
+ for dirty in pipeline.dirty:
+ if not bool(dirty.calculate_path(args.repo)):
+ failedSteps.append((pipeline, dirty))
+
+ if any(failedSteps):
+ print("Fatal: Non existing dirtyWhen path detected:")
+ for (pipeline, dirty) in failedSteps:
+ file = str.replace(pipeline.file, ".yml", ".dhall")
+ print(
+ f"\t{CmdColors.FAIL}[FATAL] Unresolved dirtyWhen path in '{file}' ('{str(dirty)}'){CmdColors.ENDC}")
+ exit(1)
+ else:
+ print('Pipelines definitions correct')
+
+if args.cmd == "dups":
+
+ unique_names = set()
+ dups = []
+
+ for pipeline in pipelinesInfo:
+ for step in pipeline.steps:
+ before = len(unique_names)
+ unique_names.add(step.key)
+ if len(unique_names) == before:
+ dups.append((pipeline,step.key))
+
+ if any(dups):
+ print("Fatal: Step name duplication detected:")
+ for pipeline,step in dups:
+ file = str.replace(pipeline.file, ".yml", ".dhall")
+ print(
+ f"\t{CmdColors.FAIL}[FATAL] Step with name '{step}' in '{file}' is defined more than once{CmdColors.ENDC}")
+ exit(1)
+ else:
+ print('Pipelines definitions correct')
+
+if args.cmd == "names":
+ invalid = []
+
+ for pipeline in pipelinesInfo:
+ stem = str.replace(pipeline.file, ".yml", "")
+ if pipeline.name != stem:
+ invalid.append(pipeline)
+
+ if any(invalid):
+ print("Fatal: Invalid pipeline name detected:")
+ for pipeline in invalid:
+ file = str.replace(pipeline.file, ".yml", ".dhall")
+ print(
+ f"\t{CmdColors.FAIL}[FATAL] Job name '{pipeline.name}' in '{file}' is incorrect. "
+ f"Pipeline name (spec.name) and pipeline filename should match {CmdColors.ENDC}")
+ exit(1)
+ else:
+ print('Pipelines definitions correct')
diff --git a/buildkite/scripts/dhall/dump_dhall_to_pipelines.sh b/buildkite/scripts/dhall/dump_dhall_to_pipelines.sh
new file mode 100755
index 00000000000..84193329b76
--- /dev/null
+++ b/buildkite/scripts/dhall/dump_dhall_to_pipelines.sh
@@ -0,0 +1,24 @@
+#!/bin/bash
+
+ROOT=$1
+OUTPUT=$2
+
+mkdir -p "$OUTPUT"
+
+shopt -s globstar nullglob
+
+echo "Dumping pipelines from '$ROOT' to '$OUTPUT'"
+
+COUNTER=0
+
+for file in "$ROOT"/**/*.dhall
+do
+ filename=$(basename "$file")
+ filename="${filename%.*}"
+
+ dhall-to-yaml --quoted --file "$file" > "$OUTPUT"/"$filename".yml
+
+ COUNTER=$((COUNTER+1))
+done
+
+echo "Done. $COUNTER jobs exported"
diff --git a/buildkite/scripts/dump-mina-type-shapes.sh b/buildkite/scripts/dump-mina-type-shapes.sh
index 57d3c2b2302..5c1d402e215 100755
--- a/buildkite/scripts/dump-mina-type-shapes.sh
+++ b/buildkite/scripts/dump-mina-type-shapes.sh
@@ -20,3 +20,5 @@ export TYPE_SHAPE_FILE=${MINA_COMMIT_SHA1}-type_shape.txt
echo "--- Create type shapes git note for commit: ${MINA_COMMIT_SHA1}"
mina internal dump-type-shapes > ${TYPE_SHAPE_FILE}
+
+source buildkite/scripts/gsutil-upload.sh ${TYPE_SHAPE_FILE} gs://mina-type-shapes
\ No newline at end of file
diff --git a/buildkite/scripts/fuzzy-zkapp-test.sh b/buildkite/scripts/fuzzy-zkapp-test.sh
index 04b2ebb7e78..bc62bcd4a79 100755
--- a/buildkite/scripts/fuzzy-zkapp-test.sh
+++ b/buildkite/scripts/fuzzy-zkapp-test.sh
@@ -21,7 +21,7 @@ export LIBP2P_NIXLESS=1 PATH=/usr/lib/go/bin:$PATH GO=/usr/lib/go/bin/go
# skip running all of the tests that have already succeeded, since dune will
# only retry those tests that failed.
echo "--- Run fuzzy zkapp tests"
-time dune exec "${path}" --profile="${profile}" -j16 -- --timeout "${timeout}" --individual-test-timeout "${individual_test_timeout}" --seed "${RANDOM}"
+time dune exec "${path}" --profile="${profile}" -- --timeout "${timeout}" --individual-test-timeout "${individual_test_timeout}" --seed "${RANDOM}"
STATUS=$?
if [ "$STATUS" -ne 0 ]; then
./scripts/link-coredumps.sh && exit "$STATUS"
diff --git a/buildkite/scripts/gsutil-upload.sh b/buildkite/scripts/gsutil-upload.sh
new file mode 100755
index 00000000000..347ed3e38bd
--- /dev/null
+++ b/buildkite/scripts/gsutil-upload.sh
@@ -0,0 +1,11 @@
+#!/bin/bash
+
+KEY_FILE=/var/secrets/google/key.json
+
+if [ ! -f $KEY_FILE ]; then
+ echo "Cannot use gsutil for upload as key file cannot be found in $KEY_FILE"
+fi
+
+gcloud auth activate-service-account --key-file=$KEY_FILE
+
+gsutil cp $1 $2
\ No newline at end of file
diff --git a/buildkite/scripts/promote-docker.sh b/buildkite/scripts/promote-docker.sh
index 683bbc28acc..b59e9a61043 100755
--- a/buildkite/scripts/promote-docker.sh
+++ b/buildkite/scripts/promote-docker.sh
@@ -4,6 +4,7 @@ set -eo pipefail
CLEAR='\033[0m'
RED='\033[0;31m'
+PUBLISH=0
while [[ "$#" -gt 0 ]]; do case $1 in
-n|--name) NAME="$2"; shift;;
@@ -45,7 +46,7 @@ docker pull ${GCR_REPO}/${NAME}:${VERSION}
source buildkite/scripts/export-git-env-vars.sh
-if [[ -v PUBLISH ]]; then
+if [[ $PUBLISH == 1 ]]; then
TARGET_REPO=docker.io/minaprotocol
docker tag ${GCR_REPO}/${NAME}:${VERSION} ${TARGET_REPO}/${NAME}:${TAG}
docker push "${TARGET_REPO}/${NAME}:${TAG}"
diff --git a/buildkite/scripts/run-snark-transaction-profiler.sh b/buildkite/scripts/run-snark-transaction-profiler.sh
deleted file mode 100755
index 802cd730632..00000000000
--- a/buildkite/scripts/run-snark-transaction-profiler.sh
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/bin/bash
-
-set -eo pipefail
-
-# Don't prompt for answers during apt-get install
-export DEBIAN_FRONTEND=noninteractive
-
-sudo apt-get update
-sudo apt-get install -y git apt-transport-https ca-certificates tzdata curl python3
-
-TESTNET_NAME="berkeley"
-
-git config --global --add safe.directory /workdir
-source buildkite/scripts/export-git-env-vars.sh
-
-source buildkite/scripts/debian/install.sh "mina-${TESTNET_NAME}" 1
-
-K=1
-MAX_NUM_UPDATES=4
-MIN_NUM_UPDATES=2
-
-echo "-- Run Snark Transaction Profiler with parameters: --zkapps --k ${K} --max-num-updates ${MAX_NUM_UPDATES} --min-num-updates ${MIN_NUM_UPDATES}"
-python3 ./scripts/snark_transaction_profiler.py ${K} ${MAX_NUM_UPDATES} ${MIN_NUM_UPDATES}
diff --git a/buildkite/scripts/run_verify_promoted_build_job.sh b/buildkite/scripts/run_verify_promoted_build_job.sh
index da2a3789d3f..713749f642f 100755
--- a/buildkite/scripts/run_verify_promoted_build_job.sh
+++ b/buildkite/scripts/run_verify_promoted_build_job.sh
@@ -107,4 +107,4 @@ if [[ "${REMOVE_PROFILE_FROM_NAME}" -eq 0 ]]; then
else
REMOVE_PROFILE_FROM_NAME="True"
fi
-echo $PROMOTE_PACKAGE_DHALL_DEF'.verify_artifacts '"$DHALL_DEBIANS"' '"$DHALL_DOCKERS"' "'"${NEW_VERSION}"'" '$PROFILES_DHALL_DEF'.Type.'"${PROFILE}"' '$NETWORK_DHALL_DEF'.Type.'"${NETWORK}"' '"${DHALL_CODENAMES}"' '$DEBIAN_CHANNEL_DHALL_DEF'.Type.'"${TO_CHANNEL}"' "'"${TAG}"'" '${REMOVE_PROFILE_FROM_NAME}' '${DHALL_PUBLISH}' ' | dhall-to-yaml --quoted
+echo $PROMOTE_PACKAGE_DHALL_DEF'.verify_artifacts '"$DHALL_DEBIANS"' '"$DHALL_DOCKERS"' "'"${NEW_VERSION}"'" '$PROFILES_DHALL_DEF'.Type.'"${PROFILE}"' '$NETWORK_DHALL_DEF'.Type.'"${NETWORK}"' '"${DHALL_CODENAMES}"' '$DEBIAN_CHANNEL_DHALL_DEF'.Type.'"${TO_CHANNEL}"' "'"${NEW_VERSION}"'" '${REMOVE_PROFILE_FROM_NAME}' '${DHALL_PUBLISH}' ' | dhall-to-yaml --quoted
diff --git a/buildkite/scripts/setup-database-for-archive-node.sh b/buildkite/scripts/setup-database-for-archive-node.sh
index 9aa9062b223..cf494a1ffaa 100755
--- a/buildkite/scripts/setup-database-for-archive-node.sh
+++ b/buildkite/scripts/setup-database-for-archive-node.sh
@@ -5,6 +5,7 @@ set -euo pipefail
user=$1
password=$2
db=$3
+port=$4
sudo service postgresql start
@@ -12,4 +13,4 @@ sudo -u postgres psql -c "CREATE USER ${user} WITH LOGIN SUPERUSER PASSWORD '${p
sudo pg_isready
service postgresql status
sudo -u postgres createdb -O $user $db
-PGPASSWORD=$password psql -h localhost -p 5434 -U $user -d $db -a -f src/app/archive/create_schema.sql
+PGPASSWORD=$password psql -h localhost -p $port -U $user -d $db -a -f src/app/archive/create_schema.sql
diff --git a/buildkite/scripts/unit-test.sh b/buildkite/scripts/unit-test.sh
index de885193086..4cb4c5d29bd 100755
--- a/buildkite/scripts/unit-test.sh
+++ b/buildkite/scripts/unit-test.sh
@@ -21,10 +21,10 @@ export LIBP2P_NIXLESS=1 PATH=/usr/lib/go/bin:$PATH GO=/usr/lib/go/bin/go
time make build
echo "--- Build all targets"
-dune build "${path}" --profile="${profile}" -j16
+dune build "${path}" --profile="${profile}"
echo "--- Check for changes to verification keys"
-time dune runtest "src/app/print_blockchain_snark_vk" --profile="${profile}" -j16
+time dune runtest "src/app/print_blockchain_snark_vk" --profile="${profile}"
# Turn on the proof-cache assertion, so that CI will fail if the proofs need to
# be updated.
@@ -35,8 +35,8 @@ export ERROR_ON_PROOF=true
# skip running all of the tests that have already succeeded, since dune will
# only retry those tests that failed.
echo "--- Run unit tests"
-time dune runtest "${path}" --profile="${profile}" -j16 || \
+time dune runtest "${path}" --profile="${profile}" || \
(./scripts/link-coredumps.sh && \
echo "--- Retrying failed unit tests" && \
- time dune runtest "${path}" --profile="${profile}" -j16 || \
+ time dune runtest "${path}" --profile="${profile}" || \
(./scripts/link-coredumps.sh && false))
diff --git a/buildkite/scripts/version-linter-patch-missing-type-shapes.sh b/buildkite/scripts/version-linter-patch-missing-type-shapes.sh
new file mode 100755
index 00000000000..74c60dc8bd5
--- /dev/null
+++ b/buildkite/scripts/version-linter-patch-missing-type-shapes.sh
@@ -0,0 +1,46 @@
+#!/bin/bash
+
+set -eox pipefail
+
+if [[ $# -ne 1 ]]; then
+ echo "Usage: $0 "
+ exit 1
+fi
+
+git config --global --add safe.directory /workdir
+
+source buildkite/scripts/handle-fork.sh
+source buildkite/scripts/export-git-env-vars.sh
+
+release_branch=${REMOTE}/$1
+
+RELEASE_BRANCH_COMMIT=$(git log -n 1 --format="%h" --abbrev=7 $release_branch)
+
+function revert_checkout() {
+ git checkout $BUILDKITE_COMMIT
+ git submodule sync
+ git submodule update --init --recursive
+}
+
+function checkout_and_dump() {
+ local __commit=$1
+ git checkout $__commit
+ git submodule sync
+ git submodule update --init --recursive
+ eval $(opam config env)
+ TYPE_SHAPE_FILE=${__commit:0:7}-type_shape.txt
+ dune exec src/app/cli/src/mina.exe internal dump-type-shapes > /tmp/${TYPE_SHAPE_FILE}
+ revert_checkout
+ source buildkite/scripts/gsutil-upload.sh /tmp/${TYPE_SHAPE_FILE} gs://mina-type-shapes
+}
+
+if ! $(gsutil ls gs://mina-type-shapes/$RELEASE_BRANCH_COMMIT 2>/dev/null); then
+ checkout_and_dump $RELEASE_BRANCH_COMMIT
+fi
+
+if [[ -n "${BUILDKITE_PULL_REQUEST_BASE_BRANCH:-}" ]]; then
+ BUILDKITE_PULL_REQUEST_BASE_BRANCH_COMMIT=$(git log -n 1 --format="%h" --abbrev=7 ${REMOTE}/${BUILDKITE_PULL_REQUEST_BASE_BRANCH} )
+ if ! $(gsutil ls gs://mina-type-shapes/$BUILDKITE_PULL_REQUEST_BASE_BRANCH_COMMIT 2>/dev/null); then
+ checkout_and_dump $BUILDKITE_PULL_REQUEST_BASE_BRANCH_COMMIT
+ fi
+fi
\ No newline at end of file
diff --git a/buildkite/scripts/zkapp_metrics.sh b/buildkite/scripts/zkapp_metrics.sh
deleted file mode 100755
index b943465c96d..00000000000
--- a/buildkite/scripts/zkapp_metrics.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/bash
-
-set -eou pipefail
-
-eval $(opam config env) && export PATH=$HOME/.cargo/bin:$PATH && ./scripts/zkapp_metrics.sh
diff --git a/buildkite/src/Command/Base.dhall b/buildkite/src/Command/Base.dhall
index 32ae7f2a820..deefb5a1742 100644
--- a/buildkite/src/Command/Base.dhall
+++ b/buildkite/src/Command/Base.dhall
@@ -131,6 +131,7 @@ let targetToAgent =
, Integration = toMap { size = "integration" }
, QA = toMap { size = "qa" }
, Hardfork = toMap { size = "hardfork" }
+ , Perf = toMap { size = "perf" }
, Multi = toMap { size = "generic-multi" }
}
target
diff --git a/buildkite/src/Command/Bench/Base.dhall b/buildkite/src/Command/Bench/Base.dhall
new file mode 100644
index 00000000000..3d2431943b6
--- /dev/null
+++ b/buildkite/src/Command/Bench/Base.dhall
@@ -0,0 +1,96 @@
+let PipelineMode = ../../Pipeline/Mode.dhall
+
+let PipelineTag = ../../Pipeline/Tag.dhall
+
+let Pipeline = ../../Pipeline/Dsl.dhall
+
+let JobSpec = ../../Pipeline/JobSpec.dhall
+
+let DebianVersions = ../../Constants/DebianVersions.dhall
+
+let RunInToolchain = ../../Command/RunInToolchain.dhall
+
+let Profiles = ../../Constants/Profiles.dhall
+
+let Command = ../../Command/Base.dhall
+
+let Docker = ../../Command/Docker/Type.dhall
+
+let Size = ../Size.dhall
+
+let Benchmarks = ../../Constants/Benchmarks.dhall
+
+let SelectFiles = ../../Lib/SelectFiles.dhall
+
+let Spec =
+ { Type =
+ { key : Text
+ , bench : Text
+ , label : Text
+ , size : Size
+ , name : Text
+ , path : Text
+ , mode : PipelineMode.Type
+ , dependsOn : List Command.TaggedKey.Type
+ , additionalDirtyWhen : List SelectFiles.Type
+ , yellowThreshold : Double
+ , redThreshold : Double
+ }
+ , default =
+ { mode = PipelineMode.Type.PullRequest
+ , size = Size.Medium
+ , dependsOn =
+ DebianVersions.dependsOn
+ DebianVersions.DebVersion.Bullseye
+ Profiles.Type.Standard
+ , additionalDirtyWhen = [] : List SelectFiles.Type
+ , yellowThreshold = 0.1
+ , redThreshold = 0.2
+ }
+ }
+
+let command
+ : Spec.Type -> Command.Type
+ = \(spec : Spec.Type)
+ -> Command.build
+ Command.Config::{
+ , commands =
+ RunInToolchain.runInToolchain
+ (Benchmarks.toEnvList Benchmarks.Type::{=})
+ "./buildkite/scripts/benchmarks.sh ${spec.bench} --red-threshold ${Double/show
+ spec.redThreshold} --yellow-threshold ${Double/show
+ spec.yellowThreshold}"
+ , label = "Perf: ${spec.label}"
+ , key = spec.key
+ , target = spec.size
+ , docker = None Docker.Type
+ , depends_on = spec.dependsOn
+ }
+
+let pipeline
+ : Spec.Type -> Pipeline.Config.Type
+ = \(spec : Spec.Type)
+ -> Pipeline.Config::{
+ , spec = JobSpec::{
+ , dirtyWhen =
+ [ SelectFiles.strictlyStart (SelectFiles.contains "src")
+ , SelectFiles.exactly
+ "buildkite/src/Command/Bench/Base"
+ "dhall"
+ , SelectFiles.contains "scripts/benchmark"
+ , SelectFiles.contains "buildkite/scripts/benchmark"
+ ]
+ # spec.additionalDirtyWhen
+ , path = spec.path
+ , name = spec.name
+ , mode = spec.mode
+ , tags =
+ [ PipelineTag.Type.Long
+ , PipelineTag.Type.Test
+ , PipelineTag.Type.Stable
+ ]
+ }
+ , steps = [ command spec ]
+ }
+
+in { command = command, pipeline = pipeline, Spec = Spec }
diff --git a/buildkite/src/Command/FuzzyZkappTest.dhall b/buildkite/src/Command/FuzzyZkappTest.dhall
deleted file mode 100644
index e3d7ab3bc73..00000000000
--- a/buildkite/src/Command/FuzzyZkappTest.dhall
+++ /dev/null
@@ -1,96 +0,0 @@
-let S = ../Lib/SelectFiles.dhall
-
-let Pipeline = ../Pipeline/Dsl.dhall
-
-let PipelineMode = ../Pipeline/Mode.dhall
-
-let PipelineTag = ../Pipeline/Tag.dhall
-
-let JobSpec = ../Pipeline/JobSpec.dhall
-
-let Command = ../Command/Base.dhall
-
-let RunInToolchain = ../Command/RunInToolchain.dhall
-
-let Docker = ../Command/Docker/Type.dhall
-
-let Size = ../Command/Size.dhall
-
-let Profiles = ../Constants/Profiles.dhall
-
-let Spec =
- { Type =
- { profile : Profiles.Type
- , test_app_path : Text
- , timeout : Natural
- , individual_test_timeout : Natural
- , cmd_target : Size
- , job_path : Text
- , job_name : Text
- , tags : List PipelineTag.Type
- , mode : PipelineMode.Type
- , additional_dirty_when : List S.Type
- }
- , default =
- { profile = Profiles.Type.Dev
- , test_app_path =
- "src/lib/transaction_snark/test/zkapp_fuzzy/zkapp_fuzzy.exe"
- , timeout = 1200
- , individual_test_timeout = 300
- , cmd_target = Size.Small
- , additional_dirty_when = [] : List S.Type
- }
- }
-
-let buildTestCmd
- : Spec.Type -> Command.Type
- = \(spec : Spec.Type)
- -> let timeout = Natural/show spec.timeout
-
- let individual_test_timeout =
- Natural/show spec.individual_test_timeout
-
- let key = "fuzzy-zkapp-unit-test-${Profiles.duneProfile spec.profile}"
-
- in Command.build
- Command.Config::{
- , commands =
- RunInToolchain.runInToolchain
- [ "DUNE_INSTRUMENT_WITH=bisect_ppx", "COVERALLS_TOKEN" ]
- "buildkite/scripts/fuzzy-zkapp-test.sh ${Profiles.duneProfile
- spec.profile} ${spec.test_app_path} ${timeout} ${individual_test_timeout} && buildkite/scripts/upload-partial-coverage-data.sh ${key} dev"
- , label = "Fuzzy zkapp unit tests"
- , key = key
- , target = spec.cmd_target
- , docker = None Docker.Type
- , artifact_paths = [ S.contains "core_dumps/*" ]
- , flake_retry_limit = Some 0
- }
-
-let pipeline
- : Spec.Type -> Pipeline.Config.Type
- = \(spec : Spec.Type)
- -> Pipeline.Config::{
- , spec =
- let unitDirtyWhen =
- [ S.strictlyStart (S.contains "src/lib")
- , S.strictlyStart
- ( S.contains
- "src/lib/transaction_snark/test/zkapp_fuzzy"
- )
- , S.exactly "buildkite/src/Command/FuzzyZkappTest" "dhall"
- , S.exactly "buildkite/scripts/fuzzy-zkapp-test" "sh"
- ]
- # spec.additional_dirty_when
-
- in JobSpec::{
- , dirtyWhen = unitDirtyWhen
- , path = spec.job_path
- , name = spec.job_name
- , tags = spec.tags
- , mode = spec.mode
- }
- , steps = [ buildTestCmd spec ]
- }
-
-in { pipeline = pipeline, Spec = Spec }
diff --git a/buildkite/src/Command/Libp2pHelperBuild.dhall b/buildkite/src/Command/Libp2pHelperBuild.dhall
deleted file mode 100644
index c00c51aa07f..00000000000
--- a/buildkite/src/Command/Libp2pHelperBuild.dhall
+++ /dev/null
@@ -1,41 +0,0 @@
-let Command = ./Base.dhall
-
-let Size = ./Size.dhall
-
-let Toolchain = ../Constants/Toolchain.dhall
-
-let BuildFlags = ../Constants/BuildFlags.dhall
-
-let Cmd = ../Lib/Cmds.dhall
-
-let DebianVersions = ../Constants/DebianVersions.dhall
-
-let commands =
- \(debVersion : DebianVersions.DebVersion)
- -> [ Cmd.run "chmod -R 777 src/app/libp2p_helper"
- , Cmd.run "chmod -R 777 src/libp2p_ipc"
- , Cmd.runInDocker
- Cmd.Docker::{
- , image = Toolchain.image debVersion
- , extraEnv = [ "GO=/usr/lib/go/bin/go" ]
- }
- "make libp2p_helper"
- , Cmd.run
- "cp src/app/libp2p_helper/result/bin/libp2p_helper . && buildkite/scripts/buildkite-artifact-helper.sh libp2p_helper"
- ]
-
-let cmdConfig =
- \(debVersion : DebianVersions.DebVersion)
- -> \(buildFlags : BuildFlags.Type)
- -> Command.build
- Command.Config::{
- , commands = commands debVersion
- , label =
- "Build Libp2p helper for ${DebianVersions.capitalName
- debVersion} ${BuildFlags.toSuffixUppercase
- buildFlags}"
- , key = "libp2p-helper${BuildFlags.toLabelSegment buildFlags}"
- , target = Size.Multi
- }
-
-in { step = cmdConfig }
diff --git a/buildkite/src/Command/MinaArtifact.dhall b/buildkite/src/Command/MinaArtifact.dhall
index 5b91eb3fec5..ddc915278da 100644
--- a/buildkite/src/Command/MinaArtifact.dhall
+++ b/buildkite/src/Command/MinaArtifact.dhall
@@ -16,8 +16,6 @@ let JobSpec = ../Pipeline/JobSpec.dhall
let Size = ./Size.dhall
-let Libp2p = ./Libp2pHelperBuild.dhall
-
let DockerImage = ./DockerImage.dhall
let DebianVersions = ../Constants/DebianVersions.dhall
@@ -257,10 +255,10 @@ let docker_step
, deb_repo = DebianRepo.Type.Local
, deb_profile = spec.profile
, step_key =
- "test-suite-${DebianVersions.lowerName
- spec.debVersion}${Profiles.toLabelSegment
- spec.profile}${BuildFlags.toLabelSegment
- spec.buildFlags}--docker-image"
+ "functional_test_suite-${DebianVersions.lowerName
+ spec.debVersion}${Profiles.toLabelSegment
+ spec.profile}${BuildFlags.toLabelSegment
+ spec.buildFlags}-docker-image"
, network = "berkeley"
}
]
@@ -320,20 +318,14 @@ let onlyDebianPipeline
= \(spec : MinaBuildSpec.Type)
-> pipelineBuilder
spec
- [ Libp2p.step spec.debVersion spec.buildFlags
- , build_artifacts spec
- , publish_to_debian_repo spec
- ]
+ [ build_artifacts spec, publish_to_debian_repo spec ]
let pipeline
: MinaBuildSpec.Type -> Pipeline.Config.Type
= \(spec : MinaBuildSpec.Type)
-> pipelineBuilder
spec
- ( [ Libp2p.step spec.debVersion spec.buildFlags
- , build_artifacts spec
- , publish_to_debian_repo spec
- ]
+ ( [ build_artifacts spec, publish_to_debian_repo spec ]
# docker_commands spec
)
diff --git a/buildkite/src/Command/PatchArchiveTest.dhall b/buildkite/src/Command/PatchArchiveTest.dhall
new file mode 100644
index 00000000000..e220106c236
--- /dev/null
+++ b/buildkite/src/Command/PatchArchiveTest.dhall
@@ -0,0 +1,32 @@
+let Artifacts = ../Constants/Artifacts.dhall
+
+let Command = ./Base.dhall
+
+let Size = ./Size.dhall
+
+let Network = ../Constants/Network.dhall
+
+let RunWithPostgres = ./RunWithPostgres.dhall
+
+let key = "patch-archive-test"
+
+in { step =
+ \(dependsOn : List Command.TaggedKey.Type)
+ -> Command.build
+ Command.Config::{
+ , commands =
+ [ RunWithPostgres.runInDockerWithPostgresConn
+ [ "PATCH_ARCHIVE_TEST_APP=mina-patch-archive-test"
+ , "NETWORK_DATA_FOLDER=/etc/mina/test/archive/sample_db"
+ ]
+ "./src/test/archive/sample_db/archive_db.sql"
+ Artifacts.Type.FunctionalTestSuite
+ (None Network.Type)
+ "./scripts/patch-archive-test.sh && buildkite/scripts/upload-partial-coverage-data.sh ${key}"
+ ]
+ , label = "Archive: Patch Archive test"
+ , key = key
+ , target = Size.Large
+ , depends_on = dependsOn
+ }
+ }
diff --git a/buildkite/src/Command/ReplayerTest.dhall b/buildkite/src/Command/ReplayerTest.dhall
index 6a742c7e7fd..1e031edd4e2 100644
--- a/buildkite/src/Command/ReplayerTest.dhall
+++ b/buildkite/src/Command/ReplayerTest.dhall
@@ -8,6 +8,8 @@ let RunWithPostgres = ./RunWithPostgres.dhall
let Network = ../Constants/Network.dhall
+let key = "replayer-test"
+
in { step =
\(dependsOn : List Command.TaggedKey.Type)
-> Command.build
@@ -16,12 +18,12 @@ in { step =
[ RunWithPostgres.runInDockerWithPostgresConn
([] : List Text)
"./src/test/archive/sample_db/archive_db.sql"
- Artifacts.Type.Archive
+ Artifacts.Type.FunctionalTestSuite
(None Network.Type)
- "./buildkite/scripts/replayer-test.sh"
+ "./buildkite/scripts/replayer-test.sh && buildkite/scripts/upload-partial-coverage-data.sh ${key}"
]
, label = "Archive: Replayer test"
- , key = "replayer-test"
+ , key = key
, target = Size.Large
, depends_on = dependsOn
}
diff --git a/buildkite/src/Command/Size.dhall b/buildkite/src/Command/Size.dhall
index eda37582dc4..a7cadacc02a 100644
--- a/buildkite/src/Command/Size.dhall
+++ b/buildkite/src/Command/Size.dhall
@@ -1 +1 @@
-< XLarge | Large | Medium | Small | Integration | QA | Hardfork | Multi >
+< XLarge | Large | Medium | Small | Integration | QA | Hardfork | Multi | Perf >
diff --git a/buildkite/src/Constants/Benchmarks.dhall b/buildkite/src/Constants/Benchmarks.dhall
new file mode 100644
index 00000000000..d303dd42499
--- /dev/null
+++ b/buildkite/src/Constants/Benchmarks.dhall
@@ -0,0 +1,21 @@
+let Spec =
+ { Type = { tokenEnvName : Text, bucket : Text, org : Text, host : Text }
+ , default =
+ { tokenEnvName = "\\\${INFLUX_TOKEN}"
+ , bucket = "\\\${INFLUX_BUCKET_NAME}"
+ , org = "\\\${INFLUX_ORG}"
+ , host = "\\\${INFLUX_HOST}"
+ }
+ }
+
+let toEnvList =
+ \(spec : Spec.Type)
+ -> [ "INFLUX_HOST=${spec.host}"
+ , "INFLUX_TOKEN=${spec.tokenEnvName}"
+ , "INFLUX_ORG=${spec.org}"
+ , "INFLUX_BUCKET_NAME=${spec.bucket}"
+ ]
+
+let mainlineBranches = "[develop,compatible,master]"
+
+in { Type = Spec, toEnvList = toEnvList, mainlineBranches = mainlineBranches }
diff --git a/buildkite/src/Constants/ContainerImages.dhall b/buildkite/src/Constants/ContainerImages.dhall
index f4916e06773..b42dfd5ecae 100644
--- a/buildkite/src/Constants/ContainerImages.dhall
+++ b/buildkite/src/Constants/ContainerImages.dhall
@@ -4,16 +4,16 @@
-- NOTE: minaToolchainBookworm is also used for building Ubuntu Jammy packages in CI
{ toolchainBase = "codaprotocol/ci-toolchain-base:v3"
, minaToolchainBullseye =
- "gcr.io/o1labs-192920/mina-toolchain@sha256:a1f60d69f3657060d6e7289dc770fd7c36fc5a067853019c2f3f6247cb4b6673"
+ "gcr.io/o1labs-192920/mina-toolchain@sha256:fee11e64a54fd8f026c4632fed7b7b9835b8262a037cdb156deb61d3d0aac8b2"
, minaToolchainBookworm =
- "gcr.io/o1labs-192920/mina-toolchain@sha256:a1f60d69f3657060d6e7289dc770fd7c36fc5a067853019c2f3f6247cb4b6673"
+ "gcr.io/o1labs-192920/mina-toolchain@sha256:fee11e64a54fd8f026c4632fed7b7b9835b8262a037cdb156deb61d3d0aac8b2"
, minaToolchain =
- "gcr.io/o1labs-192920/mina-toolchain@sha256:a1f60d69f3657060d6e7289dc770fd7c36fc5a067853019c2f3f6247cb4b6673"
+ "gcr.io/o1labs-192920/mina-toolchain@sha256:fee11e64a54fd8f026c4632fed7b7b9835b8262a037cdb156deb61d3d0aac8b2"
, elixirToolchain = "elixir:1.10-alpine"
, nodeToolchain = "node:14.13.1-stretch-slim"
, ubuntu2004 = "ubuntu:20.04"
, postgres = "postgres:12.4-alpine"
, xrefcheck =
- "serokell/xrefcheck@sha256:8fbb35a909abc353364f1bd3148614a1160ef3c111c0c4ae84e58fdf16019eeb"
+ "dkhamsing/awesome_bot@sha256:a8adaeb3b3bd5745304743e4d8a6d512127646e420544a6d22d9f58a07f35884"
, nixos = "gcr.io/o1labs-192920/nix-unstable:1.0.0"
}
diff --git a/buildkite/src/Constants/DebianVersions.dhall b/buildkite/src/Constants/DebianVersions.dhall
index f3042f5f067..138a0533803 100644
--- a/buildkite/src/Constants/DebianVersions.dhall
+++ b/buildkite/src/Constants/DebianVersions.dhall
@@ -90,6 +90,8 @@ let minimalDirtyWhen =
, S.exactly "buildkite/src/Constants/ContainerImages" "dhall"
, S.exactly "buildkite/src/Command/HardforkPackageGeneration" "dhall"
, S.exactly "buildkite/src/Command/MinaArtifact" "dhall"
+ , S.exactly "buildkite/src/Command/PatchArchiveTest" "dhall"
+ , S.exactly "buildkite/src/Command/ReplayerTest" "dhall"
, S.strictlyStart (S.contains "buildkite/src/Jobs/Release/MinaArtifact")
, S.strictlyStart (S.contains "dockerfiles/stages")
, S.exactly "scripts/debian/build" "sh"
@@ -100,8 +102,7 @@ let minimalDirtyWhen =
, S.exactly "buildkite/scripts/build-hardfork-package" "sh"
, S.exactly "buildkite/scripts/check-compatibility" "sh"
, S.exactly "buildkite/src/Jobs/Test/RunSnarkProfiler" "dhall"
- , S.exactly "buildkite/scripts/run-snark-transaction-profiler" "sh"
- , S.exactly "scripts/snark_transaction_profiler" "py"
+ , S.exactly "buildkite/scripts/bench/snark_transaction_profiler" "sh"
, S.exactly "buildkite/scripts/version-linter" "sh"
, S.exactly "scripts/version-linter" "py"
]
diff --git a/buildkite/src/Jobs/Lint/Dhall.dhall b/buildkite/src/Jobs/Lint/Dhall.dhall
index 481da3c8817..8854af5414e 100644
--- a/buildkite/src/Jobs/Lint/Dhall.dhall
+++ b/buildkite/src/Jobs/Lint/Dhall.dhall
@@ -14,6 +14,15 @@ let Docker = ../../Command/Docker/Type.dhall
let Size = ../../Command/Size.dhall
+let RunInToolchain = ../../Command/RunInToolchain.dhall
+
+let dump_pipelines_cmd =
+ Cmd.runInDocker
+ Cmd.Docker::{
+ , image = (../../Constants/ContainerImages.dhall).toolchainBase
+ }
+ "buildkite/scripts/dhall/dump_dhall_to_pipelines.sh buildkite/src/Jobs _pipelines"
+
in Pipeline.build
Pipeline.Config::{
, spec = JobSpec::{
@@ -58,5 +67,53 @@ in Pipeline.build
, image = (../../Constants/ContainerImages.dhall).toolchainBase
}
}
+ , Command.build
+ Command.Config::{
+ , commands =
+ [ dump_pipelines_cmd ]
+ # RunInToolchain.runInToolchainBullseye
+ ([] : List Text)
+ "python3 ./buildkite/scripts/dhall/checker.py --root _pipelines deps"
+ , label = "Dhall: deps"
+ , key = "check-dhall-deps"
+ , target = Size.Multi
+ , docker = None Docker.Type
+ }
+ , Command.build
+ Command.Config::{
+ , commands =
+ [ dump_pipelines_cmd ]
+ # RunInToolchain.runInToolchainBullseye
+ ([] : List Text)
+ "python3 ./buildkite/scripts/dhall/checker.py --root _pipelines dirty-when --repo ."
+ , label = "Dhall: dirtyWhen"
+ , key = "check-dhall-dirty"
+ , target = Size.Multi
+ , docker = None Docker.Type
+ }
+ , Command.build
+ Command.Config::{
+ , commands =
+ [ dump_pipelines_cmd ]
+ # RunInToolchain.runInToolchainBullseye
+ ([] : List Text)
+ "python3 ./buildkite/scripts/dhall/checker.py --root _pipelines dups"
+ , label = "Dhall: duplicates"
+ , key = "check-dhall-dups"
+ , target = Size.Multi
+ , docker = None Docker.Type
+ }
+ , Command.build
+ Command.Config::{
+ , commands =
+ [ dump_pipelines_cmd ]
+ # RunInToolchain.runInToolchainBullseye
+ ([] : List Text)
+ "python3 ./buildkite/scripts/dhall/checker.py --root _pipelines names"
+ , label = "Dhall: job names"
+ , key = "check-dhall-jobs"
+ , target = Size.Multi
+ , docker = None Docker.Type
+ }
]
}
diff --git a/buildkite/src/Jobs/Lint/Fast.dhall b/buildkite/src/Jobs/Lint/Fast.dhall
index 22274dd793f..9a9f04f2b62 100644
--- a/buildkite/src/Jobs/Lint/Fast.dhall
+++ b/buildkite/src/Jobs/Lint/Fast.dhall
@@ -45,7 +45,7 @@ in Pipeline.build
, label =
"Fast lint steps; CODEOWNERs, RFCs, Check Snarky & Proof-Systems submodules, Preprocessor Deps"
, key = "lint"
- , target = Size.Small
+ , target = Size.Multi
, docker = Some Docker::{
, image = (../../Constants/ContainerImages.dhall).toolchainBase
}
diff --git a/buildkite/src/Jobs/Lint/HelmChart.dhall b/buildkite/src/Jobs/Lint/HelmChart.dhall
index fc3cd762b70..e8e2abef194 100644
--- a/buildkite/src/Jobs/Lint/HelmChart.dhall
+++ b/buildkite/src/Jobs/Lint/HelmChart.dhall
@@ -39,7 +39,7 @@ in Pipeline.build
[ Cmd.run "HELM_LINT=true buildkite/scripts/helm-ci.sh" ]
, label = "Helm chart lint steps"
, key = "lint-helm-chart"
- , target = Size.Small
+ , target = Size.Multi
, docker = None Docker.Type
}
]
diff --git a/buildkite/src/Jobs/Lint/Merge.dhall b/buildkite/src/Jobs/Lint/Merge.dhall
index 57c2ecc0099..a03ae66b395 100644
--- a/buildkite/src/Jobs/Lint/Merge.dhall
+++ b/buildkite/src/Jobs/Lint/Merge.dhall
@@ -33,7 +33,7 @@ in Pipeline.build
[ Cmd.run "buildkite/scripts/merges-cleanly.sh compatible" ]
, label = "Check merges cleanly into compatible"
, key = "clean-merge-compatible"
- , target = Size.Small
+ , target = Size.Multi
, docker = Some Docker::{
, image = (../../Constants/ContainerImages.dhall).toolchainBase
}
@@ -44,7 +44,7 @@ in Pipeline.build
[ Cmd.run "buildkite/scripts/merges-cleanly.sh develop" ]
, label = "Check merges cleanly into develop"
, key = "clean-merge-develop"
- , target = Size.Small
+ , target = Size.Multi
, docker = Some Docker::{
, image = (../../Constants/ContainerImages.dhall).toolchainBase
}
@@ -55,7 +55,7 @@ in Pipeline.build
[ Cmd.run "buildkite/scripts/merges-cleanly.sh master" ]
, label = "Check merges cleanly into master"
, key = "clean-merge-master"
- , target = Size.Small
+ , target = Size.Multi
, docker = Some Docker::{
, image = (../../Constants/ContainerImages.dhall).toolchainBase
}
diff --git a/buildkite/src/Jobs/Lint/ValidationService.dhall b/buildkite/src/Jobs/Lint/ValidationService.dhall
index fcff4b30627..350de107453 100644
--- a/buildkite/src/Jobs/Lint/ValidationService.dhall
+++ b/buildkite/src/Jobs/Lint/ValidationService.dhall
@@ -75,10 +75,7 @@ in Pipeline.build
(S.contains "buildkite/src/Jobs/Lint/ValidationService")
in JobSpec::{
- , dirtyWhen =
- [ dirtyDhallDir
- , S.strictlyStart (S.contains ValidationService.rootPath)
- ]
+ , dirtyWhen = [ dirtyDhallDir ]
, path = "Lint"
, name = "ValidationService"
, tags =
diff --git a/buildkite/src/Jobs/Lint/Xrefcheck.dhall b/buildkite/src/Jobs/Lint/Xrefcheck.dhall
index c8fead2e93d..d586f08c5cf 100644
--- a/buildkite/src/Jobs/Lint/Xrefcheck.dhall
+++ b/buildkite/src/Jobs/Lint/Xrefcheck.dhall
@@ -12,8 +12,6 @@ let Cmd = ../../Lib/Cmds.dhall
let Command = ../../Command/Base.dhall
-let Docker = ../../Command/Docker/Type.dhall
-
let Size = ../../Command/Size.dhall
let B/SoftFail = B.definitions/commandStep/properties/soft_fail/Type
@@ -23,7 +21,8 @@ in Pipeline.build
, spec = JobSpec::{
, dirtyWhen =
[ SelectFiles.strictly SelectFiles::{ exts = Some [ "md" ] }
- , SelectFiles.strictly (SelectFiles.contains ".xrefcheck.yml")
+ , SelectFiles.strictlyStart
+ (SelectFiles.contains "buildkite/src/Jobs/Lint/Xrefcheck.dhall")
]
, path = "Lint"
, name = "Xrefcheck"
@@ -36,15 +35,27 @@ in Pipeline.build
, steps =
[ Command.build
Command.Config::{
- , commands = [] : List Cmd.Type
+ , commands =
+ [ Cmd.runInDocker
+ Cmd.Docker::{
+ , image = (../../Constants/ContainerImages.dhall).xrefcheck
+ }
+ ( "awesome_bot -allow-dupe "
+ ++ "--allow-redirect "
+ ++ "--allow 403,401 "
+ ++ "--skip-save-results "
+ ++ "--files "
+ ++ "`find . -name \"*.md\" "
+ ++ "! -path \"./src/lib/crypto/kimchi_bindings/*\" "
+ ++ "! -path \"./src/lib/crypto/proof-systems/*\" "
+ ++ "! -path \"./src/external/*\" "
+ ++ "` "
+ )
+ ]
, label = "Verifies references in markdown"
, key = "xrefcheck"
, target = Size.Small
, soft_fail = Some (B/SoftFail.Boolean True)
- , docker = Some Docker::{
- , image = (../../Constants/ContainerImages.dhall).xrefcheck
- , shell = None (List Text)
- }
}
]
}
diff --git a/buildkite/src/Jobs/Test/ArchiveNodeUnitTest.dhall b/buildkite/src/Jobs/Test/ArchiveNodeUnitTest.dhall
index 2cd73c0090d..b3799a03535 100644
--- a/buildkite/src/Jobs/Test/ArchiveNodeUnitTest.dhall
+++ b/buildkite/src/Jobs/Test/ArchiveNodeUnitTest.dhall
@@ -24,6 +24,8 @@ let password = "codarules"
let db = "archiver"
+let port = "5433"
+
let command_key = "archive-unit-tests"
in Pipeline.build
@@ -50,14 +52,14 @@ in Pipeline.build
[ "POSTGRES_PASSWORD=${password}"
, "POSTGRES_USER=${user}"
, "POSTGRES_DB=${db}"
- , "MINA_TEST_POSTGRES=postgres://${user}:${password}@localhost:5434/${db}"
+ , "MINA_TEST_POSTGRES=postgres://${user}:${password}@localhost:${port}/${db}"
, "GO=/usr/lib/go/bin/go"
, "DUNE_INSTRUMENT_WITH=bisect_ppx"
, "COVERALLS_TOKEN"
]
( Prelude.Text.concatSep
" && "
- [ "bash buildkite/scripts/setup-database-for-archive-node.sh ${user} ${password} ${db}"
+ [ "bash buildkite/scripts/setup-database-for-archive-node.sh ${user} ${password} ${db} ${port}"
, WithCargo.withCargo
"eval \\\$(opam config env) && dune runtest src/app/archive && buildkite/scripts/upload-partial-coverage-data.sh ${command_key} dev"
]
diff --git a/buildkite/src/Jobs/Test/FuzzyZkappTest.dhall b/buildkite/src/Jobs/Test/FuzzyZkappTest.dhall
deleted file mode 100644
index e983db0383e..00000000000
--- a/buildkite/src/Jobs/Test/FuzzyZkappTest.dhall
+++ /dev/null
@@ -1,22 +0,0 @@
-let S = ../../Lib/SelectFiles.dhall
-
-let Pipeline = ../../Pipeline/Dsl.dhall
-
-let PipelineMode = ../../Pipeline/Mode.dhall
-
-let PipelineTag = ../../Pipeline/Tag.dhall
-
-let Command = ../../Command/FuzzyZkappTest.dhall
-
-in Pipeline.build
- ( Command.pipeline
- Command.Spec::{
- , job_path = "Test"
- , job_name = "FuzzyZkappTest"
- , tags = [ PipelineTag.Type.VeryLong, PipelineTag.Type.Test ]
- , mode = PipelineMode.Type.Stable
- , additional_dirty_when =
- [ S.exactly "buildkite/src/Jobs/Test/FuzzyZkappTest" "dhall" ]
- , timeout = 1200
- }
- )
diff --git a/buildkite/src/Jobs/Test/PatchArchiveTest.dhall b/buildkite/src/Jobs/Test/PatchArchiveTest.dhall
new file mode 100644
index 00000000000..a0414c85a4f
--- /dev/null
+++ b/buildkite/src/Jobs/Test/PatchArchiveTest.dhall
@@ -0,0 +1,44 @@
+let S = ../../Lib/SelectFiles.dhall
+
+let Pipeline = ../../Pipeline/Dsl.dhall
+
+let PipelineTag = ../../Pipeline/Tag.dhall
+
+let JobSpec = ../../Pipeline/JobSpec.dhall
+
+let PatchArchiveTest = ../../Command/PatchArchiveTest.dhall
+
+let Profiles = ../../Constants/Profiles.dhall
+
+let Network = ../../Constants/Network.dhall
+
+let Artifacts = ../../Constants/Artifacts.dhall
+
+let Dockers = ../../Constants/DockerVersions.dhall
+
+let dependsOn =
+ Dockers.dependsOn
+ Dockers.Type.Bullseye
+ (None Network.Type)
+ Profiles.Type.Standard
+ Artifacts.Type.FunctionalTestSuite
+
+in Pipeline.build
+ Pipeline.Config::{
+ , spec = JobSpec::{
+ , dirtyWhen =
+ [ S.strictlyStart (S.contains "src")
+ , S.exactly "scripts/patch-archive-test" "sh"
+ , S.exactly "buildkite/src/Jobs/Test/PatchArchiveTest" "dhall"
+ , S.exactly "buildkite/src/Command/PatchArchiveTest" "dhall"
+ ]
+ , path = "Test"
+ , name = "PatchArchiveTest"
+ , tags =
+ [ PipelineTag.Type.Long
+ , PipelineTag.Type.Test
+ , PipelineTag.Type.Stable
+ ]
+ }
+ , steps = [ PatchArchiveTest.step dependsOn ]
+ }
diff --git a/buildkite/src/Jobs/Test/ReplayerTest.dhall b/buildkite/src/Jobs/Test/ReplayerTest.dhall
index 0e3d665e2ce..f9caa4f30a7 100644
--- a/buildkite/src/Jobs/Test/ReplayerTest.dhall
+++ b/buildkite/src/Jobs/Test/ReplayerTest.dhall
@@ -21,7 +21,7 @@ let dependsOn =
Dockers.Type.Bullseye
(None Network.Type)
Profiles.Type.Standard
- Artifacts.Type.Archive
+ Artifacts.Type.FunctionalTestSuite
in Pipeline.build
Pipeline.Config::{
diff --git a/buildkite/src/Jobs/Test/RunSnarkProfiler.dhall b/buildkite/src/Jobs/Test/RunSnarkProfiler.dhall
index 0523f1e1e74..9bfa98eb8a5 100644
--- a/buildkite/src/Jobs/Test/RunSnarkProfiler.dhall
+++ b/buildkite/src/Jobs/Test/RunSnarkProfiler.dhall
@@ -32,7 +32,7 @@ let buildTestCmd
, commands =
RunInToolchain.runInToolchain
([] : List Text)
- "buildkite/scripts/run-snark-transaction-profiler.sh"
+ "buildkite/scripts/bench/snark_transaction_profiler.sh"
, label = "Snark Transaction Profiler"
, key = "snark-transaction-profiler"
, target = cmd_target
@@ -48,9 +48,9 @@ in Pipeline.build
[ S.strictlyStart (S.contains "src")
, S.exactly "buildkite/src/Jobs/Test/RunSnarkProfiler" "dhall"
, S.exactly
- "buildkite/scripts/run-snark-transaction-profiler"
+ "buildkite/scripts/bench/snark_transaction_profiler"
"sh"
- , S.exactly "scripts/snark_transaction_profiler" "py"
+ , S.strictlyStart (S.contains "scripts/benchmarks")
]
in JobSpec::{
diff --git a/buildkite/src/Jobs/Test/TerraformNetworkTest.dhall b/buildkite/src/Jobs/Test/TerraformNetworkTest.dhall
index 336f4612cb6..3108316b34b 100644
--- a/buildkite/src/Jobs/Test/TerraformNetworkTest.dhall
+++ b/buildkite/src/Jobs/Test/TerraformNetworkTest.dhall
@@ -35,8 +35,8 @@ in Pipeline.build
Pipeline.Config::{
, spec =
let unitDirtyWhen =
- [ S.strictlyStart (S.contains "src/automation/terraform")
- , S.strictlyStart (S.contains "src/helm")
+ [ S.strictlyStart (S.contains "automation/terraform")
+ , S.strictlyStart (S.contains "helm")
, S.strictlyStart
(S.contains "buildkite/src/Jobs/Test/TerraformNetworkTest")
, S.strictlyStart
diff --git a/buildkite/src/Jobs/Test/TestnetIntegrationTests.dhall b/buildkite/src/Jobs/Test/TestnetIntegrationTests.dhall
index 933a21bae6e..a519b2f4016 100644
--- a/buildkite/src/Jobs/Test/TestnetIntegrationTests.dhall
+++ b/buildkite/src/Jobs/Test/TestnetIntegrationTests.dhall
@@ -38,20 +38,15 @@ in Pipeline.build
, S.strictlyStart (S.contains "dockerfiles")
, S.strictlyStart
(S.contains "buildkite/src/Jobs/Test/TestnetIntegrationTest")
- , S.strictlyStart
- (S.contains "buildkite/src/Jobs/Command/TestExecutive")
+ , S.strictlyStart (S.contains "buildkite/src/Command/TestExecutive")
, S.strictlyStart
(S.contains "automation/terraform/modules/o1-integration")
, S.strictlyStart
(S.contains "automation/terraform/modules/kubernetes/testnet")
, S.strictlyStart
- ( S.contains
- "automation/buildkite/script/run-test-executive-cloud"
- )
+ (S.contains "buildkite/scripts/run-test-executive-cloud")
, S.strictlyStart
- ( S.contains
- "automation/buildkite/script/run-test-executive-local"
- )
+ (S.contains "buildkite/scripts/run-test-executive-local")
]
, path = "Test"
, name = "TestnetIntegrationTests"
diff --git a/buildkite/src/Jobs/Test/TestnetIntegrationTestsLong.dhall b/buildkite/src/Jobs/Test/TestnetIntegrationTestsLong.dhall
index c02eb9c5262..4047d2212fd 100644
--- a/buildkite/src/Jobs/Test/TestnetIntegrationTestsLong.dhall
+++ b/buildkite/src/Jobs/Test/TestnetIntegrationTestsLong.dhall
@@ -38,8 +38,7 @@ in Pipeline.build
, S.strictlyStart (S.contains "dockerfiles")
, S.strictlyStart
(S.contains "buildkite/src/Jobs/Test/TestnetIntegrationTest")
- , S.strictlyStart
- (S.contains "buildkite/src/Jobs/Command/TestExecutive")
+ , S.strictlyStart (S.contains "buildkite/src/Command/TestExecutive")
, S.strictlyStart
(S.contains "automation/terraform/modules/o1-integration")
, S.strictlyStart
diff --git a/buildkite/src/Jobs/Test/VersionLint.dhall b/buildkite/src/Jobs/Test/VersionLint.dhall
index bc47db104ad..1368e8e70bc 100644
--- a/buildkite/src/Jobs/Test/VersionLint.dhall
+++ b/buildkite/src/Jobs/Test/VersionLint.dhall
@@ -1,5 +1,3 @@
-let Cmd = ../../Lib/Cmds.dhall
-
let S = ../../Lib/SelectFiles.dhall
let B = ../../External/Buildkite.dhall
@@ -34,9 +32,9 @@ let buildTestCmd
RunInToolchain.runInToolchain
([] : List Text)
"buildkite/scripts/dump-mina-type-shapes.sh"
- # [ Cmd.run
- "gsutil cp \$(git log -n 1 --format=%h --abbrev=7)-type_shape.txt \$MINA_TYPE_SHAPE gs://mina-type-shapes"
- ]
+ # RunInToolchain.runInToolchain
+ ([] : List Text)
+ "buildkite/scripts/version-linter-patch-missing-type-shapes.sh ${release_branch}"
# RunInToolchain.runInToolchain
([] : List Text)
"buildkite/scripts/version-linter.sh ${release_branch}"
diff --git a/buildkite/src/Jobs/Test/ZkappMetrics.dhall b/buildkite/src/Jobs/Test/ZkappMetrics.dhall
index 69fda58815b..90117a87a34 100644
--- a/buildkite/src/Jobs/Test/ZkappMetrics.dhall
+++ b/buildkite/src/Jobs/Test/ZkappMetrics.dhall
@@ -8,18 +8,29 @@ let Command = ../../Command/Base.dhall
let RunInToolchain = ../../Command/RunInToolchain.dhall
+let DebianVersions = ../../Constants/DebianVersions.dhall
+
+let Profiles = ../../Constants/Profiles.dhall
+
let Docker = ../../Command/Docker/Type.dhall
let Size = ../../Command/Size.dhall
let JobSpec = ../../Pipeline/JobSpec.dhall
+let dependsOn =
+ DebianVersions.dependsOn
+ DebianVersions.DebVersion.Bullseye
+ Profiles.Type.Standard
+
in Pipeline.build
Pipeline.Config::{
, spec = JobSpec::{
, dirtyWhen =
[ S.strictlyStart (S.contains "buildkite/src/Jobs/Test/ZkappMetrics")
, S.strictlyStart (S.contains "src")
+ , S.exactly "buildkite/scripts/bench/zkapp_metrics" "sh"
+ , S.strictlyStart (S.contains "scripts/benchmarks")
]
, path = "Test"
, name = "ZkappMetrics"
@@ -35,11 +46,12 @@ in Pipeline.build
, commands =
RunInToolchain.runInToolchain
([] : List Text)
- "./buildkite/scripts/zkapp_metrics.sh"
+ "./buildkite/scripts/bench/zkapp_metrics.sh"
, label = "Zkapp Metrics"
, key = "zkapp-metrics"
, target = Size.Medium
, docker = None Docker.Type
+ , depends_on = dependsOn
}
]
}
diff --git a/buildkite/src/Lib/Cmds.dhall b/buildkite/src/Lib/Cmds.dhall
index 63d5c3d7950..a64bfc8800c 100644
--- a/buildkite/src/Lib/Cmds.dhall
+++ b/buildkite/src/Lib/Cmds.dhall
@@ -71,11 +71,11 @@ let module =
= if docker.useBash then "/bin/bash" else "/bin/sh"
in { line =
- "docker run -it --rm --entrypoint ${entrypoint} --init --volume ${sharedDir}:/shared --volume ${outerDir}:/workdir --workdir /workdir${envVars}${ if docker.privileged
+ "docker run -it --rm --entrypoint ${entrypoint} --init --volume /var/secrets:/var/secrets --volume ${sharedDir}:/shared --volume ${outerDir}:/workdir --workdir /workdir${envVars}${ if docker.privileged
- then " --privileged"
+ then " --privileged"
- else ""} ${docker.image} -c '${inner.line}'"
+ else ""} ${docker.image} -c '${inner.line}'"
, readable =
Optional/map
Text
@@ -142,7 +142,7 @@ let tests =
let dockerExample =
assert
: { line =
- "docker run -it --rm --entrypoint /bin/bash --init --volume /var/buildkite/shared:/shared --volume \\\$BUILDKITE_BUILD_CHECKOUT_PATH:/workdir --workdir /workdir --env ENV1 --env ENV2 --env TEST foo/bar:tag -c 'echo hello'"
+ "docker run -it --rm --entrypoint /bin/bash --init --volume /var/secrets:/var/secrets --volume /var/buildkite/shared:/shared --volume \\\$BUILDKITE_BUILD_CHECKOUT_PATH:/workdir --workdir /workdir --env ENV1 --env ENV2 --env TEST foo/bar:tag -c 'echo hello'"
, readable = Some "Docker@foo/bar:tag ( echo hello )"
}
=== M.inDocker
@@ -154,7 +154,7 @@ let tests =
let cacheExample =
assert
- : "./buildkite/scripts/cache-through.sh data.tar \"docker run -it --rm --entrypoint /bin/bash --init --volume /var/buildkite/shared:/shared --volume \\\$BUILDKITE_BUILD_CHECKOUT_PATH:/workdir --workdir /workdir --env ENV1 --env ENV2 --env TEST foo/bar:tag -c 'echo hello > /tmp/data/foo.txt && tar cvf data.tar /tmp/data'\""
+ : "./buildkite/scripts/cache-through.sh data.tar \"docker run -it --rm --entrypoint /bin/bash --init --volume /var/secrets:/var/secrets --volume /var/buildkite/shared:/shared --volume \\\$BUILDKITE_BUILD_CHECKOUT_PATH:/workdir --workdir /workdir --env ENV1 --env ENV2 --env TEST foo/bar:tag -c 'echo hello > /tmp/data/foo.txt && tar cvf data.tar /tmp/data'\""
=== M.format
( M.cacheThrough
M.Docker::{
diff --git a/dockerfiles/stages/3-toolchain b/dockerfiles/stages/3-toolchain
index 3f66574da27..03ae9a6a9a8 100644
--- a/dockerfiles/stages/3-toolchain
+++ b/dockerfiles/stages/3-toolchain
@@ -8,10 +8,11 @@ ARG deb_codename=focal
ARG DOCKER_VERSION=19.03.4
ARG TERRAFORM_VERSION=0.14.11
-ARG DEBS3_VERSION=0.11.6
+ARG DEBS3_VERSION=0.11.7
ARG DHALL_VERSION=1.41.1
ARG DHALL_JSON_VERSION=1.7.10
ARG DHALL_BASH_VERSION=1.0.40
+ARG INFLUXDB_CLI_VERSION=2.7.5
USER root
@@ -70,6 +71,14 @@ RUN curl -sLO https://github.com/MinaProtocol/deb-s3/releases/download/${DEBS3_V
&& gem install deb-s3-${DEBS3_VERSION}.gem \
&& rm -f deb-s3-${DEBS3_VERSION}.gem
+# --- deb-s3 tool
+# Custom version, with lock only on manifest upload
+RUN wget https://download.influxdata.com/influxdb/releases/influxdb2-client-${INFLUXDB_CLI_VERSION}-linux-amd64.tar.gz \
+ && mkdir -p "influx_dir" && tar xvzf influxdb2-client-${INFLUXDB_CLI_VERSION}-linux-amd64.tar.gz -C influx_dir \
+ && sudo cp influx_dir/influx /usr/local/bin/ \
+ && rm influxdb2-client-${INFLUXDB_CLI_VERSION}-linux-amd64.tar.gz \
+ && rm -rf influx_dir
+
# --- Docker Daemon
RUN curl -sL https://download.docker.com/linux/static/stable/x86_64/docker-${DOCKER_VERSION}.tgz \
| tar --extract --gzip --strip-components 1 --directory=/usr/bin --file=-
diff --git a/nix/libp2p_helper.json b/nix/libp2p_helper.json
index 7a0a9b7e34a..783913efe5a 100644
--- a/nix/libp2p_helper.json
+++ b/nix/libp2p_helper.json
@@ -1 +1 @@
-{"go.mod":"d5de7e35a76f5c9ce7d6c98f0da39c763961e77b8c94761b1e89ab4bdfdc2a97","go.sum":"586fd920114d3875ec3e1d739921d77d30ad8e2f297b67781ca41d25a81b65a9","vendorSha256":"sha256-vyKrKi5bqm8Mf2rUOojSY0IXHcuNpcVNvd1Iu1RBxDo="}
\ No newline at end of file
+{"go.mod":"6c45e03ccef1f79541f021cf358fa69bf80cb69b58ae92c776bc09cbb1cc8096","go.sum":"d0f40cfc7b2dc7000cd0a0be051c6a832bdbf880fee88550f2b409690cc18774","vendorSha256":"sha256-x/ZReaHGNsDshohcF4+p9Xj/JTK3gMUyeTgJkaN/eUc="}
\ No newline at end of file
diff --git a/scripts/benchmarks.sh b/scripts/benchmarks.sh
deleted file mode 100755
index b072c9983a4..00000000000
--- a/scripts/benchmarks.sh
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/bin/sh
-
-# runs inline benchmarks
-# requires that app/benchmarks/benchmarks.exe is built
-# run with -help to see available flags
-
-export BENCHMARKS_RUNNER=TRUE
-export X_LIBRARY_INLINING=true
-
-GIT_ROOT="`git rev-parse --show-toplevel`"
-
-BENCHMARK_EXE=$GIT_ROOT/_build/default/src/app/benchmarks/benchmarks.exe
-
-if [ ! -f "$BENCHMARK_EXE" ]; then
- echo "Please run 'make benchmarks' before running this script";
- exit 1
-fi
-
-exec $BENCHMARK_EXE "$@" -run-without-cross-library-inlining -suppress-warnings
diff --git a/scripts/benchmarks/.gitignore b/scripts/benchmarks/.gitignore
new file mode 100644
index 00000000000..749ccdafd4f
--- /dev/null
+++ b/scripts/benchmarks/.gitignore
@@ -0,0 +1,4 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
diff --git a/scripts/benchmarks/README.md b/scripts/benchmarks/README.md
new file mode 100644
index 00000000000..afeda98a98d
--- /dev/null
+++ b/scripts/benchmarks/README.md
@@ -0,0 +1,97 @@
+# Benchmarks
+
+Python app for running all major mina benchmarks of various type
+
+- mina-benchmarks
+- snark-profiler
+- heap-usage
+- zkapp-limits
+- ledger-export
+
+It requires all underlying app to be present on os By default app uses
+official name (like mina, mina-heap-usage etc.).
+
+In order to upload files to influx db all 4 influx env vars need to be defined:
+- INFLUX_BUCKET_NAME
+- INFLUX_ORG
+- INFLUX_TOKEN
+- INFLUX_HOST
+
+More details here:
+https://docs.influxdata.com/influxdb/cloud/reference/cli/influx/#credential-precedence
+
+## Installation
+
+Project depends on Python in version 3+
+
+
+```commandline
+pip install -r ./scripts/benchmarks/requirements.txt
+```
+
+## Usage
+
+python3 ./scripts/benchmarks run --benchmark mina-base --path _build/default/src/app/benchmarks/benchmarks.exe --influx --branch compatible --format csv --outfile mina_base.csv
+
+## Commands
+
+### ls
+
+Prints all supported benchmarks
+
+```commandline
+ python3 scripts/benchmarks ls
+```
+
+### run
+
+runs benchmark.
+
+INFO: each benchmark can have its own set of additional parameters
+
+example:
+```commandline
+python3 scripts/benchmarks run --benchmark snark --path _build/default/src/app/cli/src/mina.exe --branch compatible --outfile zkap_limits.csv
+```
+
+### parse
+
+Parses textual output of benchmark to csv
+
+```commandline
+python3 scripts/benchmarks parse --benchmark mina-base --influx --branch compatible --infile output.out --outfile mina_base.csv
+```
+
+
+### compare
+
+Compare result against moving average from influx db
+
+```commandline
+python3 scripts/benchmarks compare --infile vrf_lib_tests_mina_base.csv --yellow-threshold 0.1 --red-threshold 0.2
+```
+
+### upload
+
+Uploads data to influx db
+
+```commandline
+python3 scripts/benchmarks upload --infile mina_base_mina_base.csv
+```
+
+### test
+
+Aggregates all above commands with logic to only upload data if branch is amongst mainline branches
+
+```commandline
+python3 scripts/benchmarks test --benchmark snark --path _build/default/src/app/cli/src/mina.exe --branch compatible --tmpfile zkap_limits.csv
+```
+
+
+## Further work
+
+Application is meant to be run in CI. Currently it exits when values exceeds moving average.
+Some process need to be agreed how to handle situation where increase in value is expected and values should be uploaded to
+influx db. One proposal is to add env var which can bypass comparison + additional logic which will allow value which exceeds
+moving average but does not exceed highest one
+(as we may end up in situation that moving average won't allow further values and we need to bypass them as well until avg will catchup with expected increase)
diff --git a/scripts/benchmarks/__main__.py b/scripts/benchmarks/__main__.py
new file mode 100644
index 00000000000..d76c3825a59
--- /dev/null
+++ b/scripts/benchmarks/__main__.py
@@ -0,0 +1,149 @@
+"""
+ Mina benchmark runner
+
+ Capable of running,parsing to csv, comparing with historical data stored in influx and uploading to influx.
+
+ Requirements:
+
+ all INFLUX_* env vars need to be defined (INFLUX_HOST,INFLUX_TOKEN,INFLUX_BUCKET_NAME,INFLUX_ORG)
+
+"""
+
+import argparse
+from pathlib import Path
+
+from lib import *
+
+parser = argparse.ArgumentParser(description='Executes mina benchmarks')
+subparsers = parser.add_subparsers(dest="cmd")
+run_bench = subparsers.add_parser('run')
+run_bench.add_argument("--outfile", required=True, help="output file")
+run_bench.add_argument("--benchmark", type= BenchmarkType, help="benchmark to run")
+run_bench.add_argument("--influx", action='store_true', help = "Required only if --format=csv. Makes csv complaint with influx csv ")
+run_bench.add_argument("--format", type=Format, help="output file format [text,csv]", default=Format.text)
+run_bench.add_argument("--path", help="override path to benchmark")
+run_bench.add_argument("--branch", default="test", help="Required only if --format=csv. Add branch name to csv file")
+run_bench.add_argument("--genesis-ledger-path", default="./genesis_ledgers/devnet.json", help="Applicable only for ledger-export benchmark. Location of genesis config file")
+run_bench.add_argument("--k", default=1)
+run_bench.add_argument("--max-num-updates", default=4 , type=int)
+run_bench.add_argument("--min-num-updates", default=2, type=int)
+
+parse_bench = subparsers.add_parser('parse',help="parse textual benchmark output to csv")
+parse_bench.add_argument("--benchmark", type= BenchmarkType, help="benchmark to run")
+parse_bench.add_argument("--infile",help="input file")
+parse_bench.add_argument("--influx", action='store_true', help="assure output file is compliant with influx schena")
+parse_bench.add_argument("--branch", help="adds additional colum in csv with branch from which benchmarks where built")
+parse_bench.add_argument("--outfile", help="output file")
+
+compare_bench = subparsers.add_parser('compare', help="compare current data with historical downloaded from influx db")
+compare_bench.add_argument("--benchmark", type= BenchmarkType, help="benchmark to run")
+compare_bench.add_argument("--infile", help="input file")
+compare_bench.add_argument("--yellow-threshold",help="defines how many percent current measurement can exceed average so app will trigger warning",
+ type=float,
+ choices=[Range(0.0, 1.0)],
+ default=0.1)
+compare_bench.add_argument("--red-threshold",help="defines how many percent current measurement can exceed average so app will exit with error",
+ type=float,
+ choices=[Range(0.0, 1.0)],
+ default=0.2)
+
+upload_bench = subparsers.add_parser('upload')
+upload_bench.add_argument("--infile")
+
+test_bench = subparsers.add_parser('test', help="Performs entire cycle of operations from run till upload")
+test_bench.add_argument("--benchmark", type=BenchmarkType, help="benchmark to test")
+test_bench.add_argument("--tmpfile", help="temporary location of result file")
+test_bench.add_argument("--path")
+test_bench.add_argument("--yellow-threshold",
+ help="defines how many percent current measurement can exceed average so app will trigger warning",
+ type=float,
+ choices=[Range(0.0, 1.0)],
+ default=0.1)
+test_bench.add_argument("--red-threshold",
+ help="defines how many percent current measurement can exceed average so app will exit with error",
+ type=float,
+ choices=[Range(0.0, 1.0)],
+ default=0.2)
+test_bench.add_argument("--branch", help="branch which was used in tests")
+test_bench.add_argument("--genesis-ledger-path", default="./genesis_ledgers/devnet.json", help="Applicable only for ledger-export benchmark. Location of genesis config file")
+test_bench.add_argument('-m','--mainline-branches', action='append', help='Defines mainline branch. If values of \'--branch\' parameter is among mainline branches then result will be uploaded')
+test_bench.add_argument("--k", default=1)
+test_bench.add_argument("--max-num-updates", default=4 , type=int)
+test_bench.add_argument("--min-num-updates", default=2, type=int)
+
+
+upload_bench = subparsers.add_parser('ls')
+
+args = parser.parse_args()
+
+logging.basicConfig(level=logging.DEBUG)
+
+default_mainline_branches = ["develop", "compatible", "master"]
+
+
+def select_benchmark(kind):
+ if kind == BenchmarkType.mina_base:
+ return MinaBaseBenchmark()
+ elif kind == BenchmarkType.zkapp:
+ return ZkappLimitsBenchmark()
+ elif kind == BenchmarkType.heap_usage:
+ return HeapUsageBenchmark()
+ elif kind == BenchmarkType.snark:
+ return SnarkBenchmark(args.k, args.max_num_updates, args.min_num_updates)
+ elif kind == BenchmarkType.ledger_export:
+ if args.genesis_ledger_path is None:
+ print(
+ "--genesis-ledger-path need to be provided when running ledger export benchmark"
+ )
+ exit(1)
+ return LedgerExportBenchmark(args.genesis_ledger_path)
+
+if args.cmd == "ls":
+ benches = [str(b) for b in BenchmarkType]
+ print("\n".join(benches))
+ exit(0)
+
+if args.benchmark is None:
+ print("benchmark not selected")
+ exit(1)
+
+bench = select_benchmark(args.benchmark)
+
+if args.cmd == "run":
+ output = bench.run(path=args.path)
+ if args.format == "text":
+ with open(args.outfile, 'w') as file:
+ file.write(output)
+ else:
+ files = ",".join(
+ bench.parse(output, args.outfile, args.influx, args.branch))
+ print(f"produced files: {files}")
+
+if args.cmd == "parse":
+ files = bench.parse(Path(args.infile).read_text(), args.outfile, args.influx, args.branch)
+ print(f'Parsed files: \n{",".join(files)}')
+
+
+if args.cmd == "compare":
+ bench.compare(args.infile, args.yellow_threshold, args.red_threshold)
+
+if args.cmd == "upload":
+ bench.upload(args.infile)
+
+if args.cmd == "test":
+ output = bench.run(path=args.path)
+ files = bench.parse(output,
+ args.tmpfile,
+ influxdb=True,
+ branch=args.branch)
+
+ [
+ bench.compare(file, args.yellow_threshold, args.red_threshold)
+ for file in files
+ ]
+
+ mainline_branches = default_mainline_branches if args.mainline_branches is None else args.mainline_branches
+
+ if args.branch in mainline_branches:
+ for file in files:
+ bench.upload(file)
\ No newline at end of file
diff --git a/scripts/benchmarks/lib/__init__.py b/scripts/benchmarks/lib/__init__.py
new file mode 100644
index 00000000000..d4e612aaf46
--- /dev/null
+++ b/scripts/benchmarks/lib/__init__.py
@@ -0,0 +1,3 @@
+from .influx import *
+from .bench import *
+from .utils import *
diff --git a/scripts/benchmarks/lib/bench.py b/scripts/benchmarks/lib/bench.py
new file mode 100644
index 00000000000..0034ade912f
--- /dev/null
+++ b/scripts/benchmarks/lib/bench.py
@@ -0,0 +1,615 @@
+import re
+from abc import ABC
+
+import parse
+from pathlib import Path
+import io
+import os
+from enum import Enum
+import logging
+from lib.utils import isclose, assert_cmd
+from lib.influx import *
+
+import csv
+import abc
+
+logger = logging.getLogger(__name__)
+
+class Benchmark(abc.ABC):
+ """
+ Abstract class which aggregate all necessary operations
+ (run,parse) which then are implemented by children.
+ Moreover, for all general and common operations like upload it has concrete implementation
+
+ """
+
+ def __init__(self, kind):
+ self.kind = kind
+ self.influx_client = Influx()
+
+ def headers_to_influx(self, headers):
+ """
+ Converts headers to influx db headers. Details:
+ https://docs.influxdata.com/influxdb/cloud/reference/syntax/annotated-csv/extended/
+ """
+ return "#datatype " + ",".join(
+ [header.influx_kind for header in headers])
+
+ @abc.abstractmethod
+ def default_path(self):
+ """
+ Abstract method to get default path to app
+ """
+ pass
+
+ @abc.abstractmethod
+ def name_header(self):
+ """
+ Abstract method for getting header object for measurement name
+ """
+ pass
+
+ @abc.abstractmethod
+ def branch_header(self):
+ """
+ Abstract method for getting header object for branch name
+ """
+ pass
+
+ def headers_to_name(self, headers):
+ """
+ Gets names of headers
+ """
+ return list(map(lambda x: x.name, headers))
+
+ @abc.abstractmethod
+ def headers(self):
+ """
+ Returns all csv headers
+ """
+ pass
+
+ @abc.abstractmethod
+ def fields(self):
+ """
+ Returns subset of headers for influx field:
+ https://docs.influxdata.com/influxdb/cloud/reference/syntax/annotated-csv/extended/#field
+ """
+ pass
+
+ @abc.abstractmethod
+ def run(self, path):
+ """
+ Runs benchmark
+ """
+ pass
+
+ @abc.abstractmethod
+ def parse(self, content, output_filename, influxdb, branch):
+ """
+ Parses benchmark output to csv
+ """
+ pass
+
+ def compare(self, result_file, yellow_threshold, red_threshold):
+ """
+ Compares actual measurements against thresholds (yellow,red)
+
+ Constraints on result file:
+ - comma as delimiter
+ - implements influx csv format:
+ https://docs.influxdata.com/influxdb/cloud/reference/syntax/annotated-csv/extended/
+
+ It gets moving average from influx db and adds grace values (yellow,red) to handle measurements skew.
+
+ """
+ with open(result_file, newline='') as csvfile:
+ reader = csv.reader(csvfile, delimiter=',')
+ for i in range(2):
+ next(reader)
+ for row in reader:
+ for field in self.fields():
+ value = float(row[field.pos])
+ name = row[self.name_header().pos]
+ branch = row[self.branch_header().pos]
+ result = self.influx_client.query_moving_average(
+ name, branch, str(field), self.branch_header())
+
+ if not any(result):
+ logger.warning(
+ f"Skipping comparison for {name} as there are no historical data available yet"
+ )
+ else:
+ average = float(result[-1].records[-1]["_value"])
+
+ current_red_threshold = average * red_threshold
+ current_yellow_threshold = average * yellow_threshold
+
+ logger.debug(
+ f"calculated thresholds: [red={current_red_threshold},yellow={current_yellow_threshold}]"
+ )
+
+ if isclose(value + red_threshold, average):
+ logger.error(
+ f"{name} measurement exceeds time greatly ({value + current_red_threshold} against {average}). failing the build"
+ )
+ exit(1)
+ elif isclose(value + yellow_threshold, average):
+ logger.warning(
+ f"WARNING: {name} measurement exceeds expected time ({value + current_yellow_threshold} against {average})"
+ )
+ else:
+ logger.info(
+ f"comparison succesful for {name}. {value} is less than threshold [yellow={average + current_yellow_threshold},red={average + current_red_threshold}]"
+ )
+
+ def upload(self, file):
+ self.influx_client.upload_csv(file)
+
+
+class BenchmarkType(Enum):
+
+ mina_base = 'mina-base'
+ snark = 'snark'
+ heap_usage = 'heap-usage'
+ zkapp = 'zkapp'
+ ledger_export = 'ledger-export'
+
+ def __str__(self):
+ return self.value
+
+
+class JaneStreetBenchmark(Benchmark, ABC):
+ """
+ Abstract class for native ocaml benchmarks which has the same format
+
+ """
+ name = MeasurementColumn("Name", 0)
+ time_per_runs = FieldColumn("Time/Run", 1, "us")
+ cycles_per_runs = FieldColumn("Cycls/Run", 2, "kc")
+ minor_words_per_runs = FieldColumn("mWd/Run", 3, "w")
+ major_words_per_runs = FieldColumn("mjWd/Run", 4, "w")
+ promotions_per_runs = FieldColumn("Prom/Run", 5, "w")
+ branch = TagColumn("gitbranch", 6)
+
+ def __init__(self, kind):
+ Benchmark.__init__(self, kind)
+
+ def headers(self):
+ return [
+ MinaBaseBenchmark.name, MinaBaseBenchmark.time_per_runs,
+ MinaBaseBenchmark.cycles_per_runs,
+ MinaBaseBenchmark.minor_words_per_runs,
+ MinaBaseBenchmark.major_words_per_runs,
+ MinaBaseBenchmark.promotions_per_runs, MinaBaseBenchmark.branch
+ ]
+
+ def fields(self):
+ return [
+ MinaBaseBenchmark.time_per_runs, MinaBaseBenchmark.cycles_per_runs,
+ MinaBaseBenchmark.minor_words_per_runs,
+ MinaBaseBenchmark.major_words_per_runs,
+ MinaBaseBenchmark.promotions_per_runs
+ ]
+
+ def name_header(self):
+ return self.name
+
+ def branch_header(self):
+ return self.branch
+
+ def export_to_csv(self, lines, filename, influxdb, branch):
+ """
+ Exports Native Ocaml benchmarks to influxdb annotated csv
+ JaneStreet benchmarks has a common tabular layout. Similar to:
+
+ | No.| Proof updates| Non-proof pairs| Non-proof singles| Mempool verification time (sec)| Transaction proving time (sec)|Permutation|
+ |--|--|--|--|--|--|--|
+ | 1| 0| 1| 1| 0.002070| 12.125372| SSS|
+ | 2| 1| 0| 2| 0.102019| 0.263364| SPS|
+ | 3| 1| 1| 0| 0.110309| 0.427459| SSP|
+ | 4| 2| 0| 1| 0.129152| 0.277442| SPP|
+ | 5| 0| 2| 0| 0.002546| 0.508766| SSSS|
+ | 6| 1| 1| 1| 0.135265| 0.384839| SPSS|
+ | 7| 2| 0| 2| 0.172069| 0.346551| SPPS|
+ """
+ with open(filename, 'w') as csvfile:
+
+ csvwriter = csv.writer(csvfile)
+
+ if influxdb:
+ csvfile.write(self.headers_to_influx(self.headers()) + "\n")
+
+ for line in lines:
+ if line.startswith('│'):
+
+ rows = list(map(lambda x: x.strip(), line.split('│')))
+ rows = list(filter(lambda x: x, rows))
+
+ if rows[0].startswith(MinaBaseBenchmark.name.name):
+ rows[
+ 1] += " " + MinaBaseBenchmark.time_per_runs.format_unit(
+ )
+ rows[
+ 2] += " " + MinaBaseBenchmark.cycles_per_runs.format_unit(
+ )
+ rows[
+ 3] += " " + MinaBaseBenchmark.minor_words_per_runs.format_unit(
+ )
+ rows[
+ 4] += " " + MinaBaseBenchmark.major_words_per_runs.format_unit(
+ )
+ rows[
+ 5] += " " + MinaBaseBenchmark.promotions_per_runs.format_unit(
+ )
+ rows.append("gitbranch")
+
+ else:
+ # remove [.*] from name
+ rows[0] = re.sub('\[.*?\]', '', rows[0]).strip()
+ time = rows[1]
+ # remove units from values
+ if not time.endswith("us"):
+ if time.endswith("ns"):
+ time = float(time[:-2]) * 1_000
+ rows[1] = time
+ else:
+ raise Exception(
+ "Time can be expressed only in us or ns")
+ else:
+ # us
+ rows[1] = time[:-2]
+ # kc
+ rows[2] = rows[2][:-2]
+ # w
+ rows[3] = rows[3][:-1]
+ # w
+ rows[4] = rows[4][:-1]
+ # w
+ rows[5] = rows[5][:-1]
+ rows.append(branch)
+
+ csvwriter.writerow(rows[:])
+
+ def parse(self, content, output_filename, influxdb, branch):
+ """
+ Parses output of standard jane street benchmark to csv. Format is well known and similar to below:
+ | No.| Proof updates| Non-proof pairs| Non-proof singles| Mempool verification time (sec)| Transaction proving time (sec)|Permutation|
+ |--|--|--|--|--|--|--|
+ | 1| 0| 1| 1| 0.002070| 12.125372| SSS|
+ ....
+
+ It can produce standard csv of annotated influx db csv
+ """
+ buf = io.StringIO(content)
+ lines = buf.readlines()
+
+ starts = []
+ ends = []
+ files = []
+ for i, e in enumerate(lines):
+ if "Running" in e:
+ starts.append(i)
+
+ if not any(starts):
+ self.export_to_csv(lines, output_filename, influxdb, branch)
+ else:
+ for start in starts[1:]:
+ ends.append(start)
+
+ ends.append(len(lines) - 1)
+
+ for start, end in zip(starts, ends):
+ name = parse.parse('Running inline tests in library "{}"',
+ lines[start].strip())[0]
+ file = f'{name}_{output_filename}'
+ logger.info(f"exporting {file}..")
+ self.export_to_csv(lines[start:end], f'{file}', influxdb,
+ branch)
+ files.append(file)
+
+ return files
+
+
+class MinaBaseBenchmark(JaneStreetBenchmark):
+ """
+ Concrete implementation of JaneStreetBenchmark for mina-base benchmarks
+ """
+
+ def __init__(self):
+ JaneStreetBenchmark.__init__(self, BenchmarkType.mina_base)
+
+ def run(self, path=None):
+ path = self.default_path() if path is None else path
+ cmd = [
+ path, "time", "cycles", "alloc", "-clear-columns", "-all-values",
+ "-width", "1000", "-run-without-cross-library-inlining",
+ "-suppress-warnings"
+ ]
+ envs = os.environ.copy()
+ envs["BENCHMARKS_RUNNER"] = "TRUE"
+ envs["X_LIBRARY_INLINING"] = "true"
+
+ return assert_cmd(cmd, envs)
+
+ def default_path(self):
+ return "mina-benchmarks"
+
+
+class LedgerExportBenchmark(JaneStreetBenchmark):
+ """
+ Concrete implementation of JaneStreetBenchmark for ledger export benchmark.
+ Ledger export requires also genesis ledger config
+ """
+
+ def __init__(self, genesis_ledger_path):
+ JaneStreetBenchmark.__init__(self, BenchmarkType.ledger_export)
+ self.genesis_ledger_path = genesis_ledger_path
+
+ def run(self, path=None):
+ path = self.default_path() if path is None else path
+ cmd = [
+ path, "time", "cycles", "alloc", "-clear-columns", "-all-values",
+ "-width", "1000"
+ ]
+ envs = os.environ.copy()
+ envs["RUNTIME_CONFIG"] = self.genesis_ledger_path
+
+ return assert_cmd(cmd, envs)
+
+ def default_path(self):
+ return "mina-ledger-export-benchmark"
+
+
+class ZkappLimitsBenchmark(Benchmark):
+ """
+ ZkappLimit benchmark has it's own output which we need to handle separately. It is similar to:
+
+ Proofs updates=0 Signed/None updates=0 Pairs of Signed/None updates=1: Total account updates: 2 Cost: 10.080000
+ Proofs updates=0 Signed/None updates=0 Pairs of Signed/None updates=2: Total account updates: 4 Cost: 20.160000
+ Proofs updates=0 Signed/None updates=0 Pairs of Signed/None updates=3: Total account updates: 6 Cost: 30.240000
+ Proofs updates=0 Signed/None updates=0 Pairs of Signed/None updates=4: Total account updates: 8 Cost: 40.320000
+ Proofs updates=0 Signed/None updates=0 Pairs of Signed/None updates=5: Total account updates: 10 Cost: 50.400000
+ Proofs updates=0 Signed/None updates=0 Pairs of Signed/None updates=6: Total account updates: 12 Cost: 60.480000
+ Proofs updates=0 Signed/None updates=1 Pairs of Signed/None updates=0: Total account updates: 1 Cost: 9.140000
+ Proofs updates=0 Signed/None updates=1 Pairs of Signed/None updates=1: Total account updates: 3 Cost: 19.220000
+
+ """
+
+ name = MeasurementColumn("Name", 0)
+ proofs_updates = FieldColumn("proofs updates", 1, "")
+ signed_updates = FieldColumn("signed updates", 2, "")
+ pairs_of_signed = FieldColumn("pairs of signed", 3, "")
+ total_account_updates = FieldColumn("total account updates", 4, "")
+ cost = FieldColumn("cost", 5, "")
+ category = TagColumn("category", 6)
+ branch = TagColumn("gitbranch", 7)
+
+ def __init__(self):
+ Benchmark.__init__(self, BenchmarkType.zkapp)
+
+ def default_path(self):
+ return "mina-zkapp-limits"
+
+ def fields(self):
+ return [
+ self.proofs_updates, self.pairs_of_signed,
+ self.total_account_updates, self.cost
+ ]
+
+ def name_header(self):
+ return self.name
+
+ def branch_header(self):
+ return self.branch
+
+ def headers(self):
+ return [
+ ZkappLimitsBenchmark.name, ZkappLimitsBenchmark.proofs_updates,
+ ZkappLimitsBenchmark.signed_updates,
+ ZkappLimitsBenchmark.pairs_of_signed,
+ ZkappLimitsBenchmark.total_account_updates,
+ ZkappLimitsBenchmark.cost, ZkappLimitsBenchmark.category,
+ ZkappLimitsBenchmark.branch
+ ]
+
+ def parse(self, content, output_filename, influxdb, branch):
+
+ buf = io.StringIO(content)
+ lines = buf.readlines()
+
+ stats = [list(map(lambda x: x.name, self.headers()))]
+
+ for line in lines:
+ if line == '':
+ continue
+
+ syntax = "Proofs updates=(?P\d+) Signed/None updates=(?P\d+) Pairs of Signed/None updates=(?P\d+): Total account updates: (?P\d+) Cost: (?P[0-9]*[.]?[0-9]+)"
+
+ match = re.match(syntax, line)
+
+ if match:
+ proofs_updates = int(match.group("proofs_updates"))
+ signed_updates = int(match.group("signed_updates"))
+ pairs_of_signed_updates = int(
+ match.group("pairs_of_signed_updates"))
+ total_account_updates = int(
+ match.group("total_account_updates"))
+ cost = float(match.group(ZkappLimitsBenchmark.cost.name))
+ name = f"P{proofs_updates}S{signed_updates}PS{pairs_of_signed_updates}TA{total_account_updates}"
+ tag = "zkapp"
+ stats.append((name, proofs_updates, signed_updates,
+ pairs_of_signed_updates, total_account_updates,
+ cost, tag, branch))
+
+ with open(output_filename, 'w') as csvfile:
+ if influxdb:
+ csvfile.write(
+ self.headers_to_influx(self.headers()) + "\n")
+ csvwriter = csv.writer(csvfile)
+ csvwriter.writerows(stats)
+
+ return [output_filename]
+
+ def run(self, path=None):
+ path = self.default_path() if path is None else path
+ return assert_cmd([path])
+
+
+class SnarkBenchmark(Benchmark):
+
+ name = MeasurementColumn("name", 0)
+ proofs_updates = FieldColumn("proofs updates", 1, "")
+ nonproofs_pairs = FieldColumn("non-proof pairs", 2, "")
+ nonproofs_singles = FieldColumn("non-proof singles", 3, "")
+ verification_time = FieldColumn("verification time", 4, "[s]")
+ proving_time = FieldColumn("value", 5, "[s]")
+ category = TagColumn("category", 6)
+ branch = TagColumn("gitbranch", 7)
+
+ def name_header(self):
+ return self.name
+
+ def branch_header(self):
+ return self.branch
+
+ def __init__(self, k , max_num_updates, min_num_updates ):
+ Benchmark.__init__(self, BenchmarkType.snark)
+ self.k = k
+ self.max_num_updates = max_num_updates
+ self.min_num_updates = min_num_updates
+
+ def headers(self):
+ return [
+ SnarkBenchmark.name, SnarkBenchmark.proofs_updates,
+ SnarkBenchmark.nonproofs_pairs, SnarkBenchmark.nonproofs_singles,
+ SnarkBenchmark.verification_time, SnarkBenchmark.proving_time,
+ SnarkBenchmark.category, SnarkBenchmark.branch
+ ]
+
+ def fields(self):
+ return [
+ SnarkBenchmark.proofs_updates, SnarkBenchmark.nonproofs_pairs,
+ SnarkBenchmark.nonproofs_singles, SnarkBenchmark.verification_time, SnarkBenchmark.proving_time
+ ]
+
+ def parse(self, content, output_filename, influxdb, branch):
+ buf = io.StringIO(content)
+ lines = buf.readlines()
+ rows = []
+ category = "snark"
+ rows.append(list(map(lambda x: x.name, self.headers())))
+
+ for line in lines:
+ if line.startswith("|"):
+ if "--" in line:
+ continue
+ elif line.startswith("| No.|"):
+ continue
+ else:
+ cols = line.split("|")
+ cols = list(map(lambda x: x.strip(), cols))
+ cols = list(filter(lambda x: x, cols))
+
+ #| No.| Proof updates| Non-proof pairs| Non-proof singles| Mempool verification time (sec)| Transaction proving time (sec)|Permutation|
+ proof_update = cols[1]
+ non_proof_pairs = cols[2]
+ non_proof_singles = cols[3]
+ verification_time = cols[4]
+ proving_time = cols[5]
+ name = cols[6]
+
+ rows.append((name,proof_update,non_proof_pairs,non_proof_singles,verification_time,proving_time,
+ category,branch))
+
+ with open(output_filename, 'w') as csvfile:
+ if influxdb:
+ csvfile.write(self.headers_to_influx(self.headers()) + "\n")
+
+ csvwriter = csv.writer(csvfile)
+ csvwriter.writerows(rows)
+
+ return [ output_filename ]
+
+ def default_path(self):
+ return "mina"
+
+ def run(self, path=None):
+ path = self.default_path() if path is None else path
+ return assert_cmd([
+ path, "transaction-snark-profiler", "--zkapps", "--k",
+ str(self.k), "--max-num-updates",
+ str(self.max_num_updates), "--min-num-updates",
+ str(self.min_num_updates)
+ ])
+
+
+class HeapUsageBenchmark(Benchmark):
+ """
+ Heap Usage benchmark is another example of non standard benchmark.
+ Output is similar like:
+
+ Data of type Zkapp_command.t uses 52268 heap words = 418144 bytes
+ Data of type Pickles.Side_loaded.Proof.t uses 3467 heap words = 27736 bytes
+ Data of type Mina_base.Side_loaded_verification_key.t uses 897 heap words = 7176 bytes
+ Data of type Dummy Pickles.Side_loaded.Proof.t uses 2672 heap words = 21376 bytes
+ Data of type Dummy Mina_base.Side_loaded_verification_key.t uses 99 heap words = 792 bytes
+ ...
+ """
+
+ name = MeasurementColumn("Name", 0)
+ heap_words = FieldColumn("heap words", 1, "")
+ bytes = FieldColumn("bytes", 2, "")
+ category = TagColumn("category", 3)
+ branch = TagColumn("gitbranch", 4)
+
+ def __init__(self):
+ Benchmark.__init__(self, BenchmarkType.heap_usage)
+
+ def name_header(self):
+ return self.name
+
+ def branch_header(self):
+ return self.branch
+
+ def headers(self):
+ return [
+ HeapUsageBenchmark.name, HeapUsageBenchmark.heap_words,
+ HeapUsageBenchmark.bytes, HeapUsageBenchmark.category,
+ HeapUsageBenchmark.branch
+ ]
+
+ def fields(self):
+ return [
+ HeapUsageBenchmark.heap_words,
+ HeapUsageBenchmark.bytes
+ ]
+
+ def parse(self, content, output_filename, influxdb, branch):
+ buf = io.StringIO(content)
+ lines = buf.readlines()
+ rows = []
+ rows.append(self.headers_to_name(self.headers()))
+
+ for i, line in enumerate(lines):
+ if line.startswith("Data of type"):
+ sanitized_line = line.replace(" ", "").strip()
+ row = list(
+ parse.parse("Dataoftype{}uses{}heapwords={}bytes",
+ sanitized_line))
+ row.extend(("heap_usage", branch))
+ rows.append(row)
+
+ with open(output_filename, 'w') as csvfile:
+ if influxdb:
+ csvfile.write(self.headers_to_influx(self.headers()) + "\n")
+ csvwriter = csv.writer(csvfile)
+ csvwriter.writerows(rows)
+ return [output_filename]
+
+ def default_path(self):
+ return "mina-heap-usage"
+
+ def run(self, path=None):
+ path = self.default_path() if path is None else path
+ return assert_cmd([path])
diff --git a/scripts/benchmarks/lib/influx.py b/scripts/benchmarks/lib/influx.py
new file mode 100644
index 00000000000..a45c1c0a39c
--- /dev/null
+++ b/scripts/benchmarks/lib/influx.py
@@ -0,0 +1,165 @@
+import logging
+import os
+import subprocess
+import time
+from pathlib import Path
+
+import influxdb_client
+
+logger = logging.getLogger(__name__)
+
+
+class HeaderColumn:
+ """
+ Specialized column class for influx upload.
+ It accepts influx_kind [string,double,tag..] and pos which helps find it in csv when parsing
+ """
+
+ def __init__(self, name, influx_kind, pos):
+ self.name = name
+ self.influx_kind = influx_kind
+ self.pos = pos
+
+
+class MeasurementColumn(HeaderColumn):
+ """
+ Column header which represents influx measurement header
+ """
+
+ def __init__(self, name, pos):
+ HeaderColumn.__init__(self, name, influx_kind="measurement", pos=pos)
+
+
+class FieldColumn(HeaderColumn):
+ """
+ Column header which represents influx field header.
+ It has additional unit field which can be formatted as part of name
+ Currently field is always a double (there was no need so far for different type)
+ """
+
+ def __init__(self, name, pos, unit=None):
+ HeaderColumn.__init__(self, name, influx_kind="double", pos=pos)
+ self.unit = unit
+
+ def __str__(self):
+ if self.unit:
+ return f"{self.name} [{self.unit}]"
+ else:
+ return f"{self.name}"
+
+ def format_unit(self):
+ return f"[{self.unit}]"
+
+
+class TagColumn(HeaderColumn):
+ """
+ Specialized header for inglux tag
+ """
+
+ def __init__(self, name, pos):
+ HeaderColumn.__init__(self, name, influx_kind="tag", pos=pos)
+
+
+class Influx:
+ """
+ Influx helper which wraps influx cli and python api
+ It requires INFLUX_* env vars to be set
+ and raises RuntimeException if they are not defined
+ """
+
+ host = "INFLUX_HOST"
+ token = "INFLUX_TOKEN"
+ org = "INFLUX_ORG"
+ bucket = "INFLUX_BUCKET_NAME"
+
+ @staticmethod
+ def check_envs():
+ """
+ In order to talk with influx db we need to have some env vars defined.
+ This method verifies correct setup
+ """
+
+ if Influx.host not in os.environ:
+ raise RuntimeError(f"{Influx.host} env var not defined")
+ if Influx.token not in os.environ:
+ raise RuntimeError(f"{Influx.token} env var not defined")
+ if Influx.org not in os.environ:
+ raise RuntimeError(f"{Influx.org} env var not defined")
+ if Influx.bucket not in os.environ:
+ raise RuntimeError(f"{Influx.bucket} env var not defined")
+
+ def client(self):
+ Influx.check_envs()
+ return influxdb_client.InfluxDBClient(
+ url=os.environ[Influx.host],
+ token=os.environ[Influx.token],
+ org=os.environ[Influx.org],
+ bucket=os.environ[Influx.bucket])
+
+ def __init__(self, moving_average_size=10):
+ self.moving_average_size = moving_average_size
+
+ def __get_moving_average_query(self, name, branch, field, branch_header):
+ """
+ Constructs moving average query from influx for comparison purposes
+
+ Moving average size is configured in class constructor
+ """
+
+ bucket = os.environ[Influx.bucket]
+ return f"from(bucket: \"{bucket}\") \
+ |> range(start: -{self.moving_average_size}) \
+ |> filter (fn: (r) => (r[\"{branch_header.name}\"] == \"{branch}\" ) \
+ and r._measurement == \"{name}\" \
+ and r._field == \"{field}\" ) \
+ |> keep(columns: [\"_value\"]) \
+ |> movingAverage(n:{self.moving_average_size}) "
+
+ def query_moving_average(self, name, branch, field, branch_header):
+ """
+ Retrieves moving average from influx db for particular
+ branch and field
+ """
+
+ query = self.__get_moving_average_query(name, branch, field,
+ branch_header)
+ logger.debug(f"running influx query: {query}")
+ query_api = self.client().query_api()
+ return query_api.query(query)
+
+ def upload_csv(self, file):
+ """
+ Uploads csv to influx db. File need to be formatter according to influx requirements:
+ https://docs.influxdata.com/influxdb/cloud/reference/syntax/annotated-csv/
+
+ WARNING: InfluxDb write api is not very friendly with csv which contains more than measurement
+ in csv file (which is our case). I decided to use influx cli as it supports multiple measurements in
+ single csv file.
+ Unfortunately influx cli has nasty issue when calling from python similar to:
+ (similar to hanging queries problem: https://community.influxdata.com/t/influxdb-hanging-queries/1522).
+ My workaround is to use --http-debug flag, then read output of command and if there is 204 status code
+ returned i kill influx cli
+ """
+
+ if not Path(file).is_file():
+ raise RuntimeError(f"cannot find {file}")
+
+ if not open(file).readline().rstrip().startswith("#datatype"):
+ raise RuntimeError(
+ f"{file} is badly formatted and not eligible for uploading to influx db. "
+ f"see more at https://docs.influxdata.com/influxdb/cloud/reference/syntax/annotated-csv/"
+ )
+
+ process = subprocess.Popen([
+ "influx", "write", "--http-debug", "--format=csv", f"--file={file}"
+ ],
+ stderr=subprocess.PIPE)
+
+ timeout = time.time() + 60 # 1 minute
+ while True:
+ line = process.stderr.readline()
+ if b"HTTP/2.0 204 No Content" in line or time.time() > timeout:
+ process.kill()
+ break
+
+ logger.info(f"{file} uploaded to influx db")
diff --git a/scripts/benchmarks/lib/utils.py b/scripts/benchmarks/lib/utils.py
new file mode 100644
index 00000000000..382dce31d04
--- /dev/null
+++ b/scripts/benchmarks/lib/utils.py
@@ -0,0 +1,54 @@
+import subprocess
+import logging
+from enum import Enum
+
+logger = logging.getLogger(__name__)
+
+
+def isclose(a, b, rel_tol=1e-09, abs_tol=0.0):
+ return abs(a - b) <= max(rel_tol * max(abs(a), abs(b)), abs_tol)
+
+
+def assert_cmd(cmd, envs=None):
+ logger.debug(f"running command {cmd}")
+ result = subprocess.run(cmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ env=envs)
+
+ if result.returncode != 0:
+ err = result.stderr.decode("UTF-8")
+ logger.error(
+ f"{cmd} resulted in errorcode {result.returncode} with message {err}"
+ )
+ raise RuntimeError(f"cmd failed: {cmd} with stderr: {err}")
+
+ output = result.stdout.decode("UTF-8")
+ logger.debug(f"command output: {output}")
+ return output
+
+class Range(object):
+
+ def __init__(self, start, end):
+ self.start = start
+ self.end = end
+
+ def __eq__(self, other):
+ return self.start <= other <= self.end
+
+ def __contains__(self, item):
+ return self.__eq__(item)
+
+ def __iter__(self):
+ yield self
+
+ def __str__(self):
+ return '[{0},{1}]'.format(self.start, self.end)
+
+
+class Format(Enum):
+ csv = 'csv'
+ text = 'text'
+
+ def __str__(self):
+ return self.value
diff --git a/scripts/benchmarks/requirements.txt b/scripts/benchmarks/requirements.txt
new file mode 100644
index 00000000000..22ccbdce44d
--- /dev/null
+++ b/scripts/benchmarks/requirements.txt
@@ -0,0 +1,2 @@
+influxdb_client==1.46.0
+parse==1.20.1
diff --git a/scripts/benchmarks/result_comparator.py b/scripts/benchmarks/result_comparator.py
new file mode 100644
index 00000000000..783b325af2d
--- /dev/null
+++ b/scripts/benchmarks/result_comparator.py
@@ -0,0 +1,27 @@
+import csv
+import argparse
+import subprocess
+
+
+parser = argparse.ArgumentParser(description='Calculate actual benchmark values against influx db')
+parser.add_argument('--infile',
+ help='input csv file with actual benchmark')
+parser.add_argument('--red-threshold',
+ help='value above which app return exit 1')
+parser.add_argument('--yellow-threshold',
+ help='value above which app return warning',
+ )
+args = parser.parse_args()
+
+with open(args.infile, newline='') as csvfile:
+ rows = list(csv.reader(csvfile))
+
+ headers_rows = rows[1]
+ name_pos = [ i for i,x in enumerate(headers_rows) if x == "Name"][0]
+ branch_pos = [ i for i,x in enumerate(headers_rows) if x == "gitbranch"][0]
+
+ for items in rows[2:]:
+ name = items[name_pos]
+ branch = items[branch_pos]
+ output = subprocess.run(["influx", "query", f'from(bucket: "mina-benchmarks") |> range(start: -10d) |> filter (fn: (r) => (r._tag["gitbranch"] == "{branch}" ) and r._measurement == "{name}") |> keep(columns: ["_value"]) |> movingAverage(n:1) ']).stdout.read()
+ print(output)
\ No newline at end of file
diff --git a/scripts/benchmarks/result_parser.py b/scripts/benchmarks/result_parser.py
new file mode 100755
index 00000000000..d5d0e5e0f09
--- /dev/null
+++ b/scripts/benchmarks/result_parser.py
@@ -0,0 +1,218 @@
+import csv
+import argparse
+import re
+from parse import *
+from pathlib import Path
+
+from enum import Enum
+
+
+class Benchmark(Enum):
+ tabular = 'tabular'
+ snark = 'snark'
+ heap_usage = 'heap-usage'
+ zkapp = 'zkapp'
+
+ def __str__(self):
+ return self.value
+
+
+def export_to_csv(lines, filename, influxdb, branch):
+ with open(filename, 'w') as csvfile:
+
+ csvwriter = csv.writer(csvfile)
+
+ if influxdb:
+ csvfile.write("#datatype measurement,double,double,double,double,tag\n")
+
+ for line in lines:
+ if line.startswith('│'):
+
+ rows = list(map(lambda x: x.strip(), line.split('│')))
+ rows = list(filter(lambda x: x, rows))
+
+ if rows[0].startswith("Name"):
+ rows[1] += " [us]"
+ rows[2] += " [kc]"
+ rows[3] += " [w]"
+ rows[4] += " [w]"
+ rows.append("gitbranch")
+
+ else:
+ # remove [.*] from name
+ rows[0] = re.sub('\[.*?\]', '', rows[0]).strip()
+ time = rows[1]
+ # remove units from values
+ if not time.endswith("us"):
+ if time.endswith("ns"):
+ time = float(time[:-2])* 1_000
+ rows[1] = time
+ else:
+ raise Exception("Time can be expressed only in us or ns")
+ else:
+ # us
+ rows[1] = time[:-2]
+ # kc
+ rows[2] = rows[2][:-2]
+ # w
+ rows[3] = rows[3][:-1]
+ # w
+ rows[4] = rows[4][:-1]
+
+ rows.append(branch)
+
+ csvwriter.writerow(rows[:])
+
+
+def parse_zkapp_limits(input_filename, output_filename, influxdb, branch):
+ with open(input_filename, 'r', encoding='UTF-8') as file:
+ lines = file.readlines()
+ stats = []
+ header = ["proofs updates", "signed updates", "pairs of signed", "total account updates", "cost" , "gitbranch"]
+ stats.append(header)
+
+ for line in lines:
+ if line == '':
+ continue
+
+ syntax = "Proofs updates=(?P\d+) Signed/None updates=(?P\d+) Pairs of Signed/None updates=(?P\d+): Total account updates: (?P\d+) Cost: (?P[0-9]*[.]?[0-9]+)"
+
+ match = re.match(syntax, line)
+
+ if match:
+ proofs_updates = int(match.group('proofs_updates'))
+ signed_updates = int(match.group('signed_updates'))
+ pairs_of_signed_updates = int(match.group('pairs_of_signed_updates'))
+ total_account_updates = int(match.group('total_account_updates'))
+ cost = float(match.group('cost'))
+ name = f"P{proofs_updates}S{signed_updates}PS{pairs_of_signed_updates}TA{total_account_updates}"
+ tag = "zkapp"
+ stats.append((name,proofs_updates, signed_updates, pairs_of_signed_updates, total_account_updates, cost, tag, branch))
+
+ with open(output_filename, 'w') as csvfile:
+ if influxdb:
+ csvfile.write("#datatype measurement,double,double,double,double,double,tag\n")
+ csvwriter = csv.writer(csvfile)
+ csvwriter.writerows(stats)
+
+
+def parse_snark_format(input_filename, output_filename, influxdb, branch):
+ with open(input_filename, 'r', encoding='UTF-8') as file:
+ lines = file.readlines()
+ stats = []
+ zkapps = []
+
+ header = ["measurement", "proof updates", "nonproofs", "value", "tag", "gitbranch"]
+ stats.append(header)
+
+ for line in lines:
+ if line == '':
+ continue
+
+ syntax = 'Generated zkapp transactions with (?P\d+) updates and (?P\d+) proof updates in (?P[0-9]*[.]?[0-9]+) secs'
+
+ match = re.match(syntax, line)
+
+ if match:
+ updates = int(match.group('updates'))
+ proof = int(match.group('proof'))
+ time = float(match.group('time'))
+ name = f"{updates} Updates {proof} Proofs"
+ tag = "generated zkapp transactions"
+
+ stats.append((name, updates, proof, time, tag, branch))
+
+ if line.startswith("|"):
+ if "--" in line:
+ continue
+ else:
+ cols = line.split("|")
+ cols = list(map(lambda x: x.strip(), cols))
+ cols = list(filter(lambda x: x, cols))
+ zkapps.append(cols[1:])
+ zkapps.append(branch)
+
+ with open(f"{Path(output_filename).stem}_stats.csv", 'w') as csvfile:
+ if influxdb:
+ csvfile.write("#datatype measurement,double,double,double,tag,tag\n")
+
+ csvwriter = csv.writer(csvfile)
+ csvwriter.writerows(stats)
+
+ with open(f"{Path(output_filename).stem}_zkapp.csv", 'w') as csvfile:
+ if influxdb:
+ csvfile.write("#datatype double,double,double,double,double,measurement,tag\n")
+
+ csvwriter = csv.writer(csvfile)
+ csvwriter.writerows(zkapps)
+
+
+def parse_tabular_format(input_filename, output_filename, influxdb, branch):
+ with open(input_filename, 'r', encoding='UTF-8') as file:
+ lines = file.readlines()
+
+ starts = []
+ ends = []
+ for i, e in enumerate(lines):
+ if "Running" in e:
+ starts.append(i)
+
+ if not any(starts):
+ export_to_csv(lines, output_filename, influxdb, branch)
+ else:
+ for start in starts[1:]:
+ ends.append(start)
+
+ ends.append(len(lines) - 1)
+
+ for start, end in zip(starts, ends):
+ name = parse('Running inline tests in library "{}"', lines[start].strip())[0]
+ file = f'{name}_{output_filename}'
+ print(f"exporting {file}..")
+ export_to_csv(lines[start:end], f'{file}', influxdb, branch)
+
+
+def parse_heap_usage_format(input_filename, output_filename, influxdb, branch):
+ with open(input_filename, 'r', encoding='UTF-8') as file:
+ rows = []
+
+ header = ["Name", "heap words", "bytes", "category", "gitbranch"]
+ rows.append(header)
+
+ for i, line in enumerate(file.readlines()):
+ if line.startswith("Data of type"):
+ sanitized_line = line.replace(" ", "").strip()
+ row = list(parse("Dataoftype{}uses{}heapwords={}bytes", sanitized_line))
+ row.append("heap_usage")
+ rows.append(row)
+
+ with open(output_filename, 'w') as csvfile:
+ if influxdb:
+ csvfile.write("#datatype measurement,double,double,tag,tag\n")
+ csvwriter = csv.writer(csvfile)
+ csvwriter.writerows(rows)
+
+
+parser = argparse.ArgumentParser(description='Parse various mina benchmark outputs')
+parser.add_argument('--benchmark',
+ help='type of benchmark', type=Benchmark, choices=list(Benchmark))
+parser.add_argument('--infile',
+ help='input file with benchmark output')
+parser.add_argument('--outfile',
+ help='output file with benchmark output as csv')
+parser.add_argument('--influxdb',
+ help='convert output file to be compliant with influxdb standard. See more at: https://docs.influxdata.com/influxdb/cloud/write-data/developer-tools/csv/',
+ action='store_true')
+parser.add_argument('--branch',
+ help='Adds additional tag to csv file with source git branch')
+
+args = parser.parse_args()
+
+if args.benchmark == Benchmark.tabular:
+ parse_tabular_format(args.infile, args.outfile, args.influxdb, args.branch)
+elif args.benchmark == Benchmark.snark:
+ parse_snark_format(args.infile, args.outfile, args.influxdb, args.branch)
+elif args.benchmark == Benchmark.zkapp:
+ parse_zkapp_limits(args.infile, args.outfile, args.influxdb, args.branch)
+else:
+ parse_heap_usage_format(args.infile, args.outfile, args.influxdb, args.branch)
diff --git a/scripts/debian/builder-helpers.sh b/scripts/debian/builder-helpers.sh
index 0e34178731d..1f3a42c215b 100755
--- a/scripts/debian/builder-helpers.sh
+++ b/scripts/debian/builder-helpers.sh
@@ -241,7 +241,10 @@ build_functional_test_suite_deb() {
# Binaries
cp ./default/src/test/command_line_tests/command_line_tests.exe "${BUILDDIR}/usr/local/bin/mina-command-line-tests"
-
+ cp ./default/src/app/heap_usage/heap_usage.exe "${BUILDDIR}/usr/local/bin/mina-heap-usage"
+ cp ./default/src/app/zkapp_limits/zkapp_limits.exe "${BUILDDIR}/usr/local/bin/mina-zkapp-limits"
+ cp ./default/src/test/archive/patch_archive_test/patch_archive_test.exe "${BUILDDIR}/usr/local/bin/mina-patch-archive-test"
+
build_deb mina-test-suite
}
@@ -364,7 +367,7 @@ build_archive_deb () {
cp ./default/src/app/extract_blocks/extract_blocks.exe "${BUILDDIR}/usr/local/bin/mina-extract-blocks"
mkdir -p "${BUILDDIR}/etc/mina/archive"
- cp ../scripts/archive/missing-blocks-guardian.sh "${BUILDDIR}/etc/mina/archive"
+ cp ../scripts/archive/missing-blocks-guardian.sh "${BUILDDIR}/usr/local/bin/mina-missing-blocks-guardian"
cp ./default/src/app/missing_blocks_auditor/missing_blocks_auditor.exe "${BUILDDIR}/usr/local/bin/mina-missing-blocks-auditor"
cp ./default/src/app/replayer/replayer.exe "${BUILDDIR}/usr/local/bin/mina-replayer"
diff --git a/scripts/debian/publish.sh b/scripts/debian/publish.sh
index f20d9b53d33..0ac6c73e79d 100755
--- a/scripts/debian/publish.sh
+++ b/scripts/debian/publish.sh
@@ -34,8 +34,8 @@ if [[ -z "$DEB_CODENAME" ]]; then usage "Codename is not set!"; fi;
if [[ -z "$DEB_RELEASE" ]]; then usage "Release is not set!"; fi;
-BUCKET_ARG='--bucket packages.o1test.net'
-S3_REGION_ARG='--s3-region=us-west-2'
+BUCKET_ARG="--bucket=packages.o1test.net"
+S3_REGION_ARG="--s3-region=us-west-2"
# utility for publishing deb repo with commons options
# deb-s3 https://github.com/krobertson/deb-s3
#NOTE: Do not remove --lock flag otherwise racing deb uploads may overwrite the registry and some files will be lost. If a build fails with the following error, delete the lock file https://packages.o1test.net/dists/unstable/main/binary-/lockfile and rebuild
@@ -52,33 +52,54 @@ DEBS3_UPLOAD="deb-s3 upload $BUCKET_ARG $S3_REGION_ARG \
echo "Publishing debs: ${DEB_NAMES} to Release: ${DEB_RELEASE} and Codename: ${DEB_CODENAME}"
# Upload the deb files to s3.
# If this fails, attempt to remove the lockfile and retry.
-for i in {1..10}; do (
+for _ in {1..10}; do (
${DEBS3_UPLOAD} \
--component "${DEB_RELEASE}" \
--codename "${DEB_CODENAME}" \
"${DEB_NAMES}"
) && break || scripts/debian/clear-s3-lockfile.sh; done
-# Verify integrity of debs on remote repo
-function verify_o1test_repo_has_package {
- sudo apt-get update
- ${DEBS3_SHOW} ${1} ${DEB_VERSION} $ARCH -c $DEB_CODENAME -m $DEB_RELEASE
- return $?
-}
+debs=()
for deb in $DEB_NAMES
do
- echo "Adding packages.o1test.net $DEB_CODENAME $DEB_RELEASE"
- sudo echo "deb [trusted=yes] http://packages.o1test.net $DEB_CODENAME $DEB_RELEASE" | sudo tee /etc/apt/sources.list.d/mina.list
+ # extracting name from debian package path. E.g:
+ # _build/mina-archive_3.0.1-develop-a2a872a.deb -> mina-archive
+ deb=$(basename "$deb")
+ deb="${deb%_*}"
+ debs+=("$deb")
+done
- DEBS3_SHOW="deb-s3 show $BUCKET_ARG $S3_REGION_ARG"
+function join_by { local IFS="$1"; shift; echo "$*"; }
- deb_split=(${deb//_/ })
- deb="${deb_split[0]}"
- deb=$(basename $deb)
-
- for i in {1..10}; do (verify_o1test_repo_has_package $deb) && break || sleep 60; done
+tries=10
+counter=0
-done
+while (( ${#debs[@]} ))
+do
+ join=$(join_by " " "${debs[@]}")
+
+ IFS=$'\n'
+ output=$(deb-s3 exist $BUCKET_ARG $S3_REGION_ARG "$join" $DEB_VERSION $ARCH -c $DEB_CODENAME -m $DEB_RELEASE)
+ debs=()
+ for item in $output; do
+ if [[ $item == *"Missing" ]]; then
+ key=$(echo "$item" | awk '{print $1}')
+ debs+=("$key")
+ fi
+ done
+
+ if [ ${#debs[@]} -eq 0 ]; then
+ echo "All debians are correctly published to our debian repository"
+ exit 0
+ fi
+ counter=$((counter+1))
+ if [[ $((counter)) == $((tries)) ]]; then
+ echo "Error: Some Debians are still not correctly published : "$(join_by " " "${debs[@]}")
+ echo "You may still try to rerun job as debian repository is known from imperfect performance"
+ exit 1
+ fi
+ sleep 60
+done
\ No newline at end of file
diff --git a/scripts/mina-local-network/memory_monitor.py b/scripts/mina-local-network/memory_monitor.py
new file mode 100755
index 00000000000..2b35b00fd8d
--- /dev/null
+++ b/scripts/mina-local-network/memory_monitor.py
@@ -0,0 +1,165 @@
+import collections
+import os
+import re
+import time
+import csv
+import psutil
+import signal
+import sys
+import argparse
+
+import math
+
+class MinaProcess:
+ def __init__(self,node_name):
+ self.node_name = node_name
+ self.mina_process = "mina.exe"
+ self.metrics = {
+ "main":[],
+ "prover":[],
+ "verifier":[],
+ "vrf":[]
+ }
+
+ def headers(self):
+ return [self.node_name,f"{self.node_name}_prover", f"{self.node_name}_verifier", f"{self.node_name}_vrf"]
+
+ def get_node_name_process(self,p):
+ process_name = None
+ for arg in p.cmdline():
+ if m := re.match(f".*/nodes/(.*)/.*", arg):
+ process_name = m.group(1)
+ return process_name
+ return process_name
+
+ def is_mina_process(self,p):
+ try:
+ return ((self.mina_process in p.name() ) and
+ (self.node_name == self.get_node_name_process(p)) and
+ ("daemon" in p.cmdline()))
+ except:
+ return False
+
+
+ def append_vrf(self,value):
+ self.metrics["vrf"].append(value)
+ def append_verifier(self,value):
+ self.metrics["verifier"].append(value)
+ def append_prover(self,value):
+ self.metrics["prover"].append(value)
+
+ def append(self,value):
+ self.metrics["main"].append(value)
+
+ def append_zeroes(self):
+ self.append_vrf(0)
+ self.append_verifier(0)
+ self.append_prover(0)
+ self.append(0)
+
+ def metrics_values(self,row):
+ row_values = []
+ for values in self.metrics.values():
+ row_values.append(values[row])
+ return row_values
+
+ def len(self):
+ assert len(self.metrics["vrf"]) == len(self.metrics["prover"]) == len(self.metrics["verifier"]) == len(self.metrics["main"])
+ return len(self.metrics["vrf"])
+
+
+def convert_size(size_bytes):
+ return str(size_bytes/ 1024 / 1024)
+
+def processes(whales,fishes,nodes):
+ processes = ["seed","snark_coordinator"]
+ processes.extend([f"whale_{i}" for i in range(0,whales)])
+ processes.extend([f"fish_{i}" for i in range(0,fishes)])
+ processes.extend([f"node_{i}" for i in range(0,nodes)])
+
+ return list([MinaProcess(x) for x in processes])
+
+
+def write_header(file,columns):
+ with open(file, 'w') as csvfile:
+ writer = csv.writer(csvfile, delimiter=',',)
+ writer.writerow(columns)
+
+def write_line(file,columns):
+ with open(file, 'a') as csvfile:
+ writer = csv.writer(csvfile, delimiter=',',)
+ writer.writerow(columns)
+
+def main(whales,fishes,nodes,file,interval):
+
+ mina_processes = processes(whales,fishes,nodes)
+ headers = []
+
+ for x in mina_processes:
+ headers.extend(x.headers())
+
+ write_header(file, headers)
+
+ print("Press Ctrl +c to finish")
+
+ while True:
+ for x in mina_processes:
+ matches = list(filter(lambda p: x.is_mina_process(p), psutil.process_iter()))
+ if len(matches) == 0 :
+ x.append_zeroes()
+ else:
+ p = matches[0]
+ try:
+ children = sorted(p.children(), key=lambda x: x.create_time())
+
+ def get_mem_for_child_or_default(child):
+ if not child:
+ return 0
+ else:
+ return convert_size(child.memory_info()[0])
+
+ if len(children) > 2 :
+ x.append_vrf(get_mem_for_child_or_default(children[2]))
+ x.append_verifier(get_mem_for_child_or_default(children[1]))
+ x.append_prover(get_mem_for_child_or_default(children[0]))
+ elif len(children) > 1:
+ x.append_vrf(0)
+ x.append_verifier(get_mem_for_child_or_default(children[1]))
+ x.append_prover(get_mem_for_child_or_default(children[0]))
+ elif len(children) > 0:
+ x.append_vrf(0)
+ x.append_verifier(0)
+ x.append_prover(get_mem_for_child_or_default(children[0]))
+ else:
+ x.append_vrf(0)
+ x.append_verifier(0)
+ x.append_prover(0)
+
+ x.append(convert_size(p.memory_info()[0]))
+
+ except (psutil.NoSuchProcess, psutil.ZombieProcess):
+ pass
+
+ last_row = mina_processes[0].len()
+ row = []
+ for proc in mina_processes:
+ row.extend(proc.metrics_values(last_row -1))
+ write_line(file, row)
+
+ time.sleep(interval)
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(
+ prog='local network metrics',
+ description='Program to measure local network mem usage')
+
+ parser.add_argument('-o', '--output-file', default="metrics.csv")
+ parser.add_argument('-w', '--whales', default=2, type=int)
+ parser.add_argument('-f', '--fishes', default=1, type=int)
+ parser.add_argument('-n', '--nodes', default=1, type=int)
+ parser.add_argument('-i', '--interval', type=float, default=0.5)
+
+ args = parser.parse_args()
+
+ main(args.whales,args.fishes,args.nodes,args.output_file,args.interval)
\ No newline at end of file
diff --git a/scripts/patch-archive-test.sh b/scripts/patch-archive-test.sh
new file mode 100755
index 00000000000..48e1c61861a
--- /dev/null
+++ b/scripts/patch-archive-test.sh
@@ -0,0 +1,17 @@
+#!/bin/bash
+
+set -x
+# test replayer on known archive db
+
+NETWORK_DATA_FOLDER=src/test/archive/sample_db
+PATCH_ARCHIVE_TEST_APP=${PATCH_ARCHIVE_TEST_APP:-_build/default/src/test/archive/patch_archive_test/patch_archive_test.exe}
+PG_PORT=${PG_PORT:-5432}
+POSTGRES_USER=${POSTGRES_USER:-postgres}
+POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-postgres}
+
+CONN=postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@localhost:${PG_PORT}
+
+
+echo "Running patch archive test"
+$PATCH_ARCHIVE_TEST_APP --source-uri $CONN \
+ --network-data-folder $NETWORK_DATA_FOLDER
diff --git a/scripts/replayer-test.sh b/scripts/replayer-test.sh
index b75c32f4b08..8b6f511295b 100755
--- a/scripts/replayer-test.sh
+++ b/scripts/replayer-test.sh
@@ -5,7 +5,7 @@ set -x
INPUT_FILE=src/test/archive/sample_db/replayer_input_file.json
REPLAYER_APP=_build/default/src/app/replayer/replayer.exe
-PG_CONN=postgres://postgres:postgres@localhost:5433/archive
+PG_CONN=postgres://postgres:postgres@localhost:5432/archive
while [[ "$#" -gt 0 ]]; do case $1 in
-i|--input-file) INPUT_FILE="$2"; shift;;
diff --git a/scripts/snark_transaction_profiler.py b/scripts/snark_transaction_profiler.py
deleted file mode 100755
index d21a544e765..00000000000
--- a/scripts/snark_transaction_profiler.py
+++ /dev/null
@@ -1,68 +0,0 @@
-#!/usr/bin/env python3
-
-# script run transaction snark profiler
-
-import subprocess
-import sys
-import os
-import argparse
-import json
-import re
-
-prog = 'mina'
-
-def parse_stats (output) :
-
- print(output)
-
- lines = output.split ('\n')
-
- stats = []
-
- for line in lines :
- if line == '' :
- continue
-
- compile = 'Generated zkapp transactions with (?P\d+) updates and (?P\d+) proof updates in (?P[0-9]*[.]?[0-9]+) secs'
-
- match = re.match(compile, line)
-
- if match :
- updates = int(match.group('updates'))
- proof = int(match.group('proof'))
- time = float(match.group('time'))
-
- stats.append((updates, proof, time))
-
- return stats
-
-if __name__ == "__main__":
- if len(sys.argv) != 4 :
- print("Usage: %s k max-num-updates min-num-updates" % sys.argv[0], file=sys.stderr)
- sys.exit(1)
-
- k=sys.argv[1]
- max_num_updates=sys.argv[2]
- min_num_updates=sys.argv[3]
-
- args = " ".join([prog,
- "transaction-snark-profiler",
- "--zkapps",
- "--k",
- k,
- "--max-num-updates",
- max_num_updates,
- "--min-num-updates",
- min_num_updates])
-
-
- print(f'running snark transaction profiler: {args}')
- (process_exit_code,output) = subprocess.getstatusoutput(args)
-
- stats = parse_stats (output)
- #TODO: add code to check against some threshold
- print(stats)
-
- if not process_exit_code == 0:
- print('non-zero exit code from program, failing build')
- sys.exit(1)
\ No newline at end of file
diff --git a/scripts/version-linter.py b/scripts/version-linter.py
index 0274a1f1c3f..a3b7e4763fb 100755
--- a/scripts/version-linter.py
+++ b/scripts/version-linter.py
@@ -19,8 +19,6 @@
"""
import subprocess
-import os
-import io
import sys
import re
import sexpdata
@@ -32,31 +30,38 @@ def set_error():
global exit_code
exit_code=1
-def branch_commit(branch):
+def latest_branch_commit(branch):
+ '''
+ Retrieves latest commit on branch
+ '''
print ('Retrieving', branch, 'head commit...')
result=subprocess.run(['git','log','-n','1','--format="%h"','--abbrev=7',f'{branch}'],
capture_output=True)
output=result.stdout.decode('ascii')
print ('command stdout:', output)
print ('command stderr:', result.stderr.decode('ascii'))
- return output.replace('"','').replace('\n','')
-
-def download_type_shapes(role,branch,sha1) :
+ return output.replace('"','').strip()
+
+def url_to_type_shape_file(file):
+ '''
+ Return url to mina type shape file
+ '''
+ return f'https://storage.googleapis.com/mina-type-shapes/{file}'
+
+def url_exists(url):
+ '''
+ Checks if url exists (by sending head and validating that status code is ok)
+ '''
+ return requests.head(url).status_code == 200
+
+def download_type_shape(role,branch,sha1) :
file=type_shape_file(sha1)
- print ('Downloading type shape file',file,'for',role,'branch',branch,'at commit',sha1)
- url = f'https://storage.googleapis.com/mina-type-shapes/{file}'
- r = requests.head(url, allow_redirects=True)
- if r.status_code != 200:
- print ("cannot fetch file reference from non-existing path: ${url}")
- print ("looks like you need to generate it. Please use below steps")
- print (f"git checkout ${sha1}")
- print ("nix develop mina")
- print (f"dune exec src/app/cli/src/mina.exe internal dump-type-shape > ${sha1}-type_shape.txt")
- print ("gsutil cp gs://mina-type-shapes ${sha1}-type_shape.txt ")
+ url=url_to_type_shape_file(file)
+ if not url_exists(url):
+ raise Exception(f"reference file for '{sha1}' commit does not exists. Url does not exists {url} ")
- sys.exit(1)
-
- result=subprocess.run(['wget','--no-clobber',url])
+ print ('Downloading type shape file',file,'for',role,'branch',branch,'at commit',sha1)
+ subprocess.run(['wget','--no-clobber',url], check=True)
def type_shape_file(sha1) :
# created by buildkite build-artifact script
@@ -250,18 +255,18 @@ def assert_commit(commit, desc):
subprocess.run(['git','fetch'],capture_output=False)
- base_branch_commit=branch_commit(base_branch)
- download_type_shapes('base',base_branch,base_branch_commit)
+ base_branch_commit = latest_branch_commit(base_branch)
+ download_type_shape('base',base_branch,base_branch_commit)
print('')
- release_branch_commit=branch_commit(release_branch)
- download_type_shapes('release',release_branch,release_branch_commit)
+ release_branch_commit=latest_branch_commit(release_branch)
+ download_type_shape('release',release_branch,release_branch_commit)
print('')
- pr_branch_commit=branch_commit(pr_branch)
- download_type_shapes('pr',pr_branch,pr_branch_commit)
+ pr_branch_commit=latest_branch_commit(pr_branch)
+ download_type_shape('pr',pr_branch,pr_branch_commit)
print('')
diff --git a/scripts/zkapp_metrics.sh b/scripts/zkapp_metrics.sh
deleted file mode 100755
index c6fd22721d9..00000000000
--- a/scripts/zkapp_metrics.sh
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/bin/sh
-
-set -e
-
-echo "Building heap usage app"
-make heap_usage
-
-echo "Running heap usage app"
-./_build/default/src/app/heap_usage/heap_usage.exe
-
-
-echo "Building zkapp limits app"
-make zkapp_limits
-
-echo "Running zkapp limits app"
-./_build/default/src/app/zkapp_limits/zkapp_limits.exe
\ No newline at end of file
diff --git a/src/app/archive/cli/archive_cli.ml b/src/app/archive/cli/archive_cli.ml
index d9835087e91..5ff9c6fa9f3 100644
--- a/src/app/archive/cli/archive_cli.ml
+++ b/src/app/archive/cli/archive_cli.ml
@@ -38,26 +38,38 @@ let command_run =
"int Delete blocks that are more than n blocks lower than the \
maximum seen block."
in
- let runtime_config_opt =
- Option.map runtime_config_file ~f:(fun file ->
- Yojson.Safe.from_file file |> Runtime_config.of_yojson
- |> Result.ok_or_failwith )
- in
fun () ->
let logger = Logger.create () in
- let genesis_constants = Genesis_constants.Compiled.genesis_constants in
- let constraint_constants =
- Genesis_constants.Compiled.constraint_constants
+ let open Deferred.Let_syntax in
+ let%bind config =
+ Runtime_config.Json_loader.load_config_files ~logger
+ (Option.to_list runtime_config_file)
+ |> Deferred.Or_error.ok_exn
+ in
+ let constants = Runtime_config.Constants.load_constants' config in
+ let%bind precomputed_values_opt =
+ match runtime_config_file with
+ | None ->
+ return None
+ | Some _ ->
+ Deferred.Or_error.(
+ Genesis_ledger_helper.Config_loader.init_from_config_file ~logger
+ ~constants config
+ >>| fun (a, _) -> Option.some a)
+ |> Deferred.Or_error.ok_exn
in
Stdout_log.setup log_json log_level ;
[%log info] "Starting archive process; built with commit $commit"
~metadata:[ ("commit", `String Mina_version.commit_id) ] ;
Archive_lib.Processor.setup_server ~metrics_server_port ~logger
- ~genesis_constants ~constraint_constants
+ ~genesis_constants:
+ (Runtime_config.Constants.genesis_constants constants)
+ ~constraint_constants:
+ (Runtime_config.Constants.constraint_constants constants)
~postgres_address:postgres.value
~server_port:
(Option.value server_port.value ~default:server_port.default)
- ~delete_older_than ~runtime_config_opt ~missing_blocks_width )
+ ~delete_older_than ~precomputed_values_opt ~missing_blocks_width )
let time_arg =
(* Same timezone as Genesis_constants.genesis_state_timestamp. *)
diff --git a/src/app/archive/lib/processor.ml b/src/app/archive/lib/processor.ml
index e8452d413da..9396084b92b 100644
--- a/src/app/archive/lib/processor.ml
+++ b/src/app/archive/lib/processor.ml
@@ -4728,26 +4728,12 @@ let run pool reader ~genesis_constants ~constraint_constants ~logger
Deferred.unit )
(* [add_genesis_accounts] is called when starting the archive process *)
-let add_genesis_accounts ~logger ~(runtime_config_opt : Runtime_config.t option)
- ~(genesis_constants : Genesis_constants.t)
- ~(constraint_constants : Genesis_constants.Constraint_constants.t) pool =
- match runtime_config_opt with
+let add_genesis_accounts ~logger
+ ~(precomputed_values_opt : Precomputed_values.t option) pool =
+ match precomputed_values_opt with
| None ->
Deferred.unit
- | Some runtime_config -> (
- let%bind precomputed_values =
- match%map
- Genesis_ledger_helper.init_from_config_file ~logger
- ~proof_level:Genesis_constants.Compiled.proof_level
- ~genesis_constants ~constraint_constants runtime_config
- ~cli_proof_level:None
- with
- | Ok (precomputed_values, _) ->
- precomputed_values
- | Error err ->
- failwithf "Could not get precomputed values, error: %s"
- (Error.to_string_hum err) ()
- in
+ | Some precomputed_values -> (
let ledger =
Precomputed_values.genesis_ledger precomputed_values |> Lazy.force
in
@@ -4767,7 +4753,8 @@ let add_genesis_accounts ~logger ~(runtime_config_opt : Runtime_config.t option)
let%bind.Deferred.Result genesis_block_id =
Block.add_if_doesn't_exist
(module Conn)
- ~constraint_constants genesis_block
+ ~constraint_constants:precomputed_values.constraint_constants
+ genesis_block
in
let%bind.Deferred.Result { ledger_hash; _ } =
Block.load (module Conn) ~id:genesis_block_id
@@ -4874,7 +4861,7 @@ let create_metrics_server ~logger ~metrics_server_port ~missing_blocks_width
let setup_server ~(genesis_constants : Genesis_constants.t)
~(constraint_constants : Genesis_constants.Constraint_constants.t)
~metrics_server_port ~logger ~postgres_address ~server_port
- ~delete_older_than ~runtime_config_opt ~missing_blocks_width =
+ ~delete_older_than ~precomputed_values_opt ~missing_blocks_width =
let where_to_listen =
Async.Tcp.Where_to_listen.bind_to All_addresses (On_port server_port)
in
@@ -4903,10 +4890,7 @@ let setup_server ~(genesis_constants : Genesis_constants.t)
~metadata:[ ("error", `String (Caqti_error.show e)) ] ;
Deferred.unit
| Ok pool ->
- let%bind () =
- add_genesis_accounts pool ~logger ~genesis_constants
- ~constraint_constants ~runtime_config_opt
- in
+ let%bind () = add_genesis_accounts pool ~logger ~precomputed_values_opt in
run ~constraint_constants ~genesis_constants pool reader ~logger
~delete_older_than
|> don't_wait_for ;
diff --git a/src/app/archive/lib/test.ml b/src/app/archive/lib/test.ml
index 8600c231ecf..474cb0fab9a 100644
--- a/src/app/archive/lib/test.ml
+++ b/src/app/archive/lib/test.ml
@@ -20,10 +20,8 @@ let%test_module "Archive node unit tests" =
let verifier =
Async.Thread_safe.block_on_async_exn (fun () ->
- Verifier.create ~logger ~proof_level ~constraint_constants
- ~conf_dir:None
- ~pids:(Child_processes.Termination.create_pid_table ())
- ~commit_id:"not specified for unit tests" () )
+ Verifier.For_tests.default ~constraint_constants ~logger ~proof_level
+ () )
module Genesis_ledger = (val Genesis_ledger.for_unit_tests)
diff --git a/src/app/batch_txn_tool/batch_txn_tool.ml b/src/app/batch_txn_tool/batch_txn_tool.ml
index 79741438d56..9331c5a4207 100644
--- a/src/app/batch_txn_tool/batch_txn_tool.ml
+++ b/src/app/batch_txn_tool/batch_txn_tool.ml
@@ -154,11 +154,10 @@ let there_and_back_again ~num_txn_per_acct ~txns_per_block ~slot_time ~fill_rate
~origin_sender_secret_key_path
~(origin_sender_secret_key_pw_option : string option)
~returner_secret_key_path ~(returner_secret_key_pw_option : string option)
- ~graphql_target_node_option ~minimum_user_command_fee () =
+ ~graphql_target_node_option ~minimum_user_command_fee ~logger () =
let open Deferred.Let_syntax in
(* define the rate limiting function *)
let open Logger in
- let logger = Logger.create () in
let limit_level =
let slot_limit =
Float.(
@@ -310,8 +309,6 @@ let there_and_back_again ~num_txn_per_acct ~txns_per_block ~slot_time ~fill_rate
return ()
let output_there_and_back_cmds =
- let genesis_constants = Genesis_constants.Compiled.genesis_constants in
- let compile_config = Mina_compile_config.Compiled.t in
let open Command.Let_syntax in
Command.async
~summary:
@@ -390,23 +387,32 @@ let output_there_and_back_cmds =
transactions, if this is not present then we use the env var \
MINA_PRIVKEY_PASS"
(optional string)
+ and config_file = Cli_lib.Flag.config_files
and graphql_target_node_option =
flag "--graphql-target-node" ~aliases:[ "graphql-target-node" ]
~doc:
"URL The graphql node to send graphl commands to. must be in \
format `:`. default is `127.0.0.1:3085`"
(optional string)
- and minimum_user_command_fee =
- let default = compile_config.default_transaction_fee in
- Cli_lib.Flag.fee_common
- ~minimum_user_command_fee:genesis_constants.minimum_user_command_fee
- ~default_transaction_fee:default
- in
- there_and_back_again ~num_txn_per_acct ~txns_per_block ~txn_fee_option
- ~slot_time ~fill_rate ~rate_limit ~rate_limit_level ~rate_limit_interval
- ~origin_sender_secret_key_path ~origin_sender_secret_key_pw_option
- ~returner_secret_key_path ~returner_secret_key_pw_option
- ~graphql_target_node_option ~minimum_user_command_fee )
+ and minimum_user_command_fee_opt = Cli_lib.Flag.fee_common in
+ fun () ->
+ let open Deferred.Let_syntax in
+ let logger = Logger.create () in
+ let%bind minimum_user_command_fee =
+ let%map conf =
+ Runtime_config.Constants.load_constants ~logger config_file
+ in
+ Option.value
+ ~default:
+ (Runtime_config.Constants.genesis_constants conf)
+ .minimum_user_command_fee minimum_user_command_fee_opt
+ in
+ there_and_back_again ~num_txn_per_acct ~txns_per_block ~txn_fee_option
+ ~slot_time ~fill_rate ~rate_limit ~rate_limit_level
+ ~rate_limit_interval ~origin_sender_secret_key_path
+ ~origin_sender_secret_key_pw_option ~returner_secret_key_path
+ ~returner_secret_key_pw_option ~graphql_target_node_option
+ ~minimum_user_command_fee ~logger () )
let () =
Command.run
diff --git a/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml b/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml
index 06ddf962120..0b2e187db3d 100644
--- a/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml
+++ b/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml
@@ -49,51 +49,19 @@ let plugin_flag =
times"
else Command.Param.return []
-let load_config_files ~logger ~genesis_constants ~constraint_constants ~conf_dir
- ~genesis_dir ~cli_proof_level ~proof_level (config_files : string list) =
- let open Deferred.Or_error.Let_syntax in
- let genesis_dir = Option.value ~default:(conf_dir ^/ "genesis") genesis_dir in
- let%bind config =
- Runtime_config.Json_loader.load_config_files ~conf_dir ~logger config_files
- in
- match%bind.Deferred
- Genesis_ledger_helper.init_from_config_file ~cli_proof_level ~genesis_dir
- ~logger ~genesis_constants ~constraint_constants ~proof_level config
- with
- | Ok a ->
- return a
- | Error err ->
- let ( json_config
- , `Accounts_omitted
- ( `Genesis genesis_accounts_omitted
- , `Staking staking_accounts_omitted
- , `Next next_accounts_omitted ) ) =
- Runtime_config.to_yojson_without_accounts config
- in
- let append_accounts_omitted s =
- Option.value_map
- ~f:(fun i -> List.cons (s ^ "_accounts_omitted", `Int i))
- ~default:Fn.id
- in
- let metadata =
- append_accounts_omitted "genesis" genesis_accounts_omitted
- @@ append_accounts_omitted "staking" staking_accounts_omitted
- @@ append_accounts_omitted "next" next_accounts_omitted []
- @ [ ("config", json_config)
- ; ( "name"
- , `String
- (Option.value ~default:"not provided"
- (let%bind.Option ledger = config.ledger in
- Option.first_some ledger.name ledger.hash ) ) )
- ; ("error", Error_json.error_to_yojson err)
- ]
- in
- [%log info]
- "Initializing with runtime configuration. Ledger source: $name"
- ~metadata ;
- Error.raise err
+let with_itn_logger ~itn_features ~(compile_config : Mina_compile_config.t)
+ ~logger =
+ if itn_features then
+ let conf =
+ Logger.make_itn_logger_config
+ ~rpc_handshake_timeout:compile_config.rpc_handshake_timeout
+ ~rpc_heartbeat_timeout:compile_config.rpc_heartbeat_timeout
+ ~rpc_heartbeat_send_every:compile_config.rpc_heartbeat_send_every
+ in
+ Logger.with_itn conf logger
+ else logger
-let setup_daemon logger ~itn_features ~default_snark_worker_fee =
+let setup_daemon logger ~itn_features =
let open Command.Let_syntax in
let open Cli_lib.Arg_type in
let receiver_key_warning = Cli_lib.Default.receiver_key_warning in
@@ -262,8 +230,7 @@ let setup_daemon logger ~itn_features ~default_snark_worker_fee =
~doc:
(sprintf
"FEE Amount a worker wants to get compensated for generating a \
- snark proof (default: %d)"
- (Currency.Fee.to_nanomina_int default_snark_worker_fee) )
+ snark proof" )
(optional txn_fee)
and work_reassignment_wait =
flag "--work-reassignment-wait"
@@ -686,22 +653,16 @@ let setup_daemon logger ~itn_features ~default_snark_worker_fee =
in
let pids = Child_processes.Termination.create_pid_table () in
let mina_initialization_deferred () =
- let genesis_constants =
- Genesis_constants.Compiled.genesis_constants
- in
- let constraint_constants =
- Genesis_constants.Compiled.constraint_constants
- in
- let compile_config = Mina_compile_config.Compiled.t in
let%bind precomputed_values, config =
- load_config_files ~logger ~conf_dir ~genesis_dir
- ~proof_level:Genesis_constants.Compiled.proof_level config_files
- ~genesis_constants ~constraint_constants ~cli_proof_level
+ Genesis_ledger_helper.Config_loader.load_config_files ~logger
+ ~conf_dir ?genesis_dir ?cli_proof_level ~itn_features config_files
|> Deferred.Or_error.ok_exn
in
-
- constraint_constants.block_window_duration_ms |> Float.of_int
- |> Time.Span.of_ms |> Mina_metrics.initialize_all ;
+ let constraint_constants = precomputed_values.consensus_constants in
+ let compile_config = precomputed_values.compile_config in
+ let logger = with_itn_logger ~itn_features ~compile_config ~logger in
+ constraint_constants.block_window_duration_ms |> Block_time.Span.to_ms
+ |> Float.of_int64 |> Time.Span.of_ms |> Mina_metrics.initialize_all ;
let module DC = Runtime_config.Daemon in
(* The explicit typing here is necessary to prevent type inference from specializing according
@@ -1205,7 +1166,6 @@ Pass one of -peer, -peer-list-file, -seed, -peer-list-url.|} ;
; time_controller
; pubsub_v1
; pubsub_v0
- ; block_window_duration = compile_config.block_window_duration
}
in
let net_config =
@@ -1378,12 +1338,9 @@ Pass one of -peer, -peer-list-file, -seed, -peer-list-url.|} ;
let () = Mina_plugins.init_plugins ~logger mina plugins in
return mina )
-let daemon logger =
- let compile_config = Mina_compile_config.Compiled.t in
+let daemon logger ~itn_features =
Command.async ~summary:"Mina daemon"
- (Command.Param.map
- (setup_daemon logger ~itn_features:compile_config.itn_features
- ~default_snark_worker_fee:compile_config.default_snark_worker_fee )
+ (Command.Param.map (setup_daemon logger ~itn_features)
~f:(fun setup_daemon () ->
(* Immediately disable updating the time offset. *)
Block_time.Controller.disable_setting_offset () ;
@@ -1392,7 +1349,7 @@ let daemon logger =
[%log info] "Daemon ready. Clients can now connect" ;
Async.never () ) )
-let replay_blocks logger =
+let replay_blocks ~itn_features logger =
let replay_flag =
let open Command.Param in
flag "--blocks-filename" ~aliases:[ "-blocks-filename" ] (required string)
@@ -1403,11 +1360,9 @@ let replay_blocks logger =
flag "--format" ~aliases:[ "-format" ] (optional string)
~doc:"json|sexp The format to read lines of the file in (default: json)"
in
- let compile_config = Mina_compile_config.Compiled.t in
Command.async ~summary:"Start mina daemon with blocks replayed from a file"
(Command.Param.map3 replay_flag read_kind
- (setup_daemon logger ~itn_features:compile_config.itn_features
- ~default_snark_worker_fee:compile_config.default_snark_worker_fee )
+ (setup_daemon logger ~itn_features)
~f:(fun blocks_filename read_kind setup_daemon () ->
(* Enable updating the time offset. *)
Block_time.Controller.enable_setting_offset () ;
@@ -1599,34 +1554,38 @@ let snark_hashes =
let json = Cli_lib.Flag.json in
fun () -> if json then Core.printf "[]\n%!"]
-let internal_commands logger =
+let internal_commands ~itn_features logger =
[ ( Snark_worker.Intf.command_name
- , Snark_worker.command ~proof_level:Genesis_constants.Compiled.proof_level
- ~constraint_constants:Genesis_constants.Compiled.constraint_constants
- ~commit_id:Mina_version.commit_id )
+ , Snark_worker.command ~commit_id:Mina_version.commit_id )
; ("snark-hashes", snark_hashes)
; ( "run-prover"
, Command.async
~summary:"Run prover on a sexp provided on a single line of stdin"
- (Command.Param.return (fun () ->
- let logger = Logger.create () in
- let constraint_constants =
- Genesis_constants.Compiled.constraint_constants
- in
- let proof_level = Genesis_constants.Compiled.proof_level in
- Parallel.init_master () ;
- match%bind Reader.read_sexp (Lazy.force Reader.stdin) with
- | `Ok sexp ->
- let%bind conf_dir = Unix.mkdtemp "/tmp/mina-prover" in
- [%log info] "Prover state being logged to %s" conf_dir ;
- let%bind prover =
- Prover.create ~commit_id:Mina_version.commit_id ~logger
- ~proof_level ~constraint_constants
- ~pids:(Pid.Table.create ()) ~conf_dir ()
- in
- Prover.prove_from_input_sexp prover sexp >>| ignore
- | `Eof ->
- failwith "early EOF while reading sexp" ) ) )
+ (let open Command.Let_syntax in
+ let%map_open config_file = Cli_lib.Flag.config_files in
+ fun () ->
+ let open Deferred.Let_syntax in
+ let%bind constraint_constants, proof_level, compile_config =
+ let%map conf =
+ Runtime_config.Constants.load_constants ~logger config_file
+ in
+ Runtime_config.Constants.
+ (constraint_constants conf, proof_level conf, compile_config conf)
+ in
+ let logger = with_itn_logger ~itn_features ~compile_config ~logger in
+ Parallel.init_master () ;
+ match%bind Reader.read_sexp (Lazy.force Reader.stdin) with
+ | `Ok sexp ->
+ let%bind conf_dir = Unix.mkdtemp "/tmp/mina-prover" in
+ [%log info] "Prover state being logged to %s" conf_dir ;
+ let%bind prover =
+ Prover.create ~commit_id:Mina_version.commit_id ~logger
+ ~proof_level ~constraint_constants ~pids:(Pid.Table.create ())
+ ~conf_dir ()
+ in
+ Prover.prove_from_input_sexp prover sexp >>| ignore
+ | `Eof ->
+ failwith "early EOF while reading sexp") )
; ( "run-snark-worker-single"
, Command.async
~summary:"Run snark-worker on a sexp provided on a single line of stdin"
@@ -1634,14 +1593,18 @@ let internal_commands logger =
let%map_open filename =
flag "--file" (required string)
~doc:"File containing the s-expression of the snark work to execute"
- in
+ and config_file = Cli_lib.Flag.config_files in
+
fun () ->
let open Deferred.Let_syntax in
- let logger = Logger.create () in
- let constraint_constants =
- Genesis_constants.Compiled.constraint_constants
+ let%bind constraint_constants, proof_level, compile_config =
+ let%map conf =
+ Runtime_config.Constants.load_constants ~logger config_file
+ in
+ Runtime_config.Constants.
+ (constraint_constants conf, proof_level conf, compile_config conf)
in
- let proof_level = Genesis_constants.Compiled.proof_level in
+ let logger = with_itn_logger ~itn_features ~compile_config ~logger in
Parallel.init_master () ;
match%bind
Reader.with_file filename ~f:(fun reader ->
@@ -1688,14 +1651,17 @@ let internal_commands logger =
and limit =
flag "--limit" ~aliases:[ "-limit" ] (optional int)
~doc:"limit the number of proofs taken from the file"
- in
+ and config_file = Cli_lib.Flag.config_files in
fun () ->
let open Async in
- let logger = Logger.create () in
- let constraint_constants =
- Genesis_constants.Compiled.constraint_constants
+ let%bind constraint_constants, proof_level, compile_config =
+ let%map conf =
+ Runtime_config.Constants.load_constants ~logger config_file
+ in
+ Runtime_config.Constants.
+ (constraint_constants conf, proof_level conf, compile_config conf)
in
- let proof_level = Genesis_constants.Compiled.proof_level in
+ let logger = with_itn_logger ~itn_features ~compile_config ~logger in
Parallel.init_master () ;
let%bind conf_dir = Unix.mkdtemp "/tmp/mina-verifier" in
let mode =
@@ -1770,10 +1736,11 @@ let internal_commands logger =
| Error err ->
failwithf "Could not parse JSON: %s" err () ) )
in
+
let%bind verifier =
- Verifier.create ~commit_id:Mina_version.commit_id ~logger
- ~proof_level ~constraint_constants ~pids:(Pid.Table.create ())
- ~conf_dir:(Some conf_dir) ()
+ Verifier.For_tests.default ~constraint_constants ~proof_level
+ ~commit_id:Mina_version.commit_id ~logger
+ ~pids:(Pid.Table.create ()) ~conf_dir:(Some conf_dir) ()
in
let%bind result =
let cap lst =
@@ -1831,18 +1798,12 @@ let internal_commands logger =
() ) ;
Deferred.return ()) )
; ("dump-type-shapes", dump_type_shapes)
- ; ("replay-blocks", replay_blocks logger)
+ ; ("replay-blocks", replay_blocks ~itn_features logger)
; ("audit-type-shapes", audit_type_shapes)
; ( "test-genesis-block-generation"
, Command.async ~summary:"Generate a genesis proof"
(let open Command.Let_syntax in
- let%map_open config_files =
- flag "--config-file" ~aliases:[ "config-file" ]
- ~doc:
- "PATH path to a configuration file (overrides MINA_CONFIG_FILE, \
- default: /daemon.json). Pass multiple times to \
- override fields from earlier config files"
- (listed string)
+ let%map_open config_file = Cli_lib.Flag.config_files
and conf_dir = Cli_lib.Flag.conf_dir
and genesis_dir =
flag "--genesis-ledger-dir" ~aliases:[ "genesis-ledger-dir" ]
@@ -1854,28 +1815,24 @@ let internal_commands logger =
fun () ->
let open Deferred.Let_syntax in
Parallel.init_master () ;
- let logger = Logger.create () in
let conf_dir = Mina_lib.Conf_dir.compute_conf_dir conf_dir in
- let genesis_constants =
- Genesis_constants.Compiled.genesis_constants
- in
- let constraint_constants =
- Genesis_constants.Compiled.constraint_constants
- in
- let proof_level = Genesis_constants.Proof_level.Full in
let%bind precomputed_values, _ =
- load_config_files ~logger ~conf_dir ~genesis_dir ~genesis_constants
- ~constraint_constants ~proof_level config_files
- ~cli_proof_level:None
+ Genesis_ledger_helper.Config_loader.load_config_files ~logger
+ ~conf_dir ?genesis_dir ~cli_proof_level:Full ~itn_features
+ config_file
|> Deferred.Or_error.ok_exn
in
+ let logger =
+ with_itn_logger ~itn_features
+ ~compile_config:precomputed_values.compile_config ~logger
+ in
let pids = Child_processes.Termination.create_pid_table () in
let%bind prover =
(* We create a prover process (unnecessarily) here, to have a more
realistic test.
*)
Prover.create ~commit_id:Mina_version.commit_id ~logger ~pids
- ~conf_dir ~proof_level
+ ~conf_dir ~proof_level:precomputed_values.proof_level
~constraint_constants:precomputed_values.constraint_constants ()
in
match%bind
@@ -1895,13 +1852,14 @@ let internal_commands logger =
let mina_commands logger ~itn_features =
[ ("accounts", Client.accounts)
- ; ("daemon", daemon logger)
+ ; ("daemon", daemon ~itn_features logger)
; ("client", Client.client)
; ("advanced", Client.advanced ~itn_features)
; ("ledger", Client.ledger)
; ("libp2p", Client.libp2p)
; ( "internal"
- , Command.group ~summary:"Internal commands" (internal_commands logger) )
+ , Command.group ~summary:"Internal commands"
+ (internal_commands ~itn_features logger) )
; (Parallel.worker_command_name, Parallel.worker_command)
; ("transaction-snark-profiler", Transaction_snark_profiler.command)
]
@@ -1939,11 +1897,10 @@ let () =
| [| _mina_exe; version |] when is_version_cmd version ->
Mina_version.print_version ()
| _ ->
- let compile_config = Mina_compile_config.Compiled.t in
+ let itn_features = Mina_compile_config.Compiled.t.itn_features in
Command.run
(Command.group ~summary:"Mina" ~preserve_subcommand_order:()
- (mina_commands logger ~itn_features:compile_config.itn_features) )
- ) ;
+ (mina_commands logger ~itn_features) ) ) ;
Core.exit 0
let linkme = ()
diff --git a/src/app/cli/src/init/client.ml b/src/app/cli/src/init/client.ml
index ba7d52dccec..5f7813438dc 100644
--- a/src/app/cli/src/init/client.ml
+++ b/src/app/cli/src/init/client.ml
@@ -4,6 +4,10 @@ open Signature_lib
open Mina_base
open Mina_transaction
+(* TODO consider a better way of setting a default transaction fee than
+ a fixed compile-time value *)
+let default_transaction_fee = Currency.Fee.of_nanomina_int_exn 250000000
+
module Client = Graphql_lib.Client.Make (struct
let preprocess_variables_string = Fn.id
@@ -39,13 +43,20 @@ let or_error_str ~f_ok ~error = function
| Error e ->
sprintf "%s\n%s\n" error (Error.to_string_hum e)
+let load_compile_config ?(logger = Logger.create ()) config_files =
+ let%map conf = Runtime_config.Constants.load_constants ~logger config_files in
+ Runtime_config.Constants.compile_config conf
+
let stop_daemon =
let open Deferred.Let_syntax in
let open Daemon_rpcs in
- let open Command.Param in
Command.async ~summary:"Stop the daemon"
- (Cli_lib.Background_daemon.rpc_init (return ()) ~f:(fun port () ->
- let%map res = Daemon_rpcs.Client.dispatch Stop_daemon.rpc () port in
+ (Cli_lib.Background_daemon.rpc_init Cli_lib.Flag.config_files
+ ~f:(fun port config_files ->
+ let%bind compile_config = load_compile_config config_files in
+ let%map res =
+ Daemon_rpcs.Client.dispatch ~compile_config Stop_daemon.rpc () port
+ in
printf "%s"
(or_error_str res
~f_ok:(fun _ -> "Daemon stopping\n")
@@ -168,12 +179,15 @@ let get_trust_status =
(required Cli_lib.Arg_type.ip_address)
in
let json_flag = Cli_lib.Flag.json in
- let flags = Args.zip2 address_flag json_flag in
+ let config_files = Cli_lib.Flag.config_files in
+ let flags = Args.zip3 config_files address_flag json_flag in
Command.async ~summary:"Get the trust status associated with an IP address"
- (Cli_lib.Background_daemon.rpc_init flags ~f:(fun port (ip_address, json) ->
+ (Cli_lib.Background_daemon.rpc_init flags
+ ~f:(fun port (config_files, ip_address, json) ->
+ let%bind compile_config = load_compile_config config_files in
match%map
- Daemon_rpcs.Client.dispatch Daemon_rpcs.Get_trust_status.rpc
- ip_address port
+ Daemon_rpcs.Client.dispatch ~compile_config
+ Daemon_rpcs.Get_trust_status.rpc ip_address port
with
| Ok statuses ->
print_trust_statuses
@@ -203,13 +217,16 @@ let get_trust_status_all =
~doc:"Only show trust statuses whose trust score is nonzero"
in
let json_flag = Cli_lib.Flag.json in
- let flags = Args.zip2 nonzero_flag json_flag in
+ let config_files = Cli_lib.Flag.config_files in
+ let flags = Args.zip3 config_files nonzero_flag json_flag in
Command.async
~summary:"Get trust statuses for all peers known to the trust system"
- (Cli_lib.Background_daemon.rpc_init flags ~f:(fun port (nonzero, json) ->
+ (Cli_lib.Background_daemon.rpc_init flags
+ ~f:(fun port (config_files, nonzero, json) ->
+ let%bind compile_config = load_compile_config config_files in
match%map
- Daemon_rpcs.Client.dispatch Daemon_rpcs.Get_trust_status_all.rpc ()
- port
+ Daemon_rpcs.Client.dispatch ~compile_config
+ Daemon_rpcs.Get_trust_status_all.rpc () port
with
| Ok ip_trust_statuses ->
(* always round the trust scores for display *)
@@ -240,12 +257,15 @@ let reset_trust_status =
(required Cli_lib.Arg_type.ip_address)
in
let json_flag = Cli_lib.Flag.json in
- let flags = Args.zip2 address_flag json_flag in
+ let config_files = Cli_lib.Flag.config_files in
+ let flags = Args.zip3 config_files address_flag json_flag in
Command.async ~summary:"Reset the trust status associated with an IP address"
- (Cli_lib.Background_daemon.rpc_init flags ~f:(fun port (ip_address, json) ->
+ (Cli_lib.Background_daemon.rpc_init flags
+ ~f:(fun port (config_files, ip_address, json) ->
+ let%bind compile_config = load_compile_config config_files in
match%map
- Daemon_rpcs.Client.dispatch Daemon_rpcs.Reset_trust_status.rpc
- ip_address port
+ Daemon_rpcs.Client.dispatch ~compile_config
+ Daemon_rpcs.Reset_trust_status.rpc ip_address port
with
| Ok status ->
print_trust_statuses status json
@@ -261,17 +281,19 @@ let get_public_keys =
~doc:"Show extra details (eg. balance, nonce) in addition to public keys"
in
let error_ctx = "Failed to get public-keys" in
+ let config_files = Cli_lib.Flag.config_files in
Command.async ~summary:"Get public keys"
(Cli_lib.Background_daemon.rpc_init
- (Args.zip2 with_details_flag Cli_lib.Flag.json)
- ~f:(fun port (is_balance_included, json) ->
+ (Args.zip3 config_files with_details_flag Cli_lib.Flag.json)
+ ~f:(fun port (config_files, is_balance_included, json) ->
+ let%bind compile_config = load_compile_config config_files in
if is_balance_included then
- Daemon_rpcs.Client.dispatch_pretty_message ~json
+ Daemon_rpcs.Client.dispatch_pretty_message ~compile_config ~json
~join_error:Or_error.join ~error_ctx
(module Cli_lib.Render.Public_key_with_details)
Get_public_keys_with_details.rpc () port
else
- Daemon_rpcs.Client.dispatch_pretty_message ~json
+ Daemon_rpcs.Client.dispatch_pretty_message ~compile_config ~json
~join_error:Or_error.join ~error_ctx
(module Cli_lib.Render.String_list_formatter)
Get_public_keys.rpc () port ) )
@@ -314,10 +336,13 @@ let verify_receipt =
~doc:"TOKEN_ID The token ID for the account"
(optional_with_default Token_id.default Cli_lib.Arg_type.token_id)
in
+ let config_files = Cli_lib.Flag.config_files in
Command.async ~summary:"Verify a receipt of a sent payment"
(Cli_lib.Background_daemon.rpc_init
- (Args.zip4 payment_path_flag proof_path_flag address_flag token_flag)
- ~f:(fun port (payment_path, proof_path, pk, token_id) ->
+ (Args.zip5 config_files payment_path_flag proof_path_flag address_flag
+ token_flag )
+ ~f:(fun port (config_files, payment_path, proof_path, pk, token_id) ->
+ let%bind compile_config = load_compile_config config_files in
let account_id = Account_id.create pk token_id in
let dispatch_result =
let open Deferred.Or_error.Let_syntax in
@@ -342,7 +367,7 @@ let verify_receipt =
~error:
(sprintf "Proof file %s has invalid json format" proof_path)
in
- Daemon_rpcs.Client.dispatch Verify_proof.rpc
+ Daemon_rpcs.Client.dispatch ~compile_config Verify_proof.rpc
(account_id, payment, proof)
port
in
@@ -354,13 +379,16 @@ let verify_receipt =
)
let get_nonce :
- rpc:(Account_id.t, Account.Nonce.t option Or_error.t) Rpc.Rpc.t
+ compile_config:Mina_compile_config.t
+ -> rpc:(Account_id.t, Account.Nonce.t option Or_error.t) Rpc.Rpc.t
-> Account_id.t
-> Host_and_port.t
-> (Account.Nonce.t, string) Deferred.Result.t =
- fun ~rpc account_id port ->
+ fun ~compile_config ~rpc account_id port ->
let open Deferred.Let_syntax in
- let%map res = Daemon_rpcs.Client.dispatch rpc account_id port in
+ let%map res =
+ Daemon_rpcs.Client.dispatch ~compile_config rpc account_id port
+ in
match Or_error.join res with
| Ok (Some n) ->
Ok n
@@ -382,12 +410,16 @@ let get_nonce_cmd =
~doc:"TOKEN_ID The token ID for the account"
(optional_with_default Token_id.default Cli_lib.Arg_type.token_id)
in
- let flags = Args.zip2 address_flag token_flag in
+ let config_files = Cli_lib.Flag.config_files in
+ let flags = Args.zip3 config_files address_flag token_flag in
Command.async ~summary:"Get the current nonce for an account"
- (Cli_lib.Background_daemon.rpc_init flags ~f:(fun port (pk, token_flag) ->
+ (Cli_lib.Background_daemon.rpc_init flags
+ ~f:(fun port (config_files, pk, token_flag) ->
let account_id = Account_id.create pk token_flag in
+ let%bind compile_config = load_compile_config config_files in
match%bind
- get_nonce ~rpc:Daemon_rpcs.Get_nonce.rpc account_id port
+ get_nonce ~compile_config ~rpc:Daemon_rpcs.Get_nonce.rpc account_id
+ port
with
| Error e ->
eprintf "Failed to get nonce\n%s\n" e ;
@@ -398,11 +430,16 @@ let get_nonce_cmd =
let status =
let open Daemon_rpcs in
- let flag = Args.zip2 Cli_lib.Flag.json Cli_lib.Flag.performance in
+ let flag =
+ Args.zip3 Cli_lib.Flag.config_files Cli_lib.Flag.json
+ Cli_lib.Flag.performance
+ in
Command.async ~summary:"Get running daemon status"
- (Cli_lib.Background_daemon.rpc_init flag ~f:(fun port (json, performance) ->
- Daemon_rpcs.Client.dispatch_pretty_message ~json ~join_error:Fn.id
- ~error_ctx:"Failed to get status"
+ (Cli_lib.Background_daemon.rpc_init flag
+ ~f:(fun port (config_files, json, performance) ->
+ let%bind compile_config = load_compile_config config_files in
+ Daemon_rpcs.Client.dispatch_pretty_message ~compile_config ~json
+ ~join_error:Fn.id ~error_ctx:"Failed to get status"
(module Daemon_rpcs.Types.Status)
Get_status.rpc
(if performance then `Performance else `None)
@@ -410,18 +447,24 @@ let status =
let status_clear_hist =
let open Daemon_rpcs in
- let flag = Args.zip2 Cli_lib.Flag.json Cli_lib.Flag.performance in
+ let flag =
+ Args.zip3 Cli_lib.Flag.config_files Cli_lib.Flag.json
+ Cli_lib.Flag.performance
+ in
Command.async ~summary:"Clear histograms reported in status"
- (Cli_lib.Background_daemon.rpc_init flag ~f:(fun port (json, performance) ->
- Daemon_rpcs.Client.dispatch_pretty_message ~json ~join_error:Fn.id
+ (Cli_lib.Background_daemon.rpc_init flag
+ ~f:(fun port (config_files, json, performance) ->
+ let%bind compile_config = load_compile_config config_files in
+ Daemon_rpcs.Client.dispatch_pretty_message ~compile_config ~json
+ ~join_error:Fn.id
~error_ctx:"Failed to clear histograms reported in status"
(module Daemon_rpcs.Types.Status)
Clear_hist_status.rpc
(if performance then `Performance else `None)
port ) )
-let get_nonce_exn ~rpc public_key port =
- match%bind get_nonce ~rpc public_key port with
+let get_nonce_exn ~compile_config ~rpc public_key port =
+ match%bind get_nonce ~compile_config ~rpc public_key port with
| Error e ->
eprintf "Failed to get nonce\n%s\n" e ;
exit 3
@@ -470,8 +513,9 @@ let batch_send_payments =
(List.init 3 ~f:(fun _ -> sample_info ())) ) ) ;
exit 5
in
- let main port (privkey_path, payments_path) =
+ let main port (config_files, privkey_path, payments_path) =
let open Deferred.Let_syntax in
+ let%bind compile_config = load_compile_config config_files in
let%bind keypair =
Secrets.Keypair.Terminal_stdin.read_exn ~which:"Mina keypair" privkey_path
and infos = get_infos payments_path in
@@ -486,8 +530,8 @@ let batch_send_payments =
~body:(Payment { receiver_pk; amount })
~sign_choice:(User_command_input.Sign_choice.Keypair keypair) () )
in
- Daemon_rpcs.Client.dispatch_with_message Daemon_rpcs.Send_user_commands.rpc
- ts port
+ Daemon_rpcs.Client.dispatch_with_message ~compile_config
+ Daemon_rpcs.Send_user_commands.rpc ts port
~success:(fun _ -> "Successfully enqueued payments in pool")
~error:(fun e ->
sprintf "Failed to send payments %s" (Error.to_string_hum e) )
@@ -495,7 +539,8 @@ let batch_send_payments =
in
Command.async ~summary:"Send multiple payments from a file"
(Cli_lib.Background_daemon.rpc_init
- (Args.zip2 Cli_lib.Flag.privkey_read_path payment_path_flag)
+ (Args.zip3 Cli_lib.Flag.config_files Cli_lib.Flag.privkey_read_path
+ payment_path_flag )
~f:main )
let transaction_id_to_string id =
@@ -513,14 +558,8 @@ let send_payment_graphql =
flag "--amount" ~aliases:[ "amount" ]
~doc:"VALUE Payment amount you want to send" (required txn_amount)
in
- let genesis_constants = Genesis_constants.Compiled.genesis_constants in
- let compile_config = Mina_compile_config.Compiled.t in
let args =
- Args.zip3
- (Cli_lib.Flag.signed_command_common
- ~minimum_user_command_fee:genesis_constants.minimum_user_command_fee
- ~default_transaction_fee:compile_config.default_transaction_fee )
- receiver_flag amount_flag
+ Args.zip3 Cli_lib.Flag.signed_command_common receiver_flag amount_flag
in
Command.async ~summary:"Send payment to an address"
(Cli_lib.Background_daemon.graphql_init args
@@ -528,6 +567,8 @@ let send_payment_graphql =
graphql_endpoint
({ Cli_lib.Flag.sender; fee; nonce; memo }, receiver, amount)
->
+ let open Deferred.Let_syntax in
+ let fee = Option.value ~default:default_transaction_fee fee in
let%map response =
let input =
Mina_graphql.Types.Input.SendPaymentInput.make_input ~to_:receiver
@@ -548,21 +589,15 @@ let delegate_stake_graphql =
~doc:"PUBLICKEY Public key to which you want to delegate your stake"
(required public_key_compressed)
in
- let genesis_constants = Genesis_constants.Compiled.genesis_constants in
- let compile_config = Mina_compile_config.Compiled.t in
- let args =
- Args.zip2
- (Cli_lib.Flag.signed_command_common
- ~minimum_user_command_fee:genesis_constants.minimum_user_command_fee
- ~default_transaction_fee:compile_config.default_transaction_fee )
- receiver_flag
- in
+ let args = Args.zip2 Cli_lib.Flag.signed_command_common receiver_flag in
Command.async ~summary:"Delegate your stake to another public key"
(Cli_lib.Background_daemon.graphql_init args
~f:(fun
graphql_endpoint
({ Cli_lib.Flag.sender; fee; nonce; memo }, receiver)
->
+ let open Deferred.Let_syntax in
+ let fee = Option.value ~default:default_transaction_fee fee in
let%map response =
Graphql_client.query_exn
Graphql_queries.Send_delegation.(
@@ -764,14 +799,19 @@ let export_ledger =
Command.Param.(anon (ledger_args %: t))
in
let plaintext_flag = Cli_lib.Flag.plaintext in
- let flags = Args.zip3 state_hash_flag plaintext_flag ledger_kind in
+ let flags =
+ Args.zip4 Cli_lib.Flag.config_files state_hash_flag plaintext_flag
+ ledger_kind
+ in
Command.async
~summary:
"Print the specified ledger (default: staged ledger at the best tip). \
Note: Exporting snarked ledger is an expensive operation and can take a \
few seconds"
(Cli_lib.Background_daemon.rpc_init flags
- ~f:(fun port (state_hash, plaintext, ledger_kind) ->
+ ~f:(fun port (config_files, state_hash, plaintext, ledger_kind) ->
+ let open Deferred.Let_syntax in
+ let%bind compile_config = load_compile_config config_files in
let check_for_state_hash () =
if Option.is_some state_hash then (
Format.eprintf "A state hash should not be given for %s@."
@@ -784,23 +824,25 @@ let export_ledger =
let state_hash =
Option.map ~f:State_hash.of_base58_check_exn state_hash
in
- Daemon_rpcs.Client.dispatch Daemon_rpcs.Get_ledger.rpc state_hash
- port
+ Daemon_rpcs.Client.dispatch ~compile_config
+ Daemon_rpcs.Get_ledger.rpc state_hash port
| "snarked-ledger" ->
let state_hash =
Option.map ~f:State_hash.of_base58_check_exn state_hash
in
printf
"Generating snarked ledger(this may take a few seconds)...\n" ;
- Daemon_rpcs.Client.dispatch Daemon_rpcs.Get_snarked_ledger.rpc
- state_hash port
+ Daemon_rpcs.Client.dispatch ~compile_config
+ Daemon_rpcs.Get_snarked_ledger.rpc state_hash port
| "staking-epoch-ledger" ->
check_for_state_hash () ;
- Daemon_rpcs.Client.dispatch Daemon_rpcs.Get_staking_ledger.rpc
+ Daemon_rpcs.Client.dispatch ~compile_config
+ Daemon_rpcs.Get_staking_ledger.rpc
Daemon_rpcs.Get_staking_ledger.Current port
| "next-epoch-ledger" ->
check_for_state_hash () ;
- Daemon_rpcs.Client.dispatch Daemon_rpcs.Get_staking_ledger.rpc
+ Daemon_rpcs.Client.dispatch ~compile_config
+ Daemon_rpcs.Get_staking_ledger.rpc
Daemon_rpcs.Get_staking_ledger.Next port
| _ ->
(* unreachable *)
@@ -818,10 +860,16 @@ let hash_ledger =
flag "--ledger-file"
~doc:"LEDGER-FILE File containing an exported ledger"
(required string))
+ and config_files = Cli_lib.Flag.config_files
and plaintext = Cli_lib.Flag.plaintext in
fun () ->
- let constraint_constants =
- Genesis_constants.Compiled.constraint_constants
+ let open Deferred.Let_syntax in
+ let%bind constraint_constants =
+ let logger = Logger.create () in
+ let%map conf =
+ Runtime_config.Constants.load_constants ~logger config_files
+ in
+ Runtime_config.Constants.constraint_constants conf
in
let process_accounts accounts =
let packed_ledger =
@@ -922,33 +970,39 @@ let currency_in_ledger =
ignore (exit 1 : 'a Deferred.t) )
let constraint_system_digests =
+ let open Command.Let_syntax in
Command.async ~summary:"Print MD5 digest of each SNARK constraint"
- (Command.Param.return (fun () ->
- let constraint_constants =
- Genesis_constants.Compiled.constraint_constants
- in
- let proof_level = Genesis_constants.Compiled.proof_level in
- let all =
- Transaction_snark.constraint_system_digests ~constraint_constants ()
- @ Blockchain_snark.Blockchain_snark_state.constraint_system_digests
- ~proof_level ~constraint_constants ()
- in
- let all =
- List.sort ~compare:(fun (k1, _) (k2, _) -> String.compare k1 k2) all
+ (let%map_open config_files = Cli_lib.Flag.config_files in
+ fun () ->
+ let open Deferred.Let_syntax in
+ let%bind constraint_constants, proof_level =
+ let logger = Logger.create () in
+ let%map conf =
+ Runtime_config.Constants.load_constants ~logger config_files
in
- List.iter all ~f:(fun (k, v) -> printf "%s\t%s\n" k (Md5.to_hex v)) ;
- Deferred.unit ) )
+ Runtime_config.Constants.(constraint_constants conf, proof_level conf)
+ in
+ let all =
+ Transaction_snark.constraint_system_digests ~constraint_constants ()
+ @ Blockchain_snark.Blockchain_snark_state.constraint_system_digests
+ ~proof_level ~constraint_constants ()
+ in
+ let all =
+ List.sort ~compare:(fun (k1, _) (k2, _) -> String.compare k1 k2) all
+ in
+ List.iter all ~f:(fun (k, v) -> printf "%s\t%s\n" k (Md5.to_hex v)) ;
+ Deferred.unit )
let snark_job_list =
- let open Deferred.Let_syntax in
- let open Command.Param in
Command.async
~summary:
"List of snark jobs in JSON format that are yet to be included in the \
blocks"
- (Cli_lib.Background_daemon.rpc_init (return ()) ~f:(fun port () ->
+ (Cli_lib.Background_daemon.rpc_init Cli_lib.Flag.config_files
+ ~f:(fun port config_files ->
+ let%bind compile_config = load_compile_config config_files in
match%map
- Daemon_rpcs.Client.dispatch_join_errors
+ Daemon_rpcs.Client.dispatch_join_errors ~compile_config
Daemon_rpcs.Snark_job_list.rpc () port
with
| Ok str ->
@@ -1069,13 +1123,14 @@ let pending_snark_work =
print_string (Yojson.Safe.to_string lst) ) ) )
let start_tracing =
- let open Deferred.Let_syntax in
- let open Command.Param in
Command.async
~summary:"Start async tracing to $config-directory/trace/$pid.trace"
- (Cli_lib.Background_daemon.rpc_init (return ()) ~f:(fun port () ->
+ (Cli_lib.Background_daemon.rpc_init Cli_lib.Flag.config_files
+ ~f:(fun port config_files ->
+ let%bind compile_config = load_compile_config config_files in
match%map
- Daemon_rpcs.Client.dispatch Daemon_rpcs.Start_tracing.rpc () port
+ Daemon_rpcs.Client.dispatch ~compile_config
+ Daemon_rpcs.Start_tracing.rpc () port
with
| Ok () ->
print_endline "Daemon started tracing!"
@@ -1083,12 +1138,13 @@ let start_tracing =
Daemon_rpcs.Client.print_rpc_error e ) )
let stop_tracing =
- let open Deferred.Let_syntax in
- let open Command.Param in
Command.async ~summary:"Stop async tracing"
- (Cli_lib.Background_daemon.rpc_init (return ()) ~f:(fun port () ->
+ (Cli_lib.Background_daemon.rpc_init Cli_lib.Flag.config_files
+ ~f:(fun port config_files ->
+ let%bind compile_config = load_compile_config config_files in
match%map
- Daemon_rpcs.Client.dispatch Daemon_rpcs.Stop_tracing.rpc () port
+ Daemon_rpcs.Client.dispatch ~compile_config
+ Daemon_rpcs.Stop_tracing.rpc () port
with
| Ok () ->
print_endline "Daemon stopped printing!"
@@ -1096,16 +1152,16 @@ let stop_tracing =
Daemon_rpcs.Client.print_rpc_error e ) )
let start_internal_tracing =
- let open Deferred.Let_syntax in
- let open Command.Param in
Command.async
~summary:
"Start internal tracing to \
$config-directory/internal-tracing/internal-trace.jsonl"
- (Cli_lib.Background_daemon.rpc_init (return ()) ~f:(fun port () ->
+ (Cli_lib.Background_daemon.rpc_init Cli_lib.Flag.config_files
+ ~f:(fun port config_files ->
+ let%bind compile_config = load_compile_config config_files in
match%map
- Daemon_rpcs.Client.dispatch Daemon_rpcs.Start_internal_tracing.rpc ()
- port
+ Daemon_rpcs.Client.dispatch ~compile_config
+ Daemon_rpcs.Start_internal_tracing.rpc () port
with
| Ok () ->
print_endline "Daemon internal started tracing!"
@@ -1113,13 +1169,13 @@ let start_internal_tracing =
Daemon_rpcs.Client.print_rpc_error e ) )
let stop_internal_tracing =
- let open Deferred.Let_syntax in
- let open Command.Param in
Command.async ~summary:"Stop internal tracing"
- (Cli_lib.Background_daemon.rpc_init (return ()) ~f:(fun port () ->
+ (Cli_lib.Background_daemon.rpc_init Cli_lib.Flag.config_files
+ ~f:(fun port config_files ->
+ let%bind compile_config = load_compile_config config_files in
match%map
- Daemon_rpcs.Client.dispatch Daemon_rpcs.Stop_internal_tracing.rpc ()
- port
+ Daemon_rpcs.Client.dispatch ~compile_config
+ Daemon_rpcs.Stop_internal_tracing.rpc () port
with
| Ok () ->
print_endline "Daemon internal tracing stopped!"
@@ -1226,7 +1282,6 @@ let import_key =
]
and privkey_path = Cli_lib.Flag.privkey_read_path in
fun () ->
- let open Deferred.Let_syntax in
let initial_password = ref None in
let do_graphql graphql_endpoint =
let%bind password =
@@ -1356,7 +1411,6 @@ let export_key =
(key will be exported using the same password)."
(Cli_lib.Background_daemon.graphql_init flags
~f:(fun _ (export_path, pk, conf_dir) ->
- let open Deferred.Let_syntax in
let%bind home = Sys.home_directory () in
let conf_dir =
Option.value
@@ -1374,7 +1428,6 @@ let export_key =
"Password for exported account: " ~env:Secrets.Keypair.env )
in
let%bind account =
- let open Deferred.Result.Let_syntax in
let%bind _ = Secrets.Wallets.unlock wallets ~needle:pk ~password in
Secrets.Wallets.find_identity wallets ~needle:pk
|> Result.of_option ~error:`Not_found
@@ -1612,14 +1665,12 @@ let generate_libp2p_keypair_do privkey_path =
(let open Deferred.Let_syntax in
(* FIXME: I'd like to accumulate messages into this logger and only dump them out in failure paths. *)
let logger = Logger.null () in
- let compile_config = Mina_compile_config.Compiled.t in
(* Using the helper only for keypair generation requires no state. *)
File_system.with_temp_dir "mina-generate-libp2p-keypair" ~f:(fun tmpd ->
match%bind
Mina_net2.create ~logger ~conf_dir:tmpd ~all_peers_seen_metric:false
~pids:(Child_processes.Termination.create_pid_table ())
- ~on_peer_connected:ignore ~on_peer_disconnected:ignore
- ~block_window_duration:compile_config.block_window_duration ()
+ ~on_peer_connected:ignore ~on_peer_disconnected:ignore ()
with
| Ok net ->
let%bind me = Mina_net2.generate_random_keypair net in
@@ -1646,14 +1697,12 @@ let dump_libp2p_keypair_do privkey_path =
Deferred.ignore_m
(let open Deferred.Let_syntax in
let logger = Logger.null () in
- let compile_config = Mina_compile_config.Compiled.t in
(* Using the helper only for keypair generation requires no state. *)
File_system.with_temp_dir "mina-dump-libp2p-keypair" ~f:(fun tmpd ->
match%bind
Mina_net2.create ~logger ~conf_dir:tmpd ~all_peers_seen_metric:false
~pids:(Child_processes.Termination.create_pid_table ())
- ~on_peer_connected:ignore ~on_peer_disconnected:ignore
- ~block_window_duration:compile_config.block_window_duration ()
+ ~on_peer_connected:ignore ~on_peer_disconnected:ignore ()
with
| Ok net ->
let%bind () = Mina_net2.shutdown net in
@@ -1680,10 +1729,14 @@ let trustlist_add =
let open Deferred.Let_syntax in
let open Daemon_rpcs in
Command.async ~summary:"Add an IP to the trustlist"
- (Cli_lib.Background_daemon.rpc_init trustlist_ip_flag
- ~f:(fun port trustlist_ip ->
+ (Cli_lib.Background_daemon.rpc_init
+ (Args.zip2 Cli_lib.Flag.config_files trustlist_ip_flag)
+ ~f:(fun port (config_files, trustlist_ip) ->
let trustlist_ip_string = Unix.Cidr.to_string trustlist_ip in
- match%map Client.dispatch Add_trustlist.rpc trustlist_ip port with
+ let%bind compile_config = load_compile_config config_files in
+ match%map
+ Client.dispatch ~compile_config Add_trustlist.rpc trustlist_ip port
+ with
| Ok (Ok ()) ->
printf "Added %s to client trustlist" trustlist_ip_string
| Ok (Error e) ->
@@ -1697,10 +1750,15 @@ let trustlist_remove =
let open Deferred.Let_syntax in
let open Daemon_rpcs in
Command.async ~summary:"Remove a CIDR mask from the trustlist"
- (Cli_lib.Background_daemon.rpc_init trustlist_ip_flag
- ~f:(fun port trustlist_ip ->
+ (Cli_lib.Background_daemon.rpc_init
+ (Args.zip2 Cli_lib.Flag.config_files trustlist_ip_flag)
+ ~f:(fun port (config_files, trustlist_ip) ->
let trustlist_ip_string = Unix.Cidr.to_string trustlist_ip in
- match%map Client.dispatch Remove_trustlist.rpc trustlist_ip port with
+ let%bind compile_config = load_compile_config config_files in
+ match%map
+ Client.dispatch ~compile_config Remove_trustlist.rpc trustlist_ip
+ port
+ with
| Ok (Ok ()) ->
printf "Removed %s to client trustlist" trustlist_ip_string
| Ok (Error e) ->
@@ -1711,12 +1769,14 @@ let trustlist_remove =
(Error.to_string_hum e) ) )
let trustlist_list =
- let open Deferred.Let_syntax in
let open Daemon_rpcs in
- let open Command.Param in
Command.async ~summary:"List the CIDR masks in the trustlist"
- (Cli_lib.Background_daemon.rpc_init (return ()) ~f:(fun port () ->
- match%map Client.dispatch Get_trustlist.rpc () port with
+ (Cli_lib.Background_daemon.rpc_init Cli_lib.Flag.config_files
+ ~f:(fun port config_files ->
+ let%bind compile_config = load_compile_config config_files in
+ match%map
+ Client.dispatch ~compile_config Get_trustlist.rpc () port
+ with
| Ok ips ->
printf
"The following IPs are permitted to connect to the daemon \
@@ -1795,90 +1855,82 @@ let add_peers_graphql =
} ) ) ) )
let compile_time_constants =
- let genesis_constants = Genesis_constants.Compiled.genesis_constants in
- let constraint_constants = Genesis_constants.Compiled.constraint_constants in
- let proof_level = Genesis_constants.Compiled.proof_level in
+ let open Command.Let_syntax in
Command.async
~summary:"Print a JSON map of the compile-time consensus parameters"
- (Command.Param.return (fun () ->
+ (let%map_open config_files = Cli_lib.Flag.config_files in
+ fun () ->
+ let home = Core.Sys.home_directory () in
+ let conf_dir = home ^/ Cli_lib.Default.conf_dir_name in
+ let genesis_dir =
let home = Core.Sys.home_directory () in
- let conf_dir = home ^/ Cli_lib.Default.conf_dir_name in
- let genesis_dir =
- let home = Core.Sys.home_directory () in
- home ^/ Cli_lib.Default.conf_dir_name
- in
- let config_file =
- match Sys.getenv "MINA_CONFIG_FILE" with
- | Some config_file ->
- config_file
- | None ->
- conf_dir ^/ "daemon.json"
- in
- let open Async in
+ home ^/ Cli_lib.Default.conf_dir_name
+ in
+ let open Deferred.Let_syntax in
+ let%map ({ consensus_constants; _ } as precomputed_values), _ =
+ (* This is kind of ugly because we are allowing for supplying a runtime_config value directly, rather than force what is read from the environment *)
+ (* TODO: See if we can initialize consensus_constants without also initializing the ledger *)
let logger = Logger.create () in
- let%map ({ consensus_constants; _ } as precomputed_values), _ =
- let%bind runtime_config =
- let%map config_file =
- Runtime_config.Json_loader.load_config_files ~conf_dir ~logger
- [ config_file ]
- >>| Or_error.ok
- in
- let default =
- Runtime_config.of_json_layout
- { Runtime_config.Json_layout.default with
- ledger =
- Some
- { Runtime_config.Json_layout.Ledger.default with
- accounts = Some []
- }
- }
- |> Result.ok_or_failwith
- in
- Option.value ~default config_file
- in
- Genesis_ledger_helper.init_from_config_file ~genesis_constants
- ~constraint_constants ~logger:(Logger.null ()) ~proof_level
- ~cli_proof_level:None ~genesis_dir runtime_config
- >>| Or_error.ok_exn
+ let%bind m_conf =
+ Runtime_config.Json_loader.load_config_files ~conf_dir ~logger
+ config_files
+ >>| Or_error.ok
+ in
+ let default =
+ Runtime_config.of_json_layout
+ { Runtime_config.Json_layout.default with
+ ledger =
+ Some
+ { Runtime_config.Json_layout.Ledger.default with
+ accounts = Some []
+ }
+ }
+ |> Result.ok_or_failwith
in
- let all_constants =
- `Assoc
- [ ( "genesis_state_timestamp"
- , `String
- ( Block_time.to_time_exn
- consensus_constants.genesis_state_timestamp
- |> Core.Time.to_string_iso8601_basic ~zone:Core.Time.Zone.utc
- ) )
- ; ("k", `Int (Unsigned.UInt32.to_int consensus_constants.k))
- ; ( "coinbase"
- , `String
- (Currency.Amount.to_mina_string
- precomputed_values.constraint_constants.coinbase_amount )
- )
- ; ( "block_window_duration_ms"
- , `Int
- precomputed_values.constraint_constants
- .block_window_duration_ms )
- ; ("delta", `Int (Unsigned.UInt32.to_int consensus_constants.delta))
- ; ( "sub_windows_per_window"
- , `Int
- (Unsigned.UInt32.to_int
- consensus_constants.sub_windows_per_window ) )
- ; ( "slots_per_sub_window"
- , `Int
- (Unsigned.UInt32.to_int
- consensus_constants.slots_per_sub_window ) )
- ; ( "slots_per_window"
- , `Int
- (Unsigned.UInt32.to_int consensus_constants.slots_per_window)
- )
- ; ( "slots_per_epoch"
- , `Int
- (Unsigned.UInt32.to_int consensus_constants.slots_per_epoch)
- )
- ]
+ let runtime_config = Option.value ~default m_conf in
+ let constants =
+ Runtime_config.Constants.load_constants' runtime_config
in
- Core_kernel.printf "%s\n%!" (Yojson.Safe.to_string all_constants) ) )
+ Genesis_ledger_helper.Config_loader.init_from_config_file ~genesis_dir
+ ~logger ~constants runtime_config
+ |> Deferred.Or_error.ok_exn
+ in
+ let all_constants =
+ `Assoc
+ [ ( "genesis_state_timestamp"
+ , `String
+ ( Block_time.to_time_exn
+ consensus_constants.genesis_state_timestamp
+ |> Core.Time.to_string_iso8601_basic ~zone:Core.Time.Zone.utc
+ ) )
+ ; ("k", `Int (Unsigned.UInt32.to_int consensus_constants.k))
+ ; ( "coinbase"
+ , `String
+ (Currency.Amount.to_mina_string
+ precomputed_values.constraint_constants.coinbase_amount ) )
+ ; ( "block_window_duration_ms"
+ , `Int
+ precomputed_values.constraint_constants
+ .block_window_duration_ms )
+ ; ("delta", `Int (Unsigned.UInt32.to_int consensus_constants.delta))
+ ; ( "sub_windows_per_window"
+ , `Int
+ (Unsigned.UInt32.to_int
+ consensus_constants.sub_windows_per_window ) )
+ ; ( "slots_per_sub_window"
+ , `Int
+ (Unsigned.UInt32.to_int
+ consensus_constants.slots_per_sub_window ) )
+ ; ( "slots_per_window"
+ , `Int
+ (Unsigned.UInt32.to_int consensus_constants.slots_per_window)
+ )
+ ; ( "slots_per_epoch"
+ , `Int (Unsigned.UInt32.to_int consensus_constants.slots_per_epoch)
+ )
+ ]
+ in
+ Core_kernel.printf "%s\n%!" (Yojson.Safe.to_string all_constants) )
let node_status =
let open Command.Param in
@@ -1896,10 +1948,13 @@ let node_status =
flag "--show-errors" ~aliases:[ "show-errors" ] no_arg
~doc:"Include error responses in output"
in
- let flags = Args.zip3 daemon_peers_flag peers_flag show_errors_flag in
+ let flags =
+ Args.zip4 Cli_lib.Flag.config_files daemon_peers_flag peers_flag
+ show_errors_flag
+ in
Command.async ~summary:"Get node statuses for a set of peers"
(Cli_lib.Background_daemon.rpc_init flags
- ~f:(fun port (daemon_peers, peers, show_errors) ->
+ ~f:(fun port (config_files, daemon_peers, peers, show_errors) ->
if
(Option.is_none peers && not daemon_peers)
|| (Option.is_some peers && daemon_peers)
@@ -1911,9 +1966,10 @@ let node_status =
Option.map peers ~f:(fun peers ->
List.map peers ~f:Mina_net2.Multiaddr.of_string )
in
+ let%bind compile_config = load_compile_config config_files in
match%map
- Daemon_rpcs.Client.dispatch Daemon_rpcs.Get_node_status.rpc
- peer_ids_opt port
+ Daemon_rpcs.Client.dispatch ~compile_config
+ Daemon_rpcs.Get_node_status.rpc peer_ids_opt port
with
| Ok all_status_data ->
let all_status_data =
@@ -1933,11 +1989,13 @@ let node_status =
let object_lifetime_statistics =
let open Daemon_rpcs in
- let open Command.Param in
Command.async ~summary:"Dump internal object lifetime statistics to JSON"
- (Cli_lib.Background_daemon.rpc_init (return ()) ~f:(fun port () ->
+ (Cli_lib.Background_daemon.rpc_init Cli_lib.Flag.config_files
+ ~f:(fun port config_files ->
+ let%bind compile_config = load_compile_config config_files in
match%map
- Client.dispatch Get_object_lifetime_statistics.rpc () port
+ Client.dispatch ~compile_config Get_object_lifetime_statistics.rpc ()
+ port
with
| Ok stats ->
print_endline stats
@@ -1972,14 +2030,15 @@ let archive_blocks =
and extensional_flag =
Command.Param.flag "--extensional" ~aliases:[ "extensional" ] no_arg
~doc:"Blocks are in extensional JSON format"
- in
+ and config_files = Cli_lib.Flag.config_files in
( files
, success_file
, failure_file
, log_successes
, archive_process_location
, precomputed_flag
- , extensional_flag )
+ , extensional_flag
+ , config_files )
in
Command.async
~summary:
@@ -1996,7 +2055,8 @@ let archive_blocks =
, log_successes
, archive_process_location
, precomputed_flag
- , extensional_flag )
+ , extensional_flag
+ , config_files )
->
if Bool.equal precomputed_flag extensional_flag then
failwith
@@ -2044,13 +2104,15 @@ let archive_blocks =
in
let add_to_success_file = output_file_line success_file in
let add_to_failure_file = output_file_line failure_file in
+ let%bind compile_config = load_compile_config config_files in
let send_precomputed_block =
make_send_block
~graphql_make:(fun block ->
Graphql_queries.Archive_precomputed_block.(
make @@ makeVariables ~block ()) )
~archive_dispatch:
- Mina_lib.Archive_client.dispatch_precomputed_block
+ (Mina_lib.Archive_client.dispatch_precomputed_block
+ ~compile_config )
in
let send_extensional_block =
make_send_block
@@ -2058,7 +2120,8 @@ let archive_blocks =
Graphql_queries.Archive_extensional_block.(
make @@ makeVariables ~block ()) )
~archive_dispatch:
- Mina_lib.Archive_client.dispatch_extensional_block
+ (Mina_lib.Archive_client.dispatch_extensional_block
+ ~compile_config )
in
Deferred.List.iter files ~f:(fun path ->
match%map
@@ -2162,10 +2225,13 @@ let receipt_chain_hash =
let chain_id_inputs =
let open Deferred.Let_syntax in
Command.async ~summary:"Print the inputs that yield the current chain id"
- (Cli_lib.Background_daemon.rpc_init (Command.Param.all_unit [])
- ~f:(fun port () ->
+ (Cli_lib.Background_daemon.rpc_init Cli_lib.Flag.config_files
+ ~f:(fun port config_files ->
let open Daemon_rpcs in
- match%map Client.dispatch Chain_id_inputs.rpc () port with
+ let%bind compile_config = load_compile_config config_files in
+ match%map
+ Client.dispatch ~compile_config Chain_id_inputs.rpc () port
+ with
| Ok
( genesis_state_hash
, genesis_constants
@@ -2322,26 +2388,31 @@ let test_ledger_application =
flag "--has-second-partition"
~doc:"Assume there is a second partition (scan state)" no_arg
and tracing = flag "--tracing" ~doc:"Wrap test into tracing" no_arg
+ and config_files = Cli_lib.Flag.config_files
and no_masks = flag "--no-masks" ~doc:"Do not create masks" no_arg in
Cli_lib.Exceptions.handle_nicely
@@ fun () ->
+ let open Deferred.Let_syntax in
+ let%bind genesis_constants, constraint_constants =
+ let logger = Logger.create () in
+ let%map conf =
+ Runtime_config.Constants.load_constants ~logger config_files
+ in
+ Runtime_config.Constants.
+ (genesis_constants conf, constraint_constants conf)
+ in
let first_partition_slots =
Option.value ~default:128 first_partition_slots
in
let num_txs_per_round = Option.value ~default:3 num_txs_per_round in
let rounds = Option.value ~default:580 rounds in
let max_depth = Option.value ~default:290 max_depth in
- let constraint_constants =
- Genesis_constants.Compiled.constraint_constants
- in
- let genesis_constants = Genesis_constants.Compiled.genesis_constants in
Test_ledger_application.test ~privkey_path ~ledger_path ?prev_block_path
~first_partition_slots ~no_new_stack ~has_second_partition
~num_txs_per_round ~rounds ~no_masks ~max_depth ~tracing num_txs
~constraint_constants ~genesis_constants )
let itn_create_accounts =
- let compile_config = Mina_compile_config.Compiled.t in
Command.async ~summary:"Fund new accounts for incentivized testnet"
(let open Command.Param in
let privkey_path = Cli_lib.Flag.privkey_read_path in
@@ -2352,10 +2423,7 @@ let itn_create_accounts =
flag "--num-accounts" ~doc:"NN Number of new accounts" (required int)
in
let fee =
- flag "--fee"
- ~doc:
- (sprintf "NN Fee in nanomina paid to create an account (minimum: %s)"
- (Currency.Fee.to_string compile_config.minimum_user_command_fee) )
+ flag "--fee" ~doc:"NN Fee in nanomina paid to create an account"
(required int)
in
let amount =
@@ -2363,13 +2431,31 @@ let itn_create_accounts =
~doc:"NN Amount in nanomina to be divided among new accounts"
(required int)
in
- let args = Args.zip5 privkey_path key_prefix num_accounts fee amount in
- let genesis_constants = Genesis_constants.Compiled.genesis_constants in
- let constraint_constants =
- Genesis_constants.Compiled.constraint_constants
+ let config_files = Cli_lib.Flag.config_files in
+ let args =
+ Args.zip6 privkey_path key_prefix num_accounts fee amount config_files
in
Cli_lib.Background_daemon.rpc_init args
- ~f:(Itn.create_accounts ~genesis_constants ~constraint_constants))
+ ~f:(fun
+ port
+ (privkey_path, key_prefix, num_accounts, fee, amount, config_files)
+ ->
+ let open Deferred.Let_syntax in
+ let%bind genesis_constants, constraint_constants, compile_config =
+ let logger = Logger.create () in
+ let%map conf =
+ Runtime_config.Constants.load_constants ~logger config_files
+ in
+ Runtime_config.Constants.
+ ( genesis_constants conf
+ , constraint_constants conf
+ , compile_config conf )
+ in
+ let args' = (privkey_path, key_prefix, num_accounts, fee, amount) in
+ let genesis_constants = genesis_constants in
+ let constraint_constants = constraint_constants in
+ Itn.create_accounts ~genesis_constants ~constraint_constants
+ ~compile_config port args' ))
module Visualization = struct
let create_command (type rpc_response) ~name ~f
@@ -2378,10 +2464,14 @@ module Visualization = struct
Command.async
~summary:(sprintf !"Produce a visualization of the %s" name)
(Cli_lib.Background_daemon.rpc_init
- Command.Param.(anon @@ ("output-filepath" %: string))
- ~f:(fun port filename ->
+ (Args.zip2 Cli_lib.Flag.config_files
+ Command.Param.(anon @@ ("output-filepath" %: string)) )
+ ~f:(fun port (config_files, filename) ->
+ let%bind compile_config = load_compile_config config_files in
let%map message =
- match%map Daemon_rpcs.Client.dispatch rpc filename port with
+ match%map
+ Daemon_rpcs.Client.dispatch ~compile_config rpc filename port
+ with
| Ok response ->
f filename response
| Error e ->
diff --git a/src/app/cli/src/init/itn.ml b/src/app/cli/src/init/itn.ml
index d02c987f0b0..2b689266e19 100644
--- a/src/app/cli/src/init/itn.ml
+++ b/src/app/cli/src/init/itn.ml
@@ -7,7 +7,8 @@ open Mina_base
open Mina_transaction
let create_accounts ~(genesis_constants : Genesis_constants.t)
- ~(constraint_constants : Genesis_constants.Constraint_constants.t) port
+ ~(constraint_constants : Genesis_constants.Constraint_constants.t)
+ ~(compile_config : Mina_compile_config.t) port
(privkey_path, key_prefix, num_accounts, fee, amount) =
let keys_per_zkapp = 8 in
let zkapps_per_block = 10 in
@@ -37,7 +38,7 @@ let create_accounts ~(genesis_constants : Genesis_constants.t)
in
let%bind fee_payer_balance =
match%bind
- Daemon_rpcs.Client.dispatch Daemon_rpcs.Get_balance.rpc
+ Daemon_rpcs.Client.dispatch ~compile_config Daemon_rpcs.Get_balance.rpc
fee_payer_account_id port
with
| Ok (Ok (Some balance)) ->
@@ -60,8 +61,8 @@ let create_accounts ~(genesis_constants : Genesis_constants.t)
let%bind fee_payer_initial_nonce =
(* inferred nonce considers txns in pool, in addition to ledger *)
match%map
- Daemon_rpcs.Client.dispatch Daemon_rpcs.Get_inferred_nonce.rpc
- fee_payer_account_id port
+ Daemon_rpcs.Client.dispatch ~compile_config
+ Daemon_rpcs.Get_inferred_nonce.rpc fee_payer_account_id port
with
| Ok (Ok (Some nonce)) ->
Account.Nonce.of_uint32 nonce
@@ -218,8 +219,8 @@ let create_accounts ~(genesis_constants : Genesis_constants.t)
Format.printf " Public key: %s Balance change: %s%s@." pk sgn
balance_change_str ) ) ;
let%bind res =
- Daemon_rpcs.Client.dispatch Daemon_rpcs.Send_zkapp_commands.rpc
- zkapps_batch port
+ Daemon_rpcs.Client.dispatch ~compile_config
+ Daemon_rpcs.Send_zkapp_commands.rpc zkapps_batch port
in
( match res with
| Ok res_inner -> (
@@ -253,8 +254,8 @@ let create_accounts ~(genesis_constants : Genesis_constants.t)
Deferred.List.for_all batch_pks ~f:(fun pk ->
let account_id = Account_id.create pk Token_id.default in
let%map res =
- Daemon_rpcs.Client.dispatch Daemon_rpcs.Get_balance.rpc account_id
- port
+ Daemon_rpcs.Client.dispatch ~compile_config
+ Daemon_rpcs.Get_balance.rpc account_id port
in
match res with
| Ok (Ok (Some balance)) when Currency.Balance.(balance > zero) ->
diff --git a/src/app/cli/src/init/mina_run.ml b/src/app/cli/src/init/mina_run.ml
index 62063088c4a..94851641819 100644
--- a/src/app/cli/src/init/mina_run.ml
+++ b/src/app/cli/src/init/mina_run.ml
@@ -364,7 +364,15 @@ let setup_local_server ?(client_trustlist = []) ?rest_server_port
List.map metadata ~f:(fun (s, value) ->
(s, Yojson.Safe.from_string value) )
in
- return @@ Itn_logger.log ~process ~timestamp ~message ~metadata () )
+ let config =
+ { Itn_logger.rpc_handshake_timeout =
+ compile_config.rpc_handshake_timeout
+ ; rpc_heartbeat_timeout = compile_config.rpc_heartbeat_timeout
+ ; rpc_heartbeat_send_every = compile_config.rpc_heartbeat_send_every
+ }
+ in
+ return
+ @@ Itn_logger.log ~process ~timestamp ~message ~metadata ~config () )
]
in
let log_snark_work_metrics (work : Snark_worker.Work.Result.t) =
@@ -600,15 +608,8 @@ let setup_local_server ?(client_trustlist = []) ?rest_server_port
~handshake_timeout:compile_config.rpc_handshake_timeout
~heartbeat_config:
(Rpc.Connection.Heartbeat_config.create
- ~timeout:
- (Time_ns.Span.of_sec
- (Time.Span.to_sec
- compile_config.rpc_heartbeat_timeout ) )
- ~send_every:
- (Time_ns.Span.of_sec
- (Time.Span.to_sec
- compile_config.rpc_heartbeat_send_every ) )
- () )
+ ~timeout:compile_config.rpc_heartbeat_timeout
+ ~send_every:compile_config.rpc_heartbeat_send_every () )
reader writer
~implementations:
(Rpc.Implementations.create_exn
diff --git a/src/app/cli/src/init/transaction_snark_profiler.ml b/src/app/cli/src/init/transaction_snark_profiler.ml
index ef4e26d284b..7c9176a379e 100644
--- a/src/app/cli/src/init/transaction_snark_profiler.ml
+++ b/src/app/cli/src/init/transaction_snark_profiler.ml
@@ -17,10 +17,8 @@ let run ~genesis_constants ~constraint_constants ~proof_level
Parallel.init_master () ;
let verifier =
Async.Thread_safe.block_on_async_exn (fun () ->
- Verifier.create ~commit_id:Mina_version.commit_id ~logger ~proof_level
- ~constraint_constants ~conf_dir:None
- ~pids:(Child_processes.Termination.create_pid_table ())
- () )
+ Verifier.For_tests.default ~commit_id:Mina_version.commit_id ~logger
+ ~proof_level ~constraint_constants () )
in
let rec go n =
if n <= 0 then ()
diff --git a/src/app/delegation_verify/delegation_verify.ml b/src/app/delegation_verify/delegation_verify.ml
index f160d4c1234..ac643b1db67 100644
--- a/src/app/delegation_verify/delegation_verify.ml
+++ b/src/app/delegation_verify/delegation_verify.ml
@@ -13,9 +13,7 @@ let get_filenames =
let verify_snark_work ~verify_transaction_snarks ~proof ~message =
verify_transaction_snarks [ (proof, message) ]
-let config_flag =
- let open Command.Param in
- flag "--config-file" ~doc:"FILE config file" (optional string)
+let config_flag = Cli_lib.Flag.config_files
let keyspace_flag =
let open Command.Param in
@@ -44,31 +42,15 @@ let timestamp =
let open Command.Param in
anon ("timestamp" %: string)
-let instantiate_verify_functions ~logger ~genesis_constants
- ~constraint_constants ~proof_level ~cli_proof_level = function
- | None ->
- Deferred.return
- (Verifier.verify_functions ~constraint_constants ~proof_level ())
- | Some config_file ->
- let%bind.Deferred precomputed_values =
- let%bind.Deferred.Or_error config =
- Runtime_config.Json_loader.load_config_files ~logger [ config_file ]
- in
- Genesis_ledger_helper.init_from_config_file ~logger ~proof_level
- ~constraint_constants ~genesis_constants config ~cli_proof_level
- in
- let%map.Deferred precomputed_values =
- match precomputed_values with
- | Ok (precomputed_values, _) ->
- Deferred.return precomputed_values
- | Error _ ->
- Output.display_error "fail to read config file" ;
- exit 4
- in
- let constraint_constants =
- Precomputed_values.constraint_constants precomputed_values
- in
- Verifier.verify_functions ~constraint_constants ~proof_level:Full ()
+let instantiate_verify_functions ~logger config_file =
+ let open Deferred.Let_syntax in
+ let%map constants =
+ Runtime_config.Constants.load_constants ~logger config_file
+ in
+ let constraint_constants =
+ Runtime_config.Constants.constraint_constants constants
+ in
+ Verifier.verify_functions ~constraint_constants ~proof_level:Full ()
module Make_verifier (Source : Submission.Data_source) = struct
let verify_transaction_snarks = Source.verify_transaction_snarks
@@ -139,7 +121,7 @@ module Make_verifier (Source : Submission.Data_source) = struct
|> Deferred.Or_error.all_unit
end
-let filesystem_command =
+let filesystem_command ~logger =
Command.async ~summary:"Verify submissions and block read from the filesystem"
Command.Let_syntax.(
let%map_open block_dir = block_dir_flag
@@ -147,16 +129,10 @@ let filesystem_command =
and no_checks = no_checks_flag
and config_file = config_flag in
fun () ->
- let logger = Logger.create () in
- let genesis_constants = Genesis_constants.Compiled.genesis_constants in
- let constraint_constants =
- Genesis_constants.Compiled.constraint_constants
- in
- let proof_level = Genesis_constants.Compiled.proof_level in
let%bind.Deferred verify_blockchain_snarks, verify_transaction_snarks =
- instantiate_verify_functions ~logger config_file ~genesis_constants
- ~constraint_constants ~proof_level ~cli_proof_level:None
+ instantiate_verify_functions ~logger config_file
in
+
let submission_paths = get_filenames inputs in
let module V = Make_verifier (struct
include Submission.Filesystem
@@ -175,7 +151,7 @@ let filesystem_command =
Output.display_error @@ Error.to_string_hum e ;
exit 1)
-let cassandra_command =
+let cassandra_command ~logger =
Command.async ~summary:"Verify submissions and block read from Cassandra"
Command.Let_syntax.(
let%map_open cqlsh = cassandra_executable_flag
@@ -186,15 +162,8 @@ let cassandra_command =
and period_end = timestamp in
fun () ->
let open Deferred.Let_syntax in
- let logger = Logger.create () in
- let genesis_constants = Genesis_constants.Compiled.genesis_constants in
- let constraint_constants =
- Genesis_constants.Compiled.constraint_constants
- in
- let proof_level = Genesis_constants.Compiled.proof_level in
let%bind.Deferred verify_blockchain_snarks, verify_transaction_snarks =
- instantiate_verify_functions ~logger config_file ~genesis_constants
- ~constraint_constants ~proof_level ~cli_proof_level:None
+ instantiate_verify_functions ~logger config_file
in
let module V = Make_verifier (struct
include Submission.Cassandra
@@ -217,22 +186,15 @@ let cassandra_command =
Output.display_error @@ Error.to_string_hum e ;
exit 1)
-let stdin_command =
+let stdin_command ~logger =
Command.async
~summary:"Verify submissions and blocks read from standard input"
Command.Let_syntax.(
let%map_open config_file = config_flag and no_checks = no_checks_flag in
fun () ->
let open Deferred.Let_syntax in
- let logger = Logger.create () in
- let genesis_constants = Genesis_constants.Compiled.genesis_constants in
- let constraint_constants =
- Genesis_constants.Compiled.constraint_constants
- in
- let proof_level = Genesis_constants.Compiled.proof_level in
let%bind.Deferred verify_blockchain_snarks, verify_transaction_snarks =
- instantiate_verify_functions ~logger config_file ~genesis_constants
- ~constraint_constants ~proof_level ~cli_proof_level:None
+ instantiate_verify_functions ~logger config_file
in
let module V = Make_verifier (struct
include Submission.Stdin
@@ -248,12 +210,14 @@ let stdin_command =
Output.display_error @@ Error.to_string_hum e ;
exit 1)
-let command =
+let command ~logger =
Command.group
~summary:"A tool for verifying JSON payload submitted by the uptime service"
- [ ("fs", filesystem_command)
- ; ("cassandra", cassandra_command)
- ; ("stdin", stdin_command)
+ [ ("fs", filesystem_command ~logger)
+ ; ("cassandra", cassandra_command ~logger)
+ ; ("stdin", stdin_command ~logger)
]
-let () = Async.Command.run command
+let () =
+ let logger = Logger.create () in
+ Async.Command.run @@ command ~logger
diff --git a/src/app/libp2p_helper/Makefile b/src/app/libp2p_helper/Makefile
index f7b59a4f466..9292677edda 100644
--- a/src/app/libp2p_helper/Makefile
+++ b/src/app/libp2p_helper/Makefile
@@ -14,7 +14,7 @@ libp2p_helper: ../../libp2p_ipc/libp2p_ipc.capnp.go
test: ../../libp2p_ipc/libp2p_ipc.capnp.go
cd src/libp2p_helper \
&& (ulimit -n 65536 || true) \
- && $(GO) test -short -timeout 40m
+ && $(GO) test -short -timeout 60m
test-bs-qc: ../../libp2p_ipc/libp2p_ipc.capnp.go
cd src/libp2p_helper \
diff --git a/src/app/libp2p_helper/src/bitswap_storage.go b/src/app/libp2p_helper/src/bitswap_storage.go
index 5a54e36eb2f..753c181e0a2 100644
--- a/src/app/libp2p_helper/src/bitswap_storage.go
+++ b/src/app/libp2p_helper/src/bitswap_storage.go
@@ -1,7 +1,9 @@
package codanet
import (
+ "bytes"
"context"
+ "errors"
"fmt"
"github.com/ipfs/boxo/blockstore"
@@ -24,14 +26,23 @@ type BitswapStorage interface {
GetStatus(ctx context.Context, key [32]byte) (RootBlockStatus, error)
SetStatus(ctx context.Context, key [32]byte, value RootBlockStatus) error
DeleteStatus(ctx context.Context, key [32]byte) error
+ // Delete blocks for which no reference exist
DeleteBlocks(ctx context.Context, keys [][32]byte) error
ViewBlock(ctx context.Context, key [32]byte, callback func([]byte) error) error
StoreBlocks(ctx context.Context, blocks []blocks.Block) error
+ // Reference (when exists=true) or dereference (when exists=false)
+ // blocks related to the specified root.
+ // Blocks with references are protected from deletion.
+ UpdateReferences(ctx context.Context, root [32]byte, exists bool, keys ...[32]byte) error
}
type BitswapStorageLmdb struct {
blockstore *lmdbbs.Blockstore
statusDB lmdb.DBI
+ // Reference DB: maps a composite key `` to empty bytes,
+ // functioning as a set. Querying reference DB by the `` prefix
+ // allows to determine whether some root is still referencing the key
+ refsDB lmdb.DBI
}
func OpenBitswapStorageLmdb(path string) (*BitswapStorageLmdb, error) {
@@ -51,13 +62,30 @@ func OpenBitswapStorageLmdb(path string) (*BitswapStorageLmdb, error) {
if err != nil {
return nil, fmt.Errorf("failed to create/open lmdb status database: %s", err)
}
- return &BitswapStorageLmdb{blockstore: blockstore, statusDB: statusDB}, nil
+ refsDB, err := blockstore.OpenDB("refs")
+ if err != nil {
+ return nil, fmt.Errorf("failed to create/open lmdb refs database: %s", err)
+ }
+ return &BitswapStorageLmdb{blockstore: blockstore, statusDB: statusDB, refsDB: refsDB}, nil
}
func (b *BitswapStorageLmdb) Blockstore() blockstore.Blockstore {
return b.blockstore
}
+func (bs *BitswapStorageLmdb) UpdateReferences(ctx context.Context, root [32]byte, exists bool, keys ...[32]byte) error {
+ for _, key := range keys {
+ compositeKey := append(key[:], root[:]...)
+ err := bs.blockstore.PutData(ctx, bs.refsDB, compositeKey, func([]byte, bool) ([]byte, bool, error) {
+ return nil, exists, nil
+ })
+ if err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
func UnmarshalRootBlockStatus(r []byte) (res RootBlockStatus, err error) {
err = fmt.Errorf("wrong root block status retrieved: %v", r)
if len(r) != 1 {
@@ -118,17 +146,52 @@ func (bs *BitswapStorageLmdb) SetStatus(ctx context.Context, key [32]byte, newSt
}
}
if !isStatusTransitionAllowed(exists, prev, newStatus) {
- return nil, false, fmt.Errorf("wrong status transition: from %d to %d", prev, newStatus)
+ return nil, false, fmt.Errorf("wrong status transition: from %d to %d (exists: %v)", prev, newStatus, exists)
}
return []byte{byte(newStatus)}, true, nil
})
}
+
+// hasKeyWithPrefix checks whether there is at least one key in the DB
+// with the given prefix
+func hasKeyWithPrefix(db lmdb.DBI, txn *lmdb.Txn, prefix []byte) (bool, error) {
+ cur, err := txn.OpenCursor(db)
+ if err != nil {
+ return false, err
+ }
+ defer cur.Close()
+ // Set cursor to return keys that are greater-than prefix
+ // (using ascending order) and return the first such key
+ k, _, err := cur.Get(prefix, nil, lmdb.SetRange)
+ if lmdb.IsNotFound(err) {
+ return false, nil
+ } else if err != nil {
+ return false, err
+ }
+ // Check whether the key contains is actually prefixed or merely greater
+ return bytes.HasPrefix(k, prefix), nil
+}
+
func (bs *BitswapStorageLmdb) DeleteBlocks(ctx context.Context, keys [][32]byte) error {
- cids := make([]cid.Cid, len(keys))
- for i, key := range keys {
- cids[i] = BlockHashToCid(key)
+ keys_ := make([][]byte, len(keys))
+ for i := range keys {
+ keys_[i] = keys[i][:]
+ }
+ return bs.blockstore.DeleteBlocksIf(ctx, keys_, func(txn *lmdb.Txn, key []byte) (bool, error) {
+ // Delete a block from blocksDB if it has no references in the refsDB
+ hasPrefix, err := hasKeyWithPrefix(bs.refsDB, txn, key)
+ return !hasPrefix, err
+ })
+}
+
+func CidToBlockHash(id cid.Cid) ([32]byte, error) {
+ mh, err := multihash.Decode(id.Hash())
+ var res [32]byte
+ if err == nil && mh.Code == MULTI_HASH_CODE && id.Prefix().Codec == cid.Raw && len(mh.Digest) == 32 {
+ copy(res[:], mh.Digest)
+ return res, nil
}
- return bs.blockstore.DeleteMany(ctx, cids)
+ return res, errors.New("unexpected format of cid")
}
const (
diff --git a/src/app/libp2p_helper/src/codanet.go b/src/app/libp2p_helper/src/codanet.go
index df4c732d47d..4349af9f08e 100644
--- a/src/app/libp2p_helper/src/codanet.go
+++ b/src/app/libp2p_helper/src/codanet.go
@@ -758,6 +758,8 @@ func MakeHelper(ctx context.Context, listenOn []ma.Multiaddr, externalAddr ma.Mu
return nil, err
}
bitswapNetwork := bitnet.NewFromIpfsHost(host, kad, bitnet.Prefix(BitSwapExchange))
+ // Block store is provided, but only read-only methods are used
+ // TODO update Bitswap libraries to require only read-only methods
bs := bitswap.New(context.Background(), bitswapNetwork, bstore.Blockstore())
// nil fields are initialized by beginAdvertising
diff --git a/src/app/libp2p_helper/src/go.mod b/src/app/libp2p_helper/src/go.mod
index 18a7f3b41b2..6cd6d662ec4 100644
--- a/src/app/libp2p_helper/src/go.mod
+++ b/src/app/libp2p_helper/src/go.mod
@@ -20,7 +20,7 @@ require (
github.com/libp2p/go-libp2p-record v0.2.0
github.com/multiformats/go-multiaddr v0.9.0
github.com/multiformats/go-multihash v0.2.3
- github.com/o1-labs/go-bs-lmdb v1.1.0
+ github.com/o1-labs/go-bs-lmdb v1.2.1
github.com/o1-labs/go-libp2p-kad-dht-patcher v1.1.0
github.com/prometheus/client_golang v1.14.0
github.com/shirou/gopsutil/v3 v3.22.7
diff --git a/src/app/libp2p_helper/src/go.sum b/src/app/libp2p_helper/src/go.sum
index f5b51bef0b4..4cc48c38f22 100644
--- a/src/app/libp2p_helper/src/go.sum
+++ b/src/app/libp2p_helper/src/go.sum
@@ -413,8 +413,8 @@ github.com/multiformats/go-varint v0.0.7/go.mod h1:r8PUYw/fD/SjBCiKOoDlGF6QawOEL
github.com/neelance/astrewrite v0.0.0-20160511093645-99348263ae86/go.mod h1:kHJEU3ofeGjhHklVoIGuVj85JJwZ6kWPaJwCIxgnFmo=
github.com/neelance/sourcemap v0.0.0-20151028013722-8c68805598ab/go.mod h1:Qr6/a/Q4r9LP1IltGz7tA7iOK1WonHEYhu1HRBA7ZiM=
github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A=
-github.com/o1-labs/go-bs-lmdb v1.1.0 h1:qfZNYs5tMJ5Ym23avKIbpubfWJBtSJ1REhu51qb9U6M=
-github.com/o1-labs/go-bs-lmdb v1.1.0/go.mod h1:MGOPzEutRw24iN0qrSSbqOIzbgnyFQST/cKlnVSBmnI=
+github.com/o1-labs/go-bs-lmdb v1.2.1 h1:vweOC9utt/eUiaJvoREMuTvfR/uAej86nXopudKmzgU=
+github.com/o1-labs/go-bs-lmdb v1.2.1/go.mod h1:MGOPzEutRw24iN0qrSSbqOIzbgnyFQST/cKlnVSBmnI=
github.com/o1-labs/go-bs-tests v0.0.6 h1:MytWPo5kOMgxK29UkU6ycWRJrMhuILUSQAtYpn5ek0g=
github.com/o1-labs/go-libp2p-kad-dht-patcher v1.1.0 h1:YXmSCpS/VADts1gJbyEbFLsveun8t5HVfrRQbUHm7ds=
github.com/o1-labs/go-libp2p-kad-dht-patcher v1.1.0/go.mod h1:ANWfn2GqDHigP/bw2boP1PnUG2WL3UNdfnfckVfJOIc=
diff --git a/src/app/libp2p_helper/src/libp2p_helper/bitswap.go b/src/app/libp2p_helper/src/libp2p_helper/bitswap.go
index f12063ed48b..dd0b6d98e1f 100644
--- a/src/app/libp2p_helper/src/libp2p_helper/bitswap.go
+++ b/src/app/libp2p_helper/src/libp2p_helper/bitswap.go
@@ -12,7 +12,6 @@ import (
blocks "github.com/ipfs/go-block-format"
"github.com/ipfs/go-cid"
exchange "github.com/ipfs/go-ipfs-exchange-interface"
- ipld "github.com/ipfs/go-ipld-format"
)
type bitswapDeleteCmd struct {
@@ -48,6 +47,10 @@ type BitswapCtx struct {
func NewBitswapCtx(ctx context.Context, outMsgChan chan<- *capnp.Message) *BitswapCtx {
maxBlockSize := 1 << 18 // 256 KiB
+ return NewBitswapCtxWithMaxBlockSize(maxBlockSize, ctx, outMsgChan)
+}
+
+func NewBitswapCtxWithMaxBlockSize(maxBlockSize int, ctx context.Context, outMsgChan chan<- *capnp.Message) *BitswapCtx {
return &BitswapCtx{
downloadCmds: make(chan bitswapDownloadCmd, 100),
addCmds: make(chan bitswapAddCmd, 100),
@@ -75,10 +78,16 @@ func announceNewRootBlock(ctx context.Context, engine *bitswap.Bitswap, storage
return err
}
bs := make([]blocks.Block, 0, len(blockMap))
+ keys := make([][32]byte, 0, len(blockMap))
for h, b := range blockMap {
bitswapLogger.Debugf("Publishing block %s (%d bytes)", codanet.BlockHashToCidSuffix(h), len(b))
block, _ := blocks.NewBlockWithCid(b, codanet.BlockHashToCid(h))
bs = append(bs, block)
+ keys = append(keys, h)
+ }
+ err = storage.UpdateReferences(ctx, root, true, keys...)
+ if err != nil {
+ return err
}
err = storage.StoreBlocks(ctx, bs)
if err != nil {
@@ -91,62 +100,14 @@ func announceNewRootBlock(ctx context.Context, engine *bitswap.Bitswap, storage
return storage.SetStatus(ctx, root, codanet.Full)
}
-func (bs *BitswapCtx) deleteRoot(root BitswapBlockLink) error {
- if err := bs.storage.SetStatus(bs.ctx, root, codanet.Deleting); err != nil {
- return err
- }
- ClearRootDownloadState(bs, root)
- allDescendants := []BitswapBlockLink{root}
- viewBlockF := func(b []byte) error {
- links, _, err := ReadBitswapBlock(b)
- if err == nil {
- for _, l := range links {
- var l2 BitswapBlockLink
- copy(l2[:], l[:])
- allDescendants = append(allDescendants, l2)
- }
- }
- return err
- }
- for _, block := range allDescendants {
- if err := bs.storage.ViewBlock(bs.ctx, block, viewBlockF); err != nil && err != (ipld.ErrNotFound{Cid: codanet.BlockHashToCid(block)}) {
- return err
- }
- }
- if err := bs.storage.DeleteBlocks(bs.ctx, allDescendants); err != nil {
- return err
- }
- return bs.storage.DeleteStatus(bs.ctx, root)
+func (bs *BitswapCtx) SendResourceUpdate(type_ ipc.ResourceUpdateType, tag BitswapDataTag, root root) {
+ bs.SendResourceUpdates(type_, tag, root)
}
-func ClearRootDownloadState(bs BitswapState, root root) {
- rootStates := bs.RootDownloadStates()
- state, has := rootStates[root]
- if !has {
- return
- }
- nodeParams := bs.NodeDownloadParams()
- delete(rootStates, root)
- state.allDescendants.ForEach(func(c cid.Cid) error {
- np, hasNp := nodeParams[c]
- if hasNp {
- delete(np, root)
- if len(np) == 0 {
- delete(nodeParams, c)
- }
- }
- return nil
- })
- state.cancelF()
-}
-
-func (bs *BitswapCtx) SendResourceUpdate(type_ ipc.ResourceUpdateType, root root) {
- bs.SendResourceUpdates(type_, root)
-}
-func (bs *BitswapCtx) SendResourceUpdates(type_ ipc.ResourceUpdateType, roots ...root) {
+func (bs *BitswapCtx) SendResourceUpdates(type_ ipc.ResourceUpdateType, tag BitswapDataTag, roots ...root) {
// Non-blocking upcall sending
select {
- case bs.outMsgChan <- mkResourceUpdatedUpcall(type_, roots):
+ case bs.outMsgChan <- mkResourceUpdatedUpcall(type_, tag, roots):
default:
for _, root := range roots {
bitswapLogger.Errorf("Failed to send resource update of type %d"+
@@ -187,6 +148,9 @@ func (bs *BitswapCtx) RegisterDeadlineTracker(root_ root, downloadTimeout time.D
func (bs *BitswapCtx) GetStatus(key [32]byte) (codanet.RootBlockStatus, error) {
return bs.storage.GetStatus(bs.ctx, key)
}
+func (bs *BitswapCtx) UpdateReferences(root [32]byte, exists bool, keys ...[32]byte) error {
+ return bs.storage.UpdateReferences(bs.ctx, root, exists, keys...)
+}
func (bs *BitswapCtx) SetStatus(key [32]byte, value codanet.RootBlockStatus) error {
return bs.storage.SetStatus(bs.ctx, key, value)
}
@@ -242,25 +206,27 @@ func (bs *BitswapCtx) Loop() {
ClearRootDownloadState(bs, root)
case cmd := <-bs.addCmds:
configuredCheck()
- blocks, root := SplitDataToBitswapBlocksLengthPrefixedWithTag(bs.maxBlockSize, cmd.data, BlockBodyTag)
+ blocks, root := SplitDataToBitswapBlocksLengthPrefixedWithTag(bs.maxBlockSize, cmd.data, cmd.tag)
err := announceNewRootBlock(bs.ctx, bs.engine, bs.storage, blocks, root)
if err == nil {
- bs.SendResourceUpdate(ipc.ResourceUpdateType_added, root)
+ bs.SendResourceUpdate(ipc.ResourceUpdateType_added, cmd.tag, root)
} else {
bitswapLogger.Errorf("Failed to announce root cid %s (%s)", codanet.BlockHashToCidSuffix(root), err)
}
case cmd := <-bs.deleteCmds:
configuredCheck()
- success := []root{}
+ success := map[BitswapDataTag][]root{}
for _, root := range cmd.rootIds {
- err := bs.deleteRoot(root)
+ tag, err := DeleteRoot(bs, root)
if err == nil {
- success = append(success, root)
+ success[tag] = append(success[tag], root)
} else {
bitswapLogger.Errorf("Error processing delete request for %s: %s", codanet.BlockHashToCidSuffix(root), err)
}
}
- bs.SendResourceUpdates(ipc.ResourceUpdateType_removed, success...)
+ for tag, roots := range success {
+ bs.SendResourceUpdates(ipc.ResourceUpdateType_removed, tag, roots...)
+ }
case cmd := <-bs.downloadCmds:
configuredCheck()
// We put all ids to map to avoid
diff --git a/src/app/libp2p_helper/src/libp2p_helper/bitswap_delete.go b/src/app/libp2p_helper/src/libp2p_helper/bitswap_delete.go
new file mode 100644
index 00000000000..366d20fe506
--- /dev/null
+++ b/src/app/libp2p_helper/src/libp2p_helper/bitswap_delete.go
@@ -0,0 +1,107 @@
+package main
+
+import (
+ "codanet"
+ "errors"
+
+ "github.com/ipfs/go-cid"
+)
+
+func ClearRootDownloadState(bs BitswapState, root root) {
+ rootStates := bs.RootDownloadStates()
+ state, has := rootStates[root]
+ if !has {
+ return
+ }
+ nodeParams := bs.NodeDownloadParams()
+ delete(rootStates, root)
+ state.allDescendants.ForEach(func(c cid.Cid) error {
+ np, hasNp := nodeParams[c]
+ if hasNp {
+ delete(np, root)
+ if len(np) == 0 {
+ delete(nodeParams, c)
+ }
+ }
+ return nil
+ })
+ state.cancelF()
+}
+
+// getTag retrieves root's tag, whether the root is still being processed
+// or its processing was completed
+func getTag(bs BitswapState, root BitswapBlockLink) (tag BitswapDataTag, err error) {
+ state, has := bs.RootDownloadStates()[root]
+ if has {
+ tag = state.getTag()
+ } else {
+ err = bs.ViewBlock(root, func(b []byte) error {
+ _, fullBlockData, err := ReadBitswapBlock(b)
+ if err != nil {
+ return err
+ }
+ if len(fullBlockData) < 5 {
+ return errors.New("root block is too short")
+ }
+ tag = BitswapDataTag(fullBlockData[4])
+ return nil
+ })
+ }
+ return
+}
+
+func DeleteRoot(bs BitswapState, root BitswapBlockLink) (BitswapDataTag, error) {
+ if err := bs.SetStatus(root, codanet.Deleting); err != nil {
+ return 255, err
+ }
+ tag, err := getTag(bs, root)
+ if err != nil {
+ return tag, err
+ }
+ ClearRootDownloadState(bs, root)
+
+ // Performing breadth-first search (BFS)
+
+ // descendantMap is a "visited" set, to ensure we do not
+ // traverse into nodes we once visited
+ descendantMap := map[[32]byte]struct{}{root: {}}
+
+ // allDescendants is a list of all discovered nodes,
+ // serving as both "queue" to be iterated over during BFS,
+ // and as a list of all nodes visited at the end of
+ // BFS iteration
+ allDescendants := []BitswapBlockLink{root}
+ viewBlockF := func(b []byte) error {
+ links, _, err := ReadBitswapBlock(b)
+ if err == nil {
+ for _, l := range links {
+ var l2 BitswapBlockLink
+ copy(l2[:], l[:])
+ _, has := descendantMap[l2]
+ // Checking if the nodes was visited before
+ if !has {
+ descendantMap[l2] = struct{}{}
+ // Add an item to BFS queue
+ allDescendants = append(allDescendants, l2)
+ }
+ }
+ }
+ return err
+ }
+ // Iteration is done via index-based loop, because underlying
+ // array gets extended during iteration, and regular iterator
+ // wouldn't see these changes
+ for i := 0; i < len(allDescendants); i++ {
+ block := allDescendants[i]
+ if err := bs.ViewBlock(block, viewBlockF); err != nil && !isBlockNotFound(block, err) {
+ return tag, err
+ }
+ }
+ if err := bs.UpdateReferences(root, false, allDescendants...); err != nil {
+ return tag, err
+ }
+ if err := bs.DeleteBlocks(allDescendants); err != nil {
+ return tag, err
+ }
+ return tag, bs.DeleteStatus(root)
+}
diff --git a/src/app/libp2p_helper/src/libp2p_helper/bitswap_downloader.go b/src/app/libp2p_helper/src/libp2p_helper/bitswap_downloader.go
index 8273e02a5bc..70c994650e5 100644
--- a/src/app/libp2p_helper/src/libp2p_helper/bitswap_downloader.go
+++ b/src/app/libp2p_helper/src/libp2p_helper/bitswap_downloader.go
@@ -10,7 +10,6 @@ import (
blocks "github.com/ipfs/go-block-format"
"github.com/ipfs/go-cid"
- ipld "github.com/ipfs/go-ipld-format"
logging "github.com/ipfs/go-log/v2"
)
@@ -76,7 +75,11 @@ type BitswapState interface {
GetStatus(key [32]byte) (codanet.RootBlockStatus, error)
SetStatus(key [32]byte, value codanet.RootBlockStatus) error
DeleteStatus(key [32]byte) error
+ // Delete blocks for which no reference exist
DeleteBlocks(keys [][32]byte) error
+ // Reference or dereference blocks related to the root.
+ // Blocks with references are protected from deletion.
+ UpdateReferences(root [32]byte, exists bool, keys ...[32]byte) error
ViewBlock(key [32]byte, callback func([]byte) error) error
StoreDownloadedBlock(block blocks.Block) error
NodeDownloadParams() map[cid.Cid]map[root][]NodeIndex
@@ -86,7 +89,7 @@ type BitswapState interface {
DepthIndices() DepthIndices
NewSession(downloadTimeout time.Duration) (BlockRequester, context.CancelFunc)
RegisterDeadlineTracker(root, time.Duration)
- SendResourceUpdate(type_ ipc.ResourceUpdateType, root root)
+ SendResourceUpdate(type_ ipc.ResourceUpdateType, tag BitswapDataTag, root root)
CheckInvariants()
}
@@ -109,7 +112,7 @@ func kickStartRootDownload(root_ BitswapBlockLink, tag BitswapDataTag, bs Bitswa
bitswapLogger.Debugf("Skipping download request for %s due to status: %s", codanet.BlockHashToCidSuffix(root_), err)
status, err := bs.GetStatus(root_)
if err == nil && status == codanet.Full {
- bs.SendResourceUpdate(ipc.ResourceUpdateType_added, root_)
+ bs.SendResourceUpdate(ipc.ResourceUpdateType_added, tag, root_)
}
return
}
@@ -140,7 +143,7 @@ func kickStartRootDownload(root_ BitswapBlockLink, tag BitswapDataTag, bs Bitswa
copy(rootBlock, b)
return nil
}
- if err := bs.ViewBlock(root_, rootBlockViewF); err != nil && err != (ipld.ErrNotFound{Cid: codanet.BlockHashToCid(root_)}) {
+ if err := bs.ViewBlock(root_, rootBlockViewF); err != nil && !isBlockNotFound(root_, err) {
handleError(err)
return
}
@@ -251,10 +254,6 @@ func processDownloadedBlockStep(params map[root][]NodeIndex, block blocks.Block,
func processDownloadedBlock(block blocks.Block, bs BitswapState) {
bs.CheckInvariants()
id := block.Cid()
- err := bs.StoreDownloadedBlock(block)
- if err != nil {
- bitswapLogger.Errorf("Failed to store block %s", id)
- }
nodeDownloadParams := bs.NodeDownloadParams()
rootDownloadStates := bs.RootDownloadStates()
depthIndices := bs.DepthIndices()
@@ -276,12 +275,24 @@ func processDownloadedBlock(block blocks.Block, bs BitswapState) {
}
rootState.remainingNodeCounter = rootState.remainingNodeCounter - len(ixs)
rps[root] = rootState
+ blockHash, err := codanet.CidToBlockHash(id)
+ if err == nil {
+ err = bs.UpdateReferences(root, true, blockHash)
+ }
+ if err != nil {
+ bitswapLogger.Errorf("Failed to strore reference for block %s (to root %s)",
+ id, codanet.BlockHashToCidSuffix(root))
+ }
+ }
+ err := bs.StoreDownloadedBlock(block)
+ if err != nil {
+ bitswapLogger.Errorf("Failed to store block %s", id)
}
newParams, malformed := processDownloadedBlockStep(oldPs, block, rps, bs.MaxBlockSize(), depthIndices, bs.DataConfig())
for root, err := range malformed {
bitswapLogger.Warnf("Block %s of root %s is malformed: %s", id, codanet.BlockHashToCidSuffix(root), err)
- ClearRootDownloadState(bs, root)
- bs.SendResourceUpdate(ipc.ResourceUpdateType_broken, root)
+ DeleteRoot(bs, root)
+ bs.SendResourceUpdate(ipc.ResourceUpdateType_broken, rps[root].getTag(), root)
}
blocksToProcess := make([]blocks.Block, 0)
@@ -316,7 +327,7 @@ func processDownloadedBlock(block blocks.Block, bs BitswapState) {
b, _ := blocks.NewBlockWithCid(blockBytes, childId)
blocksToProcess = append(blocksToProcess, b)
} else {
- if err != (ipld.ErrNotFound{Cid: codanet.BlockHashToCid(link)}) {
+ if !isBlockNotFound(link, err) {
// we still schedule blocks for downloading
// this case should rarely happen in practice
bitswapLogger.Warnf("Failed to retrieve block %s from storage: %s", childId, err)
@@ -338,7 +349,7 @@ func processDownloadedBlock(block blocks.Block, bs BitswapState) {
bitswapLogger.Warnf("Failed to update status of fully downloaded root %s: %s", root, err)
}
ClearRootDownloadState(bs, root)
- bs.SendResourceUpdate(ipc.ResourceUpdateType_added, root)
+ bs.SendResourceUpdate(ipc.ResourceUpdateType_added, rootState.tag, root)
}
}
for _, b := range blocksToProcess {
diff --git a/src/app/libp2p_helper/src/libp2p_helper/bitswap_downloader_test.go b/src/app/libp2p_helper/src/libp2p_helper/bitswap_downloader_test.go
index af1b2b5378b..0c2029222de 100644
--- a/src/app/libp2p_helper/src/libp2p_helper/bitswap_downloader_test.go
+++ b/src/app/libp2p_helper/src/libp2p_helper/bitswap_downloader_test.go
@@ -610,6 +610,7 @@ func TestProcessDownloadedBlockStep(t *testing.T) {
type testBitswapState struct {
r *rand.Rand
statuses map[BitswapBlockLink]codanet.RootBlockStatus
+ refs map[BitswapBlockLink]map[root]struct{}
blocks map[cid.Cid][]byte
nodeDownloadParams map[cid.Cid]map[root][]NodeIndex
rootDownloadStates map[root]*RootDownloadState
@@ -670,7 +671,7 @@ func (bs *testBitswapState) RegisterDeadlineTracker(root_ root, downloadTimeout
downloadTimeout time.Duration
}{root: root_, downloadTimeout: downloadTimeout})
}
-func (bs *testBitswapState) SendResourceUpdate(type_ ipc.ResourceUpdateType, root root) {
+func (bs *testBitswapState) SendResourceUpdate(type_ ipc.ResourceUpdateType, _tag BitswapDataTag, root root) {
type1, has := bs.resourceUpdates[root]
if has && type1 != type_ {
panic("duplicate resource update")
@@ -688,12 +689,37 @@ func (bs *testBitswapState) DeleteStatus(key [32]byte) error {
delete(bs.statuses, BitswapBlockLink(key))
return nil
}
+
func (bs *testBitswapState) DeleteBlocks(keys [][32]byte) error {
for _, key := range keys {
- delete(bs.blocks, codanet.BlockHashToCid(key))
+ if len(bs.refs[key]) == 0 {
+ delete(bs.blocks, codanet.BlockHashToCid(key))
+ }
}
return nil
}
+
+func (bs *testBitswapState) UpdateReferences(root_ [32]byte, exists bool, keys ...[32]byte) error {
+ for _, key := range keys {
+ keyRefs, hasKeyRefs := bs.refs[key]
+ if exists {
+ if !hasKeyRefs {
+ keyRefs = make(map[root]struct{})
+ bs.refs[key] = keyRefs
+ }
+ keyRefs[root_] = struct{}{}
+ } else {
+ if hasKeyRefs {
+ delete(keyRefs, root_)
+ if len(keyRefs) == 0 {
+ delete(bs.refs, key)
+ }
+ }
+ }
+ }
+ return nil
+}
+
func (bs *testBitswapState) ViewBlock(key [32]byte, callback func([]byte) error) error {
cid := codanet.BlockHashToCid(key)
b, has := bs.blocks[cid]
@@ -734,7 +760,7 @@ func (bs *testBitswapState) CheckInvariants() {
}
}
-func testBitswapDownloadDo(t *testing.T, r *rand.Rand, bg blockGroup, prepopulatedBlocks *cid.Set, removedBlocks map[cid.Cid]root, expectedToFail []root) {
+func testBitswapDownloadDo(t *testing.T, r *rand.Rand, bg blockGroup, prepopulatedBlocks *cid.Set, removedBlocks map[cid.Cid]root, expectedToFail []root) *testBitswapState {
expectedToTimeout := map[root]bool{}
for _, b := range removedBlocks {
expectedToTimeout[b] = true
@@ -749,6 +775,7 @@ func testBitswapDownloadDo(t *testing.T, r *rand.Rand, bg blockGroup, prepopulat
bs := &testBitswapState{
r: r,
statuses: map[BitswapBlockLink]codanet.RootBlockStatus{},
+ refs: map[BitswapBlockLink]map[root]struct{}{},
blocks: initBlocks,
nodeDownloadParams: map[cid.Cid]map[root][]NodeIndex{},
rootDownloadStates: map[root]*RootDownloadState{},
@@ -857,6 +884,7 @@ loop:
if expectedToTimeoutTotal != len(bs.rootDownloadStates) {
t.Error("Unexpected number of root download states")
}
+ return bs
}
func genLargeBlockGroup(r *rand.Rand) (blockGroup, map[cid.Cid]root, []root) {
@@ -939,7 +967,7 @@ func TestBitswapDownload(t *testing.T) {
}
}
-func TestBitswapDownloadPrepoluated(t *testing.T) {
+func TestBitswapDownloadPrepopulated(t *testing.T) {
seed := time.Now().Unix()
t.Logf("Seed: %d", seed)
r := rand.New(rand.NewSource(seed))
diff --git a/src/app/libp2p_helper/src/libp2p_helper/bitswap_msg.go b/src/app/libp2p_helper/src/libp2p_helper/bitswap_msg.go
index ab6f18ec140..d41e67a1306 100644
--- a/src/app/libp2p_helper/src/libp2p_helper/bitswap_msg.go
+++ b/src/app/libp2p_helper/src/libp2p_helper/bitswap_msg.go
@@ -27,12 +27,12 @@ func (m AddResourcePush) handle(app *app) {
}
}
-type DeleteResourcePushT = ipc.Libp2pHelperInterface_DeleteResource
-type DeleteResourcePush DeleteResourcePushT
+type RemoveResourcePushT = ipc.Libp2pHelperInterface_RemoveResource
+type RemoveResourcePush RemoveResourcePushT
-func fromDeleteResourcePush(m ipcPushMessage) (pushMessage, error) {
- i, err := m.DeleteResource()
- return DeleteResourcePush(i), err
+func fromRemoveResourcePush(m ipcPushMessage) (pushMessage, error) {
+ i, err := m.RemoveResource()
+ return RemoveResourcePush(i), err
}
func extractRootBlockList(l ipc.RootBlockId_List) ([]root, error) {
@@ -52,14 +52,14 @@ func extractRootBlockList(l ipc.RootBlockId_List) ([]root, error) {
return ids, nil
}
-func (m DeleteResourcePush) handle(app *app) {
- idsM, err := DeleteResourcePushT(m).Ids()
+func (m RemoveResourcePush) handle(app *app) {
+ idsM, err := RemoveResourcePushT(m).Ids()
var links []root
if err == nil {
links, err = extractRootBlockList(idsM)
}
if err != nil {
- app.P2p.Logger.Errorf("DeleteResourcePush.handle: error %s", err)
+ app.P2p.Logger.Errorf("RemoveResourcePush.handle: error %s", err)
return
}
app.bitswapCtx.deleteCmds <- bitswapDeleteCmd{links}
diff --git a/src/app/libp2p_helper/src/libp2p_helper/bitswap_test.go b/src/app/libp2p_helper/src/libp2p_helper/bitswap_test.go
index ec96f2ccd67..bc2ce793dd7 100644
--- a/src/app/libp2p_helper/src/libp2p_helper/bitswap_test.go
+++ b/src/app/libp2p_helper/src/libp2p_helper/bitswap_test.go
@@ -15,7 +15,6 @@ import (
capnp "capnproto.org/go/capnp/v3"
"github.com/ipfs/go-cid"
- ipld "github.com/ipfs/go-ipld-format"
multihash "github.com/multiformats/go-multihash"
"github.com/stretchr/testify/require"
"golang.org/x/crypto/blake2b"
@@ -71,12 +70,12 @@ func getRootIds(ids ipc.RootBlockId_List) ([]BitswapBlockLink, error) {
return links, nil
}
-func deleteResource(n testNode, root root) error {
+func removeResource(n testNode, root root) error {
_, seg, err := capnp.NewMessage(capnp.SingleSegment(nil))
if err != nil {
return err
}
- m, err := ipc.NewRootLibp2pHelperInterface_DeleteResource(seg)
+ m, err := ipc.NewRootLibp2pHelperInterface_RemoveResource(seg)
if err != nil {
return err
}
@@ -88,7 +87,7 @@ func deleteResource(n testNode, root root) error {
if err != nil {
return err
}
- DeleteResourcePush(m).handle(n.node)
+ RemoveResourcePush(m).handle(n.node)
return nil
}
@@ -189,7 +188,7 @@ func confirmBlocksNotInStorage(bs *BitswapCtx, resource []byte) error {
})
if err == nil {
return fmt.Errorf("block %s wasn't deleted", codanet.BlockHashToCidSuffix(h))
- } else if err != (ipld.ErrNotFound{Cid: codanet.BlockHashToCid(h)}) {
+ } else if !isBlockNotFound(h, err) {
return err
}
}
@@ -393,7 +392,7 @@ func (conf bitswapTestConfig) execute(nodes []testNode, delayBeforeDownload bool
if !resourceReplicated[ni] {
continue
}
- err = deleteResource(nodes[ni], roots[ni])
+ err = removeResource(nodes[ni], roots[ni])
if err != nil {
return fmt.Errorf("Error removing own resources: %v", err)
}
@@ -550,7 +549,7 @@ func TestBitswapSmoke(t *testing.T) {
}
func TestBitswapSmall(t *testing.T) {
- testBitswap(t, 20, 100, 5, 1<<16, false)
+ testBitswap(t, 20, 10, 5, 1<<16, false)
}
func TestBitswapQC(t *testing.T) {
diff --git a/src/app/libp2p_helper/src/libp2p_helper/config_msg_test.go b/src/app/libp2p_helper/src/libp2p_helper/config_msg_test.go
index 2b8070932ae..099572f1106 100644
--- a/src/app/libp2p_helper/src/libp2p_helper/config_msg_test.go
+++ b/src/app/libp2p_helper/src/libp2p_helper/config_msg_test.go
@@ -253,7 +253,7 @@ func TestGetListeningAddrs(t *testing.T) {
}
func TestListen(t *testing.T) {
- addrStr := "/ip4/127.0.0.2/tcp/8000"
+ addrStr := "/ip4/127.0.0.1/tcp/8000"
testApp, _ := newTestApp(t, nil, true)
diff --git a/src/app/libp2p_helper/src/libp2p_helper/error.go b/src/app/libp2p_helper/src/libp2p_helper/error.go
index c42db326a81..49ffc3a3ef1 100644
--- a/src/app/libp2p_helper/src/libp2p_helper/error.go
+++ b/src/app/libp2p_helper/src/libp2p_helper/error.go
@@ -1,9 +1,11 @@
package main
import (
+ "codanet"
"fmt"
"github.com/go-errors/errors"
+ ipld "github.com/ipfs/go-ipld-format"
)
// TODO: wrap these in a new type, encode them differently in the rpc mainloop
@@ -48,3 +50,7 @@ func needsConfigure() error {
func needsDHT() error {
return badRPC(errors.New("helper not yet joined to pubsub"))
}
+
+func isBlockNotFound(block BitswapBlockLink, err error) bool {
+ return err == ipld.ErrNotFound{Cid: codanet.BlockHashToCid(block)}
+}
diff --git a/src/app/libp2p_helper/src/libp2p_helper/incoming_msg.go b/src/app/libp2p_helper/src/libp2p_helper/incoming_msg.go
index a4472c443c4..d7e7c0f88dc 100644
--- a/src/app/libp2p_helper/src/libp2p_helper/incoming_msg.go
+++ b/src/app/libp2p_helper/src/libp2p_helper/incoming_msg.go
@@ -34,7 +34,7 @@ var rpcRequestExtractors = map[ipc.Libp2pHelperInterface_RpcRequest_Which]extrac
var pushMesssageExtractors = map[ipc.Libp2pHelperInterface_PushMessage_Which]extractPushMessage{
ipc.Libp2pHelperInterface_PushMessage_Which_addResource: fromAddResourcePush,
- ipc.Libp2pHelperInterface_PushMessage_Which_deleteResource: fromDeleteResourcePush,
+ ipc.Libp2pHelperInterface_PushMessage_Which_removeResource: fromRemoveResourcePush,
ipc.Libp2pHelperInterface_PushMessage_Which_downloadResource: fromDownloadResourcePush,
ipc.Libp2pHelperInterface_PushMessage_Which_validation: fromValidationPush,
ipc.Libp2pHelperInterface_PushMessage_Which_heartbeatPeer: fromHeartbeatPeerPush,
diff --git a/src/app/libp2p_helper/src/libp2p_helper/msg.go b/src/app/libp2p_helper/src/libp2p_helper/msg.go
index 053bbd64062..680b76bd487 100644
--- a/src/app/libp2p_helper/src/libp2p_helper/msg.go
+++ b/src/app/libp2p_helper/src/libp2p_helper/msg.go
@@ -395,7 +395,7 @@ func mkStreamMessageReceivedUpcall(streamIdx uint64, data []byte) *capnp.Message
})
}
-func mkResourceUpdatedUpcall(type_ ipc.ResourceUpdateType, rootIds []root) *capnp.Message {
+func mkResourceUpdatedUpcall(type_ ipc.ResourceUpdateType, tag BitswapDataTag, rootIds []root) *capnp.Message {
return mkPushMsg(func(m ipc.DaemonInterface_PushMessage) {
im, err := m.NewResourceUpdated()
panicOnErr(err)
@@ -403,6 +403,7 @@ func mkResourceUpdatedUpcall(type_ ipc.ResourceUpdateType, rootIds []root) *capn
panic("too many root ids in a single upcall")
}
im.SetType(type_)
+ im.SetTag(uint8(tag))
mIds, err := im.NewIds(int32(len(rootIds)))
panicOnErr(err)
for i, rootId := range rootIds {
diff --git a/src/app/libp2p_helper/src/libp2p_helper/util_test.go b/src/app/libp2p_helper/src/libp2p_helper/util_test.go
index 6047e490b0c..acd95262c3e 100644
--- a/src/app/libp2p_helper/src/libp2p_helper/util_test.go
+++ b/src/app/libp2p_helper/src/libp2p_helper/util_test.go
@@ -82,7 +82,7 @@ func newTestAppWithMaxConnsAndCtxAndGrace(t *testing.T, privkey crypto.PrivKey,
panicOnErr(helper.Host.Close())
})
outChan := make(chan *capnp.Message, 64)
- bitswapCtx := NewBitswapCtx(ctx, outChan)
+ bitswapCtx := NewBitswapCtxWithMaxBlockSize(1<<9, ctx, outChan)
bitswapCtx.engine = helper.Bitswap
bitswapCtx.storage = helper.BitswapStorage
diff --git a/src/config/dev.mlh b/src/config/dev.mlh
index e7f0323fb70..85914d9c89a 100644
--- a/src/config/dev.mlh
+++ b/src/config/dev.mlh
@@ -51,7 +51,6 @@
(*BEGIN src/config/amount_defaults/standard.mlh*)
-[%%define default_transaction_fee "5"]
[%%define default_snark_worker_fee "1"]
[%%define minimum_user_command_fee "2"]
(*END src/config/amount_defaults/standard.mlh*)
diff --git a/src/config/devnet.mlh b/src/config/devnet.mlh
index a15553663e1..d3e0dbe3aa7 100644
--- a/src/config/devnet.mlh
+++ b/src/config/devnet.mlh
@@ -37,7 +37,6 @@
(*BEGIN src/config/amount_defaults/realistic.mlh*)
-[%%define default_transaction_fee "0.25"]
[%%define default_snark_worker_fee "0.1"]
[%%define minimum_user_command_fee "0.001"]
(*END src/config/amount_defaults/realistic.mlh*)
diff --git a/src/config/lightnet.mlh b/src/config/lightnet.mlh
index 87a58d5969c..a1fdf739640 100644
--- a/src/config/lightnet.mlh
+++ b/src/config/lightnet.mlh
@@ -37,7 +37,6 @@
(*BEGIN src/config/amount_defaults/realistic.mlh*)
-[%%define default_transaction_fee "0.25"]
[%%define default_snark_worker_fee "0.1"]
[%%define minimum_user_command_fee "0.001"]
(*END src/config/amount_defaults/realistic.mlh*)
diff --git a/src/config/mainnet.mlh b/src/config/mainnet.mlh
index d92617178b8..777cdb5ec6e 100644
--- a/src/config/mainnet.mlh
+++ b/src/config/mainnet.mlh
@@ -37,7 +37,6 @@
(*BEGIN src/config/amount_defaults/realistic.mlh*)
-[%%define default_transaction_fee "0.25"]
[%%define default_snark_worker_fee "0.1"]
[%%define minimum_user_command_fee "0.001"]
(*END src/config/amount_defaults/realistic.mlh*)
diff --git a/src/dune-project b/src/dune-project
index 91a68888957..766a8cc514a 100644
--- a/src/dune-project
+++ b/src/dune-project
@@ -136,6 +136,7 @@
(package (name parallel_scan))
(package (name participating_state))
(package (name pasta_bindings))
+(package (name patch_archive_test))
(package (name perf_histograms))
(package (name pickles_base))
(package (name pickles))
diff --git a/src/lib/block_producer/block_producer.ml b/src/lib/block_producer/block_producer.ml
index 0a255388780..2b8ee32c1c3 100644
--- a/src/lib/block_producer/block_producer.ml
+++ b/src/lib/block_producer/block_producer.ml
@@ -656,437 +656,598 @@ let validate_genesis_protocol_state_block ~genesis_state_hash (b, v) =
|> Result.map
~f:(Fn.flip Validation.with_body (Mina_block.body @@ With_hash.data b))
-let run ~context:(module Context : CONTEXT) ~vrf_evaluator ~prover ~verifier
- ~trust_system ~get_completed_work ~transaction_resource_pool
- ~time_controller ~consensus_local_state ~coinbase_receiver ~frontier_reader
- ~transition_writer ~set_next_producer_timing ~log_block_creation
- ~block_reward_threshold ~block_produced_bvar ~vrf_evaluation_state ~net
- ~zkapp_cmd_limit_hardcap =
- let module Consensus_context = struct
- include Context
+let log_bootstrap_mode ~logger () =
+ [%log info] "Pausing block production while bootstrapping"
- let genesis_constants = precomputed_values.genesis_constants
- end in
- let open Consensus_context in
- let constraint_constants = precomputed_values.constraint_constants in
- let consensus_constants = precomputed_values.consensus_constants in
- O1trace.sync_thread "produce_blocks" (fun () ->
- let genesis_breadcrumb =
- let started = ref false in
- let genesis_breadcrumb_ivar = Ivar.create () in
- fun () ->
- if !started then Ivar.read genesis_breadcrumb_ivar
- else (
- started := true ;
- let max_num_retries = 3 in
- let rec go retries =
- [%log info]
- "Generating genesis proof ($attempts_remaining / $max_attempts)"
- ~metadata:
- [ ("attempts_remaining", `Int retries)
- ; ("max_attempts", `Int max_num_retries)
- ] ;
- match%bind
- Prover.create_genesis_block prover
- (Genesis_proof.to_inputs precomputed_values)
- with
- | Ok res ->
- Ivar.fill genesis_breadcrumb_ivar (Ok res) ;
- return (Ok res)
- | Error err ->
- [%log error] "Failed to generate genesis breadcrumb: $error"
- ~metadata:[ ("error", Error_json.error_to_yojson err) ] ;
- if retries > 0 then go (retries - 1)
- else (
- Ivar.fill genesis_breadcrumb_ivar (Error err) ;
- return (Error err) )
- in
- go max_num_retries )
+let genesis_breadcrumb_creator ~context:(module Context : CONTEXT) prover =
+ let open Context in
+ let started = ref false in
+ let genesis_breadcrumb_ivar = Ivar.create () in
+ fun () ->
+ if !started then Ivar.read genesis_breadcrumb_ivar
+ else (
+ started := true ;
+ let max_num_retries = 3 in
+ let rec go retries =
+ [%log info]
+ "Generating genesis proof ($attempts_remaining / $max_attempts)"
+ ~metadata:
+ [ ("attempts_remaining", `Int retries)
+ ; ("max_attempts", `Int max_num_retries)
+ ] ;
+ match%bind
+ Prover.create_genesis_block prover
+ (Genesis_proof.to_inputs precomputed_values)
+ with
+ | Ok res ->
+ Ivar.fill genesis_breadcrumb_ivar (Ok res) ;
+ return (Ok res)
+ | Error err ->
+ [%log error] "Failed to generate genesis breadcrumb: $error"
+ ~metadata:[ ("error", Error_json.error_to_yojson err) ] ;
+ if retries > 0 then go (retries - 1)
+ else (
+ Ivar.fill genesis_breadcrumb_ivar (Error err) ;
+ return (Error err) )
+ in
+ go max_num_retries )
+
+let produce ~genesis_breadcrumb ~context:(module Context : CONTEXT) ~prover
+ ~verifier ~trust_system ~get_completed_work ~transaction_resource_pool
+ ~frontier_reader ~time_controller ~transition_writer ~log_block_creation
+ ~block_reward_threshold ~block_produced_bvar ~slot_tx_end ~slot_chain_end
+ ~net ~zkapp_cmd_limit_hardcap ivar
+ (scheduled_time, block_data, winner_pubkey) =
+ let open Context in
+ let module Breadcrumb = Transition_frontier.Breadcrumb in
+ let open Interruptible.Let_syntax in
+ let rejected_blocks_logger =
+ Logger.create ~id:Logger.Logger_id.rejected_blocks ()
+ in
+ match Broadcast_pipe.Reader.peek frontier_reader with
+ | None ->
+ log_bootstrap_mode ~logger () ;
+ Interruptible.return ()
+ | Some frontier -> (
+ let global_slot =
+ Consensus.Data.Block_data.global_slot_since_genesis block_data
in
- let rejected_blocks_logger =
- Logger.create ~id:Logger.Logger_id.rejected_blocks ()
+ Internal_tracing.with_slot global_slot
+ @@ fun () ->
+ [%log internal] "Begin_block_production" ;
+ let open Transition_frontier.Extensions in
+ let transition_registry =
+ get_extension
+ (Transition_frontier.extensions frontier)
+ Transition_registry
in
- let log_bootstrap_mode () =
- [%log info] "Pausing block production while bootstrapping"
+ let crumb = Transition_frontier.best_tip frontier in
+ let crumb =
+ let crumb_global_slot_since_genesis =
+ Breadcrumb.protocol_state crumb
+ |> Protocol_state.consensus_state
+ |> Consensus.Data.Consensus_state.global_slot_since_genesis
+ in
+ let block_global_slot_since_genesis =
+ Consensus.Proof_of_stake.Data.Block_data.global_slot_since_genesis
+ block_data
+ in
+ if
+ Mina_numbers.Global_slot_since_genesis.equal
+ crumb_global_slot_since_genesis block_global_slot_since_genesis
+ then
+ (* We received a block for this slot over the network before
+ attempting to produce our own. Build upon its parent instead
+ of attempting (and failing) to build upon the block itself.
+ *)
+ Transition_frontier.find_exn frontier (Breadcrumb.parent_hash crumb)
+ else crumb
in
- let slot_tx_end =
- Runtime_config.slot_tx_end precomputed_values.runtime_config
+ let start = Block_time.now time_controller in
+ [%log info]
+ ~metadata:
+ [ ("parent_hash", Breadcrumb.parent_hash crumb |> State_hash.to_yojson)
+ ; ( "protocol_state"
+ , Breadcrumb.protocol_state crumb |> Protocol_state.value_to_yojson
+ )
+ ]
+ "Producing new block with parent $parent_hash%!" ;
+ let previous_transition = Breadcrumb.block_with_hash crumb in
+ let previous_protocol_state =
+ Header.protocol_state
+ @@ Mina_block.header (With_hash.data previous_transition)
in
- let slot_chain_end =
- Runtime_config.slot_chain_end precomputed_values.runtime_config
+ let%bind previous_protocol_state_proof =
+ if
+ Consensus.Data.Consensus_state.is_genesis_state
+ (Protocol_state.consensus_state previous_protocol_state)
+ && Option.is_none precomputed_values.proof_data
+ then (
+ match%bind Interruptible.uninterruptible (genesis_breadcrumb ()) with
+ | Ok block ->
+ let proof = Blockchain_snark.Blockchain.proof block in
+ Interruptible.lift (Deferred.return proof) (Deferred.never ())
+ | Error err ->
+ [%log error]
+ "Aborting block production: cannot generate a genesis proof"
+ ~metadata:[ ("error", Error_json.error_to_yojson err) ] ;
+ Interruptible.lift (Deferred.never ()) (Deferred.return ()) )
+ else
+ return
+ ( Header.protocol_state_proof
+ @@ Mina_block.header (With_hash.data previous_transition) )
in
- let module Breadcrumb = Transition_frontier.Breadcrumb in
- let produce ivar (scheduled_time, block_data, winner_pubkey) =
- let open Interruptible.Let_syntax in
- match Broadcast_pipe.Reader.peek frontier_reader with
- | None ->
- log_bootstrap_mode () ; Interruptible.return ()
- | Some frontier -> (
- let global_slot =
- Consensus.Data.Block_data.global_slot_since_genesis block_data
- in
- Internal_tracing.with_slot global_slot
- @@ fun () ->
- [%log internal] "Begin_block_production" ;
- let open Transition_frontier.Extensions in
- let transition_registry =
- get_extension
- (Transition_frontier.extensions frontier)
- Transition_registry
- in
- let crumb = Transition_frontier.best_tip frontier in
- let crumb =
- let crumb_global_slot_since_genesis =
- Breadcrumb.protocol_state crumb
- |> Protocol_state.consensus_state
- |> Consensus.Data.Consensus_state.global_slot_since_genesis
+ [%log internal] "Get_transactions_from_pool" ;
+ let transactions =
+ Network_pool.Transaction_pool.Resource_pool.transactions
+ transaction_resource_pool
+ |> Sequence.map
+ ~f:Transaction_hash.User_command_with_valid_signature.data
+ in
+ let%bind () = Interruptible.lift (Deferred.return ()) (Ivar.read ivar) in
+ [%log internal] "Generate_next_state" ;
+ let%bind next_state_opt =
+ generate_next_state ~commit_id ~constraint_constants ~scheduled_time
+ ~block_data ~previous_protocol_state ~time_controller
+ ~staged_ledger:(Breadcrumb.staged_ledger crumb)
+ ~transactions ~get_completed_work ~logger ~log_block_creation
+ ~winner_pk:winner_pubkey ~block_reward_threshold
+ ~zkapp_cmd_limit:!zkapp_cmd_limit ~zkapp_cmd_limit_hardcap
+ ~slot_tx_end ~slot_chain_end
+ in
+ [%log internal] "Generate_next_state_done" ;
+ match next_state_opt with
+ | None ->
+ Interruptible.return ()
+ | Some (protocol_state, internal_transition, pending_coinbase_witness) ->
+ let diff =
+ Internal_transition.staged_ledger_diff internal_transition
+ in
+ let commands = Staged_ledger_diff.commands diff in
+ let transactions_count = List.length commands in
+ let protocol_state_hashes = Protocol_state.hashes protocol_state in
+ let consensus_state_with_hashes =
+ { With_hash.hash = protocol_state_hashes
+ ; data = Protocol_state.consensus_state protocol_state
+ }
+ in
+ [%log internal] "@produced_block_state_hash"
+ ~metadata:
+ [ ( "state_hash"
+ , `String
+ (Mina_base.State_hash.to_base58_check
+ protocol_state_hashes.state_hash ) )
+ ] ;
+ let module Consensus_context = struct
+ include Context
+
+ let genesis_constants = precomputed_values.genesis_constants
+ end in
+ Internal_tracing.with_state_hash protocol_state_hashes.state_hash
+ @@ fun () ->
+ Debug_assert.debug_assert (fun () ->
+ [%test_result: [ `Take | `Keep ]]
+ (Consensus.Hooks.select
+ ~context:(module Consensus_context)
+ ~existing:
+ (With_hash.map ~f:Mina_block.consensus_state
+ previous_transition )
+ ~candidate:consensus_state_with_hashes )
+ ~expect:`Take
+ ~message:
+ "newly generated consensus states should be selected over \
+ their parent" ;
+ let root_consensus_state_with_hashes =
+ Transition_frontier.root frontier
+ |> Breadcrumb.consensus_state_with_hashes
in
- let block_global_slot_since_genesis =
- Consensus.Proof_of_stake.Data.Block_data
- .global_slot_since_genesis block_data
+ [%test_result: [ `Take | `Keep ]]
+ (Consensus.Hooks.select
+ ~context:(module Consensus_context)
+ ~existing:root_consensus_state_with_hashes
+ ~candidate:consensus_state_with_hashes )
+ ~expect:`Take
+ ~message:
+ "newly generated consensus states should be selected over \
+ the tf root" ) ;
+ Interruptible.uninterruptible
+ (let open Deferred.Let_syntax in
+ let emit_breadcrumb () =
+ let open Deferred.Result.Let_syntax in
+ [%log internal]
+ ~metadata:[ ("transactions_count", `Int transactions_count) ]
+ "Produce_state_transition_proof" ;
+ let%bind protocol_state_proof =
+ time ~logger ~time_controller
+ "Protocol_state_proof proving time(ms)" (fun () ->
+ O1trace.thread "dispatch_block_proving" (fun () ->
+ Prover.prove prover ~prev_state:previous_protocol_state
+ ~prev_state_proof:previous_protocol_state_proof
+ ~next_state:protocol_state internal_transition
+ pending_coinbase_witness )
+ |> Deferred.Result.map_error ~f:(fun err ->
+ `Prover_error
+ ( err
+ , ( previous_protocol_state_proof
+ , internal_transition
+ , pending_coinbase_witness ) ) ) )
in
- if
- Mina_numbers.Global_slot_since_genesis.equal
- crumb_global_slot_since_genesis
- block_global_slot_since_genesis
- then
- (* We received a block for this slot over the network before
- attempting to produce our own. Build upon its parent instead
- of attempting (and failing) to build upon the block itself.
- *)
- Transition_frontier.find_exn frontier
- (Breadcrumb.parent_hash crumb)
- else crumb
- in
- let start = Block_time.now time_controller in
- [%log info]
- ~metadata:
- [ ( "parent_hash"
- , Breadcrumb.parent_hash crumb |> State_hash.to_yojson )
- ; ( "protocol_state"
- , Breadcrumb.protocol_state crumb
- |> Protocol_state.value_to_yojson )
- ]
- "Producing new block with parent $parent_hash%!" ;
- let previous_transition = Breadcrumb.block_with_hash crumb in
- let previous_protocol_state =
- Header.protocol_state
- @@ Mina_block.header (With_hash.data previous_transition)
- in
- let%bind previous_protocol_state_proof =
- if
- Consensus.Data.Consensus_state.is_genesis_state
- (Protocol_state.consensus_state previous_protocol_state)
- && Option.is_none precomputed_values.proof_data
- then (
- match%bind
- Interruptible.uninterruptible (genesis_breadcrumb ())
- with
- | Ok block ->
- let proof = Blockchain_snark.Blockchain.proof block in
- Interruptible.lift (Deferred.return proof)
- (Deferred.never ())
- | Error err ->
- [%log error]
- "Aborting block production: cannot generate a genesis \
- proof"
- ~metadata:[ ("error", Error_json.error_to_yojson err) ] ;
- Interruptible.lift (Deferred.never ()) (Deferred.return ())
- )
- else
- return
- ( Header.protocol_state_proof
- @@ Mina_block.header (With_hash.data previous_transition) )
- in
- [%log internal] "Get_transactions_from_pool" ;
- let transactions =
- Network_pool.Transaction_pool.Resource_pool.transactions
- transaction_resource_pool
- |> Sequence.map
- ~f:Transaction_hash.User_command_with_valid_signature.data
- in
- let%bind () =
- Interruptible.lift (Deferred.return ()) (Ivar.read ivar)
- in
- [%log internal] "Generate_next_state" ;
- let%bind next_state_opt =
- generate_next_state ~commit_id ~constraint_constants
- ~scheduled_time ~block_data ~previous_protocol_state
- ~time_controller
- ~staged_ledger:(Breadcrumb.staged_ledger crumb)
- ~transactions ~get_completed_work ~logger ~log_block_creation
- ~winner_pk:winner_pubkey ~block_reward_threshold
- ~zkapp_cmd_limit:!zkapp_cmd_limit ~zkapp_cmd_limit_hardcap
- ~slot_tx_end ~slot_chain_end
- in
- [%log internal] "Generate_next_state_done" ;
- match next_state_opt with
- | None ->
- Interruptible.return ()
- | Some
- (protocol_state, internal_transition, pending_coinbase_witness)
- ->
- let diff =
- Internal_transition.staged_ledger_diff internal_transition
- in
- let commands = Staged_ledger_diff.commands diff in
- let transactions_count = List.length commands in
- let protocol_state_hashes =
- Protocol_state.hashes protocol_state
- in
- let consensus_state_with_hashes =
+ let staged_ledger_diff =
+ Internal_transition.staged_ledger_diff internal_transition
+ in
+ let previous_state_hash =
+ (Protocol_state.hashes previous_protocol_state).state_hash
+ in
+ [%log internal] "Produce_chain_transition_proof" ;
+ let delta_block_chain_proof =
+ Transition_chain_prover.prove
+ ~length:(Mina_numbers.Length.to_int consensus_constants.delta)
+ ~frontier previous_state_hash
+ |> Option.value_exn
+ in
+ [%log internal] "Produce_validated_transition" ;
+ let%bind transition =
+ let open Result.Let_syntax in
+ Validation.wrap
{ With_hash.hash = protocol_state_hashes
- ; data = Protocol_state.consensus_state protocol_state
+ ; data =
+ (let body = Body.create staged_ledger_diff in
+ Mina_block.create ~body
+ ~header:
+ (Header.create ~protocol_state ~protocol_state_proof
+ ~delta_block_chain_proof () ) )
}
- in
- [%log internal] "@produced_block_state_hash"
- ~metadata:
- [ ( "state_hash"
- , `String
- (Mina_base.State_hash.to_base58_check
- protocol_state_hashes.state_hash ) )
- ] ;
- Internal_tracing.with_state_hash
- protocol_state_hashes.state_hash
- @@ fun () ->
- Debug_assert.debug_assert (fun () ->
- [%test_result: [ `Take | `Keep ]]
- (Consensus.Hooks.select
- ~context:(module Consensus_context)
- ~existing:
- (With_hash.map ~f:Mina_block.consensus_state
- previous_transition )
- ~candidate:consensus_state_with_hashes )
- ~expect:`Take
- ~message:
- "newly generated consensus states should be selected \
- over their parent" ;
- let root_consensus_state_with_hashes =
- Transition_frontier.root frontier
- |> Breadcrumb.consensus_state_with_hashes
- in
- [%test_result: [ `Take | `Keep ]]
- (Consensus.Hooks.select
- ~context:(module Consensus_context)
- ~existing:root_consensus_state_with_hashes
- ~candidate:consensus_state_with_hashes )
- ~expect:`Take
- ~message:
- "newly generated consensus states should be selected \
- over the tf root" ) ;
- Interruptible.uninterruptible
- (let open Deferred.Let_syntax in
- let emit_breadcrumb () =
- let open Deferred.Result.Let_syntax in
- [%log internal]
- ~metadata:
- [ ("transactions_count", `Int transactions_count) ]
- "Produce_state_transition_proof" ;
- let%bind protocol_state_proof =
- time ~logger ~time_controller
- "Protocol_state_proof proving time(ms)" (fun () ->
- O1trace.thread "dispatch_block_proving" (fun () ->
- Prover.prove prover
- ~prev_state:previous_protocol_state
- ~prev_state_proof:previous_protocol_state_proof
- ~next_state:protocol_state internal_transition
- pending_coinbase_witness )
- |> Deferred.Result.map_error ~f:(fun err ->
- `Prover_error
- ( err
- , ( previous_protocol_state_proof
- , internal_transition
- , pending_coinbase_witness ) ) ) )
- in
- let staged_ledger_diff =
- Internal_transition.staged_ledger_diff internal_transition
- in
- let previous_state_hash =
- (Protocol_state.hashes previous_protocol_state).state_hash
- in
- [%log internal] "Produce_chain_transition_proof" ;
- let delta_block_chain_proof =
- Transition_chain_prover.prove
- ~length:
- (Mina_numbers.Length.to_int consensus_constants.delta)
- ~frontier previous_state_hash
- |> Option.value_exn
- in
- [%log internal] "Produce_validated_transition" ;
- let%bind transition =
- let open Result.Let_syntax in
- Validation.wrap
- { With_hash.hash = protocol_state_hashes
- ; data =
- (let body = Body.create staged_ledger_diff in
- Mina_block.create ~body
- ~header:
- (Header.create ~protocol_state
- ~protocol_state_proof
- ~delta_block_chain_proof () ) )
- }
- |> Validation.skip_time_received_validation
- `This_block_was_not_received_via_gossip
- |> Validation.skip_protocol_versions_validation
- `This_block_has_valid_protocol_versions
- |> validate_genesis_protocol_state_block
- ~genesis_state_hash:
- (Protocol_state.genesis_state_hash
- ~state_hash:(Some previous_state_hash)
- previous_protocol_state )
- >>| Validation.skip_proof_validation
- `This_block_was_generated_internally
- >>| Validation.skip_delta_block_chain_validation
- `This_block_was_not_received_via_gossip
- >>= Validation.validate_frontier_dependencies
- ~to_header:Mina_block.header
- ~context:(module Consensus_context)
- ~root_block:
- ( Transition_frontier.root frontier
- |> Breadcrumb.block_with_hash )
- ~is_block_in_frontier:
- (Fn.compose Option.is_some
- (Transition_frontier.find frontier) )
- |> Deferred.return
- in
- let transition_receipt_time = Some (Time.now ()) in
- let%bind breadcrumb =
- time ~logger ~time_controller
- "Build breadcrumb on produced block" (fun () ->
- Breadcrumb.build ~logger ~precomputed_values ~verifier
- ~get_completed_work:(Fn.const None) ~trust_system
- ~parent:crumb ~transition
- ~sender:None (* Consider skipping `All here *)
- ~skip_staged_ledger_verification:`Proofs
- ~transition_receipt_time () )
- |> Deferred.Result.map_error ~f:(function
- | `Invalid_staged_ledger_diff e ->
- `Invalid_staged_ledger_diff
- (e, staged_ledger_diff)
- | ( `Fatal_error _
- | `Invalid_genesis_protocol_state
- | `Invalid_staged_ledger_hash _
- | `Not_selected_over_frontier_root
- | `Parent_missing_from_frontier
- | `Prover_error _ ) as err ->
- err )
- in
- let txs =
- Mina_block.transactions ~constraint_constants
- (Breadcrumb.block breadcrumb)
- |> List.map ~f:Transaction.yojson_summary_with_status
- in
- [%log internal] "@block_metadata"
- ~metadata:
- [ ( "blockchain_length"
- , Mina_numbers.Length.to_yojson
- @@ Mina_block.blockchain_length
- @@ Breadcrumb.block breadcrumb )
- ; ("transactions", `List txs)
- ] ;
- [%str_log info]
- ~metadata:
- [ ("breadcrumb", Breadcrumb.to_yojson breadcrumb) ]
- Block_produced ;
- (* let uptime service (and any other waiters) know about breadcrumb *)
- Bvar.broadcast block_produced_bvar breadcrumb ;
- Mina_metrics.(
- Counter.inc_one Block_producer.blocks_produced) ;
- Mina_metrics.Block_producer.(
- Block_production_delay_histogram.observe
- block_production_delay
- Time.(
- Span.to_ms
- @@ diff (now ())
- @@ Block_time.to_time_exn scheduled_time)) ;
- [%log internal] "Send_breadcrumb_to_transition_frontier" ;
- let%bind.Async.Deferred () =
- Strict_pipe.Writer.write transition_writer breadcrumb
- in
- let metadata =
- [ ( "state_hash"
- , State_hash.to_yojson protocol_state_hashes.state_hash
- )
- ]
- in
- [%log internal] "Wait_for_confirmation" ;
- [%log debug] ~metadata
- "Waiting for block $state_hash to be inserted into \
- frontier" ;
- Deferred.choose
- [ Deferred.choice
- (Transition_registry.register transition_registry
- protocol_state_hashes.state_hash )
- (Fn.const (Ok `Transition_accepted))
- ; Deferred.choice
- ( Block_time.Timeout.create time_controller
- (* We allow up to 20 seconds for the transition
- to make its way from the transition_writer to
- the frontier.
- This value is chosen to be reasonably
- generous. In theory, this should not take
- terribly long. But long cycles do happen in
- our system, and with medium curves those long
- cycles can be substantial.
- *)
- (Block_time.Span.of_ms 20000L)
- ~f:(Fn.const ())
- |> Block_time.Timeout.to_deferred )
- (Fn.const (Ok `Timed_out))
- ]
- >>= function
- | `Transition_accepted ->
- [%log internal] "Transition_accepted" ;
- [%log info] ~metadata
- "Generated transition $state_hash was accepted into \
- transition frontier" ;
- Deferred.map ~f:Result.return
- (Mina_networking.broadcast_state net
- (Breadcrumb.block_with_hash breadcrumb) )
- | `Timed_out ->
- (* FIXME #3167: this should be fatal, and more
- importantly, shouldn't happen.
+ |> Validation.skip_time_received_validation
+ `This_block_was_not_received_via_gossip
+ |> Validation.skip_protocol_versions_validation
+ `This_block_has_valid_protocol_versions
+ |> validate_genesis_protocol_state_block
+ ~genesis_state_hash:
+ (Protocol_state.genesis_state_hash
+ ~state_hash:(Some previous_state_hash)
+ previous_protocol_state )
+ >>| Validation.skip_proof_validation
+ `This_block_was_generated_internally
+ >>| Validation.skip_delta_block_chain_validation
+ `This_block_was_not_received_via_gossip
+ >>= Validation.validate_frontier_dependencies
+ ~to_header:Mina_block.header
+ ~context:(module Consensus_context)
+ ~root_block:
+ ( Transition_frontier.root frontier
+ |> Breadcrumb.block_with_hash )
+ ~is_block_in_frontier:
+ (Fn.compose Option.is_some
+ (Transition_frontier.find frontier) )
+ |> Deferred.return
+ in
+ let transition_receipt_time = Some (Time.now ()) in
+ let%bind breadcrumb =
+ time ~logger ~time_controller
+ "Build breadcrumb on produced block" (fun () ->
+ Breadcrumb.build ~logger ~precomputed_values ~verifier
+ ~get_completed_work:(Fn.const None) ~trust_system
+ ~parent:crumb ~transition
+ ~sender:None (* Consider skipping `All here *)
+ ~skip_staged_ledger_verification:`Proofs
+ ~transition_receipt_time () )
+ |> Deferred.Result.map_error ~f:(function
+ | `Invalid_staged_ledger_diff e ->
+ `Invalid_staged_ledger_diff (e, staged_ledger_diff)
+ | ( `Fatal_error _
+ | `Invalid_genesis_protocol_state
+ | `Invalid_staged_ledger_hash _
+ | `Not_selected_over_frontier_root
+ | `Parent_missing_from_frontier
+ | `Prover_error _ ) as err ->
+ err )
+ in
+ let txs =
+ Mina_block.transactions ~constraint_constants
+ (Breadcrumb.block breadcrumb)
+ |> List.map ~f:Transaction.yojson_summary_with_status
+ in
+ [%log internal] "@block_metadata"
+ ~metadata:
+ [ ( "blockchain_length"
+ , Mina_numbers.Length.to_yojson
+ @@ Mina_block.blockchain_length
+ @@ Breadcrumb.block breadcrumb )
+ ; ("transactions", `List txs)
+ ] ;
+ [%str_log info]
+ ~metadata:[ ("breadcrumb", Breadcrumb.to_yojson breadcrumb) ]
+ Block_produced ;
+ (* let uptime service (and any other waiters) know about breadcrumb *)
+ Bvar.broadcast block_produced_bvar breadcrumb ;
+ Mina_metrics.(Counter.inc_one Block_producer.blocks_produced) ;
+ Mina_metrics.Block_producer.(
+ Block_production_delay_histogram.observe block_production_delay
+ Time.(
+ Span.to_ms
+ @@ diff (now ())
+ @@ Block_time.to_time_exn scheduled_time)) ;
+ [%log internal] "Send_breadcrumb_to_transition_frontier" ;
+ let%bind.Async.Deferred () =
+ Strict_pipe.Writer.write transition_writer breadcrumb
+ in
+ let metadata =
+ [ ( "state_hash"
+ , State_hash.to_yojson protocol_state_hashes.state_hash )
+ ]
+ in
+ [%log internal] "Wait_for_confirmation" ;
+ [%log debug] ~metadata
+ "Waiting for block $state_hash to be inserted into frontier" ;
+ Deferred.choose
+ [ Deferred.choice
+ (Transition_registry.register transition_registry
+ protocol_state_hashes.state_hash )
+ (Fn.const (Ok `Transition_accepted))
+ ; Deferred.choice
+ ( Block_time.Timeout.create time_controller
+ (* We allow up to 20 seconds for the transition
+ to make its way from the transition_writer to
+ the frontier.
+ This value is chosen to be reasonably
+ generous. In theory, this should not take
+ terribly long. But long cycles do happen in
+ our system, and with medium curves those long
+ cycles can be substantial.
*)
- [%log internal] "Transition_accept_timeout" ;
- let msg : (_, unit, string, unit) format4 =
- "Timed out waiting for generated transition \
- $state_hash to enter transition frontier. \
- Continuing to produce new blocks anyway. This may \
- mean your CPU is overloaded. Consider disabling \
- `-run-snark-worker` if it's configured."
- in
- let span =
- Block_time.diff (Block_time.now time_controller) start
- in
- let metadata =
- [ ( "time"
- , `Int
- (Block_time.Span.to_ms span |> Int64.to_int_exn)
- )
- ; ( "protocol_state"
- , Protocol_state.Value.to_yojson protocol_state )
- ]
- @ metadata
- in
- [%log' debug rejected_blocks_logger] ~metadata msg ;
- [%log fatal] ~metadata msg ;
- return ()
+ (Block_time.Span.of_ms 20000L)
+ ~f:(Fn.const ())
+ |> Block_time.Timeout.to_deferred )
+ (Fn.const (Ok `Timed_out))
+ ]
+ >>= function
+ | `Transition_accepted ->
+ [%log internal] "Transition_accepted" ;
+ [%log info] ~metadata
+ "Generated transition $state_hash was accepted into \
+ transition frontier" ;
+ Deferred.map ~f:Result.return
+ (Mina_networking.broadcast_state net
+ (Breadcrumb.block_with_hash breadcrumb) )
+ | `Timed_out ->
+ (* FIXME #3167: this should be fatal, and more
+ importantly, shouldn't happen.
+ *)
+ [%log internal] "Transition_accept_timeout" ;
+ let msg : (_, unit, string, unit) format4 =
+ "Timed out waiting for generated transition $state_hash to \
+ enter transition frontier. Continuing to produce new \
+ blocks anyway. This may mean your CPU is overloaded. \
+ Consider disabling `-run-snark-worker` if it's \
+ configured."
in
- let%bind res = emit_breadcrumb () in
let span =
Block_time.diff (Block_time.now time_controller) start
in
- handle_block_production_errors ~logger ~rejected_blocks_logger
- ~time_taken:span ~previous_protocol_state ~protocol_state
- res) )
+ let metadata =
+ [ ( "time"
+ , `Int (Block_time.Span.to_ms span |> Int64.to_int_exn) )
+ ; ( "protocol_state"
+ , Protocol_state.Value.to_yojson protocol_state )
+ ]
+ @ metadata
+ in
+ [%log' debug rejected_blocks_logger] ~metadata msg ;
+ [%log fatal] ~metadata msg ;
+ return ()
+ in
+ let%bind res = emit_breadcrumb () in
+ let span = Block_time.diff (Block_time.now time_controller) start in
+ handle_block_production_errors ~logger ~rejected_blocks_logger
+ ~time_taken:span ~previous_protocol_state ~protocol_state res) )
+
+let generate_genesis_proof_if_needed ~genesis_breadcrumb ~frontier_reader () =
+ match Broadcast_pipe.Reader.peek frontier_reader with
+ | Some transition_frontier ->
+ let consensus_state =
+ Transition_frontier.best_tip transition_frontier
+ |> Transition_frontier.Breadcrumb.consensus_state
+ in
+ if Consensus.Data.Consensus_state.is_genesis_state consensus_state then
+ genesis_breadcrumb () |> Deferred.ignore_m
+ else Deferred.return ()
+ | None ->
+ Deferred.return ()
+
+let iteration ~schedule_next_vrf_check ~produce_block_now
+ ~schedule_block_production ~next_vrf_check_now ~genesis_breadcrumb
+ ~context:(module Context : CONTEXT) ~vrf_evaluator ~time_controller
+ ~coinbase_receiver ~frontier_reader ~set_next_producer_timing
+ ~transition_frontier ~vrf_evaluation_state ~epoch_data_for_vrf
+ ~ledger_snapshot i slot =
+ O1trace.thread "block_producer_iteration"
+ @@ fun () ->
+ let consensus_state =
+ Transition_frontier.(
+ best_tip transition_frontier |> Breadcrumb.consensus_state)
+ in
+ let i' =
+ Mina_numbers.Length.succ
+ epoch_data_for_vrf.Consensus.Data.Epoch_data_for_vrf.epoch
+ in
+ let new_global_slot = epoch_data_for_vrf.global_slot in
+ let open Context in
+ let%bind () =
+ if Mina_numbers.Length.(i' > i) then
+ Vrf_evaluation_state.update_epoch_data ~vrf_evaluator ~epoch_data_for_vrf
+ ~logger vrf_evaluation_state ~vrf_poll_interval
+ else Deferred.unit
+ in
+ let%bind () =
+ (*Poll once every slot if the evaluation for the epoch is not completed or the evaluation is completed*)
+ if
+ Mina_numbers.Global_slot_since_hard_fork.(new_global_slot > slot)
+ && not (Vrf_evaluation_state.finished vrf_evaluation_state)
+ then
+ Vrf_evaluation_state.poll ~vrf_evaluator ~logger vrf_evaluation_state
+ ~vrf_poll_interval
+ else Deferred.unit
+ in
+ match Core.Queue.dequeue vrf_evaluation_state.queue with
+ | None -> (
+ (*Keep trying until we get some slots*)
+ let poll () =
+ let%bind () = Async.after vrf_poll_interval in
+ let%bind () =
+ Vrf_evaluation_state.poll ~vrf_evaluator ~logger vrf_evaluation_state
+ ~vrf_poll_interval
+ in
+ schedule_next_vrf_check (Block_time.now time_controller)
+ in
+ match Vrf_evaluation_state.evaluator_status vrf_evaluation_state with
+ | Completed ->
+ let epoch_end_time =
+ Consensus.Hooks.epoch_end_time ~constants:consensus_constants
+ epoch_data_for_vrf.epoch
+ in
+ set_next_producer_timing (`Check_again epoch_end_time) consensus_state ;
+ [%log info] "No more slots won in this epoch" ;
+ schedule_next_vrf_check epoch_end_time
+ | At last_slot ->
+ set_next_producer_timing (`Evaluating_vrf last_slot) consensus_state ;
+ poll ()
+ | Start ->
+ set_next_producer_timing (`Evaluating_vrf new_global_slot)
+ consensus_state ;
+ poll () )
+ | Some slot_won -> (
+ let winning_global_slot = slot_won.global_slot in
+ let slot, epoch =
+ let t =
+ Consensus.Data.Consensus_time.of_global_slot winning_global_slot
+ ~constants:consensus_constants
+ in
+ Consensus.Data.Consensus_time.(slot t, epoch t)
+ in
+ [%log info] "Block producer won slot $slot in epoch $epoch"
+ ~metadata:
+ [ ( "slot"
+ , Mina_numbers.Global_slot_since_genesis.(
+ to_yojson @@ of_uint32 slot) )
+ ; ("epoch", Mina_numbers.Length.to_yojson epoch)
+ ] ;
+ let now = Block_time.now time_controller in
+ let curr_global_slot =
+ Consensus.Data.Consensus_time.(
+ of_time_exn ~constants:consensus_constants now |> to_global_slot)
+ in
+ let winner_pk = fst slot_won.delegator in
+ let data =
+ Consensus.Hooks.get_block_data ~slot_won ~ledger_snapshot
+ ~coinbase_receiver:!coinbase_receiver
in
+ if
+ Mina_numbers.Global_slot_since_hard_fork.(
+ curr_global_slot = winning_global_slot)
+ then (
+ (*produce now*)
+ [%log info] "Producing a block now" ;
+ set_next_producer_timing
+ (`Produce_now (data, winner_pk))
+ consensus_state ;
+ Mina_metrics.(Counter.inc_one Block_producer.slots_won) ;
+ let%bind () =
+ generate_genesis_proof_if_needed ~genesis_breadcrumb ~frontier_reader
+ ()
+ in
+ produce_block_now (now, data, winner_pk) )
+ else
+ match
+ Mina_numbers.Global_slot_since_hard_fork.diff winning_global_slot
+ curr_global_slot
+ with
+ | None ->
+ [%log warn]
+ "Skipping block production for global slot $slot_won because it \
+ has passed. Current global slot is $curr_slot"
+ ~metadata:
+ [ ( "slot_won"
+ , Mina_numbers.Global_slot_since_hard_fork.to_yojson
+ winning_global_slot )
+ ; ( "curr_slot"
+ , Mina_numbers.Global_slot_since_hard_fork.to_yojson
+ curr_global_slot )
+ ] ;
+ next_vrf_check_now ()
+ | Some slot_diff ->
+ [%log info] "Producing a block in $slots slots"
+ ~metadata:
+ [ ("slots", Mina_numbers.Global_slot_span.to_yojson slot_diff) ] ;
+ let time =
+ Consensus.Data.Consensus_time.(
+ start_time ~constants:consensus_constants
+ (of_global_slot ~constants:consensus_constants
+ winning_global_slot ))
+ |> Block_time.to_span_since_epoch |> Block_time.Span.to_ms
+ in
+ set_next_producer_timing
+ (`Produce (time, data, winner_pk))
+ consensus_state ;
+ Mina_metrics.(Counter.inc_one Block_producer.slots_won) ;
+ let scheduled_time = time_of_ms time in
+ don't_wait_for
+ ((* Attempt to generate a genesis proof in the slot
+ immediately before we'll actually need it, so that
+ it isn't limiting our block production time in the
+ won slot.
+ This also allows non-genesis blocks to be received
+ in the meantime and alleviate the need to produce
+ one at all, if this won't have block height 1.
+ *)
+ let scheduled_genesis_time =
+ time_of_ms
+ Int64.(
+ time - of_int constraint_constants.block_window_duration_ms)
+ in
+ let span_till_time =
+ Block_time.diff scheduled_genesis_time
+ (Block_time.now time_controller)
+ |> Block_time.Span.to_time_span
+ in
+ let%bind () = after span_till_time in
+ generate_genesis_proof_if_needed ~genesis_breadcrumb
+ ~frontier_reader () ) ;
+ schedule_block_production (scheduled_time, data, winner_pk) )
+
+let run ~context:(module Context : CONTEXT) ~vrf_evaluator ~prover ~verifier
+ ~trust_system ~get_completed_work ~transaction_resource_pool
+ ~time_controller ~consensus_local_state ~coinbase_receiver ~frontier_reader
+ ~transition_writer ~set_next_producer_timing ~log_block_creation
+ ~block_reward_threshold ~block_produced_bvar ~vrf_evaluation_state ~net
+ ~zkapp_cmd_limit_hardcap =
+ let open Context in
+ O1trace.sync_thread "produce_blocks" (fun () ->
+ let genesis_breadcrumb =
+ genesis_breadcrumb_creator ~context:(module Context) prover
+ in
+ let slot_tx_end =
+ Runtime_config.slot_tx_end precomputed_values.runtime_config
+ in
+ let slot_chain_end =
+ Runtime_config.slot_chain_end precomputed_values.runtime_config
+ in
+ let produce =
+ produce ~genesis_breadcrumb
+ ~context:(module Context : CONTEXT)
+ ~prover ~verifier ~trust_system ~get_completed_work
+ ~transaction_resource_pool ~frontier_reader ~time_controller
+ ~transition_writer ~log_block_creation ~block_reward_threshold
+ ~block_produced_bvar ~slot_tx_end ~slot_chain_end ~net
+ ~zkapp_cmd_limit_hardcap
+ in
+ let module Breadcrumb = Transition_frontier.Breadcrumb in
let production_supervisor = Singleton_supervisor.create ~task:produce in
let scheduler = Singleton_scheduler.create time_controller in
let rec check_next_block_timing slot i () =
(* Begin checking for the ability to produce a block *)
match Broadcast_pipe.Reader.peek frontier_reader with
| None ->
- log_bootstrap_mode () ;
+ log_bootstrap_mode ~logger () ;
don't_wait_for
(let%map () =
Broadcast_pipe.Reader.iter_until frontier_reader
@@ -1140,21 +1301,8 @@ let run ~context:(module Context : CONTEXT) ~vrf_evaluator ~prover ~verifier
"Block producer will begin producing only empty blocks after \
$slot_diff slots"
slot_tx_end ;
-
- let generate_genesis_proof_if_needed () =
- match Broadcast_pipe.Reader.peek frontier_reader with
- | Some transition_frontier ->
- let consensus_state =
- Transition_frontier.best_tip transition_frontier
- |> Breadcrumb.consensus_state
- in
- if
- Consensus.Data.Consensus_state.is_genesis_state
- consensus_state
- then genesis_breadcrumb () |> Deferred.ignore_m
- else Deferred.return ()
- | None ->
- Deferred.return ()
+ let next_vrf_check_now =
+ check_next_block_timing new_global_slot i'
in
(* TODO: Re-enable this assertion when it doesn't fail dev demos
* (see #5354)
@@ -1163,179 +1311,34 @@ let run ~context:(module Context : CONTEXT) ~vrf_evaluator ~prover ~verifier
~constants:consensus_constants ~consensus_state
~local_state:consensus_local_state
= None ) ; *)
+ let produce_block_now triple =
+ ignore
+ ( Interruptible.finally
+ (Singleton_supervisor.dispatch production_supervisor triple)
+ ~f:next_vrf_check_now
+ : (_, _) Interruptible.t )
+ in
don't_wait_for
- (let%bind () =
- if Mina_numbers.Length.(i' > i) then
- Vrf_evaluation_state.update_epoch_data ~vrf_evaluator
- ~epoch_data_for_vrf ~logger vrf_evaluation_state
- ~vrf_poll_interval
- else Deferred.unit
- in
- let%bind () =
- (*Poll once every slot if the evaluation for the epoch is not completed or the evaluation is completed*)
- if
- Mina_numbers.Global_slot_since_hard_fork.(
- new_global_slot > slot)
- && not (Vrf_evaluation_state.finished vrf_evaluation_state)
- then
- Vrf_evaluation_state.poll ~vrf_evaluator ~logger
- vrf_evaluation_state ~vrf_poll_interval
- else Deferred.unit
- in
- match Core.Queue.dequeue vrf_evaluation_state.queue with
- | None -> (
- (*Keep trying until we get some slots*)
- let poll () =
- let%bind () = Async.after vrf_poll_interval in
- let%map () =
- Vrf_evaluation_state.poll ~vrf_evaluator ~logger
- vrf_evaluation_state ~vrf_poll_interval
- in
- Singleton_scheduler.schedule scheduler
- (Block_time.now time_controller)
- ~f:(check_next_block_timing new_global_slot i')
- in
- match
- Vrf_evaluation_state.evaluator_status vrf_evaluation_state
- with
- | Completed ->
- let epoch_end_time =
- Consensus.Hooks.epoch_end_time
- ~constants:consensus_constants
- epoch_data_for_vrf.epoch
- in
- set_next_producer_timing (`Check_again epoch_end_time)
- consensus_state ;
- [%log info] "No more slots won in this epoch" ;
- return
- (Singleton_scheduler.schedule scheduler epoch_end_time
- ~f:(check_next_block_timing new_global_slot i') )
- | At last_slot ->
- set_next_producer_timing (`Evaluating_vrf last_slot)
- consensus_state ;
- poll ()
- | Start ->
- set_next_producer_timing
- (`Evaluating_vrf new_global_slot) consensus_state ;
- poll () )
- | Some slot_won -> (
- let winning_global_slot = slot_won.global_slot in
- let slot, epoch =
- let t =
- Consensus.Data.Consensus_time.of_global_slot
- winning_global_slot ~constants:consensus_constants
- in
- Consensus.Data.Consensus_time.(slot t, epoch t)
- in
- [%log info] "Block producer won slot $slot in epoch $epoch"
- ~metadata:
- [ ( "slot"
- , Mina_numbers.Global_slot_since_genesis.(
- to_yojson @@ of_uint32 slot) )
- ; ("epoch", Mina_numbers.Length.to_yojson epoch)
- ] ;
- let now = Block_time.now time_controller in
- let curr_global_slot =
- Consensus.Data.Consensus_time.(
- of_time_exn ~constants:consensus_constants now
- |> to_global_slot)
- in
- let winner_pk = fst slot_won.delegator in
- let data =
- Consensus.Hooks.get_block_data ~slot_won ~ledger_snapshot
- ~coinbase_receiver:!coinbase_receiver
- in
- if
- Mina_numbers.Global_slot_since_hard_fork.(
- curr_global_slot = winning_global_slot)
- then (
- (*produce now*)
- [%log info] "Producing a block now" ;
- set_next_producer_timing
- (`Produce_now (data, winner_pk))
- consensus_state ;
- Mina_metrics.(Counter.inc_one Block_producer.slots_won) ;
- let%map () = generate_genesis_proof_if_needed () in
- ignore
- ( Interruptible.finally
- (Singleton_supervisor.dispatch production_supervisor
- (now, data, winner_pk) )
- ~f:(check_next_block_timing new_global_slot i')
- : (_, _) Interruptible.t ) )
- else
- match
- Mina_numbers.Global_slot_since_hard_fork.diff
- winning_global_slot curr_global_slot
- with
- | None ->
- [%log warn]
- "Skipping block production for global slot \
- $slot_won because it has passed. Current global \
- slot is $curr_slot"
- ~metadata:
- [ ( "slot_won"
- , Mina_numbers.Global_slot_since_hard_fork
- .to_yojson winning_global_slot )
- ; ( "curr_slot"
- , Mina_numbers.Global_slot_since_hard_fork
- .to_yojson curr_global_slot )
- ] ;
- return (check_next_block_timing new_global_slot i' ())
- | Some slot_diff ->
- [%log info] "Producing a block in $slots slots"
- ~metadata:
- [ ( "slots"
- , Mina_numbers.Global_slot_span.to_yojson
- slot_diff )
- ] ;
- let time =
- Consensus.Data.Consensus_time.(
- start_time ~constants:consensus_constants
- (of_global_slot ~constants:consensus_constants
- winning_global_slot ))
- |> Block_time.to_span_since_epoch
- |> Block_time.Span.to_ms
- in
- set_next_producer_timing
- (`Produce (time, data, winner_pk))
- consensus_state ;
- Mina_metrics.(Counter.inc_one Block_producer.slots_won) ;
- let scheduled_time = time_of_ms time in
- don't_wait_for
- ((* Attempt to generate a genesis proof in the slot
- immediately before we'll actually need it, so that
- it isn't limiting our block production time in the
- won slot.
- This also allows non-genesis blocks to be received
- in the meantime and alleviate the need to produce
- one at all, if this won't have block height 1.
- *)
- let scheduled_genesis_time =
- time_of_ms
- Int64.(
- time
- - of_int
- constraint_constants
- .block_window_duration_ms)
- in
- let span_till_time =
- Block_time.diff scheduled_genesis_time
- (Block_time.now time_controller)
- |> Block_time.Span.to_time_span
- in
- let%bind () = after span_till_time in
- generate_genesis_proof_if_needed () ) ;
- Singleton_scheduler.schedule scheduler scheduled_time
- ~f:(fun () ->
- ignore
- ( Interruptible.finally
- (Singleton_supervisor.dispatch
- production_supervisor
- (scheduled_time, data, winner_pk) )
- ~f:
- (check_next_block_timing new_global_slot i')
- : (_, _) Interruptible.t ) ) ;
- Deferred.return () ) )
+ ( iteration
+ ~schedule_next_vrf_check:
+ (Fn.compose Deferred.return
+ (Singleton_scheduler.schedule scheduler
+ ~f:next_vrf_check_now ) )
+ ~produce_block_now:
+ (Fn.compose Deferred.return produce_block_now)
+ ~schedule_block_production:(fun (time, data, winner) ->
+ Singleton_scheduler.schedule scheduler time ~f:(fun () ->
+ produce_block_now (time, data, winner) ) ;
+ Deferred.unit )
+ ~next_vrf_check_now:
+ (Fn.compose Deferred.return next_vrf_check_now)
+ ~genesis_breadcrumb
+ ~context:(module Context)
+ ~vrf_evaluator ~time_controller ~coinbase_receiver
+ ~frontier_reader ~set_next_producer_timing
+ ~transition_frontier ~vrf_evaluation_state ~epoch_data_for_vrf
+ ~ledger_snapshot i slot
+ : unit Deferred.t )
in
let start () =
check_next_block_timing Mina_numbers.Global_slot_since_hard_fork.zero
@@ -1366,9 +1369,6 @@ let run ~context:(module Context : CONTEXT) ~vrf_evaluator ~prover ~verifier
let run_precomputed ~context:(module Context : CONTEXT) ~verifier ~trust_system
~time_controller ~frontier_reader ~transition_writer ~precomputed_blocks =
let open Context in
- let log_bootstrap_mode () =
- [%log info] "Pausing block production while bootstrapping"
- in
let rejected_blocks_logger =
Logger.create ~id:Logger.Logger_id.rejected_blocks ()
in
@@ -1398,7 +1398,8 @@ let run_precomputed ~context:(module Context : CONTEXT) ~verifier ~trust_system
in
match Broadcast_pipe.Reader.peek frontier_reader with
| None ->
- log_bootstrap_mode () ; return ()
+ log_bootstrap_mode ~logger () ;
+ return ()
| Some frontier ->
let open Transition_frontier.Extensions in
let transition_registry =
@@ -1562,7 +1563,7 @@ let run_precomputed ~context:(module Context : CONTEXT) ~verifier ~trust_system
(* Begin checking for the ability to produce a block *)
match Broadcast_pipe.Reader.peek frontier_reader with
| None ->
- log_bootstrap_mode () ;
+ log_bootstrap_mode ~logger () ;
let%bind () =
Broadcast_pipe.Reader.iter_until frontier_reader
~f:(Fn.compose Deferred.return Option.is_some)
diff --git a/src/lib/block_storage/block_storage.ml b/src/lib/block_storage/block_storage.ml
deleted file mode 100644
index 21cb2e1b05d..00000000000
--- a/src/lib/block_storage/block_storage.ml
+++ /dev/null
@@ -1,223 +0,0 @@
-(* Only show stdout for failed inline tests. *)
-open Inline_test_quiet_logs
-open Core_kernel
-
-type t =
- { (* statuses is a map from 32-byte key to a 1-byte value representing the status of a root bitswap block *)
- statuses : (Consensus.Body_reference.t, int, [ `Uni ]) Lmdb.Map.t
- ; blocks : (Blake2.t, Bigstring.t, [ `Uni ]) Lmdb.Map.t
- ; logger : Logger.t
- ; env : Lmdb.Env.t
- }
-
-module Root_block_status = struct
- type t = Partial | Full | Deleting [@@deriving enum]
-end
-
-let body_tag = Staged_ledger_diff.Body.Tag.(to_enum Body)
-
-let full_status = Root_block_status.to_enum Full
-
-let uint8_conv =
- Lmdb.Conv.make
- ~flags:Lmdb.Conv.Flags.(integer_key + integer_dup + dup_fixed)
- ~serialise:(fun alloc x ->
- let a = alloc 1 in
- Bigstring.set_uint8_exn a ~pos:0 x ;
- a )
- ~deserialise:(Bigstring.get_uint8 ~pos:0)
- ()
-
-let blake2_conv =
- Lmdb.Conv.(
- make
- ~serialise:(fun alloc x ->
- let str = Blake2.to_raw_string x in
- serialise string alloc str )
- ~deserialise:(fun s -> deserialise string s |> Blake2.of_raw_string)
- ())
-
-let open_ ~logger dir =
- let env = Lmdb.Env.create ~max_maps:1 Ro dir in
- (* Env. *)
- let blocks =
- Lmdb.Map.open_existing ~key:blake2_conv ~value:Lmdb.Conv.bigstring Nodup env
- in
- let statuses =
- Lmdb.Map.open_existing ~key:blake2_conv ~value:uint8_conv ~name:"status"
- Nodup env
- in
- { blocks; statuses; logger; env }
-
-let get_status { statuses; logger; _ } body_ref =
- try
- let raw_status = Lmdb.Map.get statuses body_ref in
- match Root_block_status.of_enum raw_status with
- | None ->
- [%log error] "Unexpected status $status for $body_reference"
- ~metadata:
- [ ("status", `Int raw_status)
- ; ("body_reference", Consensus.Body_reference.to_yojson body_ref)
- ] ;
- None
- | Some x ->
- Some x
- with Lmdb.Not_found -> None
-
-let read_body_impl blocks txn root_ref =
- let find_block ref =
- try Lmdb.Map.get ~txn blocks ref |> Some with Lmdb.Not_found -> None
- in
- let%bind.Or_error raw_root_block =
- Option.value_map
- ~f:(fun x -> Ok x)
- ~default:
- (Or_error.error_string
- (sprintf "root block %s not found" @@ Blake2.to_hex root_ref) )
- (find_block root_ref)
- in
- let%bind.Or_error root_links, root_data =
- Staged_ledger_diff.Bitswap_block.parse_block ~hash:root_ref raw_root_block
- in
- let%bind.Or_error () =
- if Bigstring.length root_data < 5 then
- Or_error.error_string
- @@ sprintf "Couldn't read root block for %s: data section is too short"
- @@ Consensus.Body_reference.to_hex root_ref
- else Ok ()
- in
- let len = Bigstring.get_uint32_le root_data ~pos:0 - 1 in
- let%bind.Or_error () =
- let raw_tag = Bigstring.get_uint8 root_data ~pos:4 in
- if body_tag = raw_tag then Ok ()
- else
- Or_error.error_string
- @@ sprintf "Unexpected tag %s for block %s" (Int.to_string raw_tag)
- (Consensus.Body_reference.to_hex root_ref)
- in
- let buf = Bigstring.create len in
- let pos = ref (Bigstring.length root_data - 5) in
- Bigstring.blit ~src:root_data ~src_pos:5 ~dst:buf ~dst_pos:0 ~len:!pos ;
- let q = Queue.create () in
- Queue.enqueue_all q root_links ;
- let%map.Or_error () =
- Staged_ledger_diff.Bitswap_block.iter_links q
- ~report_chunk:(fun data ->
- Bigstring.blit ~src:data ~src_pos:0 ~dst:buf ~dst_pos:!pos
- ~len:(Bigstring.length data) ;
- pos := !pos + Bigstring.length data )
- ~find_block
- in
- Staged_ledger_diff.Body.Stable.Latest.bin_read_t buf ~pos_ref:(ref 0)
-
-let read_body { statuses; logger; blocks; env } body_ref =
- let impl txn =
- try
- if Lmdb.Map.get ~txn statuses body_ref = full_status then (
- match read_body_impl blocks txn body_ref with
- | Ok r ->
- Some r
- | Error e ->
- [%log error]
- "Couldn't read body for $body_reference with Full status: $error"
- ~metadata:
- [ ("body_reference", Consensus.Body_reference.to_yojson body_ref)
- ; ("error", `String (Error.to_string_hum e))
- ] ;
- None )
- else None
- with Lmdb.Not_found -> None
- in
- match Lmdb.Txn.go Ro env impl with
- | None ->
- [%log error]
- "LMDB transaction failed unexpectedly while reading block \
- $body_reference"
- ~metadata:
- [ ("body_reference", Consensus.Body_reference.to_yojson body_ref) ] ;
- None
- | Some x ->
- x
-
-let%test_module "Block storage tests" =
- ( module struct
- open Full_frontier.For_tests
- open Async_kernel
- open Frontier_base
-
- let () =
- Backtrace.elide := false ;
- Async.Scheduler.set_record_backtraces true
-
- let logger = Logger.create ()
-
- let verifier = verifier ()
-
- let%test_unit "Write a block to db and read it" =
- Quickcheck.test (gen_breadcrumb ~verifier ()) ~trials:4
- ~f:(fun make_breadcrumb ->
- let frontier = create_frontier () in
- let root = Full_frontier.root frontier in
- let open Mina_net2.For_tests in
- let res_updated_ivar = Ivar.create () in
- let handle_push_message _ msg =
- ( match msg with
- | Libp2p_ipc.Reader.DaemonInterface.PushMessage.ResourceUpdated m
- -> (
- let open Libp2p_ipc.Reader.DaemonInterface.ResourceUpdate in
- match (type_get m, ids_get_list m) with
- | Added, [ id_ ] ->
- let id =
- Libp2p_ipc.Reader.RootBlockId.blake2b_hash_get id_
- in
- Ivar.fill_if_empty res_updated_ivar id
- | _ ->
- () )
- | _ ->
- () ) ;
- Deferred.unit
- in
- Helper.test_with_libp2p_helper ~logger ~handle_push_message
- (fun conf_dir helper ->
- let%bind me = generate_random_keypair helper in
- let maddr =
- multiaddr_to_libp2p_ipc
- @@ Mina_net2.Multiaddr.of_string "/ip4/127.0.0.1/tcp/12878"
- in
- let libp2p_config =
- Libp2p_ipc.create_libp2p_config
- ~private_key:(Mina_net2.Keypair.secret me)
- ~statedir:conf_dir ~listen_on:[ maddr ]
- ~external_multiaddr:maddr ~network_id:"s"
- ~unsafe_no_trust_ip:true ~flood:false ~direct_peers:[]
- ~seed_peers:[] ~known_private_ip_nets:[] ~peer_exchange:true
- ~peer_protection_ratio:0.2 ~min_connections:20
- ~max_connections:40 ~validation_queue_size:250
- ~gating_config:empty_libp2p_ipc_gating_config
- ?metrics_port:None ~topic_config:[] ()
- in
- let%bind _ =
- Helper.do_rpc helper
- (module Libp2p_ipc.Rpcs.Configure)
- (Libp2p_ipc.Rpcs.Configure.create_request ~libp2p_config)
- >>| Or_error.ok_exn
- in
- let%bind breadcrumb = make_breadcrumb root in
- let body = Breadcrumb.block breadcrumb |> Mina_block.body in
- let body_ref = Staged_ledger_diff.Body.compute_reference body in
- [%log info] "Sending add resource" ;
- Helper.send_add_resource ~tag:Staged_ledger_diff.Body.Tag.Body
- ~body helper ;
- [%log info] "Waiting for push message" ;
- let%map id = Ivar.read res_updated_ivar in
- [%log info] "Push message received" ;
- [%test_eq: String.t]
- (Consensus.Body_reference.to_raw_string body_ref)
- id ;
- let db =
- open_ ~logger (String.concat ~sep:"/" [ conf_dir; "block-db" ])
- in
- [%test_eq: Staged_ledger_diff.Body.t option] (Some body)
- (read_body db body_ref) ) ;
- clean_up_persistent_root ~frontier )
- end )
diff --git a/src/lib/block_storage/block_storage.mli b/src/lib/block_storage/block_storage.mli
deleted file mode 100644
index 8a85f59e83b..00000000000
--- a/src/lib/block_storage/block_storage.mli
+++ /dev/null
@@ -1,12 +0,0 @@
-type t
-
-module Root_block_status : sig
- type t = Partial | Full | Deleting [@@deriving enum]
-end
-
-val open_ : logger:Logger.t -> string -> t
-
-val get_status : t -> Consensus.Body_reference.t -> Root_block_status.t option
-
-val read_body :
- t -> Consensus.Body_reference.t -> Staged_ledger_diff.Body.t option
diff --git a/src/lib/block_storage/dune b/src/lib/block_storage/dune
deleted file mode 100644
index d9a2f0f14e0..00000000000
--- a/src/lib/block_storage/dune
+++ /dev/null
@@ -1,48 +0,0 @@
-(library
- (name block_storage)
- (public_name block_storage)
- (libraries
- ;; opam libraries
- async
- base58
- base64
- capnp
- digestif
- stdio
- core
- libp2p_ipc
- yojson
- async_kernel
- core_kernel
- bin_prot.shape
- ppx_inline_test.config
- async_unix
- sexplib0
- base.caml
- base.base_internalhash_types
- splittable_random
- lmdb
- integers
- ;; local libraries
- blake2
- error_json
- child_processes
- file_system
- logger
- network_peer
- pipe_lib
- timeout_lib
- mina_metrics
- o1trace
- staged_ledger_diff
- consensus
- mina_net2
- ;; test deps
- inline_test_quiet_logs
- transition_frontier_base
- mina_block
- transition_frontier_full_frontier
- )
- (inline_tests (flags -verbose -show-counts))
- (instrumentation (backend bisect_ppx))
- (preprocess (pps ppx_mina ppx_version ppx_jane ppx_deriving.std ppx_let ppx_deriving_yojson)))
diff --git a/src/lib/blockchain_snark/blockchain_snark_state.ml b/src/lib/blockchain_snark/blockchain_snark_state.ml
index 7ea9acaf2ff..51cdc3e9063 100644
--- a/src/lib/blockchain_snark/blockchain_snark_state.ml
+++ b/src/lib/blockchain_snark/blockchain_snark_state.ml
@@ -384,17 +384,20 @@ let%snarkydef_ step ~(logger : Logger.t)
module Statement = struct
type t = Protocol_state.Value.t
- let to_field_elements (t : t) : Tick.Field.t array =
- [| (Protocol_state.hashes t).state_hash |]
-end
+ let typ =
+ Data_as_hash.typ ~hash:(fun t -> (Protocol_state.hashes t).state_hash)
-module Statement_var = struct
- type t = Protocol_state.Value.t Data_as_hash.t
+ let to_field_elements =
+ let (Typ { value_to_fields; _ }) = typ in
+ Fn.compose fst value_to_fields
end
-type tag = (Statement_var.t, Statement.t, Nat.N2.n, Nat.N1.n) Pickles.Tag.t
-
-let typ = Data_as_hash.typ ~hash:(fun t -> (Protocol_state.hashes t).state_hash)
+type tag =
+ ( Protocol_state.Value.t Data_as_hash.t
+ , Statement.t
+ , Nat.N2.n
+ , Nat.N1.n )
+ Pickles.Tag.t
let check w ?handler ~proof_level ~constraint_constants new_state_hash :
unit Or_error.t =
@@ -402,7 +405,7 @@ let check w ?handler ~proof_level ~constraint_constants new_state_hash :
check
(Fn.flip handle (wrap_handler handler w) (fun () ->
let%bind curr =
- exists typ ~compute:(As_prover.return new_state_hash)
+ exists Statement.typ ~compute:(As_prover.return new_state_hash)
in
step ~proof_level ~constraint_constants ~logger:(Logger.create ()) curr )
)
@@ -463,7 +466,7 @@ let constraint_system_digests ~proof_level ~constraint_constants () =
in
()
in
- Tick.constraint_system ~input_typ:typ
+ Tick.constraint_system ~input_typ:Statement.typ
~return_typ:(Snarky_backendless.Typ.unit ())
main ) )
]
@@ -478,7 +481,8 @@ end) : S = struct
open T
let tag, cache_handle, p, Pickles.Provers.[ step ] =
- Pickles.compile () ~cache:Cache_dir.cache ~public_input:(Input typ)
+ Pickles.compile () ~cache:Cache_dir.cache
+ ~public_input:(Input Statement.typ)
~override_wrap_domain:Pickles_base.Proofs_verified.N1
~auxiliary_typ:Typ.unit
~branches:(module Nat.N1)
diff --git a/src/lib/bootstrap_controller/bootstrap_controller.ml b/src/lib/bootstrap_controller/bootstrap_controller.ml
index 5009e540988..cc54647d1a9 100644
--- a/src/lib/bootstrap_controller/bootstrap_controller.ml
+++ b/src/lib/bootstrap_controller/bootstrap_controller.ml
@@ -750,10 +750,8 @@ let%test_module "Bootstrap_controller tests" =
let verifier =
Async.Thread_safe.block_on_async_exn (fun () ->
- Verifier.create ~logger ~proof_level ~constraint_constants
- ~conf_dir:None
- ~pids:(Child_processes.Termination.create_pid_table ())
- ~commit_id:"not specified for unit tests" () )
+ Verifier.For_tests.default ~constraint_constants ~logger ~proof_level
+ () )
module Genesis_ledger = (val precomputed_values.genesis_ledger)
diff --git a/src/lib/cli_lib/commands.ml b/src/lib/cli_lib/commands.ml
index 375a130d6e7..4467e4cb915 100644
--- a/src/lib/cli_lib/commands.ml
+++ b/src/lib/cli_lib/commands.ml
@@ -230,16 +230,20 @@ module Vrf = struct
flag "--total-stake"
~doc:"AMOUNT The total balance of all accounts in the epoch ledger"
(optional int)
- in
+ and config_file = Flag.config_files in
Exceptions.handle_nicely
@@ fun () ->
let env = Secrets.Keypair.env in
- let constraint_constants =
- Genesis_constants.Compiled.constraint_constants
+ let open Deferred.Let_syntax in
+ let%bind constraint_constants =
+ let logger = Logger.create () in
+ let%map conf =
+ Runtime_config.Constants.load_constants ~logger config_file
+ in
+ Runtime_config.Constants.constraint_constants conf
in
if Option.is_some (Sys.getenv env) then
eprintf "Using password from environment variable %s\n" env ;
- let open Deferred.Let_syntax in
(* TODO-someday: constraint constants from config file. *)
let%bind () =
let password =
@@ -297,17 +301,21 @@ module Vrf = struct
\"epochSeed\": _, \"delegatorIndex\": _} JSON message objects read on \
stdin"
(let open Command.Let_syntax in
- let%map_open privkey_path = Flag.privkey_read_path in
+ let%map_open privkey_path = Flag.privkey_read_path
+ and config_file = Flag.config_files in
Exceptions.handle_nicely
@@ fun () ->
- let constraint_constants =
- Genesis_constants.Compiled.constraint_constants
- in
let env = Secrets.Keypair.env in
if Option.is_some (Sys.getenv env) then
eprintf "Using password from environment variable %s\n" env ;
let open Deferred.Let_syntax in
- (* TODO-someday: constraint constants from config file. *)
+ let%bind constraint_constants =
+ let logger = Logger.create () in
+ let%map conf =
+ Runtime_config.Constants.load_constants ~logger config_file
+ in
+ Runtime_config.Constants.constraint_constants conf
+ in
let%bind () =
let password =
lazy
@@ -362,13 +370,18 @@ module Vrf = struct
totalStake: 1000000000}. The threshold is not checked against a \
ledger; this should be done manually to confirm whether threshold_met \
in the output corresponds to an actual won block."
- ( Command.Param.return @@ Exceptions.handle_nicely
+ (let open Command.Let_syntax in
+ let%map_open config_file = Flag.config_files in
+ Exceptions.handle_nicely
@@ fun () ->
let open Deferred.Let_syntax in
- let constraint_constants =
- Genesis_constants.Compiled.constraint_constants
+ let%bind constraint_constants =
+ let logger = Logger.create () in
+ let%map conf =
+ Runtime_config.Constants.load_constants ~logger config_file
+ in
+ Runtime_config.Constants.constraint_constants conf
in
- (* TODO-someday: constraint constants from config file. *)
let lexbuf = Lexing.from_channel In_channel.stdin in
let lexer = Yojson.init_lexer () in
let%bind () =
@@ -399,7 +412,7 @@ module Vrf = struct
(Error_json.error_to_yojson err) ) ;
`Repeat () )
in
- exit 0 )
+ exit 0)
let command_group =
Command.group ~summary:"Commands for vrf evaluations"
diff --git a/src/lib/cli_lib/flag.ml b/src/lib/cli_lib/flag.ml
index c96200feb9e..64b279757d6 100644
--- a/src/lib/cli_lib/flag.ml
+++ b/src/lib/cli_lib/flag.ml
@@ -33,6 +33,15 @@ let conf_dir =
flag "--config-directory" ~aliases:[ "config-directory" ]
~doc:"DIR Configuration directory" (optional string)
+let config_files =
+ let open Command.Param in
+ flag "--config-file" ~aliases:[ "config-file" ]
+ ~doc:
+ "PATH path to a configuration file (overrides MINA_CONFIG_FILE, default: \
+ /daemon.json). Pass multiple times to override fields from \
+ earlier config files"
+ (listed string)
+
module Doc_builder = struct
type 'value t =
{ type_name : string
@@ -343,32 +352,24 @@ end
type signed_command_common =
{ sender : Signature_lib.Public_key.Compressed.t
- ; fee : Currency.Fee.t
+ ; fee : Currency.Fee.t option
; nonce : Mina_base.Account.Nonce.t option
; memo : string option
}
-let fee_common ~default_transaction_fee ~minimum_user_command_fee :
- Currency.Fee.t Command.Param.t =
+let fee_common : Currency.Fee.t option Command.Param.t =
Command.Param.flag "--fee" ~aliases:[ "fee" ]
- ~doc:
- (Printf.sprintf
- "FEE Amount you are willing to pay to process the transaction \
- (default: %s) (minimum: %s)"
- (Currency.Fee.to_mina_string default_transaction_fee)
- (Currency.Fee.to_mina_string minimum_user_command_fee) )
- (Command.Param.optional_with_default default_transaction_fee
- Arg_type.txn_fee )
-
-let signed_command_common ~default_transaction_fee ~minimum_user_command_fee :
- signed_command_common Command.Param.t =
+ ~doc:"FEE Amount you are willing to pay to process the transaction"
+ (Command.Param.optional Arg_type.txn_fee)
+
+let signed_command_common : signed_command_common Command.Param.t =
let open Command.Let_syntax in
let open Arg_type in
let%map_open sender =
flag "--sender" ~aliases:[ "sender" ]
(required public_key_compressed)
~doc:"PUBLICKEY Public key from which you want to send the transaction"
- and fee = fee_common ~default_transaction_fee ~minimum_user_command_fee
+ and fee = fee_common
and nonce =
flag "--nonce" ~aliases:[ "nonce" ]
~doc:
@@ -401,15 +402,10 @@ module Signed_command = struct
flag "--amount" ~aliases:[ "amount" ]
~doc:"VALUE Payment amount you want to send" (required txn_amount)
- let fee ~default_transaction_fee ~minimum_user_command_fee =
+ let fee =
let open Command.Param in
flag "--fee" ~aliases:[ "fee" ]
- ~doc:
- (Printf.sprintf
- "FEE Amount you are willing to pay to process the transaction \
- (default: %s) (minimum: %s)"
- (Currency.Fee.to_mina_string default_transaction_fee)
- (Currency.Fee.to_mina_string minimum_user_command_fee) )
+ ~doc:"FEE Amount you are willing to pay to process the transaction"
(optional txn_fee)
let valid_until =
diff --git a/src/lib/cli_lib/flag.mli b/src/lib/cli_lib/flag.mli
index 69ff38f86f3..dda9c630ce5 100644
--- a/src/lib/cli_lib/flag.mli
+++ b/src/lib/cli_lib/flag.mli
@@ -12,6 +12,8 @@ val privkey_read_path : string Command.Param.t
val conf_dir : string option Command.Param.t
+val config_files : string list Command.Param.t
+
module Types : sig
type 'a with_name = { name : string; value : 'a }
@@ -81,20 +83,14 @@ end
type signed_command_common =
{ sender : Signature_lib.Public_key.Compressed.t
- ; fee : Currency.Fee.t
+ ; fee : Currency.Fee.t option
; nonce : Mina_base.Account.Nonce.t option
; memo : string option
}
-val fee_common :
- default_transaction_fee:Currency.Fee.t
- -> minimum_user_command_fee:Currency.Fee.t
- -> Currency.Fee.t Command.Param.t
+val fee_common : Currency.Fee.t option Command.Param.t
-val signed_command_common :
- default_transaction_fee:Currency.Fee.t
- -> minimum_user_command_fee:Currency.Fee.t
- -> signed_command_common Command.Param.t
+val signed_command_common : signed_command_common Command.Param.t
module Signed_command : sig
val hd_index : Mina_numbers.Hd_index.t Command.Param.t
@@ -103,10 +99,7 @@ module Signed_command : sig
val amount : Currency.Amount.t Command.Param.t
- val fee :
- default_transaction_fee:Currency.Fee.t
- -> minimum_user_command_fee:Currency.Fee.t
- -> Currency.Fee.t option Command.Param.t
+ val fee : Currency.Fee.t option Command.Param.t
val valid_until :
Mina_numbers.Global_slot_since_genesis.t option Command.Param.t
diff --git a/src/lib/crypto/kimchi_backend/pasta/pallas_based_plonk.ml b/src/lib/crypto/kimchi_backend/pasta/pallas_based_plonk.ml
index cf747c75755..3c0fc37c3fb 100644
--- a/src/lib/crypto/kimchi_backend/pasta/pallas_based_plonk.ml
+++ b/src/lib/crypto/kimchi_backend/pasta/pallas_based_plonk.ml
@@ -34,10 +34,11 @@ end
module R1CS_constraint_system =
Kimchi_pasta_constraint_system.Pallas_constraint_system
-let lagrange srs domain_log2 : _ Kimchi_types.poly_comm array =
+let lagrange (srs : Kimchi_bindings.Protocol.SRS.Fq.t) domain_log2 :
+ _ Kimchi_types.poly_comm array =
let domain_size = Int.pow 2 domain_log2 in
- Array.init domain_size ~f:(fun i ->
- Kimchi_bindings.Protocol.SRS.Fq.lagrange_commitment srs domain_size i )
+ Kimchi_bindings.Protocol.SRS.Fq.lagrange_commitments_whole_domain srs
+ domain_size
let with_lagrange f (vk : Verification_key.t) =
f (lagrange vk.srs vk.domain.log_size_of_group) vk
diff --git a/src/lib/crypto/kimchi_backend/pasta/vesta_based_plonk.ml b/src/lib/crypto/kimchi_backend/pasta/vesta_based_plonk.ml
index 4321b8963eb..1cc6717a270 100644
--- a/src/lib/crypto/kimchi_backend/pasta/vesta_based_plonk.ml
+++ b/src/lib/crypto/kimchi_backend/pasta/vesta_based_plonk.ml
@@ -35,8 +35,8 @@ module R1CS_constraint_system =
let lagrange srs domain_log2 : _ Kimchi_types.poly_comm array =
let domain_size = Int.pow 2 domain_log2 in
- Array.init domain_size ~f:(fun i ->
- Kimchi_bindings.Protocol.SRS.Fp.lagrange_commitment srs domain_size i )
+ Kimchi_bindings.Protocol.SRS.Fp.lagrange_commitments_whole_domain srs
+ domain_size
let with_lagrange f (vk : Verification_key.t) =
f (lagrange vk.srs vk.domain.log_size_of_group) vk
diff --git a/src/lib/crypto/kimchi_bindings/js/README.md b/src/lib/crypto/kimchi_bindings/js/README.md
index 8ead124f985..d0c32139c67 100644
--- a/src/lib/crypto/kimchi_bindings/js/README.md
+++ b/src/lib/crypto/kimchi_bindings/js/README.md
@@ -1,11 +1,10 @@
This library provides a wrapper around the WebAssembly prover code, which
allows `js_of_ocaml` to compile the mina project against the WebAssembly
-backend.
+backend. This means that `external` OCaml functions now know what implementation to point to. See `./bindings/README.md` for more details.
The different versions of the backend are generated in subdirectories; e.g. the
NodeJS backend is generated in `node_js/` and the Web backend is generated
-in `web/`. To use a backend, run `dune build backend/plonk_wasm.js` and copy
-`backend/plonk_wasm*` to the project directory.
+in `web/`. To use a backend, run `dune build **backend**/plonk_wasm.js` (where `**backend**` is either `web` or `node_js`) and copy `**backend**/plonk_wasm*` to the project directory.
Note that the backend code is not automatically compiled while linking against
the backend library. You should always manually issue a build command for the
@@ -14,15 +13,15 @@ For example, to run the nodejs tests in the `test/nodejs` directory you will
need to run
```
-dune build src/lib/marlin_plonk_bindings/js/test/nodejs/nodejs_test.bc.js
-src/lib/marlin_plonk_bindings/js/test/nodejs/copy_over.sh
+dune build src/lib/crypto/kimchi_bindings/js/test/nodejs/nodejs_test.bc.js
+src/lib/crypto/kimchi_bindings/js/test/nodejs/copy_over.sh
```
Similarly, to run the web tests in `test/web`, you can run
```
-dune build src/lib/marlin_plonk_bindings/js/test/web/web_test.bc.js
-src/lib/marlin_plonk_bindings/js/test/web/copy_over.sh
+dune build src/lib/crypto/kimchi_bindings/js/test/web/web_test.bc.js
+src/lib/crypto/kimchi_bindings/js/test/web/copy_over.sh
```
and then visit `http://localhost:8000` from a browser.
diff --git a/src/lib/crypto/kimchi_bindings/js/bindings.js b/src/lib/crypto/kimchi_bindings/js/bindings.js
deleted file mode 100644
index 32907233043..00000000000
--- a/src/lib/crypto/kimchi_bindings/js/bindings.js
+++ /dev/null
@@ -1,1068 +0,0 @@
-/* global plonk_wasm, caml_jsstring_of_string, caml_string_of_jsstring,
- caml_create_bytes, caml_bytes_unsafe_set, caml_bytes_unsafe_get, caml_ml_bytes_length,
- UInt64, caml_int64_of_int32
-*/
-
-// Provides: tsBindings
-var tsBindings = globalThis.__snarkyTsBindings;
-
-// Provides: tsRustConversion
-// Requires: tsBindings, plonk_wasm
-var tsRustConversion = tsBindings.rustConversion(plonk_wasm);
-
-// Provides: tsSrs
-// Requires: tsBindings, plonk_wasm
-var tsSrs = tsBindings.srs(plonk_wasm);
-
-// Provides: getTsBindings
-// Requires: tsBindings
-function getTsBindings() {
- return tsBindings;
-}
-
-// Provides: caml_bytes_of_uint8array
-// Requires: caml_create_bytes, caml_bytes_unsafe_set
-var caml_bytes_of_uint8array = function (uint8array) {
- var length = uint8array.length;
- var ocaml_bytes = caml_create_bytes(length);
- for (var i = 0; i < length; i++) {
- // No need to convert here: OCaml Char.t is just an int under the hood.
- caml_bytes_unsafe_set(ocaml_bytes, i, uint8array[i]);
- }
- return ocaml_bytes;
-};
-
-// Provides: caml_bytes_to_uint8array
-// Requires: caml_ml_bytes_length, caml_bytes_unsafe_get
-var caml_bytes_to_uint8array = function (ocaml_bytes) {
- var length = caml_ml_bytes_length(ocaml_bytes);
- var bytes = new globalThis.Uint8Array(length);
- for (var i = 0; i < length; i++) {
- // No need to convert here: OCaml Char.t is just an int under the hood.
- bytes[i] = caml_bytes_unsafe_get(ocaml_bytes, i);
- }
- return bytes;
-};
-
-// Provides: caml_option_of_maybe_undefined
-var caml_option_of_maybe_undefined = function (x) {
- if (x === undefined) {
- return 0; // None
- } else {
- return [0, x]; // Some(x)
- }
-};
-
-// Provides: caml_option_to_maybe_undefined
-var caml_option_to_maybe_undefined = function (x) {
- if (x === 0) {
- // None
- return undefined;
- } else {
- return x[1];
- }
-};
-
-// Provides: free_finalization_registry
-var free_finalization_registry = new globalThis.FinalizationRegistry(function (
- instance_representative
-) {
- instance_representative.free();
-});
-
-// Provides: free_on_finalize
-// Requires: free_finalization_registry
-var free_on_finalize = function (x) {
- // This is an unfortunate hack: we're creating a second instance of the
- // class to be able to call free on it. We can't pass the value itself,
- // since the registry holds a strong reference to the representative value.
- //
- // However, the class is only really a wrapper around a pointer, with a
- // reference to the class' prototype as its __prototype__.
- //
- // It might seem cleaner to call the destructor here on the pointer
- // directly, but unfortunately the destructor name is some mangled internal
- // string generated by wasm_bindgen. For now, this is the best,
- // least-brittle way to free once the original class instance gets collected.
- var instance_representative = x.constructor.__wrap(x.__wbg_ptr);
- free_finalization_registry.register(x, instance_representative, x);
- return x;
-};
-
-// srs
-
-// Provides: caml_fp_srs_create
-// Requires: tsSrs
-var caml_fp_srs_create = tsSrs.fp.create;
-
-// Provides: caml_fp_srs_write
-// Requires: plonk_wasm, caml_jsstring_of_string
-var caml_fp_srs_write = function (append, t, path) {
- if (append === 0) {
- append = undefined;
- } else {
- append = append[1];
- }
- return plonk_wasm.caml_fp_srs_write(append, t, caml_jsstring_of_string(path));
-};
-
-// Provides: caml_fp_srs_read
-// Requires: plonk_wasm, caml_jsstring_of_string
-var caml_fp_srs_read = function (offset, path) {
- if (offset === 0) {
- offset = undefined;
- } else {
- offset = offset[1];
- }
- var res = plonk_wasm.caml_fp_srs_read(offset, caml_jsstring_of_string(path));
- if (res) {
- return [0, res]; // Some(res)
- } else {
- return 0; // None
- }
-};
-
-// Provides: caml_fp_srs_lagrange_commitment
-// Requires: tsSrs
-var caml_fp_srs_lagrange_commitment = tsSrs.fp.lagrangeCommitment;
-
-// Provides: caml_fp_srs_commit_evaluations
-// Requires: plonk_wasm, tsRustConversion
-var caml_fp_srs_commit_evaluations = function (t, domain_size, fps) {
- var res = plonk_wasm.caml_fp_srs_commit_evaluations(
- t,
- domain_size,
- tsRustConversion.fp.vectorToRust(fps)
- );
- return tsRustConversion.fp.polyCommFromRust(res);
-};
-
-// Provides: caml_fp_srs_b_poly_commitment
-// Requires: plonk_wasm, tsRustConversion
-var caml_fp_srs_b_poly_commitment = function (srs, chals) {
- var res = plonk_wasm.caml_fp_srs_b_poly_commitment(
- srs,
- tsRustConversion.fieldsToRustFlat(chals)
- );
- return tsRustConversion.fp.polyCommFromRust(res);
-};
-
-// Provides: caml_fp_srs_batch_accumulator_check
-// Requires: plonk_wasm, tsRustConversion
-var caml_fp_srs_batch_accumulator_check = function (srs, comms, chals) {
- var rust_comms = tsRustConversion.fp.pointsToRust(comms);
- var rust_chals = tsRustConversion.fp.vectorToRust(chals);
- var ok = plonk_wasm.caml_fp_srs_batch_accumulator_check(
- srs,
- rust_comms,
- rust_chals
- );
- return ok;
-};
-
-// Provides: caml_fp_srs_batch_accumulator_generate
-// Requires: plonk_wasm, tsRustConversion
-var caml_fp_srs_batch_accumulator_generate = function (srs, n_comms, chals) {
- var rust_chals = tsRustConversion.fp.vectorToRust(chals);
- var rust_comms = plonk_wasm.caml_fp_srs_batch_accumulator_generate(
- srs,
- n_comms,
- rust_chals
- );
- return tsRustConversion.fp.pointsFromRust(rust_comms);
-};
-
-// Provides: caml_fp_srs_h
-// Requires: plonk_wasm, tsRustConversion
-var caml_fp_srs_h = function (t) {
- return tsRustConversion.fp.pointFromRust(plonk_wasm.caml_fp_srs_h(t));
-};
-
-// Provides: caml_fp_srs_add_lagrange_basis
-// Requires: tsSrs
-var caml_fp_srs_add_lagrange_basis = tsSrs.fp.addLagrangeBasis;
-
-// Provides: caml_fq_srs_create
-// Requires: tsSrs
-var caml_fq_srs_create = tsSrs.fq.create;
-
-// Provides: caml_fq_srs_write
-// Requires: plonk_wasm, caml_jsstring_of_string
-var caml_fq_srs_write = function (append, t, path) {
- if (append === 0) {
- append = undefined;
- } else {
- append = append[1];
- }
- return plonk_wasm.caml_fq_srs_write(append, t, caml_jsstring_of_string(path));
-};
-
-// Provides: caml_fq_srs_read
-// Requires: plonk_wasm, caml_jsstring_of_string
-var caml_fq_srs_read = function (offset, path) {
- if (offset === 0) {
- offset = undefined;
- } else {
- offset = offset[1];
- }
- var res = plonk_wasm.caml_fq_srs_read(offset, caml_jsstring_of_string(path));
- if (res) {
- return [0, res]; // Some(res)
- } else {
- return 0; // None
- }
-};
-
-// Provides: caml_fq_srs_lagrange_commitment
-// Requires: tsSrs
-var caml_fq_srs_lagrange_commitment = tsSrs.fq.lagrangeCommitment;
-
-// Provides: caml_fq_srs_commit_evaluations
-// Requires: plonk_wasm, tsRustConversion
-var caml_fq_srs_commit_evaluations = function (t, domain_size, fqs) {
- var res = plonk_wasm.caml_fq_srs_commit_evaluations(
- t,
- domain_size,
- tsRustConversion.fq.vectorToRust(fqs)
- );
- return tsRustConversion.fq.polyCommFromRust(res);
-};
-
-// Provides: caml_fq_srs_b_poly_commitment
-// Requires: plonk_wasm, tsRustConversion
-var caml_fq_srs_b_poly_commitment = function (srs, chals) {
- var res = plonk_wasm.caml_fq_srs_b_poly_commitment(
- srs,
- tsRustConversion.fieldsToRustFlat(chals)
- );
- return tsRustConversion.fq.polyCommFromRust(res);
-};
-
-// Provides: caml_fq_srs_batch_accumulator_check
-// Requires: plonk_wasm, tsRustConversion
-var caml_fq_srs_batch_accumulator_check = function (srs, comms, chals) {
- var rust_comms = tsRustConversion.fq.pointsToRust(comms);
- var rust_chals = tsRustConversion.fq.vectorToRust(chals);
- var ok = plonk_wasm.caml_fq_srs_batch_accumulator_check(
- srs,
- rust_comms,
- rust_chals
- );
- return ok;
-};
-
-// Provides: caml_fq_srs_batch_accumulator_generate
-// Requires: plonk_wasm, tsRustConversion
-var caml_fq_srs_batch_accumulator_generate = function (srs, comms, chals) {
- var rust_chals = tsRustConversion.fq.vectorToRust(chals);
- var rust_comms = plonk_wasm.caml_fq_srs_batch_accumulator_generate(
- srs,
- comms,
- rust_chals
- );
- return tsRustConversion.fq.pointsFromRust(rust_comms);
-};
-
-// Provides: caml_fq_srs_h
-// Requires: plonk_wasm, tsRustConversion
-var caml_fq_srs_h = function (t) {
- return tsRustConversion.fq.pointFromRust(plonk_wasm.caml_fq_srs_h(t));
-};
-
-// Provides: caml_fq_srs_add_lagrange_basis
-// Requires: tsSrs
-var caml_fq_srs_add_lagrange_basis = tsSrs.fq.addLagrangeBasis;
-
-// gate vector
-
-// Provides: caml_pasta_fp_plonk_gate_vector_create
-// Requires: plonk_wasm, free_on_finalize
-var caml_pasta_fp_plonk_gate_vector_create = function () {
- return free_on_finalize(plonk_wasm.caml_pasta_fp_plonk_gate_vector_create());
-};
-
-// Provides: caml_pasta_fp_plonk_gate_vector_add
-// Requires: plonk_wasm, tsRustConversion
-var caml_pasta_fp_plonk_gate_vector_add = function (v, x) {
- return plonk_wasm.caml_pasta_fp_plonk_gate_vector_add(
- v,
- tsRustConversion.fp.gateToRust(x)
- );
-};
-
-// Provides: caml_pasta_fp_plonk_gate_vector_get
-// Requires: plonk_wasm, tsRustConversion
-var caml_pasta_fp_plonk_gate_vector_get = function (v, i) {
- return tsRustConversion.fp.gateFromRust(
- plonk_wasm.caml_pasta_fp_plonk_gate_vector_get(v, i)
- );
-};
-
-// Provides: caml_pasta_fp_plonk_gate_vector_len
-// Requires: plonk_wasm
-var caml_pasta_fp_plonk_gate_vector_len = function (v) {
- return plonk_wasm.caml_pasta_fp_plonk_gate_vector_len(v);
-};
-
-// Provides: caml_pasta_fp_plonk_gate_vector_wrap
-// Requires: plonk_wasm, tsRustConversion
-var caml_pasta_fp_plonk_gate_vector_wrap = function (v, x, y) {
- return plonk_wasm.caml_pasta_fp_plonk_gate_vector_wrap(
- v,
- tsRustConversion.wireToRust(x),
- tsRustConversion.wireToRust(y)
- );
-};
-
-// Provides: caml_pasta_fp_plonk_gate_vector_digest
-// Requires: plonk_wasm, caml_bytes_of_uint8array
-var caml_pasta_fp_plonk_gate_vector_digest = function (
- public_input_size,
- gate_vector
-) {
- var uint8array = plonk_wasm.caml_pasta_fp_plonk_gate_vector_digest(
- public_input_size,
- gate_vector
- );
- return caml_bytes_of_uint8array(uint8array);
-};
-
-// Provides: caml_pasta_fp_plonk_circuit_serialize
-// Requires: plonk_wasm, caml_string_of_jsstring
-var caml_pasta_fp_plonk_circuit_serialize = function (
- public_input_size,
- gate_vector
-) {
- return caml_string_of_jsstring(
- plonk_wasm.caml_pasta_fp_plonk_circuit_serialize(
- public_input_size,
- gate_vector
- )
- );
-};
-
-// prover index
-
-// Provides: caml_pasta_fq_plonk_gate_vector_create
-// Requires: plonk_wasm, free_on_finalize
-var caml_pasta_fq_plonk_gate_vector_create = function () {
- return free_on_finalize(plonk_wasm.caml_pasta_fq_plonk_gate_vector_create());
-};
-
-// Provides: caml_pasta_fq_plonk_gate_vector_add
-// Requires: plonk_wasm, tsRustConversion
-var caml_pasta_fq_plonk_gate_vector_add = function (v, x) {
- return plonk_wasm.caml_pasta_fq_plonk_gate_vector_add(
- v,
- tsRustConversion.fq.gateToRust(x)
- );
-};
-
-// Provides: caml_pasta_fq_plonk_gate_vector_get
-// Requires: plonk_wasm, tsRustConversion
-var caml_pasta_fq_plonk_gate_vector_get = function (v, i) {
- return tsRustConversion.fq.gateFromRust(
- plonk_wasm.caml_pasta_fq_plonk_gate_vector_get(v, i)
- );
-};
-
-// Provides: caml_pasta_fq_plonk_gate_vector_len
-// Requires: plonk_wasm
-var caml_pasta_fq_plonk_gate_vector_len = function (v) {
- return plonk_wasm.caml_pasta_fq_plonk_gate_vector_len(v);
-};
-
-// Provides: caml_pasta_fq_plonk_gate_vector_wrap
-// Requires: plonk_wasm, tsRustConversion
-var caml_pasta_fq_plonk_gate_vector_wrap = function (v, x, y) {
- return plonk_wasm.caml_pasta_fq_plonk_gate_vector_wrap(
- v,
- tsRustConversion.wireToRust(x),
- tsRustConversion.wireToRust(y)
- );
-};
-
-// Provides: caml_pasta_fq_plonk_gate_vector_digest
-// Requires: plonk_wasm, caml_bytes_of_uint8array
-var caml_pasta_fq_plonk_gate_vector_digest = function (
- public_input_size,
- gate_vector
-) {
- var uint8array = plonk_wasm.caml_pasta_fq_plonk_gate_vector_digest(
- public_input_size,
- gate_vector
- );
- return caml_bytes_of_uint8array(uint8array);
-};
-
-// Provides: caml_pasta_fq_plonk_circuit_serialize
-// Requires: plonk_wasm, caml_string_of_jsstring
-var caml_pasta_fq_plonk_circuit_serialize = function (
- public_input_size,
- gate_vector
-) {
- return caml_string_of_jsstring(
- plonk_wasm.caml_pasta_fq_plonk_circuit_serialize(
- public_input_size,
- gate_vector
- )
- );
-};
-
-// Provides: caml_pasta_fp_plonk_index_create
-// Requires: plonk_wasm, free_on_finalize, tsRustConversion
-var caml_pasta_fp_plonk_index_create = function (
- gates,
- public_inputs,
- caml_lookup_tables,
- caml_runtime_table_cfgs,
- prev_challenges,
- urs
-) {
- var wasm_lookup_tables =
- tsRustConversion.fp.lookupTablesToRust(caml_lookup_tables);
- var wasm_runtime_table_cfgs = tsRustConversion.fp.runtimeTableCfgsToRust(
- caml_runtime_table_cfgs
- );
-
- var t = plonk_wasm.caml_pasta_fp_plonk_index_create(
- gates,
- public_inputs,
- wasm_lookup_tables,
- wasm_runtime_table_cfgs,
- prev_challenges,
- urs
- );
- return free_on_finalize(t);
-};
-
-// Provides: caml_pasta_fp_plonk_index_create_bytecode
-// Requires: caml_pasta_fp_plonk_index_create
-var caml_pasta_fp_plonk_index_create_bytecode = function (
- gates,
- public_inputs,
- caml_lookup_tables,
- caml_runtime_table_cfgs,
- prev_challenges,
- urs
-) {
- return caml_pasta_fp_plonk_index_create(
- gates,
- public_inputs,
- caml_lookup_tables,
- caml_runtime_table_cfgs,
- prev_challenges,
- urs
- );
-};
-
-// Provides: caml_pasta_fp_plonk_index_max_degree
-// Requires: plonk_wasm
-var caml_pasta_fp_plonk_index_max_degree =
- plonk_wasm.caml_pasta_fp_plonk_index_max_degree;
-
-// Provides: caml_pasta_fp_plonk_index_public_inputs
-// Requires: plonk_wasm
-var caml_pasta_fp_plonk_index_public_inputs =
- plonk_wasm.caml_pasta_fp_plonk_index_public_inputs;
-
-// Provides: caml_pasta_fp_plonk_index_domain_d1_size
-// Requires: plonk_wasm
-var caml_pasta_fp_plonk_index_domain_d1_size =
- plonk_wasm.caml_pasta_fp_plonk_index_domain_d1_size;
-
-// Provides: caml_pasta_fp_plonk_index_domain_d4_size
-// Requires: plonk_wasm
-var caml_pasta_fp_plonk_index_domain_d4_size =
- plonk_wasm.caml_pasta_fp_plonk_index_domain_d4_size;
-
-// Provides: caml_pasta_fp_plonk_index_domain_d8_size
-// Requires: plonk_wasm
-var caml_pasta_fp_plonk_index_domain_d8_size =
- plonk_wasm.caml_pasta_fp_plonk_index_domain_d8_size;
-
-// Provides: caml_pasta_fp_plonk_index_read
-// Requires: plonk_wasm, caml_jsstring_of_string
-var caml_pasta_fp_plonk_index_read = function (offset, urs, path) {
- if (offset === 0) {
- offset = undefined;
- } else {
- offset = offset[1];
- }
- return plonk_wasm.caml_pasta_fp_plonk_index_read(
- offset,
- urs,
- caml_jsstring_of_string(path)
- );
-};
-
-// Provides: caml_pasta_fp_plonk_index_write
-// Requires: plonk_wasm, caml_jsstring_of_string
-var caml_pasta_fp_plonk_index_write = function (append, t, path) {
- if (append === 0) {
- append = undefined;
- } else {
- append = append[1];
- }
- return plonk_wasm.caml_pasta_fp_plonk_index_write(
- append,
- t,
- caml_jsstring_of_string(path)
- );
-};
-
-// Provides: caml_pasta_fq_plonk_index_create
-// Requires: plonk_wasm, free_on_finalize, tsRustConversion
-var caml_pasta_fq_plonk_index_create = function (
- gates,
- public_inputs,
- caml_lookup_tables,
- caml_runtime_table_cfgs,
- prev_challenges,
- urs
-) {
- var wasm_lookup_tables =
- tsRustConversion.fq.lookupTablesToRust(caml_lookup_tables);
- var wasm_runtime_table_cfgs = tsRustConversion.fq.runtimeTableCfgsToRust(
- caml_runtime_table_cfgs
- );
-
- return free_on_finalize(
- plonk_wasm.caml_pasta_fq_plonk_index_create(
- gates,
- public_inputs,
- wasm_lookup_tables,
- wasm_runtime_table_cfgs,
- prev_challenges,
- urs
- )
- );
-};
-
-// Provides: caml_pasta_fq_plonk_index_create_bytecode
-// Requires: caml_pasta_fq_plonk_index_create
-var caml_pasta_fq_plonk_index_create_bytecode = function (
- gates,
- public_inputs,
- caml_lookup_tables,
- caml_runtime_table_cfgs,
- prev_challenges,
- urs
-) {
- return caml_pasta_fq_plonk_index_create(
- gates,
- public_inputs,
- caml_lookup_tables,
- caml_runtime_table_cfgs,
- prev_challenges,
- urs
- );
-};
-
-// Provides: caml_pasta_fq_plonk_index_max_degree
-// Requires: plonk_wasm
-var caml_pasta_fq_plonk_index_max_degree =
- plonk_wasm.caml_pasta_fq_plonk_index_max_degree;
-
-// Provides: caml_pasta_fq_plonk_index_public_inputs
-// Requires: plonk_wasm
-var caml_pasta_fq_plonk_index_public_inputs =
- plonk_wasm.caml_pasta_fq_plonk_index_public_inputs;
-
-// Provides: caml_pasta_fq_plonk_index_domain_d1_size
-// Requires: plonk_wasm
-var caml_pasta_fq_plonk_index_domain_d1_size =
- plonk_wasm.caml_pasta_fq_plonk_index_domain_d1_size;
-
-// Provides: caml_pasta_fq_plonk_index_domain_d4_size
-// Requires: plonk_wasm
-var caml_pasta_fq_plonk_index_domain_d4_size =
- plonk_wasm.caml_pasta_fq_plonk_index_domain_d4_size;
-
-// Provides: caml_pasta_fq_plonk_index_domain_d8_size
-// Requires: plonk_wasm
-var caml_pasta_fq_plonk_index_domain_d8_size =
- plonk_wasm.caml_pasta_fq_plonk_index_domain_d8_size;
-
-// Provides: caml_pasta_fq_plonk_index_read
-// Requires: plonk_wasm, caml_jsstring_of_string
-var caml_pasta_fq_plonk_index_read = function (offset, urs, path) {
- if (offset === 0) {
- offset = undefined;
- } else {
- offset = offset[1];
- }
- return plonk_wasm.caml_pasta_fq_plonk_index_read(
- offset,
- urs,
- caml_jsstring_of_string(path)
- );
-};
-
-// Provides: caml_pasta_fq_plonk_index_write
-// Requires: plonk_wasm, caml_jsstring_of_string
-var caml_pasta_fq_plonk_index_write = function (append, t, path) {
- if (append === 0) {
- append = undefined;
- } else {
- append = append[1];
- }
- return plonk_wasm.caml_pasta_fq_plonk_index_write(
- append,
- t,
- caml_jsstring_of_string(path)
- );
-};
-
-// verifier index
-
-// Provides: caml_opt_of_rust
-var caml_opt_of_rust = function (value, value_of_rust) {
- if (value === undefined) {
- return 0;
- } else {
- return [0, value_of_rust(value)];
- }
-};
-
-// Provides: caml_opt_to_rust
-var caml_opt_to_rust = function (caml_optional_value, to_rust) {
- // to_rust expects the parameters of the variant. A `Some vx` is represented
- // as [0, vx]
- if (caml_optional_value === 0) {
- return undefined;
- } else {
- return to_rust(caml_optional_value[1]);
- }
-};
-
-// Provides: caml_pasta_fp_plonk_verifier_index_create
-// Requires: plonk_wasm, tsRustConversion
-var caml_pasta_fp_plonk_verifier_index_create = function (x) {
- var vk = plonk_wasm.caml_pasta_fp_plonk_verifier_index_create(x);
- return tsRustConversion.fp.verifierIndexFromRust(vk);
-};
-
-// Provides: caml_pasta_fp_plonk_verifier_index_read
-// Requires: plonk_wasm, caml_jsstring_of_string, tsRustConversion
-var caml_pasta_fp_plonk_verifier_index_read = function (offset, urs, path) {
- if (offset === 0) {
- offset = undefined;
- } else {
- offset = offset[1];
- }
- return tsRustConversion.fp.verifierIndexFromRust(
- plonk_wasm.caml_pasta_fp_plonk_verifier_index_read(
- offset,
- urs,
- caml_jsstring_of_string(path)
- )
- );
-};
-
-// Provides: caml_pasta_fp_plonk_verifier_index_write
-// Requires: plonk_wasm, caml_jsstring_of_string, tsRustConversion
-var caml_pasta_fp_plonk_verifier_index_write = function (append, t, path) {
- if (append === 0) {
- append = undefined;
- } else {
- append = append[1];
- }
- return plonk_wasm.caml_pasta_fp_plonk_verifier_index_write(
- append,
- tsRustConversion.fp.verifierIndexToRust(t),
- caml_jsstring_of_string(path)
- );
-};
-
-// Provides: caml_pasta_fp_plonk_verifier_index_shifts
-// Requires: plonk_wasm, tsRustConversion
-var caml_pasta_fp_plonk_verifier_index_shifts = function (log2_size) {
- return tsRustConversion.fp.shiftsFromRust(
- plonk_wasm.caml_pasta_fp_plonk_verifier_index_shifts(log2_size)
- );
-};
-
-// Provides: caml_pasta_fp_plonk_verifier_index_dummy
-// Requires: plonk_wasm, tsRustConversion
-var caml_pasta_fp_plonk_verifier_index_dummy = function () {
- var res = plonk_wasm.caml_pasta_fp_plonk_verifier_index_dummy();
- return tsRustConversion.fp.verifierIndexFromRust(res);
-};
-
-// Provides: caml_pasta_fp_plonk_verifier_index_deep_copy
-// Requires: plonk_wasm, tsRustConversion
-var caml_pasta_fp_plonk_verifier_index_deep_copy = function (x) {
- return tsRustConversion.fp.verifierIndexFromRust(
- plonk_wasm.caml_pasta_fp_plonk_verifier_index_deep_copy(
- tsRustConversion.fp.verifierIndexToRust(x)
- )
- );
-};
-
-// Provides: caml_pasta_fq_plonk_verifier_index_create
-// Requires: plonk_wasm, tsRustConversion
-var caml_pasta_fq_plonk_verifier_index_create = function (x) {
- return tsRustConversion.fq.verifierIndexFromRust(
- plonk_wasm.caml_pasta_fq_plonk_verifier_index_create(x)
- );
-};
-
-// Provides: caml_pasta_fq_plonk_verifier_index_read
-// Requires: plonk_wasm, caml_jsstring_of_string, tsRustConversion
-var caml_pasta_fq_plonk_verifier_index_read = function (offset, urs, path) {
- if (offset === 0) {
- offset = undefined;
- } else {
- offset = offset[1];
- }
- return tsRustConversion.fq.verifierIndexFromRust(
- plonk_wasm.caml_pasta_fq_plonk_verifier_index_read(
- offset,
- urs,
- caml_jsstring_of_string(path)
- )
- );
-};
-
-// Provides: caml_pasta_fq_plonk_verifier_index_write
-// Requires: plonk_wasm, caml_jsstring_of_string, tsRustConversion
-var caml_pasta_fq_plonk_verifier_index_write = function (append, t, path) {
- if (append === 0) {
- append = undefined;
- } else {
- append = append[1];
- }
- return plonk_wasm.caml_pasta_fq_plonk_verifier_index_write(
- append,
- tsRustConversion.fq.verifierIndexToRust(t),
- caml_jsstring_of_string(path)
- );
-};
-
-// Provides: caml_pasta_fq_plonk_verifier_index_shifts
-// Requires: plonk_wasm, tsRustConversion
-var caml_pasta_fq_plonk_verifier_index_shifts = function (log2_size) {
- return tsRustConversion.fq.shiftsFromRust(
- plonk_wasm.caml_pasta_fq_plonk_verifier_index_shifts(log2_size)
- );
-};
-
-// Provides: caml_pasta_fq_plonk_verifier_index_dummy
-// Requires: plonk_wasm, tsRustConversion
-var caml_pasta_fq_plonk_verifier_index_dummy = function () {
- return tsRustConversion.fq.verifierIndexFromRust(
- plonk_wasm.caml_pasta_fq_plonk_verifier_index_dummy()
- );
-};
-
-// Provides: caml_pasta_fq_plonk_verifier_index_deep_copy
-// Requires: plonk_wasm, tsRustConversion, tsRustConversion
-var caml_pasta_fq_plonk_verifier_index_deep_copy = function (x) {
- return tsRustConversion.fq.verifierIndexFromRust(
- plonk_wasm.caml_pasta_fq_plonk_verifier_index_deep_copy(
- tsRustConversion.fq.verifierIndexToRust(x)
- )
- );
-};
-
-// proof
-
-// Provides: caml_pasta_fp_plonk_proof_create
-// Requires: plonk_wasm, tsRustConversion
-var caml_pasta_fp_plonk_proof_create = function (
- index,
- witness_cols,
- caml_runtime_tables,
- prev_challenges,
- prev_sgs
-) {
- var w = new plonk_wasm.WasmVecVecFp(witness_cols.length - 1);
- for (var i = 1; i < witness_cols.length; i++) {
- w.push(tsRustConversion.fp.vectorToRust(witness_cols[i]));
- }
- witness_cols = w;
- prev_challenges = tsRustConversion.fp.vectorToRust(prev_challenges);
- var wasm_runtime_tables =
- tsRustConversion.fp.runtimeTablesToRust(caml_runtime_tables);
- prev_sgs = tsRustConversion.fp.pointsToRust(prev_sgs);
- var proof = plonk_wasm.caml_pasta_fp_plonk_proof_create(
- index,
- witness_cols,
- wasm_runtime_tables,
- prev_challenges,
- prev_sgs
- );
- return tsRustConversion.fp.proofFromRust(proof);
-};
-
-// Provides: caml_pasta_fp_plonk_proof_verify
-// Requires: plonk_wasm, tsRustConversion
-var caml_pasta_fp_plonk_proof_verify = function (index, proof) {
- index = tsRustConversion.fp.verifierIndexToRust(index);
- proof = tsRustConversion.fp.proofToRust(proof);
- return plonk_wasm.caml_pasta_fp_plonk_proof_verify(index, proof);
-};
-
-// Provides: caml_pasta_fp_plonk_proof_batch_verify
-// Requires: plonk_wasm, tsRustConversion
-var caml_pasta_fp_plonk_proof_batch_verify = function (indexes, proofs) {
- indexes = tsRustConversion.mapMlArrayToRustVector(
- indexes,
- tsRustConversion.fp.verifierIndexToRust
- );
- proofs = tsRustConversion.mapMlArrayToRustVector(
- proofs,
- tsRustConversion.fp.proofToRust
- );
- return plonk_wasm.caml_pasta_fp_plonk_proof_batch_verify(indexes, proofs);
-};
-
-// Provides: caml_pasta_fp_plonk_proof_dummy
-// Requires: plonk_wasm, tsRustConversion
-var caml_pasta_fp_plonk_proof_dummy = function () {
- return tsRustConversion.fp.proofFromRust(
- plonk_wasm.caml_pasta_fp_plonk_proof_dummy()
- );
-};
-
-// Provides: caml_pasta_fp_plonk_proof_deep_copy
-// Requires: plonk_wasm, tsRustConversion
-var caml_pasta_fp_plonk_proof_deep_copy = function (proof) {
- return tsRustConversion.fp.proofFromRust(
- plonk_wasm.caml_pasta_fp_plonk_proof_deep_copy(
- tsRustConversion.fp.proofToRust(proof)
- )
- );
-};
-
-// Provides: caml_pasta_fq_plonk_proof_create
-// Requires: plonk_wasm, tsRustConversion
-var caml_pasta_fq_plonk_proof_create = function (
- index,
- witness_cols,
- caml_runtime_tables,
- prev_challenges,
- prev_sgs
-) {
- var w = new plonk_wasm.WasmVecVecFq(witness_cols.length - 1);
- for (var i = 1; i < witness_cols.length; i++) {
- w.push(tsRustConversion.fq.vectorToRust(witness_cols[i]));
- }
- witness_cols = w;
- prev_challenges = tsRustConversion.fq.vectorToRust(prev_challenges);
- var wasm_runtime_tables =
- tsRustConversion.fq.runtimeTablesToRust(caml_runtime_tables);
- prev_sgs = tsRustConversion.fq.pointsToRust(prev_sgs);
- var proof = plonk_wasm.caml_pasta_fq_plonk_proof_create(
- index,
- witness_cols,
- wasm_runtime_tables,
- prev_challenges,
- prev_sgs
- );
- return tsRustConversion.fq.proofFromRust(proof);
-};
-
-// Provides: caml_pasta_fq_plonk_proof_verify
-// Requires: plonk_wasm, tsRustConversion
-var caml_pasta_fq_plonk_proof_verify = function (index, proof) {
- index = tsRustConversion.fq.verifierIndexToRust(index);
- proof = tsRustConversion.fq.proofToRust(proof);
- return plonk_wasm.caml_pasta_fq_plonk_proof_verify(index, proof);
-};
-
-// Provides: caml_pasta_fq_plonk_proof_batch_verify
-// Requires: plonk_wasm, tsRustConversion
-var caml_pasta_fq_plonk_proof_batch_verify = function (indexes, proofs) {
- indexes = tsRustConversion.mapMlArrayToRustVector(
- indexes,
- tsRustConversion.fq.verifierIndexToRust
- );
- proofs = tsRustConversion.mapMlArrayToRustVector(
- proofs,
- tsRustConversion.fq.proofToRust
- );
- return plonk_wasm.caml_pasta_fq_plonk_proof_batch_verify(indexes, proofs);
-};
-
-// Provides: caml_pasta_fq_plonk_proof_dummy
-// Requires: plonk_wasm, tsRustConversion
-var caml_pasta_fq_plonk_proof_dummy = function () {
- return tsRustConversion.fq.proofFromRust(
- plonk_wasm.caml_pasta_fq_plonk_proof_dummy()
- );
-};
-
-// Provides: caml_pasta_fq_plonk_proof_deep_copy
-// Requires: plonk_wasm, tsRustConversion
-var caml_pasta_fq_plonk_proof_deep_copy = function (proof) {
- return tsRustConversion.fq.proofFromRust(
- plonk_wasm.caml_pasta_fq_plonk_proof_deep_copy(
- tsRustConversion.fq.proofToRust(proof)
- )
- );
-};
-
-// oracles
-
-// Provides: fp_oracles_create
-// Requires: plonk_wasm, tsRustConversion
-var fp_oracles_create = function (lgr_comm, verifier_index, proof) {
- return tsRustConversion.fp.oraclesFromRust(
- plonk_wasm.fp_oracles_create(
- tsRustConversion.fp.polyCommsToRust(lgr_comm),
- tsRustConversion.fp.verifierIndexToRust(verifier_index),
- tsRustConversion.fp.proofToRust(proof)
- )
- );
-};
-
-// Provides: fp_oracles_create_no_public
-// Requires: fp_oracles_create
-var fp_oracles_create_no_public = function (lgr_comm, verifier_index, proof) {
- return fp_oracles_create(lgr_comm, verifier_index, [0, 0, proof]);
-};
-
-// Provides: fp_oracles_dummy
-// Requires: plonk_wasm, tsRustConversion
-var fp_oracles_dummy = function () {
- return tsRustConversion.fp.oraclesFromRust(plonk_wasm.fp_oracles_dummy());
-};
-
-// Provides: fp_oracles_deep_copy
-// Requires: plonk_wasm, tsRustConversion
-var fp_oracles_deep_copy = function (x) {
- return tsRustConversion.fp.oraclesFromRust(
- plonk_wasm.fp_oracles_deep_copy(tsRustConversion.fp.oraclesToRust(x))
- );
-};
-
-// Provides: fq_oracles_create
-// Requires: plonk_wasm, tsRustConversion
-var fq_oracles_create = function (lgr_comm, verifier_index, proof) {
- return tsRustConversion.fq.oraclesFromRust(
- plonk_wasm.fq_oracles_create(
- tsRustConversion.fq.polyCommsToRust(lgr_comm),
- tsRustConversion.fq.verifierIndexToRust(verifier_index),
- tsRustConversion.fq.proofToRust(proof)
- )
- );
-};
-
-// Provides: fq_oracles_create_no_public
-// Requires: fq_oracles_create
-var fq_oracles_create_no_public = function (lgr_comm, verifier_index, proof) {
- return fq_oracles_create(lgr_comm, verifier_index, [0, 0, proof]);
-};
-
-// Provides: fq_oracles_dummy
-// Requires: plonk_wasm, tsRustConversion
-var fq_oracles_dummy = function () {
- return tsRustConversion.fq.oraclesFromRust(plonk_wasm.fq_oracles_dummy());
-};
-
-// Provides: fq_oracles_deep_copy
-// Requires: plonk_wasm, tsRustConversion
-var fq_oracles_deep_copy = function (x) {
- return tsRustConversion.fq.oraclesFromRust(
- plonk_wasm.fq_oracles_deep_copy(tsRustConversion.fq.oraclesToRust(x))
- );
-};
-
-// This is fake -- parameters are only needed on the Rust side, so no need to return something meaningful
-// Provides: caml_pasta_fp_poseidon_params_create
-function caml_pasta_fp_poseidon_params_create() {
- return [0];
-}
-// Provides: caml_pasta_fq_poseidon_params_create
-function caml_pasta_fq_poseidon_params_create() {
- return [0];
-}
-
-// Provides: caml_pasta_fp_poseidon_block_cipher
-// Requires: plonk_wasm, tsRustConversion, tsRustConversion
-function caml_pasta_fp_poseidon_block_cipher(_fake_params, fp_vector) {
- // 1. get permuted field vector from rust
- var wasm_flat_vector = plonk_wasm.caml_pasta_fp_poseidon_block_cipher(
- tsRustConversion.fp.vectorToRust(fp_vector)
- );
- var new_fp_vector = tsRustConversion.fp.vectorFromRust(wasm_flat_vector);
- // 2. write back modified field vector to original one
- new_fp_vector.forEach(function (a, i) {
- fp_vector[i] = a;
- });
-}
-
-// Provides: caml_pasta_fq_poseidon_block_cipher
-// Requires: plonk_wasm, tsRustConversion, tsRustConversion
-function caml_pasta_fq_poseidon_block_cipher(_fake_params, fq_vector) {
- // 1. get permuted field vector from rust
- var wasm_flat_vector = plonk_wasm.caml_pasta_fq_poseidon_block_cipher(
- tsRustConversion.fq.vectorToRust(fq_vector)
- );
- var new_fq_vector = tsRustConversion.fq.vectorFromRust(wasm_flat_vector);
- // 2. write back modified field vector to original one
- new_fq_vector.forEach(function (a, i) {
- fq_vector[i] = a;
- });
-}
-
-// Provides: caml_pasta_fp_plonk_proof_example_with_lookup
-function caml_pasta_fp_plonk_proof_example_with_lookup() {
- // This is only used in the pickles unit tests
- throw new Error(
- 'Unimplemented caml_pasta_fp_plonk_proof_example_with_lookup'
- );
-}
-
-// Provides: prover_to_json
-// Requires: plonk_wasm
-var prover_to_json = plonk_wasm.prover_to_json;
-
-// Provides: integers_uint64_of_uint32
-// Requires: UInt64, caml_int64_of_int32
-function integers_uint64_of_uint32(i) {
- // Same as integers_uint64_of_int
- return new UInt64(caml_int64_of_int32(i));
-}
-
-/////////////////////////////////////////////////////////////////////////////
-// The *_example_* functions below are only used in the pickles unit tests //
-/////////////////////////////////////////////////////////////////////////////
-
-// Provides: caml_pasta_fp_plonk_proof_example_with_ffadd
-function caml_pasta_fp_plonk_proof_example_with_ffadd() {
- throw new Error('Unimplemented caml_pasta_fp_plonk_proof_example_with_ffadd');
-}
-
-// Provides: caml_pasta_fp_plonk_proof_example_with_foreign_field_mul
-function caml_pasta_fp_plonk_proof_example_with_foreign_field_mul() {
- throw new Error(
- 'Unimplemented caml_pasta_fp_plonk_proof_example_with_foreign_field_mul'
- );
-}
-
-// Provides: caml_pasta_fp_plonk_proof_example_with_range_check
-function caml_pasta_fp_plonk_proof_example_with_range_check() {
- throw new Error(
- 'Unimplemented caml_pasta_fp_plonk_proof_example_with_range_check'
- );
-}
-
-// Provides: caml_pasta_fp_plonk_proof_example_with_range_check0
-function caml_pasta_fp_plonk_proof_example_with_range_check0() {
- throw new Error(
- 'Unimplemented caml_pasta_fp_plonk_proof_example_with_range_check0'
- );
-}
-
-// Provides: caml_pasta_fp_plonk_proof_example_with_rot
-function caml_pasta_fp_plonk_proof_example_with_rot() {
- throw new Error('Unimplemented caml_pasta_fp_plonk_proof_example_with_rot');
-}
-
-// Provides: caml_pasta_fp_plonk_proof_example_with_xor
-function caml_pasta_fp_plonk_proof_example_with_xor() {
- throw new Error('Unimplemented caml_pasta_fp_plonk_proof_example_with_xor');
-}
diff --git a/src/lib/crypto/kimchi_bindings/js/bindings/README.md b/src/lib/crypto/kimchi_bindings/js/bindings/README.md
new file mode 100644
index 00000000000..c6c2ee2a8d6
--- /dev/null
+++ b/src/lib/crypto/kimchi_bindings/js/bindings/README.md
@@ -0,0 +1,88 @@
+**Despite popular belief, the bindings files are not auto-generated, they just look funny.**
+
+In OCaml, we sometimes call out to foreign functions (that is usually indicated by the `external` keyword), here's an example:
+
+```ml
+module FunnyLittleModule = struct
+ external do_cool_thingies : unit -> unit = "caml_do_cool_thingies"
+end
+```
+
+This way, when calling the function `FunnyLittleModule.do_cool_thingies`, we tell OCaml that the implementation for `do_cool_thingies` is actually somewhere else, and not in OCaml directly. That other place can, as in our case, be in Rust! So whenever we call `FunnyLittleModule.do_cool_thingies`, we tell OCaml under the hood to look for an external function, in our case somewhere in the Rust bindings, that is called `caml_do_cool_thingies`, and executes it.
+
+We use this for many things. Many things in the code base rely of implementations in Rust. For example, we use Kimchi to generate proofs! So in order to tell OCaml to generate a Kimchi proof, we need to point it to the correct function that's living in the Rust proof-systems repository.
+
+The other side of the `external` keyword is somewhere in the Rust bindings layer, more specifically somewhere in `src/lib/crypto/kimchi_bindings/wasm/src` - in our case where we want to establish bindings between OCaml that has been compiled to JavaScript using JSOO and Rust (compiled to WASM).
+
+For example, the implementation of `caml_do_cool_thingies` could look like this:
+
+```rs
+#[wasm_bindgen]
+pub fn caml_do_cool_thingies() {
+ do_more_funny_things();
+}
+```
+
+`#[wasm_bindgen]` indicates Rust that we want to compile the code to WASM and use the function there.
+`pub fn caml_do_cool_thingies()` is the name of our "external" function that we are looking for in our OCaml module.
+
+There's one step left! Since we are compiling OCaml to JavaScript using JSOO, we need to tell JSOO how to connect these `external` functions and where to look for them. That's where all these funny little bindings files come in. When compiling OCaml, we tell JSOO to "look at these functions for their correct implementation" - this means we have to write these bindings files to "proxy" OCaml's `external` functions to their implementation. These implementations can be in JavaScript directly, for example something like this
+
+```js
+// Provides: caml_do_cool_thingies
+function caml_do_cool_thingies() {
+ assert(1 + 1 === 2);
+}
+```
+
+The comment above the function actually tells JSOO what `external` function it _provides_! This way JSOO knows how to connect `external` functions to their implementation. The comments used here have their own little syntax, I would recommend you to check it out in the JSOO docs.
+
+In our case, however, the implementation of the function isn't directly in JavaScript - it is in Rust compiled to WASM! So what we have to do is use these bindings files to point the implementation to WASM, we usually do this by injecting a WASM object or proxy into our bindings layer (see `../web/web_backend.js` and `../node_js/node_backend.js` for their web and node implementations respectively).
+
+We then use this WASM object and "inject" it into our proxy in order to use it.
+
+```js
+// Provides: caml_do_cool_thingies
+// Requires: plonk_wasm
+function caml_do_cool_thingies() {
+ plonk_wasm.caml_do_cool_thingies();
+}
+```
+
+So now instead of using the implementation in JavaScript, we directly call into the Rust implementation that has been compiled to WASM! This means, whenever something in OCaml invokes `FunnyLittleModule.do_cool_thingies` it automatically resolves to `caml_do_cool_thingies` in Rust compiled to WASM.
+
+Previously, these bindings were in one single file `bindings.js` which made it hard to understand. Now, bindings are split into separate files, each with their own responsibilities.
+
+Sometimes, these "proxy" functions actually don't call into WASM directly, but do some pre-computation, like this example:
+
+```js
+// Provides: caml_pasta_fp_plonk_proof_create
+// Requires: plonk_wasm, tsRustConversion
+var caml_pasta_fp_plonk_proof_create = function (
+ index,
+ witness_cols,
+ caml_runtime_tables,
+ prev_challenges,
+ prev_sgs
+) {
+ var w = new plonk_wasm.WasmVecVecFp(witness_cols.length - 1);
+ for (var i = 1; i < witness_cols.length; i++) {
+ w.push(tsRustConversion.fp.vectorToRust(witness_cols[i]));
+ }
+ witness_cols = w;
+ prev_challenges = tsRustConversion.fp.vectorToRust(prev_challenges);
+ var wasm_runtime_tables =
+ tsRustConversion.fp.runtimeTablesToRust(caml_runtime_tables);
+ prev_sgs = tsRustConversion.fp.pointsToRust(prev_sgs);
+ var proof = plonk_wasm.caml_pasta_fp_plonk_proof_create(
+ index,
+ witness_cols,
+ wasm_runtime_tables,
+ prev_challenges,
+ prev_sgs
+ );
+ return tsRustConversion.fp.proofFromRust(proof);
+};
+```
+
+So just keep in mind that sometimes it's not as easy to just forward the implementation to WASM and occasionally some more work needs to be done :)
diff --git a/src/lib/crypto/kimchi_bindings/js/bindings-bigint256.js b/src/lib/crypto/kimchi_bindings/js/bindings/bigint256.js
similarity index 100%
rename from src/lib/crypto/kimchi_bindings/js/bindings-bigint256.js
rename to src/lib/crypto/kimchi_bindings/js/bindings/bigint256.js
diff --git a/src/lib/crypto/kimchi_bindings/js/bindings-curve.js b/src/lib/crypto/kimchi_bindings/js/bindings/curve.js
similarity index 100%
rename from src/lib/crypto/kimchi_bindings/js/bindings-curve.js
rename to src/lib/crypto/kimchi_bindings/js/bindings/curve.js
diff --git a/src/lib/crypto/kimchi_bindings/js/bindings-field.js b/src/lib/crypto/kimchi_bindings/js/bindings/field.js
similarity index 100%
rename from src/lib/crypto/kimchi_bindings/js/bindings-field.js
rename to src/lib/crypto/kimchi_bindings/js/bindings/field.js
diff --git a/src/lib/crypto/kimchi_bindings/js/bindings/gate-vector.js b/src/lib/crypto/kimchi_bindings/js/bindings/gate-vector.js
new file mode 100644
index 00000000000..4a4d0d04c37
--- /dev/null
+++ b/src/lib/crypto/kimchi_bindings/js/bindings/gate-vector.js
@@ -0,0 +1,122 @@
+/* eslint-disable no-unused-vars */
+/* global plonk_wasm, caml_string_of_jsstring,
+ free_on_finalize, tsRustConversion, caml_bytes_of_uint8array
+*/
+
+// Provides: caml_pasta_fp_plonk_gate_vector_create
+// Requires: plonk_wasm, free_on_finalize
+var caml_pasta_fp_plonk_gate_vector_create = function () {
+ return free_on_finalize(plonk_wasm.caml_pasta_fp_plonk_gate_vector_create());
+};
+
+// Provides: caml_pasta_fp_plonk_gate_vector_add
+// Requires: plonk_wasm, tsRustConversion
+var caml_pasta_fp_plonk_gate_vector_add = function (v, x) {
+ return plonk_wasm.caml_pasta_fp_plonk_gate_vector_add(
+ v,
+ tsRustConversion.fp.gateToRust(x)
+ );
+};
+
+// Provides: caml_pasta_fp_plonk_gate_vector_get
+// Requires: plonk_wasm, tsRustConversion
+var caml_pasta_fp_plonk_gate_vector_get = function (v, i) {
+ return tsRustConversion.fp.gateFromRust(
+ plonk_wasm.caml_pasta_fp_plonk_gate_vector_get(v, i)
+ );
+};
+
+// Provides: caml_pasta_fp_plonk_gate_vector_len
+// Requires: plonk_wasm
+var caml_pasta_fp_plonk_gate_vector_len = function (v) {
+ return plonk_wasm.caml_pasta_fp_plonk_gate_vector_len(v);
+};
+
+// Provides: caml_pasta_fp_plonk_gate_vector_wrap
+// Requires: plonk_wasm, tsRustConversion
+var caml_pasta_fp_plonk_gate_vector_wrap = function (v, x, y) {
+ return plonk_wasm.caml_pasta_fp_plonk_gate_vector_wrap(
+ v,
+ tsRustConversion.wireToRust(x),
+ tsRustConversion.wireToRust(y)
+ );
+};
+
+// Provides: caml_pasta_fp_plonk_gate_vector_digest
+// Requires: plonk_wasm, caml_bytes_of_uint8array
+var caml_pasta_fp_plonk_gate_vector_digest = function (
+ public_input_size,
+ gate_vector
+) {
+ var uint8array = plonk_wasm.caml_pasta_fp_plonk_gate_vector_digest(
+ public_input_size,
+ gate_vector
+ );
+ return caml_bytes_of_uint8array(uint8array);
+};
+
+// Provides: caml_pasta_fp_plonk_circuit_serialize
+// Requires: plonk_wasm, caml_string_of_jsstring
+var caml_pasta_fp_plonk_circuit_serialize = function (
+ public_input_size,
+ gate_vector
+) {
+ return caml_string_of_jsstring(
+ plonk_wasm.caml_pasta_fp_plonk_circuit_serialize(
+ public_input_size,
+ gate_vector
+ )
+ );
+};
+
+// Provides: caml_pasta_fq_plonk_gate_vector_create
+// Requires: plonk_wasm, free_on_finalize
+var caml_pasta_fq_plonk_gate_vector_create = function () {
+ return free_on_finalize(plonk_wasm.caml_pasta_fq_plonk_gate_vector_create());
+};
+
+// Provides: caml_pasta_fq_plonk_gate_vector_add
+// Requires: plonk_wasm, tsRustConversion
+var caml_pasta_fq_plonk_gate_vector_add = function (v, x) {
+ return plonk_wasm.caml_pasta_fq_plonk_gate_vector_add(
+ v,
+ tsRustConversion.fq.gateToRust(x)
+ );
+};
+
+// Provides: caml_pasta_fq_plonk_gate_vector_get
+// Requires: plonk_wasm, tsRustConversion
+var caml_pasta_fq_plonk_gate_vector_get = function (v, i) {
+ return tsRustConversion.fq.gateFromRust(
+ plonk_wasm.caml_pasta_fq_plonk_gate_vector_get(v, i)
+ );
+};
+
+// Provides: caml_pasta_fq_plonk_gate_vector_len
+// Requires: plonk_wasm
+var caml_pasta_fq_plonk_gate_vector_len = function (v) {
+ return plonk_wasm.caml_pasta_fq_plonk_gate_vector_len(v);
+};
+
+// Provides: caml_pasta_fq_plonk_gate_vector_wrap
+// Requires: plonk_wasm, tsRustConversion
+var caml_pasta_fq_plonk_gate_vector_wrap = function (v, x, y) {
+ return plonk_wasm.caml_pasta_fq_plonk_gate_vector_wrap(
+ v,
+ tsRustConversion.wireToRust(x),
+ tsRustConversion.wireToRust(y)
+ );
+};
+
+// Provides: caml_pasta_fq_plonk_gate_vector_digest
+// Requires: plonk_wasm, caml_bytes_of_uint8array
+var caml_pasta_fq_plonk_gate_vector_digest = function (
+ public_input_size,
+ gate_vector
+) {
+ var uint8array = plonk_wasm.caml_pasta_fq_plonk_gate_vector_digest(
+ public_input_size,
+ gate_vector
+ );
+ return caml_bytes_of_uint8array(uint8array);
+};
diff --git a/src/lib/crypto/kimchi_bindings/js/bindings/oracles.js b/src/lib/crypto/kimchi_bindings/js/bindings/oracles.js
new file mode 100644
index 00000000000..1411fd73613
--- /dev/null
+++ b/src/lib/crypto/kimchi_bindings/js/bindings/oracles.js
@@ -0,0 +1,105 @@
+/* global plonk_wasm, tsRustConversion,
+
+ */
+
+// Provides: fp_oracles_create
+// Requires: plonk_wasm, tsRustConversion
+var fp_oracles_create = function (lgr_comm, verifier_index, proof) {
+ return tsRustConversion.fp.oraclesFromRust(
+ plonk_wasm.fp_oracles_create(
+ tsRustConversion.fp.polyCommsToRust(lgr_comm),
+ tsRustConversion.fp.verifierIndexToRust(verifier_index),
+ tsRustConversion.fp.proofToRust(proof)
+ )
+ );
+};
+
+// Provides: fp_oracles_create_no_public
+// Requires: fp_oracles_create
+var fp_oracles_create_no_public = function (lgr_comm, verifier_index, proof) {
+ return fp_oracles_create(lgr_comm, verifier_index, [0, 0, proof]);
+};
+
+// Provides: fp_oracles_dummy
+// Requires: plonk_wasm, tsRustConversion
+var fp_oracles_dummy = function () {
+ return tsRustConversion.fp.oraclesFromRust(plonk_wasm.fp_oracles_dummy());
+};
+
+// Provides: fp_oracles_deep_copy
+// Requires: plonk_wasm, tsRustConversion
+var fp_oracles_deep_copy = function (x) {
+ return tsRustConversion.fp.oraclesFromRust(
+ plonk_wasm.fp_oracles_deep_copy(tsRustConversion.fp.oraclesToRust(x))
+ );
+};
+
+// Provides: fq_oracles_create
+// Requires: plonk_wasm, tsRustConversion
+var fq_oracles_create = function (lgr_comm, verifier_index, proof) {
+ return tsRustConversion.fq.oraclesFromRust(
+ plonk_wasm.fq_oracles_create(
+ tsRustConversion.fq.polyCommsToRust(lgr_comm),
+ tsRustConversion.fq.verifierIndexToRust(verifier_index),
+ tsRustConversion.fq.proofToRust(proof)
+ )
+ );
+};
+
+// Provides: fq_oracles_create_no_public
+// Requires: fq_oracles_create
+var fq_oracles_create_no_public = function (lgr_comm, verifier_index, proof) {
+ return fq_oracles_create(lgr_comm, verifier_index, [0, 0, proof]);
+};
+
+// Provides: fq_oracles_dummy
+// Requires: plonk_wasm, tsRustConversion
+var fq_oracles_dummy = function () {
+ return tsRustConversion.fq.oraclesFromRust(plonk_wasm.fq_oracles_dummy());
+};
+
+// Provides: fq_oracles_deep_copy
+// Requires: plonk_wasm, tsRustConversion
+var fq_oracles_deep_copy = function (x) {
+ return tsRustConversion.fq.oraclesFromRust(
+ plonk_wasm.fq_oracles_deep_copy(tsRustConversion.fq.oraclesToRust(x))
+ );
+};
+
+// This is fake -- parameters are only needed on the Rust side, so no need to return something meaningful
+// Provides: caml_pasta_fp_poseidon_params_create
+function caml_pasta_fp_poseidon_params_create() {
+ return [0];
+}
+// Provides: caml_pasta_fq_poseidon_params_create
+function caml_pasta_fq_poseidon_params_create() {
+ return [0];
+}
+
+// Provides: caml_pasta_fp_poseidon_block_cipher
+// Requires: plonk_wasm, tsRustConversion, tsRustConversion
+function caml_pasta_fp_poseidon_block_cipher(_fake_params, fp_vector) {
+ // 1. get permuted field vector from rust
+ var wasm_flat_vector = plonk_wasm.caml_pasta_fp_poseidon_block_cipher(
+ tsRustConversion.fp.vectorToRust(fp_vector)
+ );
+ var new_fp_vector = tsRustConversion.fp.vectorFromRust(wasm_flat_vector);
+ // 2. write back modified field vector to original one
+ new_fp_vector.forEach(function (a, i) {
+ fp_vector[i] = a;
+ });
+}
+
+// Provides: caml_pasta_fq_poseidon_block_cipher
+// Requires: plonk_wasm, tsRustConversion, tsRustConversion
+function caml_pasta_fq_poseidon_block_cipher(_fake_params, fq_vector) {
+ // 1. get permuted field vector from rust
+ var wasm_flat_vector = plonk_wasm.caml_pasta_fq_poseidon_block_cipher(
+ tsRustConversion.fq.vectorToRust(fq_vector)
+ );
+ var new_fq_vector = tsRustConversion.fq.vectorFromRust(wasm_flat_vector);
+ // 2. write back modified field vector to original one
+ new_fq_vector.forEach(function (a, i) {
+ fq_vector[i] = a;
+ });
+}
diff --git a/src/lib/crypto/kimchi_bindings/js/bindings/pickles-test.js b/src/lib/crypto/kimchi_bindings/js/bindings/pickles-test.js
new file mode 100644
index 00000000000..0a68e6f9f46
--- /dev/null
+++ b/src/lib/crypto/kimchi_bindings/js/bindings/pickles-test.js
@@ -0,0 +1,47 @@
+/////////////////////////////////////////////////////////////////////////////
+// The *_example_* functions below are only used in the pickles unit tests //
+/////////////////////////////////////////////////////////////////////////////
+
+// Provides: caml_pasta_fp_plonk_proof_example_with_ffadd
+function caml_pasta_fp_plonk_proof_example_with_ffadd() {
+ throw new Error('Unimplemented caml_pasta_fp_plonk_proof_example_with_ffadd');
+}
+
+// Provides: caml_pasta_fp_plonk_proof_example_with_foreign_field_mul
+function caml_pasta_fp_plonk_proof_example_with_foreign_field_mul() {
+ throw new Error(
+ 'Unimplemented caml_pasta_fp_plonk_proof_example_with_foreign_field_mul'
+ );
+}
+
+// Provides: caml_pasta_fp_plonk_proof_example_with_range_check
+function caml_pasta_fp_plonk_proof_example_with_range_check() {
+ throw new Error(
+ 'Unimplemented caml_pasta_fp_plonk_proof_example_with_range_check'
+ );
+}
+
+// Provides: caml_pasta_fp_plonk_proof_example_with_range_check0
+function caml_pasta_fp_plonk_proof_example_with_range_check0() {
+ throw new Error(
+ 'Unimplemented caml_pasta_fp_plonk_proof_example_with_range_check0'
+ );
+}
+
+// Provides: caml_pasta_fp_plonk_proof_example_with_rot
+function caml_pasta_fp_plonk_proof_example_with_rot() {
+ throw new Error('Unimplemented caml_pasta_fp_plonk_proof_example_with_rot');
+}
+
+// Provides: caml_pasta_fp_plonk_proof_example_with_xor
+function caml_pasta_fp_plonk_proof_example_with_xor() {
+ throw new Error('Unimplemented caml_pasta_fp_plonk_proof_example_with_xor');
+}
+
+// Provides: caml_pasta_fp_plonk_proof_example_with_lookup
+function caml_pasta_fp_plonk_proof_example_with_lookup() {
+ // This is only used in the pickles unit tests
+ throw new Error(
+ 'Unimplemented caml_pasta_fp_plonk_proof_example_with_lookup'
+ );
+}
diff --git a/src/lib/crypto/kimchi_bindings/js/bindings/proof.js b/src/lib/crypto/kimchi_bindings/js/bindings/proof.js
new file mode 100644
index 00000000000..b0de9eddc60
--- /dev/null
+++ b/src/lib/crypto/kimchi_bindings/js/bindings/proof.js
@@ -0,0 +1,142 @@
+/* global plonk_wasm, tsRustConversion
+ */
+
+// Provides: caml_pasta_fp_plonk_proof_create
+// Requires: plonk_wasm, tsRustConversion
+var caml_pasta_fp_plonk_proof_create = function (
+ index,
+ witness_cols,
+ caml_runtime_tables,
+ prev_challenges,
+ prev_sgs
+) {
+ var w = new plonk_wasm.WasmVecVecFp(witness_cols.length - 1);
+ for (var i = 1; i < witness_cols.length; i++) {
+ w.push(tsRustConversion.fp.vectorToRust(witness_cols[i]));
+ }
+ witness_cols = w;
+ prev_challenges = tsRustConversion.fp.vectorToRust(prev_challenges);
+ var wasm_runtime_tables =
+ tsRustConversion.fp.runtimeTablesToRust(caml_runtime_tables);
+ prev_sgs = tsRustConversion.fp.pointsToRust(prev_sgs);
+ var proof = plonk_wasm.caml_pasta_fp_plonk_proof_create(
+ index,
+ witness_cols,
+ wasm_runtime_tables,
+ prev_challenges,
+ prev_sgs
+ );
+ return tsRustConversion.fp.proofFromRust(proof);
+};
+
+// Provides: caml_pasta_fp_plonk_proof_verify
+// Requires: plonk_wasm, tsRustConversion
+var caml_pasta_fp_plonk_proof_verify = function (index, proof) {
+ index = tsRustConversion.fp.verifierIndexToRust(index);
+ proof = tsRustConversion.fp.proofToRust(proof);
+ return plonk_wasm.caml_pasta_fp_plonk_proof_verify(index, proof);
+};
+
+// Provides: caml_pasta_fp_plonk_proof_batch_verify
+// Requires: plonk_wasm, tsRustConversion
+var caml_pasta_fp_plonk_proof_batch_verify = function (indexes, proofs) {
+ indexes = tsRustConversion.mapMlArrayToRustVector(
+ indexes,
+ tsRustConversion.fp.verifierIndexToRust
+ );
+ proofs = tsRustConversion.mapMlArrayToRustVector(
+ proofs,
+ tsRustConversion.fp.proofToRust
+ );
+ return plonk_wasm.caml_pasta_fp_plonk_proof_batch_verify(indexes, proofs);
+};
+
+// Provides: caml_pasta_fp_plonk_proof_dummy
+// Requires: plonk_wasm, tsRustConversion
+var caml_pasta_fp_plonk_proof_dummy = function () {
+ return tsRustConversion.fp.proofFromRust(
+ plonk_wasm.caml_pasta_fp_plonk_proof_dummy()
+ );
+};
+
+// Provides: caml_pasta_fp_plonk_proof_deep_copy
+// Requires: plonk_wasm, tsRustConversion
+var caml_pasta_fp_plonk_proof_deep_copy = function (proof) {
+ return tsRustConversion.fp.proofFromRust(
+ plonk_wasm.caml_pasta_fp_plonk_proof_deep_copy(
+ tsRustConversion.fp.proofToRust(proof)
+ )
+ );
+};
+
+// Provides: caml_pasta_fq_plonk_proof_create
+// Requires: plonk_wasm, tsRustConversion
+var caml_pasta_fq_plonk_proof_create = function (
+ index,
+ witness_cols,
+ caml_runtime_tables,
+ prev_challenges,
+ prev_sgs
+) {
+ var w = new plonk_wasm.WasmVecVecFq(witness_cols.length - 1);
+ for (var i = 1; i < witness_cols.length; i++) {
+ w.push(tsRustConversion.fq.vectorToRust(witness_cols[i]));
+ }
+ witness_cols = w;
+ prev_challenges = tsRustConversion.fq.vectorToRust(prev_challenges);
+ var wasm_runtime_tables =
+ tsRustConversion.fq.runtimeTablesToRust(caml_runtime_tables);
+ prev_sgs = tsRustConversion.fq.pointsToRust(prev_sgs);
+ var proof = plonk_wasm.caml_pasta_fq_plonk_proof_create(
+ index,
+ witness_cols,
+ wasm_runtime_tables,
+ prev_challenges,
+ prev_sgs
+ );
+ return tsRustConversion.fq.proofFromRust(proof);
+};
+
+// Provides: caml_pasta_fq_plonk_proof_verify
+// Requires: plonk_wasm, tsRustConversion
+var caml_pasta_fq_plonk_proof_verify = function (index, proof) {
+ index = tsRustConversion.fq.verifierIndexToRust(index);
+ proof = tsRustConversion.fq.proofToRust(proof);
+ return plonk_wasm.caml_pasta_fq_plonk_proof_verify(index, proof);
+};
+
+// Provides: caml_pasta_fq_plonk_proof_batch_verify
+// Requires: plonk_wasm, tsRustConversion
+var caml_pasta_fq_plonk_proof_batch_verify = function (indexes, proofs) {
+ indexes = tsRustConversion.mapMlArrayToRustVector(
+ indexes,
+ tsRustConversion.fq.verifierIndexToRust
+ );
+ proofs = tsRustConversion.mapMlArrayToRustVector(
+ proofs,
+ tsRustConversion.fq.proofToRust
+ );
+ return plonk_wasm.caml_pasta_fq_plonk_proof_batch_verify(indexes, proofs);
+};
+
+// Provides: caml_pasta_fq_plonk_proof_dummy
+// Requires: plonk_wasm, tsRustConversion
+var caml_pasta_fq_plonk_proof_dummy = function () {
+ return tsRustConversion.fq.proofFromRust(
+ plonk_wasm.caml_pasta_fq_plonk_proof_dummy()
+ );
+};
+
+// Provides: caml_pasta_fq_plonk_proof_deep_copy
+// Requires: plonk_wasm, tsRustConversion
+var caml_pasta_fq_plonk_proof_deep_copy = function (proof) {
+ return tsRustConversion.fq.proofFromRust(
+ plonk_wasm.caml_pasta_fq_plonk_proof_deep_copy(
+ tsRustConversion.fq.proofToRust(proof)
+ )
+ );
+};
+
+// Provides: prover_to_json
+// Requires: plonk_wasm
+var prover_to_json = plonk_wasm.prover_to_json;
diff --git a/src/lib/crypto/kimchi_bindings/js/bindings/prover-index.js b/src/lib/crypto/kimchi_bindings/js/bindings/prover-index.js
new file mode 100644
index 00000000000..163a38dce22
--- /dev/null
+++ b/src/lib/crypto/kimchi_bindings/js/bindings/prover-index.js
@@ -0,0 +1,222 @@
+/* global plonk_wasm, tsRustConversion, caml_string_of_jsstring,
+ free_on_finalize, caml_jsstring_of_string
+ */
+
+// Provides: caml_pasta_fq_plonk_circuit_serialize
+// Requires: plonk_wasm, caml_string_of_jsstring
+var caml_pasta_fq_plonk_circuit_serialize = function (
+ public_input_size,
+ gate_vector
+) {
+ return caml_string_of_jsstring(
+ plonk_wasm.caml_pasta_fq_plonk_circuit_serialize(
+ public_input_size,
+ gate_vector
+ )
+ );
+};
+
+// Provides: caml_pasta_fp_plonk_index_create
+// Requires: plonk_wasm, free_on_finalize, tsRustConversion
+var caml_pasta_fp_plonk_index_create = function (
+ gates,
+ public_inputs,
+ caml_lookup_tables,
+ caml_runtime_table_cfgs,
+ prev_challenges,
+ urs
+) {
+ var wasm_lookup_tables =
+ tsRustConversion.fp.lookupTablesToRust(caml_lookup_tables);
+ var wasm_runtime_table_cfgs = tsRustConversion.fp.runtimeTableCfgsToRust(
+ caml_runtime_table_cfgs
+ );
+
+ var t = plonk_wasm.caml_pasta_fp_plonk_index_create(
+ gates,
+ public_inputs,
+ wasm_lookup_tables,
+ wasm_runtime_table_cfgs,
+ prev_challenges,
+ urs
+ );
+ return free_on_finalize(t);
+};
+
+// Provides: caml_pasta_fp_plonk_index_create_bytecode
+// Requires: caml_pasta_fp_plonk_index_create
+var caml_pasta_fp_plonk_index_create_bytecode = function (
+ gates,
+ public_inputs,
+ caml_lookup_tables,
+ caml_runtime_table_cfgs,
+ prev_challenges,
+ urs
+) {
+ return caml_pasta_fp_plonk_index_create(
+ gates,
+ public_inputs,
+ caml_lookup_tables,
+ caml_runtime_table_cfgs,
+ prev_challenges,
+ urs
+ );
+};
+
+// Provides: caml_pasta_fp_plonk_index_max_degree
+// Requires: plonk_wasm
+var caml_pasta_fp_plonk_index_max_degree =
+ plonk_wasm.caml_pasta_fp_plonk_index_max_degree;
+
+// Provides: caml_pasta_fp_plonk_index_public_inputs
+// Requires: plonk_wasm
+var caml_pasta_fp_plonk_index_public_inputs =
+ plonk_wasm.caml_pasta_fp_plonk_index_public_inputs;
+
+// Provides: caml_pasta_fp_plonk_index_domain_d1_size
+// Requires: plonk_wasm
+var caml_pasta_fp_plonk_index_domain_d1_size =
+ plonk_wasm.caml_pasta_fp_plonk_index_domain_d1_size;
+
+// Provides: caml_pasta_fp_plonk_index_domain_d4_size
+// Requires: plonk_wasm
+var caml_pasta_fp_plonk_index_domain_d4_size =
+ plonk_wasm.caml_pasta_fp_plonk_index_domain_d4_size;
+
+// Provides: caml_pasta_fp_plonk_index_domain_d8_size
+// Requires: plonk_wasm
+var caml_pasta_fp_plonk_index_domain_d8_size =
+ plonk_wasm.caml_pasta_fp_plonk_index_domain_d8_size;
+
+// Provides: caml_pasta_fp_plonk_index_read
+// Requires: plonk_wasm, caml_jsstring_of_string
+var caml_pasta_fp_plonk_index_read = function (offset, urs, path) {
+ if (offset === 0) {
+ offset = undefined;
+ } else {
+ offset = offset[1];
+ }
+ return plonk_wasm.caml_pasta_fp_plonk_index_read(
+ offset,
+ urs,
+ caml_jsstring_of_string(path)
+ );
+};
+
+// Provides: caml_pasta_fp_plonk_index_write
+// Requires: plonk_wasm, caml_jsstring_of_string
+var caml_pasta_fp_plonk_index_write = function (append, t, path) {
+ if (append === 0) {
+ append = undefined;
+ } else {
+ append = append[1];
+ }
+ return plonk_wasm.caml_pasta_fp_plonk_index_write(
+ append,
+ t,
+ caml_jsstring_of_string(path)
+ );
+};
+
+// Provides: caml_pasta_fq_plonk_index_create
+// Requires: plonk_wasm, free_on_finalize, tsRustConversion
+var caml_pasta_fq_plonk_index_create = function (
+ gates,
+ public_inputs,
+ caml_lookup_tables,
+ caml_runtime_table_cfgs,
+ prev_challenges,
+ urs
+) {
+ var wasm_lookup_tables =
+ tsRustConversion.fq.lookupTablesToRust(caml_lookup_tables);
+ var wasm_runtime_table_cfgs = tsRustConversion.fq.runtimeTableCfgsToRust(
+ caml_runtime_table_cfgs
+ );
+
+ return free_on_finalize(
+ plonk_wasm.caml_pasta_fq_plonk_index_create(
+ gates,
+ public_inputs,
+ wasm_lookup_tables,
+ wasm_runtime_table_cfgs,
+ prev_challenges,
+ urs
+ )
+ );
+};
+
+// Provides: caml_pasta_fq_plonk_index_create_bytecode
+// Requires: caml_pasta_fq_plonk_index_create
+var caml_pasta_fq_plonk_index_create_bytecode = function (
+ gates,
+ public_inputs,
+ caml_lookup_tables,
+ caml_runtime_table_cfgs,
+ prev_challenges,
+ urs
+) {
+ return caml_pasta_fq_plonk_index_create(
+ gates,
+ public_inputs,
+ caml_lookup_tables,
+ caml_runtime_table_cfgs,
+ prev_challenges,
+ urs
+ );
+};
+
+// Provides: caml_pasta_fq_plonk_index_max_degree
+// Requires: plonk_wasm
+var caml_pasta_fq_plonk_index_max_degree =
+ plonk_wasm.caml_pasta_fq_plonk_index_max_degree;
+
+// Provides: caml_pasta_fq_plonk_index_public_inputs
+// Requires: plonk_wasm
+var caml_pasta_fq_plonk_index_public_inputs =
+ plonk_wasm.caml_pasta_fq_plonk_index_public_inputs;
+
+// Provides: caml_pasta_fq_plonk_index_domain_d1_size
+// Requires: plonk_wasm
+var caml_pasta_fq_plonk_index_domain_d1_size =
+ plonk_wasm.caml_pasta_fq_plonk_index_domain_d1_size;
+
+// Provides: caml_pasta_fq_plonk_index_domain_d4_size
+// Requires: plonk_wasm
+var caml_pasta_fq_plonk_index_domain_d4_size =
+ plonk_wasm.caml_pasta_fq_plonk_index_domain_d4_size;
+
+// Provides: caml_pasta_fq_plonk_index_domain_d8_size
+// Requires: plonk_wasm
+var caml_pasta_fq_plonk_index_domain_d8_size =
+ plonk_wasm.caml_pasta_fq_plonk_index_domain_d8_size;
+
+// Provides: caml_pasta_fq_plonk_index_read
+// Requires: plonk_wasm, caml_jsstring_of_string
+var caml_pasta_fq_plonk_index_read = function (offset, urs, path) {
+ if (offset === 0) {
+ offset = undefined;
+ } else {
+ offset = offset[1];
+ }
+ return plonk_wasm.caml_pasta_fq_plonk_index_read(
+ offset,
+ urs,
+ caml_jsstring_of_string(path)
+ );
+};
+
+// Provides: caml_pasta_fq_plonk_index_write
+// Requires: plonk_wasm, caml_jsstring_of_string
+var caml_pasta_fq_plonk_index_write = function (append, t, path) {
+ if (append === 0) {
+ append = undefined;
+ } else {
+ append = append[1];
+ }
+ return plonk_wasm.caml_pasta_fq_plonk_index_write(
+ append,
+ t,
+ caml_jsstring_of_string(path)
+ );
+};
diff --git a/src/lib/crypto/kimchi_bindings/js/bindings/srs.js b/src/lib/crypto/kimchi_bindings/js/bindings/srs.js
new file mode 100644
index 00000000000..d61480d422d
--- /dev/null
+++ b/src/lib/crypto/kimchi_bindings/js/bindings/srs.js
@@ -0,0 +1,191 @@
+/* global plonk_wasm, caml_jsstring_of_string,
+ tsBindings, tsRustConversion
+*/
+
+// Provides: tsSrs
+// Requires: tsBindings, plonk_wasm
+var tsSrs = tsBindings.srs(plonk_wasm);
+
+// srs
+
+// Provides: caml_fp_srs_create
+// Requires: tsSrs
+var caml_fp_srs_create = tsSrs.fp.create;
+
+// Provides: caml_fp_srs_write
+// Requires: plonk_wasm, caml_jsstring_of_string
+var caml_fp_srs_write = function (append, t, path) {
+ if (append === 0) {
+ append = undefined;
+ } else {
+ append = append[1];
+ }
+ return plonk_wasm.caml_fp_srs_write(append, t, caml_jsstring_of_string(path));
+};
+
+// Provides: caml_fp_srs_read
+// Requires: plonk_wasm, caml_jsstring_of_string
+var caml_fp_srs_read = function (offset, path) {
+ if (offset === 0) {
+ offset = undefined;
+ } else {
+ offset = offset[1];
+ }
+ var res = plonk_wasm.caml_fp_srs_read(offset, caml_jsstring_of_string(path));
+ if (res) {
+ return [0, res]; // Some(res)
+ } else {
+ return 0; // None
+ }
+};
+
+// Provides: caml_fp_srs_lagrange_commitment
+// Requires: tsSrs
+var caml_fp_srs_lagrange_commitment = tsSrs.fp.lagrangeCommitment;
+
+// Provides: caml_fp_srs_commit_evaluations
+// Requires: plonk_wasm, tsRustConversion
+var caml_fp_srs_commit_evaluations = function (t, domain_size, fps) {
+ var res = plonk_wasm.caml_fp_srs_commit_evaluations(
+ t,
+ domain_size,
+ tsRustConversion.fp.vectorToRust(fps)
+ );
+ return tsRustConversion.fp.polyCommFromRust(res);
+};
+
+// Provides: caml_fp_srs_b_poly_commitment
+// Requires: plonk_wasm, tsRustConversion
+var caml_fp_srs_b_poly_commitment = function (srs, chals) {
+ var res = plonk_wasm.caml_fp_srs_b_poly_commitment(
+ srs,
+ tsRustConversion.fieldsToRustFlat(chals)
+ );
+ return tsRustConversion.fp.polyCommFromRust(res);
+};
+
+// Provides: caml_fp_srs_batch_accumulator_check
+// Requires: plonk_wasm, tsRustConversion
+var caml_fp_srs_batch_accumulator_check = function (srs, comms, chals) {
+ var rust_comms = tsRustConversion.fp.pointsToRust(comms);
+ var rust_chals = tsRustConversion.fp.vectorToRust(chals);
+ var ok = plonk_wasm.caml_fp_srs_batch_accumulator_check(
+ srs,
+ rust_comms,
+ rust_chals
+ );
+ return ok;
+};
+
+// Provides: caml_fp_srs_batch_accumulator_generate
+// Requires: plonk_wasm, tsRustConversion
+var caml_fp_srs_batch_accumulator_generate = function (srs, n_comms, chals) {
+ var rust_chals = tsRustConversion.fp.vectorToRust(chals);
+ var rust_comms = plonk_wasm.caml_fp_srs_batch_accumulator_generate(
+ srs,
+ n_comms,
+ rust_chals
+ );
+ return tsRustConversion.fp.pointsFromRust(rust_comms);
+};
+
+// Provides: caml_fp_srs_h
+// Requires: plonk_wasm, tsRustConversion
+var caml_fp_srs_h = function (t) {
+ return tsRustConversion.fp.pointFromRust(plonk_wasm.caml_fp_srs_h(t));
+};
+
+// Provides: caml_fp_srs_add_lagrange_basis
+// Requires: tsSrs
+var caml_fp_srs_add_lagrange_basis = tsSrs.fp.addLagrangeBasis;
+
+// Provides: caml_fq_srs_create
+// Requires: tsSrs
+var caml_fq_srs_create = tsSrs.fq.create;
+
+// Provides: caml_fq_srs_write
+// Requires: plonk_wasm, caml_jsstring_of_string
+var caml_fq_srs_write = function (append, t, path) {
+ if (append === 0) {
+ append = undefined;
+ } else {
+ append = append[1];
+ }
+ return plonk_wasm.caml_fq_srs_write(append, t, caml_jsstring_of_string(path));
+};
+
+// Provides: caml_fq_srs_read
+// Requires: plonk_wasm, caml_jsstring_of_string
+var caml_fq_srs_read = function (offset, path) {
+ if (offset === 0) {
+ offset = undefined;
+ } else {
+ offset = offset[1];
+ }
+ var res = plonk_wasm.caml_fq_srs_read(offset, caml_jsstring_of_string(path));
+ if (res) {
+ return [0, res]; // Some(res)
+ } else {
+ return 0; // None
+ }
+};
+
+// Provides: caml_fq_srs_lagrange_commitment
+// Requires: tsSrs
+var caml_fq_srs_lagrange_commitment = tsSrs.fq.lagrangeCommitment;
+
+// Provides: caml_fq_srs_commit_evaluations
+// Requires: plonk_wasm, tsRustConversion
+var caml_fq_srs_commit_evaluations = function (t, domain_size, fqs) {
+ var res = plonk_wasm.caml_fq_srs_commit_evaluations(
+ t,
+ domain_size,
+ tsRustConversion.fq.vectorToRust(fqs)
+ );
+ return tsRustConversion.fq.polyCommFromRust(res);
+};
+
+// Provides: caml_fq_srs_b_poly_commitment
+// Requires: plonk_wasm, tsRustConversion
+var caml_fq_srs_b_poly_commitment = function (srs, chals) {
+ var res = plonk_wasm.caml_fq_srs_b_poly_commitment(
+ srs,
+ tsRustConversion.fieldsToRustFlat(chals)
+ );
+ return tsRustConversion.fq.polyCommFromRust(res);
+};
+
+// Provides: caml_fq_srs_batch_accumulator_check
+// Requires: plonk_wasm, tsRustConversion
+var caml_fq_srs_batch_accumulator_check = function (srs, comms, chals) {
+ var rust_comms = tsRustConversion.fq.pointsToRust(comms);
+ var rust_chals = tsRustConversion.fq.vectorToRust(chals);
+ var ok = plonk_wasm.caml_fq_srs_batch_accumulator_check(
+ srs,
+ rust_comms,
+ rust_chals
+ );
+ return ok;
+};
+
+// Provides: caml_fq_srs_batch_accumulator_generate
+// Requires: plonk_wasm, tsRustConversion
+var caml_fq_srs_batch_accumulator_generate = function (srs, comms, chals) {
+ var rust_chals = tsRustConversion.fq.vectorToRust(chals);
+ var rust_comms = plonk_wasm.caml_fq_srs_batch_accumulator_generate(
+ srs,
+ comms,
+ rust_chals
+ );
+ return tsRustConversion.fq.pointsFromRust(rust_comms);
+};
+
+// Provides: caml_fq_srs_h
+// Requires: plonk_wasm, tsRustConversion
+var caml_fq_srs_h = function (t) {
+ return tsRustConversion.fq.pointFromRust(plonk_wasm.caml_fq_srs_h(t));
+};
+
+// Provides: caml_fq_srs_add_lagrange_basis
+// Requires: tsSrs
+var caml_fq_srs_add_lagrange_basis = tsSrs.fq.addLagrangeBasis;
diff --git a/src/lib/crypto/kimchi_bindings/js/bindings/util.js b/src/lib/crypto/kimchi_bindings/js/bindings/util.js
new file mode 100644
index 00000000000..8ada833be86
--- /dev/null
+++ b/src/lib/crypto/kimchi_bindings/js/bindings/util.js
@@ -0,0 +1,93 @@
+/* global UInt64, caml_int64_of_int32, caml_create_bytes,
+ caml_bytes_unsafe_set, caml_bytes_unsafe_get, caml_ml_bytes_length,
+ plonk_wasm
+ */
+
+// Provides: tsBindings
+var tsBindings = globalThis.__snarkyTsBindings;
+
+// Provides: tsRustConversion
+// Requires: tsBindings, plonk_wasm
+var tsRustConversion = tsBindings.rustConversion(plonk_wasm);
+
+// Provides: getTsBindings
+// Requires: tsBindings
+function getTsBindings() {
+ return tsBindings;
+}
+
+// Provides: integers_uint64_of_uint32
+// Requires: UInt64, caml_int64_of_int32
+function integers_uint64_of_uint32(i) {
+ // Same as integers_uint64_of_int
+ return new UInt64(caml_int64_of_int32(i));
+}
+
+// Provides: caml_bytes_of_uint8array
+// Requires: caml_create_bytes, caml_bytes_unsafe_set
+var caml_bytes_of_uint8array = function (uint8array) {
+ var length = uint8array.length;
+ var ocaml_bytes = caml_create_bytes(length);
+ for (var i = 0; i < length; i++) {
+ // No need to convert here: OCaml Char.t is just an int under the hood.
+ caml_bytes_unsafe_set(ocaml_bytes, i, uint8array[i]);
+ }
+ return ocaml_bytes;
+};
+
+// Provides: caml_bytes_to_uint8array
+// Requires: caml_ml_bytes_length, caml_bytes_unsafe_get
+var caml_bytes_to_uint8array = function (ocaml_bytes) {
+ var length = caml_ml_bytes_length(ocaml_bytes);
+ var bytes = new globalThis.Uint8Array(length);
+ for (var i = 0; i < length; i++) {
+ // No need to convert here: OCaml Char.t is just an int under the hood.
+ bytes[i] = caml_bytes_unsafe_get(ocaml_bytes, i);
+ }
+ return bytes;
+};
+
+// Provides: caml_option_of_maybe_undefined
+var caml_option_of_maybe_undefined = function (x) {
+ if (x === undefined) {
+ return 0; // None
+ } else {
+ return [0, x]; // Some(x)
+ }
+};
+
+// Provides: caml_option_to_maybe_undefined
+var caml_option_to_maybe_undefined = function (x) {
+ if (x === 0) {
+ // None
+ return undefined;
+ } else {
+ return x[1];
+ }
+};
+
+// Provides: free_finalization_registry
+var free_finalization_registry = new globalThis.FinalizationRegistry(function (
+ instance_representative
+) {
+ instance_representative.free();
+});
+
+// Provides: free_on_finalize
+// Requires: free_finalization_registry
+var free_on_finalize = function (x) {
+ // This is an unfortunate hack: we're creating a second instance of the
+ // class to be able to call free on it. We can't pass the value itself,
+ // since the registry holds a strong reference to the representative value.
+ //
+ // However, the class is only really a wrapper around a pointer, with a
+ // reference to the class' prototype as its __prototype__.
+ //
+ // It might seem cleaner to call the destructor here on the pointer
+ // directly, but unfortunately the destructor name is some mangled internal
+ // string generated by wasm_bindgen. For now, this is the best,
+ // least-brittle way to free once the original class instance gets collected.
+ var instance_representative = x.constructor.__wrap(x.__wbg_ptr);
+ free_finalization_registry.register(x, instance_representative, x);
+ return x;
+};
diff --git a/src/lib/crypto/kimchi_bindings/js/bindings-vector.js b/src/lib/crypto/kimchi_bindings/js/bindings/vector.js
similarity index 100%
rename from src/lib/crypto/kimchi_bindings/js/bindings-vector.js
rename to src/lib/crypto/kimchi_bindings/js/bindings/vector.js
diff --git a/src/lib/crypto/kimchi_bindings/js/bindings/verifier-index.js b/src/lib/crypto/kimchi_bindings/js/bindings/verifier-index.js
new file mode 100644
index 00000000000..877d17084f4
--- /dev/null
+++ b/src/lib/crypto/kimchi_bindings/js/bindings/verifier-index.js
@@ -0,0 +1,152 @@
+/* global plonk_wasm, caml_jsstring_of_string, tsRustConversion
+ */
+
+// Provides: caml_opt_of_rust
+var caml_opt_of_rust = function (value, value_of_rust) {
+ if (value === undefined) {
+ return 0;
+ } else {
+ return [0, value_of_rust(value)];
+ }
+};
+
+// Provides: caml_opt_to_rust
+var caml_opt_to_rust = function (caml_optional_value, to_rust) {
+ // to_rust expects the parameters of the variant. A `Some vx` is represented
+ // as [0, vx]
+ if (caml_optional_value === 0) {
+ return undefined;
+ } else {
+ return to_rust(caml_optional_value[1]);
+ }
+};
+
+// Provides: caml_pasta_fp_plonk_verifier_index_create
+// Requires: plonk_wasm, tsRustConversion
+var caml_pasta_fp_plonk_verifier_index_create = function (x) {
+ var vk = plonk_wasm.caml_pasta_fp_plonk_verifier_index_create(x);
+ return tsRustConversion.fp.verifierIndexFromRust(vk);
+};
+
+// Provides: caml_pasta_fp_plonk_verifier_index_read
+// Requires: plonk_wasm, caml_jsstring_of_string, tsRustConversion
+var caml_pasta_fp_plonk_verifier_index_read = function (offset, urs, path) {
+ if (offset === 0) {
+ offset = undefined;
+ } else {
+ offset = offset[1];
+ }
+ return tsRustConversion.fp.verifierIndexFromRust(
+ plonk_wasm.caml_pasta_fp_plonk_verifier_index_read(
+ offset,
+ urs,
+ caml_jsstring_of_string(path)
+ )
+ );
+};
+
+// Provides: caml_pasta_fp_plonk_verifier_index_write
+// Requires: plonk_wasm, caml_jsstring_of_string, tsRustConversion
+var caml_pasta_fp_plonk_verifier_index_write = function (append, t, path) {
+ if (append === 0) {
+ append = undefined;
+ } else {
+ append = append[1];
+ }
+ return plonk_wasm.caml_pasta_fp_plonk_verifier_index_write(
+ append,
+ tsRustConversion.fp.verifierIndexToRust(t),
+ caml_jsstring_of_string(path)
+ );
+};
+
+// Provides: caml_pasta_fp_plonk_verifier_index_shifts
+// Requires: plonk_wasm, tsRustConversion
+var caml_pasta_fp_plonk_verifier_index_shifts = function (log2_size) {
+ return tsRustConversion.fp.shiftsFromRust(
+ plonk_wasm.caml_pasta_fp_plonk_verifier_index_shifts(log2_size)
+ );
+};
+
+// Provides: caml_pasta_fp_plonk_verifier_index_dummy
+// Requires: plonk_wasm, tsRustConversion
+var caml_pasta_fp_plonk_verifier_index_dummy = function () {
+ var res = plonk_wasm.caml_pasta_fp_plonk_verifier_index_dummy();
+ return tsRustConversion.fp.verifierIndexFromRust(res);
+};
+
+// Provides: caml_pasta_fp_plonk_verifier_index_deep_copy
+// Requires: plonk_wasm, tsRustConversion
+var caml_pasta_fp_plonk_verifier_index_deep_copy = function (x) {
+ return tsRustConversion.fp.verifierIndexFromRust(
+ plonk_wasm.caml_pasta_fp_plonk_verifier_index_deep_copy(
+ tsRustConversion.fp.verifierIndexToRust(x)
+ )
+ );
+};
+
+// Provides: caml_pasta_fq_plonk_verifier_index_create
+// Requires: plonk_wasm, tsRustConversion
+var caml_pasta_fq_plonk_verifier_index_create = function (x) {
+ return tsRustConversion.fq.verifierIndexFromRust(
+ plonk_wasm.caml_pasta_fq_plonk_verifier_index_create(x)
+ );
+};
+
+// Provides: caml_pasta_fq_plonk_verifier_index_read
+// Requires: plonk_wasm, caml_jsstring_of_string, tsRustConversion
+var caml_pasta_fq_plonk_verifier_index_read = function (offset, urs, path) {
+ if (offset === 0) {
+ offset = undefined;
+ } else {
+ offset = offset[1];
+ }
+ return tsRustConversion.fq.verifierIndexFromRust(
+ plonk_wasm.caml_pasta_fq_plonk_verifier_index_read(
+ offset,
+ urs,
+ caml_jsstring_of_string(path)
+ )
+ );
+};
+
+// Provides: caml_pasta_fq_plonk_verifier_index_write
+// Requires: plonk_wasm, caml_jsstring_of_string, tsRustConversion
+var caml_pasta_fq_plonk_verifier_index_write = function (append, t, path) {
+ if (append === 0) {
+ append = undefined;
+ } else {
+ append = append[1];
+ }
+ return plonk_wasm.caml_pasta_fq_plonk_verifier_index_write(
+ append,
+ tsRustConversion.fq.verifierIndexToRust(t),
+ caml_jsstring_of_string(path)
+ );
+};
+
+// Provides: caml_pasta_fq_plonk_verifier_index_shifts
+// Requires: plonk_wasm, tsRustConversion
+var caml_pasta_fq_plonk_verifier_index_shifts = function (log2_size) {
+ return tsRustConversion.fq.shiftsFromRust(
+ plonk_wasm.caml_pasta_fq_plonk_verifier_index_shifts(log2_size)
+ );
+};
+
+// Provides: caml_pasta_fq_plonk_verifier_index_dummy
+// Requires: plonk_wasm, tsRustConversion
+var caml_pasta_fq_plonk_verifier_index_dummy = function () {
+ return tsRustConversion.fq.verifierIndexFromRust(
+ plonk_wasm.caml_pasta_fq_plonk_verifier_index_dummy()
+ );
+};
+
+// Provides: caml_pasta_fq_plonk_verifier_index_deep_copy
+// Requires: plonk_wasm, tsRustConversion, tsRustConversion
+var caml_pasta_fq_plonk_verifier_index_deep_copy = function (x) {
+ return tsRustConversion.fq.verifierIndexFromRust(
+ plonk_wasm.caml_pasta_fq_plonk_verifier_index_deep_copy(
+ tsRustConversion.fq.verifierIndexToRust(x)
+ )
+ );
+};
diff --git a/src/lib/crypto/kimchi_bindings/js/dune b/src/lib/crypto/kimchi_bindings/js/dune
index f8746ce705d..7c0eb98fe09 100644
--- a/src/lib/crypto/kimchi_bindings/js/dune
+++ b/src/lib/crypto/kimchi_bindings/js/dune
@@ -3,11 +3,18 @@
(public_name bindings_js)
(js_of_ocaml
(javascript_files
- bindings.js
- bindings-bigint256.js
- bindings-field.js
- bindings-curve.js
- bindings-vector.js))
+ bindings/bigint256.js
+ bindings/field.js
+ bindings/curve.js
+ bindings/vector.js
+ bindings/gate-vector.js
+ bindings/oracles.js
+ bindings/pickles-test.js
+ bindings/proof.js
+ bindings/prover-index.js
+ bindings/util.js
+ bindings/srs.js
+ bindings/verifier-index.js))
(instrumentation
(backend bisect_ppx))
(preprocess
diff --git a/src/lib/crypto/kimchi_bindings/stubs/kimchi_bindings.ml b/src/lib/crypto/kimchi_bindings/stubs/kimchi_bindings.ml
index 07a4d89b681..7d2ebb242f8 100644
--- a/src/lib/crypto/kimchi_bindings/stubs/kimchi_bindings.ml
+++ b/src/lib/crypto/kimchi_bindings/stubs/kimchi_bindings.ml
@@ -107,6 +107,12 @@ module Protocol = struct
-> Pasta_bindings.Fq.t Kimchi_types.or_infinity Kimchi_types.poly_comm
= "caml_fp_srs_lagrange_commitment"
+ external lagrange_commitments_whole_domain :
+ t
+ -> int
+ -> Pasta_bindings.Fq.t Kimchi_types.or_infinity Kimchi_types.poly_comm
+ array = "caml_fp_srs_lagrange_commitments_whole_domain"
+
external add_lagrange_basis : t -> int -> unit
= "caml_fp_srs_add_lagrange_basis"
@@ -156,6 +162,12 @@ module Protocol = struct
-> Pasta_bindings.Fp.t Kimchi_types.or_infinity Kimchi_types.poly_comm
= "caml_fq_srs_lagrange_commitment"
+ external lagrange_commitments_whole_domain :
+ t
+ -> int
+ -> Pasta_bindings.Fp.t Kimchi_types.or_infinity Kimchi_types.poly_comm
+ array = "caml_fq_srs_lagrange_commitments_whole_domain"
+
external add_lagrange_basis : t -> int -> unit
= "caml_fq_srs_add_lagrange_basis"
diff --git a/src/lib/crypto/kimchi_bindings/stubs/src/lagrange_basis.rs b/src/lib/crypto/kimchi_bindings/stubs/src/lagrange_basis.rs
index 818edc09776..a132e142621 100644
--- a/src/lib/crypto/kimchi_bindings/stubs/src/lagrange_basis.rs
+++ b/src/lib/crypto/kimchi_bindings/stubs/src/lagrange_basis.rs
@@ -6,11 +6,11 @@ use poly_commitment::{commitment::CommitmentCurve, srs::SRS};
use std::env;
pub trait WithLagrangeBasis {
- fn with_lagrange_basis(&mut self, domain: D);
+ fn with_lagrange_basis(&self, domain: D);
}
impl WithLagrangeBasis for SRS {
- fn with_lagrange_basis(&mut self, domain: D<::ScalarField>) {
+ fn with_lagrange_basis(&self, domain: D<::ScalarField>) {
match env::var("LAGRANGE_CACHE_DIR") {
Ok(_) => add_lagrange_basis_with_cache(self, domain, cache::get_vesta_file_cache()),
Err(_) => {
@@ -21,7 +21,7 @@ impl WithLagrangeBasis for SRS {
}
impl WithLagrangeBasis for SRS {
- fn with_lagrange_basis(&mut self, domain: D<::ScalarField>) {
+ fn with_lagrange_basis(&self, domain: D<::ScalarField>) {
match env::var("LAGRANGE_CACHE_DIR") {
Ok(_) => add_lagrange_basis_with_cache(self, domain, cache::get_pallas_file_cache()),
Err(_) => {
@@ -32,7 +32,7 @@ impl WithLagrangeBasis for SRS {
}
fn add_lagrange_basis_with_cache>(
- srs: &mut SRS,
+ srs: &SRS,
domain: D,
cache: &C,
) {
@@ -41,7 +41,7 @@ fn add_lagrange_basis_with_cache>(
return;
}
if let Some(basis) = cache.load_lagrange_basis_from_cache(srs.g.len(), &domain) {
- srs.lagrange_bases.get_or_generate(n, || { basis });
+ srs.lagrange_bases.get_or_generate(n, || basis);
return;
} else {
let basis = srs.get_lagrange_basis(domain);
diff --git a/src/lib/crypto/kimchi_bindings/stubs/src/main.rs b/src/lib/crypto/kimchi_bindings/stubs/src/main.rs
index 5b74b542a3f..a3421e345b6 100644
--- a/src/lib/crypto/kimchi_bindings/stubs/src/main.rs
+++ b/src/lib/crypto/kimchi_bindings/stubs/src/main.rs
@@ -363,6 +363,7 @@ fn generate_kimchi_bindings(mut w: impl std::io::Write, env: &mut Env) {
decl_func!(w, env, caml_fp_srs_write => "write");
decl_func!(w, env, caml_fp_srs_read => "read");
decl_func!(w, env, caml_fp_srs_lagrange_commitment => "lagrange_commitment");
+ decl_func!(w, env, caml_fp_srs_lagrange_commitments_whole_domain => "lagrange_commitments_whole_domain");
decl_func!(w, env, caml_fp_srs_add_lagrange_basis=> "add_lagrange_basis");
decl_func!(w, env, caml_fp_srs_commit_evaluations => "commit_evaluations");
decl_func!(w, env, caml_fp_srs_b_poly_commitment => "b_poly_commitment");
@@ -378,6 +379,7 @@ fn generate_kimchi_bindings(mut w: impl std::io::Write, env: &mut Env) {
decl_func!(w, env, caml_fq_srs_write => "write");
decl_func!(w, env, caml_fq_srs_read => "read");
decl_func!(w, env, caml_fq_srs_lagrange_commitment => "lagrange_commitment");
+ decl_func!(w, env, caml_fq_srs_lagrange_commitments_whole_domain => "lagrange_commitments_whole_domain");
decl_func!(w, env, caml_fq_srs_add_lagrange_basis=> "add_lagrange_basis");
decl_func!(w, env, caml_fq_srs_commit_evaluations => "commit_evaluations");
decl_func!(w, env, caml_fq_srs_b_poly_commitment => "b_poly_commitment");
diff --git a/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fp_plonk_index.rs b/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fp_plonk_index.rs
index f750f8769c0..acd6f90a390 100644
--- a/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fp_plonk_index.rs
+++ b/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fp_plonk_index.rs
@@ -87,12 +87,7 @@ pub fn caml_pasta_fp_plonk_index_create(
// endo
let (endo_q, _endo_r) = poly_commitment::srs::endos::();
- // Unsafe if we are in a multi-core ocaml
- {
- let ptr: &mut poly_commitment::srs::SRS =
- unsafe { &mut *(std::sync::Arc::as_ptr(&srs.0) as *mut _) };
- ptr.with_lagrange_basis(cs.domain.d1);
- }
+ srs.0.with_lagrange_basis(cs.domain.d1);
// create index
let mut index = ProverIndex::>::create(cs, endo_q, srs.clone());
diff --git a/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fp_plonk_proof.rs b/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fp_plonk_proof.rs
index dd15d0e1303..23816ccf39c 100644
--- a/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fp_plonk_proof.rs
+++ b/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fp_plonk_proof.rs
@@ -46,10 +46,13 @@ pub fn caml_pasta_fp_plonk_proof_create(
prev_sgs: Vec,
) -> Result, ocaml::Error> {
{
- let ptr: &mut poly_commitment::srs::SRS =
- unsafe { &mut *(std::sync::Arc::as_ptr(&index.as_ref().0.srs) as *mut _) };
- ptr.with_lagrange_basis(index.as_ref().0.cs.domain.d1);
+ index
+ .as_ref()
+ .0
+ .srs
+ .with_lagrange_basis(index.as_ref().0.cs.domain.d1);
}
+
let prev = if prev_challenges.is_empty() {
Vec::new()
} else {
@@ -112,9 +115,11 @@ pub fn caml_pasta_fp_plonk_proof_create_and_verify(
prev_sgs: Vec,
) -> Result, ocaml::Error> {
{
- let ptr: &mut poly_commitment::srs::SRS =
- unsafe { &mut *(std::sync::Arc::as_ptr(&index.as_ref().0.srs) as *mut _) };
- ptr.with_lagrange_basis(index.as_ref().0.cs.domain.d1);
+ index
+ .as_ref()
+ .0
+ .srs
+ .with_lagrange_basis(index.as_ref().0.cs.domain.d1);
}
let prev = if prev_challenges.is_empty() {
Vec::new()
@@ -199,7 +204,7 @@ pub fn caml_pasta_fp_plonk_proof_example_with_lookup(
polynomial::COLUMNS,
wires::Wire,
};
- use poly_commitment::srs::{endos, SRS};
+ use poly_commitment::srs::endos;
let num_gates = 1000;
let num_tables: usize = 5;
@@ -276,8 +281,7 @@ pub fn caml_pasta_fp_plonk_proof_example_with_lookup(
.build()
.unwrap();
- let ptr: &mut SRS = unsafe { &mut *(std::sync::Arc::as_ptr(&srs.0) as *mut _) };
- ptr.with_lagrange_basis(cs.domain.d1);
+ srs.0.with_lagrange_basis(cs.domain.d1);
let (endo_q, _endo_r) = endos::();
let index = ProverIndex::>::create(cs, endo_q, srs.0);
@@ -321,7 +325,7 @@ pub fn caml_pasta_fp_plonk_proof_example_with_foreign_field_mul(
use num_bigint::BigUint;
use num_bigint::RandBigInt;
use o1_utils::{foreign_field::BigUintForeignFieldHelpers, FieldHelpers};
- use poly_commitment::srs::{endos, SRS};
+ use poly_commitment::srs::endos;
use rand::{rngs::StdRng, SeedableRng};
let foreign_field_modulus = Fq::modulus_biguint();
@@ -441,8 +445,7 @@ pub fn caml_pasta_fp_plonk_proof_example_with_foreign_field_mul(
// Create constraint system
let cs = ConstraintSystem::::create(gates).build().unwrap();
- let ptr: &mut SRS = unsafe { &mut *(std::sync::Arc::as_ptr(&srs.0) as *mut _) };
- ptr.with_lagrange_basis(cs.domain.d1);
+ srs.0.with_lagrange_basis(cs.domain.d1);
let (endo_q, _endo_r) = endos::();
let index = ProverIndex::>::create(cs, endo_q, srs.0);
@@ -478,7 +481,7 @@ pub fn caml_pasta_fp_plonk_proof_example_with_range_check(
use num_bigint::BigUint;
use num_bigint::RandBigInt;
use o1_utils::{foreign_field::BigUintForeignFieldHelpers, BigUintFieldHelpers};
- use poly_commitment::srs::{endos, SRS};
+ use poly_commitment::srs::endos;
use rand::{rngs::StdRng, SeedableRng};
let rng = &mut StdRng::from_seed([255u8; 32]);
@@ -508,8 +511,7 @@ pub fn caml_pasta_fp_plonk_proof_example_with_range_check(
// Create constraint system
let cs = ConstraintSystem::::create(gates).build().unwrap();
- let ptr: &mut SRS = unsafe { &mut *(std::sync::Arc::as_ptr(&srs.0) as *mut _) };
- ptr.with_lagrange_basis(cs.domain.d1);
+ srs.0.with_lagrange_basis(cs.domain.d1);
let (endo_q, _endo_r) = endos::();
let index = ProverIndex::>::create(cs, endo_q, srs.0);
@@ -546,7 +548,7 @@ pub fn caml_pasta_fp_plonk_proof_example_with_range_check0(
polynomials::{generic::GenericGateSpec, range_check},
wires::Wire,
};
- use poly_commitment::srs::{endos, SRS};
+ use poly_commitment::srs::endos;
let gates = {
// Public input row with value 0
@@ -581,8 +583,7 @@ pub fn caml_pasta_fp_plonk_proof_example_with_range_check0(
// not sure if theres a smarter way instead of the double unwrap, but should be fine in the test
let cs = ConstraintSystem::::create(gates).build().unwrap();
- let ptr: &mut SRS = unsafe { &mut *(std::sync::Arc::as_ptr(&srs.0) as *mut _) };
- ptr.with_lagrange_basis(cs.domain.d1);
+ srs.0.with_lagrange_basis(cs.domain.d1);
let (endo_q, _endo_r) = endos::();
let index = ProverIndex::>::create(cs, endo_q, srs.0);
@@ -625,7 +626,7 @@ pub fn caml_pasta_fp_plonk_proof_example_with_ffadd(
wires::Wire,
};
use num_bigint::BigUint;
- use poly_commitment::srs::{endos, SRS};
+ use poly_commitment::srs::endos;
// Includes a row to store value 1
let num_public_inputs = 1;
@@ -706,8 +707,7 @@ pub fn caml_pasta_fp_plonk_proof_example_with_ffadd(
.build()
.unwrap();
- let ptr: &mut SRS = unsafe { &mut *(std::sync::Arc::as_ptr(&srs.0) as *mut _) };
- ptr.with_lagrange_basis(cs.domain.d1);
+ srs.0.with_lagrange_basis(cs.domain.d1);
let (endo_q, _endo_r) = endos::();
let index = ProverIndex::>::create(cs, endo_q, srs.0);
@@ -747,7 +747,7 @@ pub fn caml_pasta_fp_plonk_proof_example_with_xor(
polynomials::{generic::GenericGateSpec, xor},
wires::Wire,
};
- use poly_commitment::srs::{endos, SRS};
+ use poly_commitment::srs::endos;
let num_public_inputs = 2;
@@ -795,8 +795,7 @@ pub fn caml_pasta_fp_plonk_proof_example_with_xor(
.build()
.unwrap();
- let ptr: &mut SRS = unsafe { &mut *(std::sync::Arc::as_ptr(&srs.0) as *mut _) };
- ptr.with_lagrange_basis(cs.domain.d1);
+ srs.0.with_lagrange_basis(cs.domain.d1);
let (endo_q, _endo_r) = endos::();
let index = ProverIndex::>::create(cs, endo_q, srs.0);
@@ -839,7 +838,7 @@ pub fn caml_pasta_fp_plonk_proof_example_with_rot(
},
wires::Wire,
};
- use poly_commitment::srs::{endos, SRS};
+ use poly_commitment::srs::endos;
// Includes the actual input of the rotation and a row with the zero value
let num_public_inputs = 2;
@@ -889,8 +888,7 @@ pub fn caml_pasta_fp_plonk_proof_example_with_rot(
.build()
.unwrap();
- let ptr: &mut SRS = unsafe { &mut *(std::sync::Arc::as_ptr(&srs.0) as *mut _) };
- ptr.with_lagrange_basis(cs.domain.d1);
+ srs.0.with_lagrange_basis(cs.domain.d1);
let (endo_q, _endo_r) = endos::();
let index = ProverIndex::>::create(cs, endo_q, srs.0);
diff --git a/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fp_plonk_verifier_index.rs b/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fp_plonk_verifier_index.rs
index 6f05f08773b..ed950841f3f 100644
--- a/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fp_plonk_verifier_index.rs
+++ b/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fp_plonk_verifier_index.rs
@@ -221,11 +221,11 @@ pub fn caml_pasta_fp_plonk_verifier_index_write(
pub fn caml_pasta_fp_plonk_verifier_index_create(
index: CamlPastaFpPlonkIndexPtr,
) -> CamlPastaFpPlonkVerifierIndex {
- {
- let ptr: &mut poly_commitment::srs::SRS =
- unsafe { &mut *(std::sync::Arc::as_ptr(&index.as_ref().0.srs) as *mut _) };
- ptr.with_lagrange_basis(index.as_ref().0.cs.domain.d1);
- }
+ index
+ .as_ref()
+ .0
+ .srs
+ .with_lagrange_basis(index.as_ref().0.cs.domain.d1);
let verifier_index = index.as_ref().0.verifier_index();
verifier_index.into()
}
diff --git a/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fq_plonk_index.rs b/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fq_plonk_index.rs
index c1f6f50f9e9..fc360eb168b 100644
--- a/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fq_plonk_index.rs
+++ b/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fq_plonk_index.rs
@@ -86,12 +86,7 @@ pub fn caml_pasta_fq_plonk_index_create(
// endo
let (endo_q, _endo_r) = poly_commitment::srs::endos::();
- // Unsafe if we are in a multi-core ocaml
- {
- let ptr: &mut poly_commitment::srs::SRS =
- unsafe { &mut *(std::sync::Arc::as_ptr(&srs.0) as *mut _) };
- ptr.with_lagrange_basis(cs.domain.d1);
- }
+ srs.0.with_lagrange_basis(cs.domain.d1);
// create index
let mut index = ProverIndex::>::create(cs, endo_q, srs.clone());
diff --git a/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fq_plonk_proof.rs b/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fq_plonk_proof.rs
index 607d28691ae..14be39c2269 100644
--- a/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fq_plonk_proof.rs
+++ b/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fq_plonk_proof.rs
@@ -41,9 +41,11 @@ pub fn caml_pasta_fq_plonk_proof_create(
prev_sgs: Vec,
) -> Result, ocaml::Error> {
{
- let ptr: &mut poly_commitment::srs::SRS =
- unsafe { &mut *(std::sync::Arc::as_ptr(&index.as_ref().0.srs) as *mut _) };
- ptr.with_lagrange_basis(index.as_ref().0.cs.domain.d1);
+ index
+ .as_ref()
+ .0
+ .srs
+ .with_lagrange_basis(index.as_ref().0.cs.domain.d1);
}
let prev = if prev_challenges.is_empty() {
Vec::new()
diff --git a/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fq_plonk_verifier_index.rs b/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fq_plonk_verifier_index.rs
index 7b81e74a7a8..5251923a42d 100644
--- a/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fq_plonk_verifier_index.rs
+++ b/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fq_plonk_verifier_index.rs
@@ -220,11 +220,11 @@ pub fn caml_pasta_fq_plonk_verifier_index_write(
pub fn caml_pasta_fq_plonk_verifier_index_create(
index: CamlPastaFqPlonkIndexPtr,
) -> CamlPastaFqPlonkVerifierIndex {
- {
- let ptr: &mut poly_commitment::srs::SRS =
- unsafe { &mut *(std::sync::Arc::as_ptr(&index.as_ref().0.srs) as *mut _) };
- ptr.with_lagrange_basis(index.as_ref().0.cs.domain.d1);
- }
+ index
+ .as_ref()
+ .0
+ .srs
+ .with_lagrange_basis(index.as_ref().0.cs.domain.d1);
let verifier_index = index.as_ref().0.verifier_index();
verifier_index.into()
}
diff --git a/src/lib/crypto/kimchi_bindings/stubs/src/srs.rs b/src/lib/crypto/kimchi_bindings/stubs/src/srs.rs
index e32617aeb6e..6269de0aa9d 100644
--- a/src/lib/crypto/kimchi_bindings/stubs/src/srs.rs
+++ b/src/lib/crypto/kimchi_bindings/stubs/src/srs.rs
@@ -1,4 +1,4 @@
-use crate::WithLagrangeBasis;
+use crate::lagrange_basis::WithLagrangeBasis;
use ark_poly::DenseUVPolynomial;
use ark_poly::{univariate::DensePolynomial, EvaluationDomain, Evaluations};
use paste::paste;
@@ -72,6 +72,18 @@ macro_rules! impl_srs {
Ok(Some($name::new(srs)))
}
+ #[ocaml_gen::func]
+ #[ocaml::func]
+ /// This is same as _lagrange_commitments, but returns the result for every
+ /// i <= domain_size.
+ pub fn [<$name:snake _lagrange_commitments_whole_domain>](
+ srs: $name,
+ domain_size: ocaml::Int,
+ ) -> Result>, ocaml::Error> {
+ Ok(srs.get_lagrange_basis_from_domain_size(domain_size as usize).clone().into_iter().map(|x| x.into()).collect())
+ }
+
+
#[ocaml_gen::func]
#[ocaml::func]
pub fn [<$name:snake _lagrange_commitment>](
@@ -84,15 +96,9 @@ macro_rules! impl_srs {
.err()
.unwrap()
})?;
-
- {
- // We're single-threaded, so it's safe to grab this pointer as mutable.
- // Do not try this at home.
- let srs = unsafe { &mut *((&**srs as *const SRS<$G>) as *mut SRS<$G>) as &mut SRS<$G> };
- srs.with_lagrange_basis(x_domain);
- }
-
- Ok(srs.get_lagrange_basis(x_domain)[i as usize].clone().into())
+ srs.with_lagrange_basis(x_domain);
+ let vec_polycomm = srs.get_lagrange_basis_from_domain_size(domain_size as usize);
+ Ok(vec_polycomm[i as usize].clone().into())
}
#[ocaml_gen::func]
@@ -101,10 +107,8 @@ macro_rules! impl_srs {
srs: $name,
log2_size: ocaml::Int,
) {
- let ptr: &mut poly_commitment::srs::SRS<$G> =
- unsafe { &mut *(std::sync::Arc::as_ptr(&srs) as *mut _) };
let domain = EvaluationDomain::<$F>::new(1 << (log2_size as usize)).expect("invalid domain size");
- ptr.with_lagrange_basis(domain);
+ srs.with_lagrange_basis(domain);
}
#[ocaml_gen::func]
diff --git a/src/lib/crypto/kimchi_bindings/wasm/src/plonk_verifier_index.rs b/src/lib/crypto/kimchi_bindings/wasm/src/plonk_verifier_index.rs
index 0fa21bffec2..def0114de61 100644
--- a/src/lib/crypto/kimchi_bindings/wasm/src/plonk_verifier_index.rs
+++ b/src/lib/crypto/kimchi_bindings/wasm/src/plonk_verifier_index.rs
@@ -729,9 +729,10 @@ macro_rules! impl_verification_key {
.lookup_index.as_ref()
.map_or(false, |li| li.lookup_info.features.patterns.lookup);
- // TODO
- let runtime_tables = false;
-
+ let runtime_tables = index
+ .lookup_index.as_ref()
+ .map_or(false, |li| li.runtime_tables_selector.is_some());
+
let patterns = LookupPatterns {
xor,
lookup,
diff --git a/src/lib/crypto/kimchi_bindings/wasm/src/srs.rs b/src/lib/crypto/kimchi_bindings/wasm/src/srs.rs
index 1d436ab1f91..2b33c56d59f 100644
--- a/src/lib/crypto/kimchi_bindings/wasm/src/srs.rs
+++ b/src/lib/crypto/kimchi_bindings/wasm/src/srs.rs
@@ -253,6 +253,9 @@ pub mod fp {
domain_size: i32,
i: i32,
) -> Option {
+ if !(srs.0.lagrange_bases.contains_key(&(domain_size as usize))) {
+ return None;
+ }
let basis = srs.get_lagrange_basis_from_domain_size(domain_size as usize);
Some(basis[i as usize].clone().into())
}
@@ -330,7 +333,10 @@ pub mod fq {
domain_size: i32,
i: i32,
) -> Option {
- let basis = srs.0.get_lagrange_basis_from_domain_size(domain_size as usize);
+ if !(srs.0.lagrange_bases.contains_key(&(domain_size as usize))) {
+ return None;
+ }
+ let basis = srs.get_lagrange_basis_from_domain_size(domain_size as usize);
Some(basis[i as usize].clone().into())
}
diff --git a/src/lib/crypto/snarky_tests/dune b/src/lib/crypto/snarky_tests/dune
index a9710cf679b..5fbfe8499c7 100644
--- a/src/lib/crypto/snarky_tests/dune
+++ b/src/lib/crypto/snarky_tests/dune
@@ -68,4 +68,5 @@
blockchain_snark
transaction_snark
genesis_constants
+ mina_runtime_config
core))
diff --git a/src/lib/crypto/snarky_tests/snarky_tests.ml b/src/lib/crypto/snarky_tests/snarky_tests.ml
index 51db1241b6d..68297f8468f 100644
--- a/src/lib/crypto/snarky_tests/snarky_tests.ml
+++ b/src/lib/crypto/snarky_tests/snarky_tests.ml
@@ -604,14 +604,12 @@ module Protocol_circuits = struct
(* Full because we want to be sure nothing changes *)
let proof_level = Genesis_constants.Proof_level.Full
- let constraint_constants = Genesis_constants.Compiled.constraint_constants
-
let print_hash print expected digest : unit =
if print then (
Format.printf "expected:\n%s\n" expected ;
Format.printf "obtained:\n%s\n" digest )
- let blockchain () : unit =
+ let blockchain ~constraint_constants () : unit =
let expected = "36786c300e37c2a2f1341ad6374aa113" in
let digest =
Blockchain_snark.Blockchain_snark_state.constraint_system_digests
@@ -626,7 +624,7 @@ module Protocol_circuits = struct
assert digests_match ;
()
- let transaction () : unit =
+ let transaction ~constraint_constants () : unit =
let expected1 = "b8879f677f622a1d86648030701f43e1" in
let expected2 = "740db2397b0b01806a48f061a2e2b063" in
let digest =
@@ -651,9 +649,9 @@ module Protocol_circuits = struct
assert check ;
()
- let tests =
- [ ("test blockchain circuit", `Quick, blockchain)
- ; ("test transaction circuit", `Quick, transaction)
+ let tests ~constraint_constants =
+ [ ("test blockchain circuit", `Quick, blockchain ~constraint_constants)
+ ; ("test transaction circuit", `Quick, transaction ~constraint_constants)
]
end
@@ -666,16 +664,25 @@ let api_tests =
]
let () =
+ Async.Thread_safe.block_on_async_exn
+ @@ fun () ->
let range_checks =
List.map ~f:QCheck_alcotest.to_alcotest [ RangeCircuits.test_range_gates ]
in
+ let logger = Logger.create () in
+ let%map.Async.Deferred constraint_constants =
+ let%map.Async.Deferred config =
+ Runtime_config.Constants.load_constants ~logger []
+ in
+ Runtime_config.Constants.constraint_constants config
+ in
Alcotest.run "Simple snarky tests"
[ ("outside of circuit tests before", outside_circuit_tests)
; ("API tests", api_tests)
; ("circuit tests", circuit_tests)
; ("As_prover tests", As_prover_circuits.as_prover_tests)
; ("range checks", range_checks)
- ; ("protocol circuits", Protocol_circuits.tests)
+ ; ("protocol circuits", Protocol_circuits.tests ~constraint_constants)
; ("improper calls", Improper_calls.tests)
(* We run the pure functions before and after other tests,
because we've had bugs in the past where it would only work after the global state was initialized by an API function
diff --git a/src/lib/daemon_rpcs/client.ml b/src/lib/daemon_rpcs/client.ml
index 1c95b95a6bc..cb84061f574 100644
--- a/src/lib/daemon_rpcs/client.ml
+++ b/src/lib/daemon_rpcs/client.ml
@@ -6,29 +6,19 @@ open Async
let print_rpc_error error =
eprintf "RPC connection error: %s\n" (Error.to_string_hum error)
-let dispatch rpc query (host_and_port : Host_and_port.t) =
+let dispatch ~(compile_config : Mina_compile_config.t) rpc query
+ (host_and_port : Host_and_port.t) =
Deferred.Or_error.try_with_join ~here:[%here] (fun () ->
Tcp.with_connection (Tcp.Where_to_connect.of_host_and_port host_and_port)
~timeout:(Time.Span.of_sec 1.) (fun _ r w ->
let open Deferred.Let_syntax in
match%bind
Rpc.Connection.create
- ~handshake_timeout:
- (Time.Span.of_sec
- Node_config_unconfigurable_constants
- .rpc_handshake_timeout_sec )
+ ~handshake_timeout:compile_config.rpc_handshake_timeout
~heartbeat_config:
(Rpc.Connection.Heartbeat_config.create
- ~timeout:
- (Time_ns.Span.of_sec
- Node_config_unconfigurable_constants
- .rpc_heartbeat_timeout_sec )
- ~send_every:
- (Time_ns.Span.of_sec
- Node_config_unconfigurable_constants
- .rpc_heartbeat_send_every_sec )
- () )
- r w
+ ~timeout:compile_config.rpc_heartbeat_timeout
+ ~send_every:compile_config.rpc_heartbeat_send_every () ) r w
~connection_state:(fun _ -> ())
with
| Error exn ->
@@ -40,19 +30,19 @@ let dispatch rpc query (host_and_port : Host_and_port.t) =
| Ok conn ->
Rpc.Rpc.dispatch rpc conn query ) )
-let dispatch_join_errors rpc query port =
+let dispatch_join_errors ~compile_config rpc query port =
let open Deferred.Let_syntax in
- let%map res = dispatch rpc query port in
+ let%map res = dispatch ~compile_config rpc query port in
Or_error.join res
(** Call an RPC, passing handlers for a successful call and a failing one. Note
that a successful *call* may have failed on the server side and returned a
failing result. To deal with that, the success handler returns an
Or_error. *)
-let dispatch_with_message rpc query port ~success ~error
+let dispatch_with_message ~compile_config rpc query port ~success ~error
~(join_error : 'a Or_error.t -> 'b Or_error.t) =
let fail err = eprintf "%s\n%!" err ; exit 18 in
- let%bind res = dispatch rpc query port in
+ let%bind res = dispatch ~compile_config rpc query port in
match join_error res with
| Ok x ->
printf "%s\n" (success x) ;
@@ -62,8 +52,8 @@ let dispatch_with_message rpc query port ~success ~error
let dispatch_pretty_message (type t)
(module Print : Cli_lib.Render.Printable_intf with type t = t)
- ?(json = true) ~(join_error : 'a Or_error.t -> t Or_error.t) ~error_ctx rpc
- query port =
- let%bind res = dispatch rpc query port in
+ ?(json = true) ~compile_config ~(join_error : 'a Or_error.t -> t Or_error.t)
+ ~error_ctx rpc query port =
+ let%bind res = dispatch ~compile_config rpc query port in
Cli_lib.Render.print (module Print) json (join_error res) ~error_ctx
|> Deferred.return
diff --git a/src/lib/daemon_rpcs/dune b/src/lib/daemon_rpcs/dune
index 86876a493e8..a9abbb47f89 100644
--- a/src/lib/daemon_rpcs/dune
+++ b/src/lib/daemon_rpcs/dune
@@ -36,7 +36,6 @@
perf_histograms
sync_status
node_addrs_and_ports
- mina_node_config.unconfigurable_constants
logger
network_pool
data_hash_lib
diff --git a/src/lib/genesis_constants/genesis_constants.ml b/src/lib/genesis_constants/genesis_constants.ml
index d2ba77df760..a2fb1a3ff6c 100644
--- a/src/lib/genesis_constants/genesis_constants.ml
+++ b/src/lib/genesis_constants/genesis_constants.ml
@@ -427,19 +427,17 @@ module Make (Node_config : Node_config_intf.S) : S = struct
}
; txpool_max_size = pool_max_size
; num_accounts = None
- ; zkapp_proof_update_cost = Node_config.zkapp_proof_update_cost
- ; zkapp_signed_single_update_cost =
- Node_config.zkapp_signed_single_update_cost
- ; zkapp_signed_pair_update_cost = Node_config.zkapp_signed_pair_update_cost
- ; zkapp_transaction_cost_limit = Node_config.zkapp_transaction_cost_limit
- ; max_event_elements = Node_config.max_event_elements
- ; max_action_elements = Node_config.max_action_elements
- ; zkapp_cmd_limit_hardcap = Node_config.zkapp_cmd_limit_hardcap
+ ; zkapp_proof_update_cost = 10.26
+ ; zkapp_signed_pair_update_cost = 10.08
+ ; zkapp_signed_single_update_cost = 9.14
+ ; zkapp_transaction_cost_limit = 69.45
+ ; max_event_elements = 100
+ ; max_action_elements = 100
+ ; zkapp_cmd_limit_hardcap = 128
; minimum_user_command_fee =
Currency.Fee.of_mina_string_exn Node_config.minimum_user_command_fee
- ; sync_ledger_max_subtree_depth = Node_config.sync_ledger_max_subtree_depth
- ; sync_ledger_default_subtree_depth =
- Node_config.sync_ledger_default_subtree_depth
+ ; sync_ledger_max_subtree_depth = 8
+ ; sync_ledger_default_subtree_depth = 6
}
end
diff --git a/src/lib/genesis_ledger_helper/genesis_ledger_helper.ml b/src/lib/genesis_ledger_helper/genesis_ledger_helper.ml
index b669fd3bc65..e3c4b62cbb3 100644
--- a/src/lib/genesis_ledger_helper/genesis_ledger_helper.ml
+++ b/src/lib/genesis_ledger_helper/genesis_ledger_helper.ml
@@ -666,7 +666,7 @@ module Genesis_proof = struct
return None
let generate_inputs ~runtime_config ~proof_level ~ledger ~genesis_epoch_data
- ~constraint_constants ~blockchain_proof_system_id
+ ~constraint_constants ~blockchain_proof_system_id ~compile_config
~(genesis_constants : Genesis_constants.t) =
let consensus_constants =
Consensus.Constants.create ~constraint_constants
@@ -682,6 +682,7 @@ module Genesis_proof = struct
{ Genesis_proof.Inputs.runtime_config
; constraint_constants
; proof_level
+ ; compile_config
; blockchain_proof_system_id
; genesis_ledger = ledger
; genesis_epoch_data
@@ -708,6 +709,7 @@ module Genesis_proof = struct
; consensus_constants = inputs.consensus_constants
; constraint_constants = inputs.constraint_constants
; genesis_body_reference = inputs.genesis_body_reference
+ ; compile_config = inputs.compile_config
}
| _ ->
Deferred.return (Genesis_proof.create_values_no_proof inputs)
@@ -759,109 +761,138 @@ let print_config ~logger (config : Runtime_config.t) =
[%log info] "Initializing with runtime configuration. Ledger name: $name"
~metadata
-let inputs_from_config_file ?(genesis_dir = Cache_dir.autogen_path) ~logger
- ~cli_proof_level ~(genesis_constants : Genesis_constants.t)
- ~(constraint_constants : Genesis_constants.Constraint_constants.t)
- ~proof_level:compiled_proof_level ?overwrite_version
- (config : Runtime_config.t) =
- print_config ~logger config ;
- let open Deferred.Or_error.Let_syntax in
- let proof_level =
- List.find_map_exn ~f:Fn.id
- [ cli_proof_level
- ; Option.Let_syntax.(
- let%bind proof = config.proof in
- match%map proof.level with
- | Full ->
- Genesis_constants.Proof_level.Full
- | Check ->
- Check
- | No_check ->
- No_check)
- ; Some compiled_proof_level
- ]
- in
- let constraint_constants, blockchain_proof_system_id =
- match config.proof with
- | None ->
- [%log info] "Using the compiled constraint constants" ;
- (constraint_constants, Some (Pickles.Verification_key.Id.dummy ()))
- | Some config ->
- [%log info] "Using the constraint constants from the configuration file" ;
- let blockchain_proof_system_id =
- (* We pass [None] here, which will force the constraint systems to be
- set up and their hashes evaluated before we can calculate the
- genesis proof's filename.
- This adds no overhead if we are generating a genesis proof, since
- we will do these evaluations anyway to load the blockchain proving
- key. Otherwise, this will in a slight slowdown.
- *)
- None
+module type Config_loader_intf = sig
+ val load_config_files :
+ ?overwrite_version:Mina_numbers.Txn_version.t
+ -> ?genesis_dir:string
+ -> ?itn_features:bool
+ -> ?cli_proof_level:Genesis_constants.Proof_level.t
+ -> ?conf_dir:string
+ -> logger:Logger.t
+ -> string list
+ -> (Precomputed_values.t * Runtime_config.t) Deferred.Or_error.t
+
+ (* Mostly loads genesis ledger and epoch data *)
+ val init_from_config_file :
+ ?overwrite_version:Mina_numbers.Txn_version.t
+ -> ?genesis_dir:string
+ -> logger:Logger.t
+ -> constants:Runtime_config.Constants.constants
+ -> Runtime_config.t
+ -> (Precomputed_values.t * Runtime_config.t) Deferred.Or_error.t
+end
+
+module Config_loader : Config_loader_intf = struct
+ let inputs_from_config_file ?(genesis_dir = Cache_dir.autogen_path) ~logger
+ ~(constants : Runtime_config.Constants.constants) ?overwrite_version
+ (config : Runtime_config.t) =
+ print_config ~logger config ;
+ let open Deferred.Or_error.Let_syntax in
+ let constraint_constants =
+ Runtime_config.Constants.constraint_constants constants
+ in
+ let proof_level = Runtime_config.Constants.proof_level constants in
+ let compile_config = Runtime_config.Constants.compile_config constants in
+ let genesis_constants =
+ Runtime_config.Constants.genesis_constants constants
+ in
+ let%bind genesis_ledger, ledger_config, ledger_file =
+ match config.ledger with
+ | Some ledger ->
+ Ledger.load ~proof_level ~genesis_dir ~logger ~constraint_constants
+ ?overwrite_version ledger
+ | None ->
+ [%log fatal] "No ledger was provided in the runtime configuration" ;
+ Deferred.Or_error.errorf
+ "No ledger was provided in the runtime configuration"
+ in
+ [%log info] "Loaded genesis ledger from $ledger_file"
+ ~metadata:[ ("ledger_file", `String ledger_file) ] ;
+ let%bind genesis_epoch_data, genesis_epoch_data_config =
+ Epoch_data.load ~proof_level ~genesis_dir ~logger ~constraint_constants
+ config.epoch_data
+ in
+ let config =
+ { config with
+ ledger = Option.map config.ledger ~f:(fun _ -> ledger_config)
+ ; epoch_data = genesis_epoch_data_config
+ }
+ in
+ let%map genesis_constants =
+ Deferred.return
+ @@ make_genesis_constants ~logger ~default:genesis_constants config
+ in
+ let proof_inputs =
+ Genesis_proof.generate_inputs ~runtime_config:config ~proof_level
+ ~ledger:genesis_ledger ~constraint_constants ~genesis_constants
+ ~compile_config ~blockchain_proof_system_id:None ~genesis_epoch_data
+ in
+ (proof_inputs, config)
+
+ let init_from_config_file ?overwrite_version ?genesis_dir ~logger
+ ~(constants : Runtime_config.Constants.constants)
+ (config : Runtime_config.t) :
+ (Precomputed_values.t * Runtime_config.t) Deferred.Or_error.t =
+ let open Deferred.Or_error.Let_syntax in
+ let%map inputs, config =
+ inputs_from_config_file ?genesis_dir ~constants ~logger ?overwrite_version
+ config
+ in
+ let values = Genesis_proof.create_values_no_proof inputs in
+ (values, config)
+
+ let load_config_files ?overwrite_version ?genesis_dir ?(itn_features = false)
+ ?cli_proof_level ?conf_dir ~logger (config_files : string list) =
+ let open Deferred.Or_error.Let_syntax in
+ let genesis_dir =
+ let%map.Option conf_dir = conf_dir in
+ Option.value ~default:(conf_dir ^/ "genesis") genesis_dir
+ in
+ let%bind.Deferred constants =
+ Runtime_config.Constants.load_constants ?conf_dir ?cli_proof_level
+ ~itn_features ~logger config_files
+ in
+ let%bind config =
+ Runtime_config.Json_loader.load_config_files ?conf_dir ~logger
+ config_files
+ in
+ match%bind.Deferred
+ init_from_config_file ?overwrite_version ?genesis_dir ~logger ~constants
+ config
+ with
+ | Ok a ->
+ return a
+ | Error err ->
+ let ( json_config
+ , `Accounts_omitted
+ ( `Genesis genesis_accounts_omitted
+ , `Staking staking_accounts_omitted
+ , `Next next_accounts_omitted ) ) =
+ Runtime_config.to_yojson_without_accounts config
in
- ( make_constraint_constants ~default:constraint_constants config
- , blockchain_proof_system_id )
- in
- let%bind () =
- match (proof_level, compiled_proof_level) with
- | _, Full | (Check | No_check), _ ->
- return ()
- | Full, ((Check | No_check) as compiled) ->
- let str = Genesis_constants.Proof_level.to_string in
- [%log fatal]
- "Proof level $proof_level is not compatible with compile-time proof \
- level $compiled_proof_level"
- ~metadata:
- [ ("proof_level", `String (str proof_level))
- ; ("compiled_proof_level", `String (str compiled))
- ] ;
- Deferred.Or_error.errorf
- "Proof level %s is not compatible with compile-time proof level %s"
- (str proof_level) (str compiled)
- in
- let%bind genesis_ledger, ledger_config, ledger_file =
- match config.ledger with
- | Some ledger ->
- Ledger.load ~proof_level ~genesis_dir ~logger ~constraint_constants
- ?overwrite_version ledger
- | None ->
- [%log fatal] "No ledger was provided in the runtime configuration" ;
- Deferred.Or_error.errorf
- "No ledger was provided in the runtime configuration"
- in
- [%log info] "Loaded genesis ledger from $ledger_file"
- ~metadata:[ ("ledger_file", `String ledger_file) ] ;
- let%bind genesis_epoch_data, genesis_epoch_data_config =
- Epoch_data.load ~proof_level ~genesis_dir ~logger ~constraint_constants
- config.epoch_data
- in
- let config =
- { config with
- ledger = Option.map config.ledger ~f:(fun _ -> ledger_config)
- ; epoch_data = genesis_epoch_data_config
- }
- in
- let%map genesis_constants =
- Deferred.return
- @@ make_genesis_constants ~logger ~default:genesis_constants config
- in
- let proof_inputs =
- Genesis_proof.generate_inputs ~runtime_config:config ~proof_level
- ~ledger:genesis_ledger ~constraint_constants ~genesis_constants
- ~blockchain_proof_system_id ~genesis_epoch_data
- in
- (proof_inputs, config)
-
-let init_from_config_file ?genesis_dir ~cli_proof_level ~genesis_constants
- ~constraint_constants ~logger ~proof_level ?overwrite_version
- (config : Runtime_config.t) :
- (Precomputed_values.t * Runtime_config.t) Deferred.Or_error.t =
- let open Deferred.Or_error.Let_syntax in
- let%map inputs, config =
- inputs_from_config_file ?genesis_dir ~cli_proof_level ~genesis_constants
- ~constraint_constants ~logger ~proof_level ?overwrite_version config
- in
- let values = Genesis_proof.create_values_no_proof inputs in
- (values, config)
+ let append_accounts_omitted s =
+ Option.value_map
+ ~f:(fun i -> List.cons (s ^ "_accounts_omitted", `Int i))
+ ~default:Fn.id
+ in
+ let metadata =
+ append_accounts_omitted "genesis" genesis_accounts_omitted
+ @@ append_accounts_omitted "staking" staking_accounts_omitted
+ @@ append_accounts_omitted "next" next_accounts_omitted []
+ @ [ ("config", json_config)
+ ; ( "name"
+ , `String
+ (Option.value ~default:"not provided"
+ (let%bind.Option ledger = config.ledger in
+ Option.first_some ledger.name ledger.hash ) ) )
+ ; ("error", Error_json.error_to_yojson err)
+ ]
+ in
+ [%log info]
+ "Initializing with runtime configuration. Ledger source: $name"
+ ~metadata ;
+ Error.raise err
+end
let%test_module "Account config test" =
( module struct
diff --git a/src/lib/genesis_ledger_helper/lib/genesis_ledger_helper_lib.ml b/src/lib/genesis_ledger_helper/lib/genesis_ledger_helper_lib.ml
index 8e14b481e83..68bb2d2f151 100644
--- a/src/lib/genesis_ledger_helper/lib/genesis_ledger_helper_lib.ml
+++ b/src/lib/genesis_ledger_helper/lib/genesis_ledger_helper_lib.ml
@@ -425,25 +425,10 @@ let make_constraint_constants
in
let transaction_capacity_log_2 =
match config.transaction_capacity with
- | Some (Log_2 i) ->
- i
- | Some (Txns_per_second_x10 tps_goal_x10) ->
- let max_coinbases = 2 in
- let max_user_commands_per_block =
- (* block_window_duration is in milliseconds, so divide by 1000 divide
- by 10 again because we have tps * 10
- *)
- tps_goal_x10 * block_window_duration_ms / (1000 * 10)
- in
- (* Log of the capacity of transactions per transition.
- - 1 will only work if we don't have prover fees.
- - 2 will work with prover fees, but not if we want a transaction
- included in every block.
- - At least 3 ensures a transaction per block and the staged-ledger
- unit tests pass.
- *)
- 1
- + Core_kernel.Int.ceil_log2 (max_user_commands_per_block + max_coinbases)
+ | Some transaction_capacity ->
+ Runtime_config.Proof_keys.Transaction_capacity
+ .to_transaction_capacity_log_2 ~block_window_duration_ms
+ ~transaction_capacity
| None ->
default.transaction_capacity_log_2
in
diff --git a/src/lib/genesis_proof/genesis_proof.ml b/src/lib/genesis_proof/genesis_proof.ml
index 9d3c74d6a18..db2aabbeccb 100644
--- a/src/lib/genesis_proof/genesis_proof.ml
+++ b/src/lib/genesis_proof/genesis_proof.ml
@@ -8,6 +8,7 @@ module Inputs = struct
; constraint_constants : Genesis_constants.Constraint_constants.t
; proof_level : Genesis_constants.Proof_level.t
; genesis_constants : Genesis_constants.t
+ ; compile_config : Mina_compile_config.t
; genesis_ledger : Genesis_ledger.Packed.t
; genesis_epoch_data : Consensus.Genesis_epoch_data.t
; genesis_body_reference : Consensus.Body_reference.t
@@ -85,6 +86,7 @@ module T = struct
; constraint_constants : Genesis_constants.Constraint_constants.t
; genesis_constants : Genesis_constants.t
; proof_level : Genesis_constants.Proof_level.t
+ ; compile_config : Mina_compile_config.t
; genesis_ledger : Genesis_ledger.Packed.t
; genesis_epoch_data : Consensus.Genesis_epoch_data.t
; genesis_body_reference : Consensus.Body_reference.t
@@ -223,6 +225,7 @@ let create_values_no_proof (t : Inputs.t) =
; constraint_constants = t.constraint_constants
; proof_level = t.proof_level
; genesis_constants = t.genesis_constants
+ ; compile_config = t.compile_config
; genesis_ledger = t.genesis_ledger
; genesis_epoch_data = t.genesis_epoch_data
; genesis_body_reference = t.genesis_body_reference
@@ -240,6 +243,7 @@ let to_inputs (t : t) : Inputs.t =
; constraint_constants = t.constraint_constants
; proof_level = t.proof_level
; genesis_constants = t.genesis_constants
+ ; compile_config = t.compile_config
; genesis_ledger = t.genesis_ledger
; genesis_epoch_data = t.genesis_epoch_data
; genesis_body_reference = t.genesis_body_reference
diff --git a/src/lib/gossip_net/libp2p.ml b/src/lib/gossip_net/libp2p.ml
index b0855a7742c..4de2c63250c 100644
--- a/src/lib/gossip_net/libp2p.ml
+++ b/src/lib/gossip_net/libp2p.ml
@@ -52,7 +52,6 @@ module Config = struct
; mutable keypair : Mina_net2.Keypair.t option
; all_peers_seen_metric : bool
; known_private_ip_nets : Core.Unix.Cidr.t list
- ; block_window_duration : Time.Span.t
}
[@@deriving make]
end
@@ -220,8 +219,7 @@ module Make (Rpc_interface : RPC_INTERFACE) :
ctx first_peer_ivar high_connectivity_ivar ~added_seeds ~pids
~on_unexpected_termination
~sinks:
- (Message.Any_sinks (sinksM, (sink_block, sink_tx, sink_snark_work)))
- ~block_window_duration =
+ (Message.Any_sinks (sinksM, (sink_block, sink_tx, sink_snark_work))) =
let module Sinks = (val sinksM) in
let ctr = ref 0 in
let record_peer_connection () =
@@ -258,7 +256,7 @@ module Make (Rpc_interface : RPC_INTERFACE) :
~all_peers_seen_metric:config.all_peers_seen_metric
~on_peer_connected:(fun _ -> record_peer_connection ())
~on_peer_disconnected:ignore ~logger:config.logger ~conf_dir
- ~pids ~block_window_duration () ) )
+ ~pids () ) )
with
| Ok (Ok net2) -> (
let open Mina_net2 in
@@ -629,7 +627,6 @@ module Make (Rpc_interface : RPC_INTERFACE) :
create_libp2p ~allow_multiple_instances config rpc_handlers
first_peer_ivar high_connectivity_ivar ~added_seeds ~pids
~on_unexpected_termination:restart_libp2p ~sinks
- ~block_window_duration:config.block_window_duration
in
on_libp2p_create libp2p ; Deferred.ignore_m libp2p
and restart_libp2p () = don't_wait_for (start_libp2p ()) in
diff --git a/src/lib/itn_logger/dune b/src/lib/itn_logger/dune
index bf9a03dbc9d..6f9c8936a26 100644
--- a/src/lib/itn_logger/dune
+++ b/src/lib/itn_logger/dune
@@ -14,8 +14,6 @@
core
core_kernel
yojson
- ;; local libraries
- mina_node_config.unconfigurable_constants
)
(instrumentation (backend bisect_ppx))
(preprocess (pps ppx_version ppx_mina ppx_jane)))
diff --git a/src/lib/itn_logger/itn_logger.ml b/src/lib/itn_logger/itn_logger.ml
index 11e5f50c997..ad5a5d73f18 100644
--- a/src/lib/itn_logger/itn_logger.ml
+++ b/src/lib/itn_logger/itn_logger.ml
@@ -67,7 +67,16 @@ module Submit_internal_log = struct
~bin_response
end
-let dispatch_remote_log log =
+type config =
+ { rpc_handshake_timeout : Time.Span.t
+ ; rpc_heartbeat_timeout : Time_ns.Span.t
+ ; rpc_heartbeat_send_every : Time_ns.Span.t
+ }
+[@@deriving bin_io_unversioned]
+
+(* dispatch log to daemon *)
+
+let dispatch_remote_log config log =
let open Async.Deferred.Let_syntax in
let rpc = Submit_internal_log.rpc in
match daemon_where_to_connect () with
@@ -80,21 +89,11 @@ let dispatch_remote_log log =
| Some where_to_connect -> (
let%map res =
Async.Rpc.Connection.with_client
- ~handshake_timeout:
- (Time.Span.of_sec
- Node_config_unconfigurable_constants.rpc_handshake_timeout_sec )
+ ~handshake_timeout:config.rpc_handshake_timeout
~heartbeat_config:
(Async.Rpc.Connection.Heartbeat_config.create
- ~timeout:
- (Time_ns.Span.of_sec
- Node_config_unconfigurable_constants
- .rpc_heartbeat_timeout_sec )
- ~send_every:
- (Time_ns.Span.of_sec
- Node_config_unconfigurable_constants
- .rpc_heartbeat_send_every_sec )
- () )
- where_to_connect
+ ~timeout:config.rpc_heartbeat_timeout
+ ~send_every:config.rpc_heartbeat_send_every () ) where_to_connect
(fun conn -> Async.Rpc.Rpc.dispatch rpc conn log)
in
(* not ideal that errors are not themselves logged *)
@@ -110,13 +109,13 @@ let dispatch_remote_log log =
(* Used to ensure that no more than one log message is in-flight at
a time to guarantee sequential processing. *)
-let sequential_dispatcher_loop () =
+let sequential_dispatcher_loop config () =
let open Async in
let pipe_r, pipe_w = Pipe.create () in
- don't_wait_for (Pipe.iter pipe_r ~f:dispatch_remote_log) ;
+ don't_wait_for (Pipe.iter pipe_r ~f:(dispatch_remote_log config)) ;
pipe_w
-let sequential_log_writer_pipe = sequential_dispatcher_loop ()
+let sequential_log_writer_pipe config = sequential_dispatcher_loop config ()
(* this function can be called:
(1) by the logging process (daemon, verifier, or prover) from the logger in Logger, or
@@ -125,7 +124,7 @@ let sequential_log_writer_pipe = sequential_dispatcher_loop ()
for (1), if the process is the verifier or prover, the log is forwarded by RPC
to the daemon, resulting in a recursive call of type (2)
*)
-let log ?process ~timestamp ~message ~metadata () =
+let log ?process ~timestamp ~message ~metadata ~config () =
match get_process_kind () with
| Some process ->
(* prover or verifier, send log to daemon
@@ -136,7 +135,9 @@ let log ?process ~timestamp ~message ~metadata () =
List.map metadata ~f:(fun (s, json) -> (s, Yojson.Safe.to_string json))
in
let remote_log = { timestamp; message; metadata; process } in
- Async.Pipe.write_without_pushback sequential_log_writer_pipe remote_log
+ Async.Pipe.write_without_pushback
+ (sequential_log_writer_pipe config)
+ remote_log
| None ->
(* daemon *)
(* convert JSON to Basic.t in queue, so we don't have to in GraphQL response *)
diff --git a/src/lib/ledger_catchup/normal_catchup.ml b/src/lib/ledger_catchup/normal_catchup.ml
index bfd2607ad38..38a23725d08 100644
--- a/src/lib/ledger_catchup/normal_catchup.ml
+++ b/src/lib/ledger_catchup/normal_catchup.ml
@@ -904,10 +904,8 @@ let%test_module "Ledger_catchup tests" =
let verifier =
Async.Thread_safe.block_on_async_exn (fun () ->
- Verifier.create ~logger ~proof_level ~constraint_constants
- ~conf_dir:None
- ~pids:(Child_processes.Termination.create_pid_table ())
- ~commit_id:"not specified for unit tests" () )
+ Verifier.For_tests.default ~constraint_constants ~logger ~proof_level
+ () )
module Context = struct
let logger = logger
diff --git a/src/lib/ledger_catchup/super_catchup.ml b/src/lib/ledger_catchup/super_catchup.ml
index 507cd5eff88..a92306be64f 100644
--- a/src/lib/ledger_catchup/super_catchup.ml
+++ b/src/lib/ledger_catchup/super_catchup.ml
@@ -1454,10 +1454,8 @@ let%test_module "Ledger_catchup tests" =
let verifier =
Async.Thread_safe.block_on_async_exn (fun () ->
- Verifier.create ~logger ~proof_level ~constraint_constants
- ~conf_dir:None
- ~pids:(Child_processes.Termination.create_pid_table ())
- ~commit_id:"not specified for unit tests" () )
+ Verifier.For_tests.default ~constraint_constants ~logger ~proof_level
+ () )
module Context = struct
let logger = logger
diff --git a/src/lib/logger/fake/logger.ml b/src/lib/logger/fake/logger.ml
index 959a7a6c567..5f74dc578c6 100644
--- a/src/lib/logger/fake/logger.ml
+++ b/src/lib/logger/fake/logger.ml
@@ -134,7 +134,15 @@ type t = Metadata.Stable.Latest.t [@@deriving bin_io_unversioned]
let metadata = Fn.id
-let create ?metadata:_ ?id:_ ?itn_features:_ () = Metadata.empty
+type itn_logger_config = unit
+
+let make_itn_logger_config ~rpc_handshake_timeout:_ ~rpc_heartbeat_timeout:_
+ ~rpc_heartbeat_send_every:_ =
+ ()
+
+let create ?metadata:_ ?id:_ ?itn_config:_ () = Metadata.empty
+
+let with_itn _ = Fn.id
let null () = Metadata.empty
diff --git a/src/lib/logger/logger.mli b/src/lib/logger/logger.mli
index c2cb406e29e..c6eff3bb4a7 100644
--- a/src/lib/logger/logger.mli
+++ b/src/lib/logger/logger.mli
@@ -135,10 +135,20 @@ type 'a log_function =
-> ('a, unit, string, unit) format4
-> 'a
+type itn_logger_config
+
+val make_itn_logger_config :
+ rpc_handshake_timeout:Time.Span.t
+ -> rpc_heartbeat_timeout:Time_ns.Span.t
+ -> rpc_heartbeat_send_every:Time_ns.Span.t
+ -> itn_logger_config
+
+val with_itn : itn_logger_config -> t -> t
+
val create :
?metadata:(string, Yojson.Safe.t) List.Assoc.t
-> ?id:string
- -> ?itn_features:bool
+ -> ?itn_config:itn_logger_config
-> unit
-> t
diff --git a/src/lib/logger/native/logger.ml b/src/lib/logger/native/logger.ml
index da36f253cda..dcbc3c90b93 100644
--- a/src/lib/logger/native/logger.ml
+++ b/src/lib/logger/native/logger.ml
@@ -333,25 +333,33 @@ type t =
{ null : bool
; metadata : Metadata.Stable.Latest.t
; id : Bounded_types.String.Stable.V1.t
- ; itn_features : bool
+ ; itn_config : Itn_logger.config option
}
[@@deriving bin_io_unversioned]
let metadata t = t.metadata
-let create ?(metadata = []) ?(id = "default") ?(itn_features = false) () =
+type itn_logger_config = Itn_logger.config
+
+let make_itn_logger_config ~rpc_handshake_timeout ~rpc_heartbeat_timeout
+ ~rpc_heartbeat_send_every =
+ { Itn_logger.rpc_handshake_timeout
+ ; rpc_heartbeat_timeout
+ ; rpc_heartbeat_send_every
+ }
+
+let create ?(metadata = []) ?(id = "default") ?itn_config () =
{ null = false
; metadata = Metadata.extend Metadata.empty metadata
; id
- ; itn_features
+ ; itn_config
}
+let with_itn itn_logger_config t =
+ { t with itn_config = Some itn_logger_config }
+
let null () =
- { null = true
- ; metadata = Metadata.empty
- ; id = "default"
- ; itn_features = false
- }
+ { null = true; metadata = Metadata.empty; id = "default"; itn_config = None }
let extend t metadata =
{ t with metadata = Metadata.extend t.metadata metadata }
@@ -413,14 +421,17 @@ let log t ~level ~module_ ~location ?(metadata = []) ?event_id fmt =
in
raw t message' ;
match level with
- | Internal ->
- if t.itn_features then
- let timestamp = message'.timestamp in
- let entries =
- Itn_logger.postprocess_message ~timestamp ~message ~metadata
- in
- List.iter entries ~f:(fun (timestamp, message, metadata) ->
- Itn_logger.log ~timestamp ~message ~metadata () )
+ | Internal -> (
+ match t.itn_config with
+ | Some config ->
+ let timestamp = message'.timestamp in
+ let entries =
+ Itn_logger.postprocess_message ~timestamp ~message ~metadata
+ in
+ List.iter entries ~f:(fun (timestamp, message, metadata) ->
+ Itn_logger.log ~timestamp ~message ~metadata ~config () )
+ | None ->
+ () )
| _ ->
()
in
diff --git a/src/lib/mina_base/dune b/src/lib/mina_base/dune
index e362e42f2bb..5324ac38daf 100644
--- a/src/lib/mina_base/dune
+++ b/src/lib/mina_base/dune
@@ -73,6 +73,7 @@
snark_bits
error_json
ppx_version.runtime
+ mina_compile_config
)
(preprocess
(pps ppx_annot ppx_snarky ppx_here ppx_mina ppx_version ppx_compare ppx_deriving.enum ppx_deriving.ord ppx_deriving.make
diff --git a/src/lib/mina_base/test/verification_key_permission_test.ml b/src/lib/mina_base/test/verification_key_permission_test.ml
index e7c8559722c..ba624fb295e 100644
--- a/src/lib/mina_base/test/verification_key_permission_test.ml
+++ b/src/lib/mina_base/test/verification_key_permission_test.ml
@@ -39,6 +39,7 @@ let update_vk_perm_with_different_version () =
match
User_command.check_well_formedness
~genesis_constants:Genesis_constants.For_unit_tests.t
+ ~compile_config:Mina_compile_config.For_unit_tests.t
(Zkapp_command (update_vk_perm_to_be ~auth:(auth, different_version)))
with
| Ok _ ->
@@ -52,6 +53,7 @@ let update_vk_perm_with_current_version () =
Quickcheck.test ~trials:10 auth_gen ~f:(fun auth ->
match
User_command.check_well_formedness
+ ~compile_config:Mina_compile_config.For_unit_tests.t
~genesis_constants:Genesis_constants.For_unit_tests.t
(Zkapp_command
(update_vk_perm_to_be
diff --git a/src/lib/mina_base/test/zero_vesting_period.ml b/src/lib/mina_base/test/zero_vesting_period.ml
index bc717be8fc6..41aee5aca98 100644
--- a/src/lib/mina_base/test/zero_vesting_period.ml
+++ b/src/lib/mina_base/test/zero_vesting_period.ml
@@ -282,6 +282,7 @@ let zero_vesting_period_is_error () =
match
User_command.check_well_formedness
~genesis_constants:Genesis_constants.For_unit_tests.t
+ ~compile_config:Mina_compile_config.For_unit_tests.t
(Zkapp_command zkapp_zero_vesting_period)
with
| Error [ Zero_vesting_period ] ->
@@ -294,6 +295,7 @@ let zkapp_nonzero_vesting_period = mk_zkapp_with_vesting_period 1
let nonzero_vesting_period_ok () =
match
User_command.check_well_formedness
+ ~compile_config:Mina_compile_config.For_unit_tests.t
~genesis_constants:Genesis_constants.For_unit_tests.t
(Zkapp_command zkapp_nonzero_vesting_period)
with
diff --git a/src/lib/mina_base/user_command.ml b/src/lib/mina_base/user_command.ml
index 909fb53deb6..952d290ee74 100644
--- a/src/lib/mina_base/user_command.ml
+++ b/src/lib/mina_base/user_command.ml
@@ -230,9 +230,9 @@ let fee : t -> Currency.Fee.t = function
let has_insufficient_fee ~minimum_fee t = Currency.Fee.(fee t < minimum_fee)
-let is_disabled = function
+let is_disabled ~(compile_config : Mina_compile_config.t) = function
| Zkapp_command _ ->
- Node_config_unconfigurable_constants.zkapps_disabled
+ compile_config.zkapps_disabled
| _ ->
false
@@ -430,7 +430,8 @@ module Well_formedness_error = struct
"Transaction type disabled"
end
-let check_well_formedness ~(genesis_constants : Genesis_constants.t) t :
+let check_well_formedness ~(genesis_constants : Genesis_constants.t)
+ ~(compile_config : Mina_compile_config.t) t :
(unit, Well_formedness_error.t list) result =
let preds =
let open Well_formedness_error in
@@ -439,7 +440,7 @@ let check_well_formedness ~(genesis_constants : Genesis_constants.t) t :
, Insufficient_fee )
; (has_zero_vesting_period, Zero_vesting_period)
; (is_incompatible_version, Incompatible_version)
- ; (is_disabled, Transaction_type_disabled)
+ ; (is_disabled ~compile_config, Transaction_type_disabled)
; (has_invalid_call_forest, Zkapp_invalid_call_forest)
]
in
diff --git a/src/lib/mina_base/zkapp_account.ml b/src/lib/mina_base/zkapp_account.ml
index 663df35a545..9052abd8832 100644
--- a/src/lib/mina_base/zkapp_account.ml
+++ b/src/lib/mina_base/zkapp_account.ml
@@ -297,32 +297,35 @@ end
(* This preimage cannot be attained by any string, due to the trailing [true]
added below.
*)
-let zkapp_uri_non_preimage =
- lazy (Random_oracle_input.Chunked.field_elements [| Field.zero; Field.zero |])
-
-let hash_zkapp_uri_opt (zkapp_uri_opt : string option) =
- let input =
- match zkapp_uri_opt with
- | Some zkapp_uri ->
- (* We use [length*8 + 1] to pass a final [true] after the end of the
- string, to ensure that trailing null bytes don't alias in the hash
- preimage.
- *)
- let bits = Array.create ~len:((String.length zkapp_uri * 8) + 1) true in
- String.foldi zkapp_uri ~init:() ~f:(fun i () c ->
- let c = Char.to_int c in
- (* Insert the bits into [bits], LSB order. *)
- for j = 0 to 7 do
- (* [Int.test_bit c j] *)
- bits.((i * 8) + j) <- Int.bit_and c (1 lsl j) <> 0
- done ) ;
+let zkapp_uri_non_preimage_hash =
+ lazy
+ ( Random_oracle.pack_input
+ (Random_oracle_input.Chunked.field_elements
+ [| Field.zero; Field.zero |] )
+ |> Random_oracle.hash ~init:Hash_prefix_states.zkapp_uri )
+
+let hash_zkapp_uri_opt = function
+ | None ->
+ Lazy.force zkapp_uri_non_preimage_hash
+ | Some zkapp_uri ->
+ (* We use [length*8 + 1] to pass a final [true] after the end of the
+ string, to ensure that trailing null bytes don't alias in the hash
+ preimage.
+ *)
+ let bits = Array.create ~len:((String.length zkapp_uri * 8) + 1) true in
+ String.foldi zkapp_uri ~init:() ~f:(fun i () c ->
+ let c = Char.to_int c in
+ (* Insert the bits into [bits], LSB order. *)
+ for j = 0 to 7 do
+ (* [Int.test_bit c j] *)
+ bits.((i * 8) + j) <- Int.bit_and c (1 lsl j) <> 0
+ done ) ;
+ let input =
Random_oracle_input.Chunked.packeds
(Array.map ~f:(fun b -> (field_of_bool b, 1)) bits)
- | None ->
- Lazy.force zkapp_uri_non_preimage
- in
- Random_oracle.pack_input input
- |> Random_oracle.hash ~init:Hash_prefix_states.zkapp_uri
+ in
+ Random_oracle.pack_input input
+ |> Random_oracle.hash ~init:Hash_prefix_states.zkapp_uri
let hash_zkapp_uri (zkapp_uri : string) = hash_zkapp_uri_opt (Some zkapp_uri)
@@ -389,12 +392,6 @@ let digest (t : t) =
let default_digest = lazy (digest default)
-let hash_zkapp_account_opt' = function
- | None ->
- Lazy.force default_digest
- | Some (a : t) ->
- digest a
-
let action_state_deriver obj =
let open Fields_derivers_zkapps.Derivers in
let list_5 = list ~static_length:5 (field @@ o ()) in
diff --git a/src/lib/mina_base/zkapp_call_forest.ml b/src/lib/mina_base/zkapp_call_forest.ml
index 5c67949c94a..4b041e7451b 100644
--- a/src/lib/mina_base/zkapp_call_forest.ml
+++ b/src/lib/mina_base/zkapp_call_forest.ml
@@ -25,11 +25,6 @@ let pop_exn : t -> (Account_update.t * t) * t = function
| _ ->
failwith "pop_exn"
-let push ~account_update ~calls t =
- Zkapp_command.Call_forest.cons ~calls account_update t
-
-let hash (t : t) = Zkapp_command.Call_forest.hash t
-
open Snark_params.Tick.Run
module Checked = struct
@@ -210,6 +205,7 @@ module Checked = struct
} )
: (account_update * t) * t ) )
+ (* TODO Consider moving out of mina_base *)
let push
~account_update:
{ account_update = { hash = account_update_hash; data = account_update }
diff --git a/src/lib/mina_base/zkapp_call_forest_base.ml b/src/lib/mina_base/zkapp_call_forest_base.ml
new file mode 100644
index 00000000000..7bf01f807bc
--- /dev/null
+++ b/src/lib/mina_base/zkapp_call_forest_base.ml
@@ -0,0 +1,606 @@
+open Core_kernel
+
+let empty = Outside_hash_image.t
+
+module Tree = struct
+ [%%versioned
+ module Stable = struct
+ module V1 = struct
+ type ('account_update, 'account_update_digest, 'digest) t =
+ ( 'account_update
+ , 'account_update_digest
+ , 'digest )
+ Mina_wire_types.Mina_base.Zkapp_command.Call_forest.Tree.V1.t =
+ { account_update : 'account_update
+ ; account_update_digest : 'account_update_digest
+ ; calls :
+ ( ('account_update, 'account_update_digest, 'digest) t
+ , 'digest )
+ With_stack_hash.Stable.V1.t
+ list
+ }
+ [@@deriving sexp, compare, equal, hash, yojson]
+
+ let to_latest = Fn.id
+ end
+ end]
+
+ let rec fold_forest (ts : (_ t, _) With_stack_hash.t list) ~f ~init =
+ List.fold ts ~init ~f:(fun acc { elt; stack_hash = _ } ->
+ fold elt ~init:acc ~f )
+
+ and fold { account_update; calls; account_update_digest = _ } ~f ~init =
+ fold_forest calls ~f ~init:(f init account_update)
+
+ let rec fold_forest2_exn (ts1 : (_ t, _) With_stack_hash.t list)
+ (ts2 : (_ t, _) With_stack_hash.t list) ~f ~init =
+ List.fold2_exn ts1 ts2 ~init
+ ~f:(fun acc { elt = elt1; stack_hash = _ } { elt = elt2; stack_hash = _ }
+ -> fold2_exn elt1 elt2 ~init:acc ~f )
+
+ and fold2_exn
+ { account_update = account_update1
+ ; calls = calls1
+ ; account_update_digest = _
+ }
+ { account_update = account_update2
+ ; calls = calls2
+ ; account_update_digest = _
+ } ~f ~init =
+ fold_forest2_exn calls1 calls2 ~f
+ ~init:(f init account_update1 account_update2)
+
+ let iter_forest2_exn ts1 ts2 ~f =
+ fold_forest2_exn ts1 ts2 ~init:() ~f:(fun () p1 p2 -> f p1 p2)
+
+ let iter2_exn ts1 ts2 ~f =
+ fold2_exn ts1 ts2 ~init:() ~f:(fun () p1 p2 -> f p1 p2)
+
+ let rec mapi_with_trees' ~i (t : _ t) ~f =
+ let account_update = f i t.account_update t in
+ let l, calls = mapi_forest_with_trees' ~i:(i + 1) t.calls ~f in
+ ( l
+ , { calls; account_update; account_update_digest = t.account_update_digest }
+ )
+
+ and mapi_forest_with_trees' ~i x ~f =
+ let rec go i acc = function
+ | [] ->
+ (i, List.rev acc)
+ | t :: ts ->
+ let l, elt' = mapi_with_trees' ~i ~f (With_stack_hash.elt t) in
+ go l (With_stack_hash.map t ~f:(fun _ -> elt') :: acc) ts
+ in
+ go i [] x
+
+ let mapi_with_trees t ~f = mapi_with_trees' ~i:0 t ~f |> snd
+
+ let mapi_forest_with_trees t ~f = mapi_forest_with_trees' ~i:0 t ~f |> snd
+
+ let mapi' ~i t ~f =
+ mapi_with_trees' ~i t ~f:(fun i account_update _ -> f i account_update)
+
+ let mapi_forest' ~i t ~f =
+ mapi_forest_with_trees' ~i t ~f:(fun i account_update _ ->
+ f i account_update )
+
+ let rec deferred_mapi_with_trees' ~i (t : _ t) ~f =
+ let open Async_kernel.Deferred.Let_syntax in
+ let%bind l, calls =
+ deferred_mapi_forest_with_trees' ~i:(i + 1) t.calls ~f
+ in
+ let%map account_update = f i t.account_update t in
+ ( l
+ , { calls; account_update; account_update_digest = t.account_update_digest }
+ )
+
+ and deferred_mapi_forest_with_trees' ~i x ~f =
+ let open Async_kernel.Deferred.Let_syntax in
+ let rec go i acc = function
+ | [] ->
+ return (i, List.rev acc)
+ | t :: ts ->
+ let%bind l, elt' =
+ deferred_mapi_with_trees' ~i ~f (With_stack_hash.elt t)
+ in
+ go l (With_stack_hash.map t ~f:(fun _ -> elt') :: acc) ts
+ in
+ go i [] x
+
+ let map_forest ~f t = mapi_forest' ~i:0 ~f:(fun _ x -> f x) t |> snd
+
+ let mapi_forest ~f t = mapi_forest' ~i:0 ~f t |> snd
+
+ let deferred_map_forest ~f t =
+ let open Async_kernel.Deferred in
+ deferred_mapi_forest_with_trees' ~i:0 ~f:(fun _ x -> f x) t >>| snd
+
+ let deferred_mapi_forest ~f t =
+ let open Async_kernel.Deferred in
+ deferred_mapi_forest_with_trees' ~i:0 ~f t >>| snd
+
+ let hash { account_update = _; calls; account_update_digest } =
+ let stack_hash = match calls with [] -> empty | e :: _ -> e.stack_hash in
+ Random_oracle.hash ~init:Hash_prefix_states.account_update_node
+ [| account_update_digest; stack_hash |]
+end
+
+type ('a, 'b, 'c) tree = ('a, 'b, 'c) Tree.t
+
+module type Digest_intf = sig
+ module Account_update : sig
+ include Digest_intf.S
+
+ module Checked : sig
+ include Digest_intf.S_checked
+
+ val create : ?chain:Mina_signature_kind.t -> Account_update.Checked.t -> t
+
+ val create_body :
+ ?chain:Mina_signature_kind.t -> Account_update.Body.Checked.t -> t
+ end
+
+ include Digest_intf.S_aux with type t := t and type checked := Checked.t
+
+ val create : ?chain:Mina_signature_kind.t -> Account_update.t -> t
+
+ val create_body : ?chain:Mina_signature_kind.t -> Account_update.Body.t -> t
+ end
+
+ module rec Forest : sig
+ include Digest_intf.S
+
+ module Checked : sig
+ include Digest_intf.S_checked
+
+ val empty : t
+
+ val cons : Tree.Checked.t -> t -> t
+ end
+
+ include Digest_intf.S_aux with type t := t and type checked := Checked.t
+
+ val empty : t
+
+ val cons : Tree.t -> Forest.t -> Forest.t
+ end
+
+ and Tree : sig
+ include Digest_intf.S
+
+ module Checked : sig
+ include Digest_intf.S_checked
+
+ val create :
+ account_update:Account_update.Checked.t
+ -> calls:Forest.Checked.t
+ -> Tree.Checked.t
+ end
+
+ include Digest_intf.S_aux with type t := t and type checked := Checked.t
+
+ val create : (_, Account_update.t, Forest.t) tree -> Tree.t
+ end
+end
+
+module Make_digest_sig
+ (T : Mina_wire_types.Mina_base.Zkapp_command.Digest_types.S) =
+struct
+ module type S =
+ Digest_intf
+ with type Account_update.Stable.V1.t = T.Account_update.V1.t
+ and type Forest.Stable.V1.t = T.Forest.V1.t
+end
+
+module Make_digest_types = struct
+ module Account_update = struct
+ [%%versioned
+ module Stable = struct
+ module V1 = struct
+ type t = Kimchi_backend.Pasta.Basic.Fp.Stable.V1.t
+ [@@deriving sexp, compare, equal, hash, yojson]
+
+ let to_latest = Fn.id
+ end
+ end]
+ end
+
+ module Forest = struct
+ [%%versioned
+ module Stable = struct
+ module V1 = struct
+ type t = Kimchi_backend.Pasta.Basic.Fp.Stable.V1.t
+ [@@deriving sexp, compare, equal, hash, yojson]
+
+ let to_latest = Fn.id
+ end
+ end]
+ end
+
+ module Tree = struct
+ [%%versioned
+ module Stable = struct
+ module V1 = struct
+ type t = Kimchi_backend.Pasta.Basic.Fp.Stable.V1.t
+ [@@deriving sexp, compare, equal, hash, yojson]
+
+ let to_latest = Fn.id
+ end
+ end]
+ end
+end
+
+module Make_digest_str
+ (T : Mina_wire_types.Mina_base.Zkapp_command.Digest_concrete) :
+ Make_digest_sig(T).S = struct
+ module M = struct
+ open Pickles.Impls.Step.Field
+ module Checked = Pickles.Impls.Step.Field
+
+ let typ = typ
+
+ let constant = constant
+ end
+
+ module Account_update = struct
+ include Make_digest_types.Account_update
+ include M
+
+ module Checked = struct
+ include Checked
+
+ let create = Account_update.Checked.digest
+
+ let create_body = Account_update.Body.Checked.digest
+ end
+
+ let create : ?chain:Mina_signature_kind.t -> Account_update.t -> t =
+ Account_update.digest
+
+ let create_body : ?chain:Mina_signature_kind.t -> Account_update.Body.t -> t
+ =
+ Account_update.Body.digest
+ end
+
+ module Forest = struct
+ include Make_digest_types.Forest
+ include M
+
+ module Checked = struct
+ include Checked
+
+ let empty = constant empty
+
+ let cons hash h_tl =
+ Random_oracle.Checked.hash ~init:Hash_prefix_states.account_update_cons
+ [| hash; h_tl |]
+ end
+
+ let empty = empty
+
+ let cons hash h_tl =
+ Random_oracle.hash ~init:Hash_prefix_states.account_update_cons
+ [| hash; h_tl |]
+ end
+
+ module Tree = struct
+ include Make_digest_types.Tree
+ include M
+
+ module Checked = struct
+ include Checked
+
+ let create ~(account_update : Account_update.Checked.t)
+ ~(calls : Forest.Checked.t) =
+ Random_oracle.Checked.hash ~init:Hash_prefix_states.account_update_node
+ [| (account_update :> t); (calls :> t) |]
+ end
+
+ let create ({ account_update = _; calls; account_update_digest } : _ tree) =
+ let stack_hash =
+ match calls with [] -> empty | e :: _ -> e.stack_hash
+ in
+ Random_oracle.hash ~init:Hash_prefix_states.account_update_node
+ [| account_update_digest; stack_hash |]
+ end
+end
+
+module Digest =
+ Mina_wire_types.Mina_base.Zkapp_command.Digest_make
+ (Make_digest_sig)
+ (Make_digest_str)
+
+let fold = Tree.fold_forest
+
+let iteri t ~(f : int -> 'a -> unit) : unit =
+ let (_ : int) = fold t ~init:0 ~f:(fun acc x -> f acc x ; acc + 1) in
+ ()
+
+[%%versioned
+module Stable = struct
+ module V1 = struct
+ type ('account_update, 'account_update_digest, 'digest) t =
+ ( ('account_update, 'account_update_digest, 'digest) Tree.Stable.V1.t
+ , 'digest )
+ With_stack_hash.Stable.V1.t
+ list
+ [@@deriving sexp, compare, equal, hash, yojson]
+
+ let to_latest = Fn.id
+ end
+end]
+
+module Shape = struct
+ module I = struct
+ type t = int
+
+ let quickcheck_shrinker = Quickcheck.Shrinker.empty ()
+
+ let quickcheck_generator = [%quickcheck.generator: int]
+
+ let quickcheck_observer = [%quickcheck.observer: int]
+ end
+
+ type t = Node of (I.t * t) list [@@deriving quickcheck]
+end
+
+let rec shape (t : _ t) : Shape.t =
+ Node (List.mapi t ~f:(fun i { elt; stack_hash = _ } -> (i, shape elt.calls)))
+
+let match_up (type a b) (xs : a list) (ys : (int * b) list) : (a * b) list =
+ let rec go i_curr xs ys =
+ match (xs, ys) with
+ | [], [] ->
+ []
+ | x :: xs', (i, y) :: ys' ->
+ if i_curr = i then (x, y) :: go (i_curr + 1) xs' ys'
+ else if i_curr < i then go (i_curr + 1) xs' ys'
+ else assert false
+ | [], _ :: _ ->
+ assert false
+ | _ :: _, [] ->
+ []
+ in
+ go 0 xs ys
+
+let rec mask (t : ('p, 'h1, unit) t) (Node shape : Shape.t) : ('p, 'h1, unit) t
+ =
+ List.map (match_up t shape)
+ ~f:(fun ({ With_stack_hash.elt = t_sub; stack_hash = () }, shape_sub) ->
+ { With_stack_hash.elt = { t_sub with calls = mask t_sub.calls shape_sub }
+ ; stack_hash = ()
+ } )
+
+let rec of_account_updates_map ~(f : 'p1 -> 'p2)
+ ~(account_update_depth : 'p1 -> int) (account_updates : 'p1 list) :
+ ('p2, unit, unit) t =
+ match account_updates with
+ | [] ->
+ []
+ | p :: ps ->
+ let depth = account_update_depth p in
+ let children, siblings =
+ List.split_while ps ~f:(fun p' -> account_update_depth p' > depth)
+ in
+ { With_stack_hash.elt =
+ { Tree.account_update = f p
+ ; account_update_digest = ()
+ ; calls = of_account_updates_map ~f ~account_update_depth children
+ }
+ ; stack_hash = ()
+ }
+ :: of_account_updates_map ~f ~account_update_depth siblings
+
+let of_account_updates ~account_update_depth account_updates =
+ of_account_updates_map ~f:Fn.id ~account_update_depth account_updates
+
+let to_account_updates_map ~f (xs : _ t) =
+ let rec collect depth (xs : _ t) acc =
+ match xs with
+ | [] ->
+ acc
+ | { elt = { account_update; calls; account_update_digest = _ }
+ ; stack_hash = _
+ }
+ :: xs ->
+ f ~depth account_update :: acc
+ |> collect (depth + 1) calls
+ |> collect depth xs
+ in
+ List.rev (collect 0 xs [])
+
+let to_account_updates xs =
+ to_account_updates_map ~f:(fun ~depth:_ account_update -> account_update) xs
+
+let hd_account_update (xs : _ t) =
+ match xs with
+ | [] ->
+ None
+ | { elt = { account_update; calls = _; account_update_digest = _ }
+ ; stack_hash = _
+ }
+ :: _ ->
+ Some account_update
+
+let map = Tree.map_forest
+
+let mapi = Tree.mapi_forest
+
+let mapi_with_trees = Tree.mapi_forest_with_trees
+
+let deferred_mapi = Tree.deferred_mapi_forest
+
+let to_zkapp_command_with_hashes_list (xs : _ t) =
+ let rec collect (xs : _ t) acc =
+ match xs with
+ | [] ->
+ acc
+ | { elt = { account_update; calls; account_update_digest = _ }; stack_hash }
+ :: xs ->
+ (account_update, stack_hash) :: acc |> collect calls |> collect xs
+ in
+ List.rev (collect xs [])
+
+let hash_cons hash h_tl =
+ Random_oracle.hash ~init:Hash_prefix_states.account_update_cons
+ [| hash; h_tl |]
+
+let hash = function
+ | [] ->
+ Digest.Forest.empty
+ | x :: _ ->
+ With_stack_hash.stack_hash x
+
+let cons_tree tree (forest : _ t) : _ t =
+ { elt = tree
+ ; stack_hash = Digest.Forest.cons (Digest.Tree.create tree) (hash forest)
+ }
+ :: forest
+
+let cons_aux (type p) ~(digest_account_update : p -> _) ?(calls = [])
+ (account_update : p) (xs : _ t) : _ t =
+ let account_update_digest = digest_account_update account_update in
+ let tree : _ Tree.t = { account_update; account_update_digest; calls } in
+ cons_tree tree xs
+
+let cons ?calls (account_update : Account_update.t) xs =
+ cons_aux ~digest_account_update:Digest.Account_update.create ?calls
+ account_update xs
+
+let rec accumulate_hashes ~hash_account_update (xs : _ t) =
+ let go = accumulate_hashes ~hash_account_update in
+ match xs with
+ | [] ->
+ []
+ | { elt = { account_update; calls; account_update_digest = _ }
+ ; stack_hash = _
+ }
+ :: xs ->
+ let calls = go calls in
+ let xs = go xs in
+ let node =
+ { Tree.account_update
+ ; calls
+ ; account_update_digest = hash_account_update account_update
+ }
+ in
+ let node_hash = Digest.Tree.create node in
+ { elt = node; stack_hash = Digest.Forest.cons node_hash (hash xs) } :: xs
+
+let accumulate_hashes' (type a b) (xs : (Account_update.t, a, b) t) :
+ (Account_update.t, Digest.Account_update.t, Digest.Forest.t) t =
+ let hash_account_update (p : Account_update.t) =
+ Digest.Account_update.create p
+ in
+ accumulate_hashes ~hash_account_update xs
+
+let accumulate_hashes_predicated xs =
+ accumulate_hashes ~hash_account_update:Digest.Account_update.create xs
+
+module With_hashes_and_data = struct
+ [%%versioned
+ module Stable = struct
+ module V1 = struct
+ type 'data t =
+ ( Account_update.Stable.V1.t * 'data
+ , Digest.Account_update.Stable.V1.t
+ , Digest.Forest.Stable.V1.t )
+ Stable.V1.t
+ [@@deriving sexp, compare, equal, hash, yojson]
+
+ let to_latest = Fn.id
+ end
+ end]
+
+ let empty = Digest.Forest.empty
+
+ let hash_account_update ((p : Account_update.t), _) =
+ Digest.Account_update.create p
+
+ let accumulate_hashes xs : _ t = accumulate_hashes ~hash_account_update xs
+
+ let of_zkapp_command_simple_list (xs : (Account_update.Simple.t * 'a) list) :
+ _ t =
+ of_account_updates xs
+ ~account_update_depth:(fun ((p : Account_update.Simple.t), _) ->
+ p.body.call_depth )
+ |> map ~f:(fun (p, x) -> (Account_update.of_simple p, x))
+ |> accumulate_hashes
+
+ let of_account_updates (xs : (Account_update.Graphql_repr.t * 'a) list) : _ t
+ =
+ of_account_updates_map
+ ~account_update_depth:(fun ((p : Account_update.Graphql_repr.t), _) ->
+ p.body.call_depth )
+ ~f:(fun (p, x) -> (Account_update.of_graphql_repr p, x))
+ xs
+ |> accumulate_hashes
+
+ let to_account_updates (x : _ t) = to_account_updates x
+
+ let to_zkapp_command_with_hashes_list (x : _ t) =
+ to_zkapp_command_with_hashes_list x
+
+ let account_updates_hash' xs = of_account_updates xs |> hash
+
+ let account_updates_hash xs =
+ List.map ~f:(fun x -> (x, ())) xs |> account_updates_hash'
+end
+
+module With_hashes = struct
+ [%%versioned
+ module Stable = struct
+ module V1 = struct
+ type t =
+ ( Account_update.Stable.V1.t
+ , Digest.Account_update.Stable.V1.t
+ , Digest.Forest.Stable.V1.t )
+ Stable.V1.t
+ [@@deriving sexp, compare, equal, hash, yojson]
+
+ let to_latest = Fn.id
+ end
+ end]
+
+ let empty = Digest.Forest.empty
+
+ let hash_account_update (p : Account_update.t) =
+ Digest.Account_update.create p
+
+ let accumulate_hashes xs : t = accumulate_hashes ~hash_account_update xs
+
+ let of_zkapp_command_simple_list (xs : Account_update.Simple.t list) : t =
+ of_account_updates xs
+ ~account_update_depth:(fun (p : Account_update.Simple.t) ->
+ p.body.call_depth )
+ |> map ~f:Account_update.of_simple
+ |> accumulate_hashes
+
+ let of_account_updates (xs : Account_update.Graphql_repr.t list) : t =
+ of_account_updates_map
+ ~account_update_depth:(fun (p : Account_update.Graphql_repr.t) ->
+ p.body.call_depth )
+ ~f:(fun p -> Account_update.of_graphql_repr p)
+ xs
+ |> accumulate_hashes
+
+ let to_account_updates (x : t) = to_account_updates x
+
+ let to_zkapp_command_with_hashes_list (x : t) =
+ to_zkapp_command_with_hashes_list x
+
+ let account_updates_hash' xs = of_account_updates xs |> hash
+
+ let account_updates_hash xs =
+ List.map ~f:(fun x -> x) xs |> account_updates_hash'
+end
+
+let is_empty : _ t -> bool = List.is_empty
+
+let to_list (type p) (t : (p, _, _) t) : p list =
+ List.rev @@ fold t ~init:[] ~f:(fun acc p -> p :: acc)
+
+let exists (type p) (t : (p, _, _) t) ~(f : p -> bool) : bool =
+ with_return (fun { return } ->
+ fold t ~init:() ~f:(fun () p -> if f p then return true else ()) ;
+ false )
diff --git a/src/lib/mina_base/zkapp_command.ml b/src/lib/mina_base/zkapp_command.ml
index 0583e18f6ec..b2203540f3e 100644
--- a/src/lib/mina_base/zkapp_command.ml
+++ b/src/lib/mina_base/zkapp_command.ml
@@ -1,630 +1,6 @@
open Core_kernel
open Signature_lib
-module Call_forest = struct
- let empty = Outside_hash_image.t
-
- module Tree = struct
- [%%versioned
- module Stable = struct
- module V1 = struct
- type ('account_update, 'account_update_digest, 'digest) t =
- ( 'account_update
- , 'account_update_digest
- , 'digest )
- Mina_wire_types.Mina_base.Zkapp_command.Call_forest.Tree.V1.t =
- { account_update : 'account_update
- ; account_update_digest : 'account_update_digest
- ; calls :
- ( ('account_update, 'account_update_digest, 'digest) t
- , 'digest )
- With_stack_hash.Stable.V1.t
- list
- }
- [@@deriving sexp, compare, equal, hash, yojson]
-
- let to_latest = Fn.id
- end
- end]
-
- let rec fold_forest (ts : (_ t, _) With_stack_hash.t list) ~f ~init =
- List.fold ts ~init ~f:(fun acc { elt; stack_hash = _ } ->
- fold elt ~init:acc ~f )
-
- and fold { account_update; calls; account_update_digest = _ } ~f ~init =
- fold_forest calls ~f ~init:(f init account_update)
-
- let rec fold_forest2_exn (ts1 : (_ t, _) With_stack_hash.t list)
- (ts2 : (_ t, _) With_stack_hash.t list) ~f ~init =
- List.fold2_exn ts1 ts2 ~init
- ~f:(fun
- acc
- { elt = elt1; stack_hash = _ }
- { elt = elt2; stack_hash = _ }
- -> fold2_exn elt1 elt2 ~init:acc ~f )
-
- and fold2_exn
- { account_update = account_update1
- ; calls = calls1
- ; account_update_digest = _
- }
- { account_update = account_update2
- ; calls = calls2
- ; account_update_digest = _
- } ~f ~init =
- fold_forest2_exn calls1 calls2 ~f
- ~init:(f init account_update1 account_update2)
-
- let iter_forest2_exn ts1 ts2 ~f =
- fold_forest2_exn ts1 ts2 ~init:() ~f:(fun () p1 p2 -> f p1 p2)
-
- let iter2_exn ts1 ts2 ~f =
- fold2_exn ts1 ts2 ~init:() ~f:(fun () p1 p2 -> f p1 p2)
-
- let rec mapi_with_trees' ~i (t : _ t) ~f =
- let account_update = f i t.account_update t in
- let l, calls = mapi_forest_with_trees' ~i:(i + 1) t.calls ~f in
- ( l
- , { calls
- ; account_update
- ; account_update_digest = t.account_update_digest
- } )
-
- and mapi_forest_with_trees' ~i x ~f =
- let rec go i acc = function
- | [] ->
- (i, List.rev acc)
- | t :: ts ->
- let l, elt' = mapi_with_trees' ~i ~f (With_stack_hash.elt t) in
- go l (With_stack_hash.map t ~f:(fun _ -> elt') :: acc) ts
- in
- go i [] x
-
- let mapi_with_trees t ~f = mapi_with_trees' ~i:0 t ~f |> snd
-
- let mapi_forest_with_trees t ~f = mapi_forest_with_trees' ~i:0 t ~f |> snd
-
- let mapi' ~i t ~f =
- mapi_with_trees' ~i t ~f:(fun i account_update _ -> f i account_update)
-
- let mapi_forest' ~i t ~f =
- mapi_forest_with_trees' ~i t ~f:(fun i account_update _ ->
- f i account_update )
-
- let rec deferred_mapi_with_trees' ~i (t : _ t) ~f =
- let open Async_kernel.Deferred.Let_syntax in
- let%bind l, calls =
- deferred_mapi_forest_with_trees' ~i:(i + 1) t.calls ~f
- in
- let%map account_update = f i t.account_update t in
- ( l
- , { calls
- ; account_update
- ; account_update_digest = t.account_update_digest
- } )
-
- and deferred_mapi_forest_with_trees' ~i x ~f =
- let open Async_kernel.Deferred.Let_syntax in
- let rec go i acc = function
- | [] ->
- return (i, List.rev acc)
- | t :: ts ->
- let%bind l, elt' =
- deferred_mapi_with_trees' ~i ~f (With_stack_hash.elt t)
- in
- go l (With_stack_hash.map t ~f:(fun _ -> elt') :: acc) ts
- in
- go i [] x
-
- let map_forest ~f t = mapi_forest' ~i:0 ~f:(fun _ x -> f x) t |> snd
-
- let mapi_forest ~f t = mapi_forest' ~i:0 ~f t |> snd
-
- let deferred_map_forest ~f t =
- let open Async_kernel.Deferred in
- deferred_mapi_forest_with_trees' ~i:0 ~f:(fun _ x -> f x) t >>| snd
-
- let deferred_mapi_forest ~f t =
- let open Async_kernel.Deferred in
- deferred_mapi_forest_with_trees' ~i:0 ~f t >>| snd
-
- let hash { account_update = _; calls; account_update_digest } =
- let stack_hash =
- match calls with [] -> empty | e :: _ -> e.stack_hash
- in
- Random_oracle.hash ~init:Hash_prefix_states.account_update_node
- [| account_update_digest; stack_hash |]
- end
-
- type ('a, 'b, 'c) tree = ('a, 'b, 'c) Tree.t
-
- module type Digest_intf = sig
- module Account_update : sig
- include Digest_intf.S
-
- module Checked : sig
- include Digest_intf.S_checked
-
- val create :
- ?chain:Mina_signature_kind.t -> Account_update.Checked.t -> t
-
- val create_body :
- ?chain:Mina_signature_kind.t -> Account_update.Body.Checked.t -> t
- end
-
- include Digest_intf.S_aux with type t := t and type checked := Checked.t
-
- val create : ?chain:Mina_signature_kind.t -> Account_update.t -> t
-
- val create_body :
- ?chain:Mina_signature_kind.t -> Account_update.Body.t -> t
- end
-
- module rec Forest : sig
- include Digest_intf.S
-
- module Checked : sig
- include Digest_intf.S_checked
-
- val empty : t
-
- val cons : Tree.Checked.t -> t -> t
- end
-
- include Digest_intf.S_aux with type t := t and type checked := Checked.t
-
- val empty : t
-
- val cons : Tree.t -> Forest.t -> Forest.t
- end
-
- and Tree : sig
- include Digest_intf.S
-
- module Checked : sig
- include Digest_intf.S_checked
-
- val create :
- account_update:Account_update.Checked.t
- -> calls:Forest.Checked.t
- -> Tree.Checked.t
- end
-
- include Digest_intf.S_aux with type t := t and type checked := Checked.t
-
- val create : (_, Account_update.t, Forest.t) tree -> Tree.t
- end
- end
-
- module Make_digest_sig
- (T : Mina_wire_types.Mina_base.Zkapp_command.Digest_types.S) =
- struct
- module type S =
- Digest_intf
- with type Account_update.Stable.V1.t = T.Account_update.V1.t
- and type Forest.Stable.V1.t = T.Forest.V1.t
- end
-
- module Make_digest_types = struct
- module Account_update = struct
- [%%versioned
- module Stable = struct
- module V1 = struct
- type t = Kimchi_backend.Pasta.Basic.Fp.Stable.V1.t
- [@@deriving sexp, compare, equal, hash, yojson]
-
- let to_latest = Fn.id
- end
- end]
- end
-
- module Forest = struct
- [%%versioned
- module Stable = struct
- module V1 = struct
- type t = Kimchi_backend.Pasta.Basic.Fp.Stable.V1.t
- [@@deriving sexp, compare, equal, hash, yojson]
-
- let to_latest = Fn.id
- end
- end]
- end
-
- module Tree = struct
- [%%versioned
- module Stable = struct
- module V1 = struct
- type t = Kimchi_backend.Pasta.Basic.Fp.Stable.V1.t
- [@@deriving sexp, compare, equal, hash, yojson]
-
- let to_latest = Fn.id
- end
- end]
- end
- end
-
- module Make_digest_str
- (T : Mina_wire_types.Mina_base.Zkapp_command.Digest_concrete) :
- Make_digest_sig(T).S = struct
- module M = struct
- open Pickles.Impls.Step.Field
- module Checked = Pickles.Impls.Step.Field
-
- let typ = typ
-
- let constant = constant
- end
-
- module Account_update = struct
- include Make_digest_types.Account_update
- include M
-
- module Checked = struct
- include Checked
-
- let create = Account_update.Checked.digest
-
- let create_body = Account_update.Body.Checked.digest
- end
-
- let create : ?chain:Mina_signature_kind.t -> Account_update.t -> t =
- Account_update.digest
-
- let create_body :
- ?chain:Mina_signature_kind.t -> Account_update.Body.t -> t =
- Account_update.Body.digest
- end
-
- module Forest = struct
- include Make_digest_types.Forest
- include M
-
- module Checked = struct
- include Checked
-
- let empty = constant empty
-
- let cons hash h_tl =
- Random_oracle.Checked.hash
- ~init:Hash_prefix_states.account_update_cons [| hash; h_tl |]
- end
-
- let empty = empty
-
- let cons hash h_tl =
- Random_oracle.hash ~init:Hash_prefix_states.account_update_cons
- [| hash; h_tl |]
- end
-
- module Tree = struct
- include Make_digest_types.Tree
- include M
-
- module Checked = struct
- include Checked
-
- let create ~(account_update : Account_update.Checked.t)
- ~(calls : Forest.Checked.t) =
- Random_oracle.Checked.hash
- ~init:Hash_prefix_states.account_update_node
- [| (account_update :> t); (calls :> t) |]
- end
-
- let create ({ account_update = _; calls; account_update_digest } : _ tree)
- =
- let stack_hash =
- match calls with [] -> empty | e :: _ -> e.stack_hash
- in
- Random_oracle.hash ~init:Hash_prefix_states.account_update_node
- [| account_update_digest; stack_hash |]
- end
- end
-
- module Digest =
- Mina_wire_types.Mina_base.Zkapp_command.Digest_make
- (Make_digest_sig)
- (Make_digest_str)
-
- let fold = Tree.fold_forest
-
- let iteri t ~(f : int -> 'a -> unit) : unit =
- let (_ : int) = fold t ~init:0 ~f:(fun acc x -> f acc x ; acc + 1) in
- ()
-
- [%%versioned
- module Stable = struct
- module V1 = struct
- type ('account_update, 'account_update_digest, 'digest) t =
- ( ('account_update, 'account_update_digest, 'digest) Tree.Stable.V1.t
- , 'digest )
- With_stack_hash.Stable.V1.t
- list
- [@@deriving sexp, compare, equal, hash, yojson]
-
- let to_latest = Fn.id
- end
- end]
-
- module Shape = struct
- module I = struct
- type t = int
-
- let quickcheck_shrinker = Quickcheck.Shrinker.empty ()
-
- let quickcheck_generator = [%quickcheck.generator: int]
-
- let quickcheck_observer = [%quickcheck.observer: int]
- end
-
- type t = Node of (I.t * t) list [@@deriving quickcheck]
- end
-
- let rec shape (t : _ t) : Shape.t =
- Node (List.mapi t ~f:(fun i { elt; stack_hash = _ } -> (i, shape elt.calls)))
-
- let match_up (type a b) (xs : a list) (ys : (int * b) list) : (a * b) list =
- let rec go i_curr xs ys =
- match (xs, ys) with
- | [], [] ->
- []
- | x :: xs', (i, y) :: ys' ->
- if i_curr = i then (x, y) :: go (i_curr + 1) xs' ys'
- else if i_curr < i then go (i_curr + 1) xs' ys'
- else assert false
- | [], _ :: _ ->
- assert false
- | _ :: _, [] ->
- []
- in
- go 0 xs ys
-
- let rec mask (t : ('p, 'h1, unit) t) (Node shape : Shape.t) :
- ('p, 'h1, unit) t =
- List.map (match_up t shape)
- ~f:(fun ({ With_stack_hash.elt = t_sub; stack_hash = () }, shape_sub) ->
- { With_stack_hash.elt =
- { t_sub with calls = mask t_sub.calls shape_sub }
- ; stack_hash = ()
- } )
-
- let rec of_account_updates_map ~(f : 'p1 -> 'p2)
- ~(account_update_depth : 'p1 -> int) (account_updates : 'p1 list) :
- ('p2, unit, unit) t =
- match account_updates with
- | [] ->
- []
- | p :: ps ->
- let depth = account_update_depth p in
- let children, siblings =
- List.split_while ps ~f:(fun p' -> account_update_depth p' > depth)
- in
- { With_stack_hash.elt =
- { Tree.account_update = f p
- ; account_update_digest = ()
- ; calls = of_account_updates_map ~f ~account_update_depth children
- }
- ; stack_hash = ()
- }
- :: of_account_updates_map ~f ~account_update_depth siblings
-
- let of_account_updates ~account_update_depth account_updates =
- of_account_updates_map ~f:Fn.id ~account_update_depth account_updates
-
- let to_account_updates_map ~f (xs : _ t) =
- let rec collect depth (xs : _ t) acc =
- match xs with
- | [] ->
- acc
- | { elt = { account_update; calls; account_update_digest = _ }
- ; stack_hash = _
- }
- :: xs ->
- f ~depth account_update :: acc
- |> collect (depth + 1) calls
- |> collect depth xs
- in
- List.rev (collect 0 xs [])
-
- let to_account_updates xs =
- to_account_updates_map ~f:(fun ~depth:_ account_update -> account_update) xs
-
- let hd_account_update (xs : _ t) =
- match xs with
- | [] ->
- None
- | { elt = { account_update; calls = _; account_update_digest = _ }
- ; stack_hash = _
- }
- :: _ ->
- Some account_update
-
- let map = Tree.map_forest
-
- let mapi = Tree.mapi_forest
-
- let mapi_with_trees = Tree.mapi_forest_with_trees
-
- let deferred_mapi = Tree.deferred_mapi_forest
-
- let to_zkapp_command_with_hashes_list (xs : _ t) =
- let rec collect (xs : _ t) acc =
- match xs with
- | [] ->
- acc
- | { elt = { account_update; calls; account_update_digest = _ }
- ; stack_hash
- }
- :: xs ->
- (account_update, stack_hash) :: acc |> collect calls |> collect xs
- in
- List.rev (collect xs [])
-
- let hash_cons hash h_tl =
- Random_oracle.hash ~init:Hash_prefix_states.account_update_cons
- [| hash; h_tl |]
-
- let hash = function
- | [] ->
- Digest.Forest.empty
- | x :: _ ->
- With_stack_hash.stack_hash x
-
- let cons_tree tree (forest : _ t) : _ t =
- { elt = tree
- ; stack_hash = Digest.Forest.cons (Digest.Tree.create tree) (hash forest)
- }
- :: forest
-
- let cons_aux (type p) ~(digest_account_update : p -> _) ?(calls = [])
- (account_update : p) (xs : _ t) : _ t =
- let account_update_digest = digest_account_update account_update in
- let tree : _ Tree.t = { account_update; account_update_digest; calls } in
- cons_tree tree xs
-
- let cons ?calls (account_update : Account_update.t) xs =
- cons_aux ~digest_account_update:Digest.Account_update.create ?calls
- account_update xs
-
- let rec accumulate_hashes ~hash_account_update (xs : _ t) =
- let go = accumulate_hashes ~hash_account_update in
- match xs with
- | [] ->
- []
- | { elt = { account_update; calls; account_update_digest = _ }
- ; stack_hash = _
- }
- :: xs ->
- let calls = go calls in
- let xs = go xs in
- let node =
- { Tree.account_update
- ; calls
- ; account_update_digest = hash_account_update account_update
- }
- in
- let node_hash = Digest.Tree.create node in
- { elt = node; stack_hash = Digest.Forest.cons node_hash (hash xs) }
- :: xs
-
- let accumulate_hashes' (type a b) (xs : (Account_update.t, a, b) t) :
- (Account_update.t, Digest.Account_update.t, Digest.Forest.t) t =
- let hash_account_update (p : Account_update.t) =
- Digest.Account_update.create p
- in
- accumulate_hashes ~hash_account_update xs
-
- let accumulate_hashes_predicated xs =
- accumulate_hashes ~hash_account_update:Digest.Account_update.create xs
-
- module With_hashes_and_data = struct
- [%%versioned
- module Stable = struct
- module V1 = struct
- type 'data t =
- ( Account_update.Stable.V1.t * 'data
- , Digest.Account_update.Stable.V1.t
- , Digest.Forest.Stable.V1.t )
- Stable.V1.t
- [@@deriving sexp, compare, equal, hash, yojson]
-
- let to_latest = Fn.id
- end
- end]
-
- let empty = Digest.Forest.empty
-
- let hash_account_update ((p : Account_update.t), _) =
- Digest.Account_update.create p
-
- let accumulate_hashes xs : _ t = accumulate_hashes ~hash_account_update xs
-
- let of_zkapp_command_simple_list (xs : (Account_update.Simple.t * 'a) list)
- : _ t =
- of_account_updates xs
- ~account_update_depth:(fun ((p : Account_update.Simple.t), _) ->
- p.body.call_depth )
- |> map ~f:(fun (p, x) -> (Account_update.of_simple p, x))
- |> accumulate_hashes
-
- let of_account_updates (xs : (Account_update.Graphql_repr.t * 'a) list) :
- _ t =
- of_account_updates_map
- ~account_update_depth:(fun ((p : Account_update.Graphql_repr.t), _) ->
- p.body.call_depth )
- ~f:(fun (p, x) -> (Account_update.of_graphql_repr p, x))
- xs
- |> accumulate_hashes
-
- let to_account_updates (x : _ t) = to_account_updates x
-
- let to_zkapp_command_with_hashes_list (x : _ t) =
- to_zkapp_command_with_hashes_list x
-
- let account_updates_hash' xs = of_account_updates xs |> hash
-
- let account_updates_hash xs =
- List.map ~f:(fun x -> (x, ())) xs |> account_updates_hash'
- end
-
- module With_hashes = struct
- [%%versioned
- module Stable = struct
- module V1 = struct
- type t =
- ( Account_update.Stable.V1.t
- , Digest.Account_update.Stable.V1.t
- , Digest.Forest.Stable.V1.t )
- Stable.V1.t
- [@@deriving sexp, compare, equal, hash, yojson]
-
- let to_latest = Fn.id
- end
- end]
-
- let empty = Digest.Forest.empty
-
- let hash_account_update (p : Account_update.t) =
- Digest.Account_update.create p
-
- let accumulate_hashes xs : t = accumulate_hashes ~hash_account_update xs
-
- let of_zkapp_command_simple_list (xs : Account_update.Simple.t list) : t =
- of_account_updates xs
- ~account_update_depth:(fun (p : Account_update.Simple.t) ->
- p.body.call_depth )
- |> map ~f:Account_update.of_simple
- |> accumulate_hashes
-
- let of_account_updates (xs : Account_update.Graphql_repr.t list) : t =
- of_account_updates_map
- ~account_update_depth:(fun (p : Account_update.Graphql_repr.t) ->
- p.body.call_depth )
- ~f:(fun p -> Account_update.of_graphql_repr p)
- xs
- |> accumulate_hashes
-
- let to_account_updates (x : t) = to_account_updates x
-
- let to_zkapp_command_with_hashes_list (x : t) =
- to_zkapp_command_with_hashes_list x
-
- let account_updates_hash' xs = of_account_updates xs |> hash
-
- let account_updates_hash xs =
- List.map ~f:(fun x -> x) xs |> account_updates_hash'
- end
-
- let is_empty : _ t -> bool = List.is_empty
-
- let to_list (type p) (t : (p, _, _) t) : p list =
- List.rev @@ fold t ~init:[] ~f:(fun acc p -> p :: acc)
-
- let exists (type p) (t : (p, _, _) t) ~(f : p -> bool) : bool =
- with_return (fun { return } ->
- fold t ~init:() ~f:(fun () p -> if f p then return true else ()) ;
- false )
-end
-
module Graphql_repr = struct
[%%versioned
module Stable = struct
@@ -658,6 +34,7 @@ module Simple = struct
end]
end
+module Call_forest = Zkapp_call_forest_base
module Digest = Call_forest.Digest
module T = struct
diff --git a/src/lib/mina_block/block.ml b/src/lib/mina_block/block.ml
index ce044757af2..ef25ea79b59 100644
--- a/src/lib/mina_block/block.ml
+++ b/src/lib/mina_block/block.ml
@@ -102,17 +102,6 @@ let transactions ~constraint_constants block =
|> Result.map_error ~f:Staged_ledger.Pre_diff_info.Error.to_error
|> Or_error.ok_exn
-let payments block =
- block |> body |> Staged_ledger_diff.Body.staged_ledger_diff
- |> Staged_ledger_diff.commands
- |> List.filter_map ~f:(function
- | { data = Signed_command ({ payload = { body = Payment _; _ }; _ } as c)
- ; status
- } ->
- Some { With_status.data = c; status }
- | _ ->
- None )
-
let account_ids_accessed ~constraint_constants t =
let transactions = transactions ~constraint_constants t in
List.map transactions ~f:(fun { data = txn; status } ->
diff --git a/src/lib/mina_block/block.mli b/src/lib/mina_block/block.mli
index 2fc84e2877f..dbfe00a144f 100644
--- a/src/lib/mina_block/block.mli
+++ b/src/lib/mina_block/block.mli
@@ -35,8 +35,6 @@ val transactions :
-> t
-> Transaction.t With_status.t list
-val payments : t -> Signed_command.t With_status.t list
-
val account_ids_accessed :
constraint_constants:Genesis_constants.Constraint_constants.t
-> t
diff --git a/src/lib/mina_compile_config/mina_compile_config.ml b/src/lib/mina_compile_config/mina_compile_config.ml
index 61f8d7f1d3b..17849c24d10 100644
--- a/src/lib/mina_compile_config/mina_compile_config.ml
+++ b/src/lib/mina_compile_config/mina_compile_config.ml
@@ -9,7 +9,6 @@ open Core_kernel
module Inputs = struct
type t =
{ curve_size : int
- ; default_transaction_fee_string : string
; default_snark_worker_fee_string : string
; minimum_user_command_fee_string : string
; itn_features : bool
@@ -21,25 +20,15 @@ module Inputs = struct
; rpc_handshake_timeout_sec : float
; rpc_heartbeat_timeout_sec : float
; rpc_heartbeat_send_every_sec : float
- ; zkapp_proof_update_cost : float
- ; zkapp_signed_pair_update_cost : float
- ; zkapp_signed_single_update_cost : float
- ; zkapp_transaction_cost_limit : float
- ; max_event_elements : int
- ; max_action_elements : int
- ; zkapp_cmd_limit_hardcap : int
; zkapps_disabled : bool
- ; sync_ledger_max_subtree_depth : int
- ; sync_ledger_default_subtree_depth : int
}
[@@deriving yojson, bin_io_unversioned]
end
type t =
{ curve_size : int
- ; default_transaction_fee : Currency.Fee.Stable.Latest.t
- ; default_snark_worker_fee : Currency.Fee.Stable.Latest.t
- ; minimum_user_command_fee : Currency.Fee.Stable.Latest.t
+ ; default_snark_worker_fee : Currency.Fee.t
+ ; minimum_user_command_fee : Currency.Fee.t
; itn_features : bool
; compaction_interval : Time.Span.t option
; block_window_duration : Time.Span.t
@@ -47,25 +36,14 @@ type t =
; network_id : string
; zkapp_cmd_limit : int option
; rpc_handshake_timeout : Time.Span.t
- ; rpc_heartbeat_timeout : Time.Span.t
- ; rpc_heartbeat_send_every : Time.Span.t
- ; zkapp_proof_update_cost : float
- ; zkapp_signed_pair_update_cost : float
- ; zkapp_signed_single_update_cost : float
- ; zkapp_transaction_cost_limit : float
- ; max_event_elements : int
- ; max_action_elements : int
- ; zkapp_cmd_limit_hardcap : int
+ ; rpc_heartbeat_timeout : Time_ns.Span.t
+ ; rpc_heartbeat_send_every : Time_ns.Span.t
; zkapps_disabled : bool
- ; sync_ledger_max_subtree_depth : int
- ; sync_ledger_default_subtree_depth : int
}
-[@@deriving bin_io_unversioned]
+[@@deriving sexp_of]
let make (inputs : Inputs.t) =
{ curve_size = inputs.curve_size
- ; default_transaction_fee =
- Currency.Fee.of_mina_string_exn inputs.default_transaction_fee_string
; default_snark_worker_fee =
Currency.Fee.of_mina_string_exn inputs.default_snark_worker_fee_string
; minimum_user_command_fee =
@@ -80,28 +58,17 @@ let make (inputs : Inputs.t) =
; vrf_poll_interval =
Float.of_int inputs.vrf_poll_interval_ms |> Time.Span.of_ms
; rpc_handshake_timeout = Time.Span.of_sec inputs.rpc_handshake_timeout_sec
- ; rpc_heartbeat_timeout = Time.Span.of_sec inputs.rpc_heartbeat_timeout_sec
+ ; rpc_heartbeat_timeout = Time_ns.Span.of_sec inputs.rpc_heartbeat_timeout_sec
; rpc_heartbeat_send_every =
- Time.Span.of_sec inputs.rpc_heartbeat_send_every_sec
- ; zkapp_proof_update_cost = inputs.zkapp_proof_update_cost
- ; zkapp_signed_pair_update_cost = inputs.zkapp_signed_pair_update_cost
- ; zkapp_signed_single_update_cost = inputs.zkapp_signed_single_update_cost
- ; zkapp_transaction_cost_limit = inputs.zkapp_transaction_cost_limit
- ; max_event_elements = inputs.max_event_elements
- ; max_action_elements = inputs.max_action_elements
+ Time_ns.Span.of_sec inputs.rpc_heartbeat_send_every_sec
; network_id = inputs.network_id
; zkapp_cmd_limit = inputs.zkapp_cmd_limit
- ; zkapp_cmd_limit_hardcap = inputs.zkapp_cmd_limit_hardcap
; zkapps_disabled = inputs.zkapps_disabled
- ; sync_ledger_max_subtree_depth = inputs.sync_ledger_max_subtree_depth
- ; sync_ledger_default_subtree_depth = inputs.sync_ledger_default_subtree_depth
}
let to_yojson t =
`Assoc
[ ("curve_size", `Int t.curve_size)
- ; ( "default_transaction_fee"
- , Currency.Fee.to_yojson t.default_transaction_fee )
; ( "default_snark_worker_fee"
, Currency.Fee.to_yojson t.default_snark_worker_fee )
; ( "minimum_user_command_fee"
@@ -116,25 +83,14 @@ let to_yojson t =
; ( "rpc_handshake_timeout"
, `Float (Time.Span.to_sec t.rpc_handshake_timeout) )
; ( "rpc_heartbeat_timeout"
- , `Float (Time.Span.to_sec t.rpc_heartbeat_timeout) )
+ , `Float (Time_ns.Span.to_sec t.rpc_heartbeat_timeout) )
; ( "rpc_heartbeat_send_every"
- , `Float (Time.Span.to_sec t.rpc_heartbeat_send_every) )
- ; ("zkapp_proof_update_cost", `Float t.zkapp_proof_update_cost)
- ; ("zkapp_signed_pair_update_cost", `Float t.zkapp_signed_pair_update_cost)
- ; ( "zkapp_signed_single_update_cost"
- , `Float t.zkapp_signed_single_update_cost )
- ; ("zkapp_transaction_cost_limit", `Float t.zkapp_transaction_cost_limit)
- ; ("max_event_elements", `Int t.max_event_elements)
- ; ("max_action_elements", `Int t.max_action_elements)
+ , `Float (Time_ns.Span.to_sec t.rpc_heartbeat_send_every) )
; ("network_id", `String t.network_id)
; ( "zkapp_cmd_limit"
, Option.value_map ~default:`Null ~f:(fun x -> `Int x) t.zkapp_cmd_limit
)
- ; ("zkapp_cmd_limit_hardcap", `Int t.zkapp_cmd_limit_hardcap)
; ("zkapps_disabled", `Bool t.zkapps_disabled)
- ; ("sync_ledger_max_subtree_depth", `Int t.sync_ledger_max_subtree_depth)
- ; ( "sync_ledger_default_subtree_depth"
- , `Int t.sync_ledger_default_subtree_depth )
]
(*TODO: Delete this module and read in a value from the environment*)
@@ -142,32 +98,18 @@ module Compiled = struct
let t : t =
let (inputs : Inputs.t) =
{ curve_size = Node_config.curve_size
- ; default_transaction_fee_string = Node_config.default_transaction_fee
; default_snark_worker_fee_string = Node_config.default_snark_worker_fee
; minimum_user_command_fee_string = Node_config.minimum_user_command_fee
; itn_features = Node_config.itn_features
; compaction_interval_ms = Node_config.compaction_interval
; block_window_duration_ms = Node_config.block_window_duration
; vrf_poll_interval_ms = Node_config.vrf_poll_interval
- ; rpc_handshake_timeout_sec = Node_config.rpc_handshake_timeout_sec
- ; rpc_heartbeat_timeout_sec = Node_config.rpc_heartbeat_timeout_sec
- ; rpc_heartbeat_send_every_sec = Node_config.rpc_heartbeat_send_every_sec
- ; zkapp_proof_update_cost = Node_config.zkapp_proof_update_cost
- ; zkapp_signed_pair_update_cost =
- Node_config.zkapp_signed_pair_update_cost
- ; zkapp_signed_single_update_cost =
- Node_config.zkapp_signed_single_update_cost
- ; zkapp_transaction_cost_limit = Node_config.zkapp_transaction_cost_limit
- ; max_event_elements = Node_config.max_event_elements
- ; max_action_elements = Node_config.max_action_elements
; network_id = Node_config.network
; zkapp_cmd_limit = Node_config.zkapp_cmd_limit
- ; zkapp_cmd_limit_hardcap = Node_config.zkapp_cmd_limit_hardcap
- ; zkapps_disabled = Node_config.zkapps_disabled
- ; sync_ledger_max_subtree_depth =
- Node_config.sync_ledger_max_subtree_depth
- ; sync_ledger_default_subtree_depth =
- Node_config.sync_ledger_default_subtree_depth
+ ; rpc_handshake_timeout_sec = 60.0
+ ; rpc_heartbeat_timeout_sec = 60.0
+ ; rpc_heartbeat_send_every_sec = 10.0
+ ; zkapps_disabled = false
}
in
make inputs
@@ -177,8 +119,6 @@ module For_unit_tests = struct
let t : t =
let inputs : Inputs.t =
{ curve_size = Node_config_for_unit_tests.curve_size
- ; default_transaction_fee_string =
- Node_config_for_unit_tests.default_transaction_fee
; default_snark_worker_fee_string =
Node_config_for_unit_tests.default_snark_worker_fee
; minimum_user_command_fee_string =
@@ -194,25 +134,9 @@ module For_unit_tests = struct
Node_config_for_unit_tests.rpc_heartbeat_timeout_sec
; rpc_heartbeat_send_every_sec =
Node_config_for_unit_tests.rpc_heartbeat_send_every_sec
- ; zkapp_proof_update_cost =
- Node_config_for_unit_tests.zkapp_proof_update_cost
- ; zkapp_signed_pair_update_cost =
- Node_config_for_unit_tests.zkapp_signed_pair_update_cost
- ; zkapp_signed_single_update_cost =
- Node_config_for_unit_tests.zkapp_signed_single_update_cost
- ; zkapp_transaction_cost_limit =
- Node_config_for_unit_tests.zkapp_transaction_cost_limit
- ; max_event_elements = Node_config_for_unit_tests.max_event_elements
- ; max_action_elements = Node_config_for_unit_tests.max_action_elements
; network_id = Node_config_for_unit_tests.network
; zkapp_cmd_limit = Node_config_for_unit_tests.zkapp_cmd_limit
- ; zkapp_cmd_limit_hardcap =
- Node_config_for_unit_tests.zkapp_cmd_limit_hardcap
; zkapps_disabled = Node_config_for_unit_tests.zkapps_disabled
- ; sync_ledger_max_subtree_depth =
- Node_config_for_unit_tests.sync_ledger_max_subtree_depth
- ; sync_ledger_default_subtree_depth =
- Node_config_for_unit_tests.sync_ledger_default_subtree_depth
}
in
make inputs
diff --git a/src/lib/mina_graphql/mina_graphql.ml b/src/lib/mina_graphql/mina_graphql.ml
index 08f276a9fbc..0378ca6bece 100644
--- a/src/lib/mina_graphql/mina_graphql.ml
+++ b/src/lib/mina_graphql/mina_graphql.ml
@@ -936,8 +936,10 @@ module Mutations = struct
"Could not find an archive process to connect to"
in
let%map () =
- Mina_lib.Archive_client.dispatch_precomputed_block archive_location
- block
+ Mina_lib.Archive_client.dispatch_precomputed_block
+ ~compile_config:
+ (Mina_lib.config mina).precomputed_values.compile_config
+ archive_location block
|> Deferred.Result.map_error ~f:Error.to_string_hum
in
() )
@@ -967,8 +969,10 @@ module Mutations = struct
"Could not find an archive process to connect to"
in
let%map () =
- Mina_lib.Archive_client.dispatch_extensional_block archive_location
- block
+ Mina_lib.Archive_client.dispatch_extensional_block
+ ~compile_config:
+ (Mina_lib.config mina).precomputed_values.compile_config
+ archive_location block
|> Deferred.Result.map_error ~f:Error.to_string_hum
in
() )
@@ -2659,7 +2663,7 @@ module Queries = struct
~args:Arg.[]
~resolve:(fun { ctx = mina; _ } () ->
let open Deferred.Result.Let_syntax in
- Mina_lib.verifier mina |> Verifier.get_blockchain_verification_key
+ Mina_lib.prover mina |> Prover.get_blockchain_verification_key
|> Deferred.Result.map_error ~f:Error.to_string_hum
>>| Pickles.Verification_key.to_yojson >>| Yojson.Safe.to_basic )
@@ -2672,7 +2676,13 @@ module Queries = struct
~args:Arg.[]
~resolve:(fun { ctx = mina; _ } () ->
let cfg = Mina_lib.config mina in
- "mina:" ^ cfg.compile_config.network_id )
+ let runtime_cfg = Mina_lib.runtime_config mina in
+ let network_id =
+ Option.value ~default:cfg.compile_config.network_id
+ @@ let%bind.Option daemon = runtime_cfg.daemon in
+ daemon.network_id
+ in
+ "mina:" ^ network_id )
let signature_kind =
field "signatureKind"
diff --git a/src/lib/mina_ledger/ledger.ml b/src/lib/mina_ledger/ledger.ml
index d84b7a9afcc..2fe2c10deeb 100644
--- a/src/lib/mina_ledger/ledger.ml
+++ b/src/lib/mina_ledger/ledger.ml
@@ -341,33 +341,6 @@ module Ledger_inner = struct
Debug_assert.debug_assert (fun () ->
[%test_eq: Ledger_hash.t] start_hash (merkle_root ledger) ) ;
(merkle_path ledger new_loc, Account.empty)
-
- let _handler t =
- let open Snark_params.Tick in
- let path_exn idx =
- List.map (merkle_path_at_index_exn t idx) ~f:(function
- | `Left h ->
- h
- | `Right h ->
- h )
- in
- stage (fun (With { request; respond }) ->
- match request with
- | Ledger_hash.Get_element idx ->
- let elt = get_at_index_exn t idx in
- let path = (path_exn idx :> Random_oracle.Digest.t list) in
- respond (Provide (elt, path))
- | Ledger_hash.Get_path idx ->
- let path = (path_exn idx :> Random_oracle.Digest.t list) in
- respond (Provide path)
- | Ledger_hash.Set (idx, account) ->
- set_at_index_exn t idx account ;
- respond (Provide ())
- | Ledger_hash.Find_index pk ->
- let index = index_of_account_exn t pk in
- respond (Provide index)
- | _ ->
- unhandled )
end
include Ledger_inner
diff --git a/src/lib/mina_ledger/sparse_ledger.ml b/src/lib/mina_ledger/sparse_ledger.ml
index fec47bf2266..e89b1082d5a 100644
--- a/src/lib/mina_ledger/sparse_ledger.ml
+++ b/src/lib/mina_ledger/sparse_ledger.ml
@@ -179,7 +179,9 @@ let apply_zkapp_second_pass_unchecked_with_states ~init ledger c =
|> Result.map ~f:(fun (account_update_applied, rev_states) ->
let module LS = Mina_transaction_logic.Zkapp_command_logic.Local_state
in
- let module Applied = T.Transaction_applied.Zkapp_command_applied in
+ let module Applied =
+ Mina_transaction_logic.Transaction_applied.Zkapp_command_applied
+ in
let states =
match rev_states with
| [] ->
diff --git a/src/lib/mina_lib/archive_client.ml b/src/lib/mina_lib/archive_client.ml
index 7f7e49790e8..396f878ddfe 100644
--- a/src/lib/mina_lib/archive_client.ml
+++ b/src/lib/mina_lib/archive_client.ml
@@ -2,7 +2,7 @@ open Core_kernel
open Async_kernel
open Pipe_lib
-let dispatch ?(max_tries = 5) ~logger
+let dispatch ?(max_tries = 5) ~logger ~compile_config
(archive_location : Host_and_port.t Cli_lib.Flag.Types.with_name) diff =
let rec go tries_left errs =
if Int.( <= ) tries_left 0 then
@@ -20,7 +20,7 @@ let dispatch ?(max_tries = 5) ~logger
[%sexp_of: (string * Host_and_port.t) * (string * string)] ) )
else
match%bind
- Daemon_rpcs.Client.dispatch Archive_lib.Rpc.t diff
+ Daemon_rpcs.Client.dispatch ~compile_config Archive_lib.Rpc.t diff
archive_location.value
with
| Ok () ->
@@ -33,7 +33,7 @@ let dispatch ?(max_tries = 5) ~logger
in
go max_tries []
-let make_dispatch_block rpc ?(max_tries = 5)
+let make_dispatch_block ~compile_config rpc ?(max_tries = 5)
(archive_location : Host_and_port.t Cli_lib.Flag.Types.with_name) block =
let rec go tries_left errs =
if Int.( <= ) tries_left 0 then
@@ -51,7 +51,8 @@ let make_dispatch_block rpc ?(max_tries = 5)
[%sexp_of: (string * Host_and_port.t) * (string * string)] ) )
else
match%bind
- Daemon_rpcs.Client.dispatch rpc block archive_location.value
+ Daemon_rpcs.Client.dispatch ~compile_config rpc block
+ archive_location.value
with
| Ok () ->
return (Ok ())
@@ -111,7 +112,8 @@ let run ~logger ~precomputed_values
, `Float (Time.Span.to_ms (Time.diff diff_time start)) )
] ;
match%map
- dispatch archive_location ~logger (Transition_frontier diff)
+ dispatch ~compile_config:precomputed_values.compile_config
+ archive_location ~logger (Transition_frontier diff)
with
| Ok () ->
[%log debug]
diff --git a/src/lib/mina_lib/archive_client.mli b/src/lib/mina_lib/archive_client.mli
index 612e04859be..0cebd18f41b 100644
--- a/src/lib/mina_lib/archive_client.mli
+++ b/src/lib/mina_lib/archive_client.mli
@@ -2,13 +2,15 @@ open Core
open Pipe_lib
val dispatch_precomputed_block :
- ?max_tries:int
+ compile_config:Mina_compile_config.t
+ -> ?max_tries:int
-> Host_and_port.t Cli_lib.Flag.Types.with_name
-> Mina_block.Precomputed.t
-> unit Async.Deferred.Or_error.t
val dispatch_extensional_block :
- ?max_tries:int
+ compile_config:Mina_compile_config.t
+ -> ?max_tries:int
-> Host_and_port.t Cli_lib.Flag.Types.with_name
-> Archive_lib.Extensional.Block.t
-> unit Async.Deferred.Or_error.t
diff --git a/src/lib/mina_lib/mina_lib.ml b/src/lib/mina_lib/mina_lib.ml
index 70c0fe033d6..6b12bededdf 100644
--- a/src/lib/mina_lib/mina_lib.ml
+++ b/src/lib/mina_lib/mina_lib.ml
@@ -943,7 +943,8 @@ let add_full_transactions t user_commands =
List.find_map user_commands ~f:(fun cmd ->
match
User_command.check_well_formedness
- ~genesis_constants:t.config.precomputed_values.genesis_constants cmd
+ ~genesis_constants:t.config.precomputed_values.genesis_constants
+ ~compile_config:t.config.precomputed_values.compile_config cmd
with
| Ok () ->
None
@@ -975,6 +976,7 @@ let add_zkapp_transactions t (zkapp_commands : Zkapp_command.t list) =
match
User_command.check_well_formedness
~genesis_constants:t.config.precomputed_values.genesis_constants
+ ~compile_config:t.config.precomputed_values.compile_config
(Zkapp_command cmd)
with
| Ok () ->
@@ -1497,6 +1499,7 @@ let create ~commit_id ?wallets (config : Config.t) =
let catchup_mode = if config.super_catchup then `Super else `Normal in
let constraint_constants = config.precomputed_values.constraint_constants in
let consensus_constants = config.precomputed_values.consensus_constants in
+ let compile_config = config.precomputed_values.compile_config in
let block_window_duration = config.compile_config.block_window_duration in
let monitor = Option.value ~default:(Monitor.create ()) config.monitor in
Async.Scheduler.within' ~monitor (fun () ->
@@ -1561,15 +1564,23 @@ let create ~commit_id ?wallets (config : Config.t) =
~metadata:[ ("exn", Error_json.error_to_yojson err) ] ) )
(fun () ->
O1trace.thread "manage_verifier_subprocess" (fun () ->
+ let%bind blockchain_verification_key =
+ Prover.get_blockchain_verification_key prover
+ >>| Or_error.ok_exn
+ in
+ let%bind transaction_verification_key =
+ Prover.get_transaction_verification_key prover
+ >>| Or_error.ok_exn
+ in
let%bind verifier =
Verifier.create ~commit_id ~logger:config.logger
~enable_internal_tracing:
(Internal_tracing.is_enabled ())
~internal_trace_filename:"verifier-internal-trace.jsonl"
~proof_level:config.precomputed_values.proof_level
- ~constraint_constants:
- config.precomputed_values.constraint_constants
- ~pids:config.pids ~conf_dir:(Some config.conf_dir) ()
+ ~pids:config.pids ~conf_dir:(Some config.conf_dir)
+ ~blockchain_verification_key
+ ~transaction_verification_key ()
in
let%map () = set_itn_data (module Verifier) verifier in
verifier ) )
@@ -1812,7 +1823,7 @@ let create ~commit_id ?wallets (config : Config.t) =
~pool_max_size:
config.precomputed_values.genesis_constants.txpool_max_size
~genesis_constants:config.precomputed_values.genesis_constants
- ~slot_tx_end
+ ~slot_tx_end ~compile_config
in
let first_received_message_signal = Ivar.create () in
let online_status, notify_online_impl =
@@ -1868,7 +1879,7 @@ let create ~commit_id ?wallets (config : Config.t) =
; consensus_constants
; genesis_constants = config.precomputed_values.genesis_constants
; constraint_constants
- ; block_window_duration
+ ; compile_config
}
in
let sinks = (block_sink, tx_remote_sink, snark_remote_sink) in
@@ -2249,7 +2260,7 @@ let get_filtered_log_entries
in
(get_from_idx curr_idx messages [], is_started)
-let verifier { processes = { verifier; _ }; _ } = verifier
+let prover { processes = { prover; _ }; _ } = prover
let vrf_evaluator { processes = { vrf_evaluator; _ }; _ } = vrf_evaluator
diff --git a/src/lib/mina_lib/mina_lib.mli b/src/lib/mina_lib/mina_lib.mli
index cb8e2c22f11..0789d030d3d 100644
--- a/src/lib/mina_lib/mina_lib.mli
+++ b/src/lib/mina_lib/mina_lib.mli
@@ -233,7 +233,7 @@ val start_filtered_log : t -> string list -> unit Or_error.t
val get_filtered_log_entries : t -> int -> string list * bool
-val verifier : t -> Verifier.t
+val prover : t -> Prover.t
val vrf_evaluator : t -> Vrf_evaluator.t
diff --git a/src/lib/mina_lib/tests/tests.ml b/src/lib/mina_lib/tests/tests.ml
index 791b4027f59..4ebfcc653ee 100644
--- a/src/lib/mina_lib/tests/tests.ml
+++ b/src/lib/mina_lib/tests/tests.ml
@@ -36,11 +36,6 @@ let%test_module "Epoch ledger sync tests" =
let dir_prefix = "sync_test_data"
- let genesis_constants = Genesis_constants.For_unit_tests.t
-
- let constraint_constants =
- Genesis_constants.For_unit_tests.Constraint_constants.t
-
let make_dirname s =
let open Core in
let uuid = Uuid_unix.create () |> Uuid.to_string in
@@ -51,7 +46,9 @@ let%test_module "Epoch ledger sync tests" =
let runtime_config : Runtime_config.t =
{ daemon = None
; genesis = None
- ; proof = None
+ ; proof =
+ Some
+ { Runtime_config.Proof_keys.default with level = Some No_check }
; ledger =
Some
{ base = Named "test"
@@ -66,10 +63,11 @@ let%test_module "Epoch ledger sync tests" =
}
in
match%map
- Genesis_ledger_helper.init_from_config_file
+ Genesis_ledger_helper.Config_loader.init_from_config_file
~genesis_dir:(make_dirname "genesis_dir")
- ~constraint_constants ~genesis_constants ~logger
- ~proof_level:No_check runtime_config ~cli_proof_level:None
+ ~constants:
+ (Runtime_config.Constants.magic_for_unit_tests runtime_config)
+ ~logger runtime_config
with
| Ok (precomputed_values, _) ->
precomputed_values
@@ -142,10 +140,10 @@ let%test_module "Epoch ledger sync tests" =
let make_verifier (module Context : CONTEXT) =
let open Context in
- Verifier.create ~logger ~proof_level:precomputed_values.proof_level
- ~constraint_constants:precomputed_values.constraint_constants ~pids
+ Verifier.For_tests.default ~constraint_constants ~logger
+ ~proof_level:precomputed_values.proof_level ~pids
~conf_dir:(Some (make_dirname "verifier"))
- ~commit_id:"not specified for unit tests" ()
+ ()
let make_empty_ledger (module Context : CONTEXT) =
Mina_ledger.Ledger.create
@@ -185,7 +183,7 @@ let%test_module "Epoch ledger sync tests" =
; consensus_constants
; genesis_constants = precomputed_values.genesis_constants
; constraint_constants
- ; block_window_duration = compile_config.block_window_duration
+ ; compile_config
}
in
let _transaction_pool, tx_remote_sink, _tx_local_sink =
@@ -194,7 +192,7 @@ let%test_module "Epoch ledger sync tests" =
~trust_system
~pool_max_size:precomputed_values.genesis_constants.txpool_max_size
~genesis_constants:precomputed_values.genesis_constants
- ~slot_tx_end:None
+ ~slot_tx_end:None ~compile_config
in
Network_pool.Transaction_pool.create ~config ~constraint_constants
~consensus_constants ~time_controller ~logger
@@ -273,7 +271,6 @@ let%test_module "Epoch ledger sync tests" =
; time_controller
; pubsub_v1
; pubsub_v0
- ; block_window_duration = compile_config.block_window_duration
}
in
Mina_networking.Gossip_net.(
diff --git a/src/lib/mina_metrics/prometheus_metrics/dune b/src/lib/mina_metrics/prometheus_metrics/dune
index 6ce3904ca3d..97833ca2ae2 100644
--- a/src/lib/mina_metrics/prometheus_metrics/dune
+++ b/src/lib/mina_metrics/prometheus_metrics/dune
@@ -22,7 +22,6 @@
;; local libraries
logger
o1trace
- mina_node_config
)
(instrumentation (backend bisect_ppx))
(preprocess (pps ppx_mina ppx_let ppx_version ppx_pipebang ppx_custom_printf ppx_here))
diff --git a/src/lib/mina_net2/libp2p_helper.ml b/src/lib/mina_net2/libp2p_helper.ml
index 2ef08749016..116a85be3c1 100644
--- a/src/lib/mina_net2/libp2p_helper.ml
+++ b/src/lib/mina_net2/libp2p_helper.ml
@@ -221,8 +221,7 @@ let handle_incoming_message t msg ~handle_push_message =
handle_push_message t (DaemonInterface.PushMessage.get push_msg) )
)
| Undefined n ->
- Libp2p_ipc.undefined_union ~context:"DaemonInterface.Message" n ;
- Deferred.unit
+ Libp2p_ipc.undefined_union ~context:"DaemonInterface.Message" n
let spawn ?(allow_multiple_instances = false) ~logger ~pids ~conf_dir
~handle_push_message () =
diff --git a/src/lib/mina_net2/mina_net2.ml b/src/lib/mina_net2/mina_net2.ml
index d2db2d0bbad..a649a5d978c 100644
--- a/src/lib/mina_net2/mina_net2.ml
+++ b/src/lib/mina_net2/mina_net2.ml
@@ -94,7 +94,6 @@ type t =
; mutable banned_ips : Unix.Inet_addr.t list
; peer_connected_callback : string -> unit
; peer_disconnected_callback : string -> unit
- ; block_window_duration : Time.Span.t
}
let banned_ips t = t.banned_ips
@@ -383,8 +382,7 @@ let handle_push_message t push_message =
upon
(O1trace.thread "validate_libp2p_gossip" (fun () ->
Subscription.handle_and_validate sub ~validation_expiration
- ~sender ~data
- ~block_window_duration:t.block_window_duration ) )
+ ~sender ~data ) )
(function
| `Validation_timeout ->
[%log' warn t.logger]
@@ -545,8 +543,7 @@ let handle_push_message t push_message =
Libp2p_ipc.undefined_union ~context:"DaemonInterface.PushMessage" n
let create ?(allow_multiple_instances = false) ~all_peers_seen_metric ~logger
- ~pids ~conf_dir ~on_peer_connected ~on_peer_disconnected
- ~block_window_duration () =
+ ~pids ~conf_dir ~on_peer_connected ~on_peer_disconnected () =
let open Deferred.Or_error.Let_syntax in
let push_message_handler =
ref (fun _msg ->
@@ -577,7 +574,6 @@ let create ?(allow_multiple_instances = false) ~all_peers_seen_metric ~logger
; peer_disconnected_callback =
(fun peer_id -> on_peer_disconnected (Peer.Id.unsafe_of_string peer_id))
; protocol_handlers = Hashtbl.create (module String)
- ; block_window_duration
}
in
(push_message_handler := fun msg -> handle_push_message t msg) ;
diff --git a/src/lib/mina_net2/mina_net2.mli b/src/lib/mina_net2/mina_net2.mli
index f6f8cfd927b..6b76e6a7049 100644
--- a/src/lib/mina_net2/mina_net2.mli
+++ b/src/lib/mina_net2/mina_net2.mli
@@ -139,7 +139,6 @@ val create :
-> conf_dir:string
-> on_peer_connected:(Peer.Id.t -> unit)
-> on_peer_disconnected:(Peer.Id.t -> unit)
- -> block_window_duration:Time.Span.t
-> unit
-> t Deferred.Or_error.t
diff --git a/src/lib/mina_net2/subscription.ml b/src/lib/mina_net2/subscription.ml
index 61b15d96af5..69ca2a4959e 100644
--- a/src/lib/mina_net2/subscription.ml
+++ b/src/lib/mina_net2/subscription.ml
@@ -50,7 +50,7 @@ let unsubscribe ~helper sub =
else Deferred.Or_error.error_string "already unsubscribed"
let handle_and_validate sub ~validation_expiration ~(sender : Peer.t)
- ~data:raw_data ~block_window_duration =
+ ~data:raw_data =
let open Libp2p_ipc.Reader.ValidationResult in
let wrap_message data =
if
@@ -65,9 +65,7 @@ let handle_and_validate sub ~validation_expiration ~(sender : Peer.t)
Validation_callback.create validation_expiration
in
let%bind () = sub.validator (wrap_message data) validation_callback in
- match%map
- Validation_callback.await ~block_window_duration validation_callback
- with
+ match%map Validation_callback.await validation_callback with
| Some `Accept ->
`Validation_result Accept
| Some `Reject ->
diff --git a/src/lib/mina_net2/subscription.mli b/src/lib/mina_net2/subscription.mli
index 022763618e3..6809c782440 100644
--- a/src/lib/mina_net2/subscription.mli
+++ b/src/lib/mina_net2/subscription.mli
@@ -29,7 +29,6 @@ val handle_and_validate :
-> validation_expiration:Time_ns.t
-> sender:Peer.t
-> data:string
- -> block_window_duration:Time.Span.t
-> [ `Validation_result of Libp2p_ipc.validation_result
| `Validation_timeout
| `Decoding_error of Error.t ]
diff --git a/src/lib/mina_net2/tests/all_ipc.ml b/src/lib/mina_net2/tests/all_ipc.ml
index de778d0f125..4027c6cea31 100644
--- a/src/lib/mina_net2/tests/all_ipc.ml
+++ b/src/lib/mina_net2/tests/all_ipc.ml
@@ -78,9 +78,6 @@ let%test_module "all-ipc test" =
let bob_status =
"This is major Tom to ground control\nI'm stepping through the door"
- let block_window_duration =
- Mina_compile_config.For_unit_tests.t.block_window_duration
-
type messages =
{ topic_a_msg_1 : string
; topic_a_msg_2 : string
@@ -543,8 +540,7 @@ let%test_module "all-ipc test" =
let%bind node =
create ~all_peers_seen_metric:false
~logger:(Logger.extend logger [ ("name", `String local_name) ])
- ~conf_dir ~pids ~on_peer_connected ~on_peer_disconnected
- ~block_window_duration ()
+ ~conf_dir ~pids ~on_peer_connected ~on_peer_disconnected ()
>>| Or_error.ok_exn
in
let%bind kp_a =
diff --git a/src/lib/mina_net2/tests/tests.ml b/src/lib/mina_net2/tests/tests.ml
index 3bfefc9c951..865392609c1 100644
--- a/src/lib/mina_net2/tests/tests.ml
+++ b/src/lib/mina_net2/tests/tests.ml
@@ -11,9 +11,6 @@ let%test_module "Mina network tests" =
let pids = Child_processes.Termination.create_pid_table ()
- let block_window_duration =
- Mina_compile_config.For_unit_tests.t.block_window_duration
-
let setup_two_nodes network_id =
let%bind a_tmp = Unix.mkdtemp "p2p_helper_test_a" in
let%bind b_tmp = Unix.mkdtemp "p2p_helper_test_b" in
@@ -22,21 +19,21 @@ let%test_module "Mina network tests" =
create ~all_peers_seen_metric:false
~logger:(Logger.extend logger [ ("name", `String "a") ])
~conf_dir:a_tmp ~pids ~on_peer_connected:Fn.ignore
- ~on_peer_disconnected:Fn.ignore ~block_window_duration ()
+ ~on_peer_disconnected:Fn.ignore ()
>>| Or_error.ok_exn
in
let%bind b =
create ~all_peers_seen_metric:false
~logger:(Logger.extend logger [ ("name", `String "b") ])
~conf_dir:b_tmp ~pids ~on_peer_connected:Fn.ignore
- ~on_peer_disconnected:Fn.ignore ~block_window_duration ()
+ ~on_peer_disconnected:Fn.ignore ()
>>| Or_error.ok_exn
in
let%bind c =
create ~all_peers_seen_metric:false
~logger:(Logger.extend logger [ ("name", `String "c") ])
~conf_dir:c_tmp ~pids ~on_peer_connected:Fn.ignore
- ~on_peer_disconnected:Fn.ignore ~block_window_duration ()
+ ~on_peer_disconnected:Fn.ignore ()
>>| Or_error.ok_exn
in
let%bind kp_a = generate_random_keypair a in
diff --git a/src/lib/mina_net2/validation_callback.ml b/src/lib/mina_net2/validation_callback.ml
index 7688067a7e7..a7da7b1ecec 100644
--- a/src/lib/mina_net2/validation_callback.ml
+++ b/src/lib/mina_net2/validation_callback.ml
@@ -72,7 +72,7 @@ let record_timeout_metrics cb =
Mina_metrics.Counter.inc_one M.validations_timed_out
let record_validation_metrics message_type (result : validation_result)
- validation_time processing_time ~block_window_duration:_ (*TODO remove*) =
+ validation_time processing_time =
match metrics_of_message_type message_type with
| None ->
()
@@ -81,14 +81,11 @@ let record_validation_metrics message_type (result : validation_result)
| `Ignore ->
Mina_metrics.Counter.inc_one M.ignored
| `Accept ->
- let module Validation_time = M.Validation_time in
- Validation_time.update validation_time ;
- let module Processing_time = M.Processing_time in
- Processing_time.update processing_time
+ M.Validation_time.update validation_time ;
+ M.Processing_time.update processing_time
| `Reject ->
Mina_metrics.Counter.inc_one M.rejected ;
- let module Rejection_time = M.Rejection_time in
- Rejection_time.update processing_time )
+ M.Rejection_time.update processing_time )
let await_timeout cb =
if is_expired cb then Deferred.return ()
@@ -101,7 +98,7 @@ let await_timeout cb =
( Time_ns.Span.to_span_float_round_nearest
@@ Time_ns.diff expires_at (Time_ns.now ()) )
-let await ~block_window_duration cb =
+let await cb =
if is_expired cb then (record_timeout_metrics cb ; Deferred.return None)
else
match cb.expiration with
@@ -122,20 +119,19 @@ let await ~block_window_duration cb =
Time_ns.abs_diff (Time_ns.now ()) cb.created_at
|> Time_ns.Span.to_ms |> Time.Span.of_ms
in
- record_validation_metrics ~block_window_duration cb.message_type
- result validation_time processing_time ;
+ record_validation_metrics cb.message_type result validation_time
+ processing_time ;
Some result
| `Timeout ->
record_timeout_metrics cb ; None )
-let await_exn ~block_window_duration cb =
- match%map await ~block_window_duration cb with
- | None ->
- failwith "timeout"
- | Some result ->
- result
+let await_exn cb =
+ match%map await cb with None -> failwith "timeout" | Some result -> result
let fire_if_not_already_fired cb result =
- if not (is_expired cb) then Ivar.fill_if_empty cb.signal result
+ if not (is_expired cb) then
+ if Ivar.is_full cb.signal then
+ [%log' error (Logger.create ())] "Ivar.fill bug is here!"
+ else Ivar.fill cb.signal result
let set_message_type t x = t.message_type <- x
diff --git a/src/lib/mina_net2/validation_callback.mli b/src/lib/mina_net2/validation_callback.mli
index 352dd161642..ac87f3a8b48 100644
--- a/src/lib/mina_net2/validation_callback.mli
+++ b/src/lib/mina_net2/validation_callback.mli
@@ -11,11 +11,9 @@ val create_without_expiration : unit -> t
val is_expired : t -> bool
-val await :
- block_window_duration:Time.Span.t -> t -> validation_result option Deferred.t
+val await : t -> validation_result option Deferred.t
-val await_exn :
- block_window_duration:Time.Span.t -> t -> validation_result Deferred.t
+val await_exn : t -> validation_result Deferred.t
(** May return a deferred that never resolves, in the case of callbacks without expiration. *)
val await_timeout : t -> unit Deferred.t
diff --git a/src/lib/mina_state/snarked_ledger_state.ml b/src/lib/mina_state/snarked_ledger_state.ml
index 55d1d460645..5ae5e5acd07 100644
--- a/src/lib/mina_state/snarked_ledger_state.ml
+++ b/src/lib/mina_state/snarked_ledger_state.ml
@@ -384,55 +384,9 @@ module Make_str (A : Wire_types.Concrete) = struct
Pending_coinbase.Stack.typ Fee_excess.typ Sok_message.Digest.typ
Local_state.typ
- let to_input ({ sok_digest; _ } as t : t) =
- let input =
- let input_without_sok = to_input { t with sok_digest = () } in
- Array.reduce_exn ~f:Random_oracle.Input.Chunked.append
- [| Sok_message.Digest.to_input sok_digest; input_without_sok |]
- in
- if !top_hash_logging_enabled then
- Format.eprintf
- !"Generating unchecked top hash from:@.%{sexp: Tick.Field.t \
- Random_oracle.Input.Chunked.t}@."
- input ;
- input
-
- let to_field_elements t = Random_oracle.pack_input (to_input t)
-
- module Checked = struct
- type t = var
-
- module Checked_without_sok = Checked
-
- let to_input ({ sok_digest; _ } as t : t) =
- let open Tick in
- let open Checked.Let_syntax in
- let%bind input_without_sok =
- Checked_without_sok.to_input { t with sok_digest = () }
- in
- let input =
- Array.reduce_exn ~f:Random_oracle.Input.Chunked.append
- [| Sok_message.Digest.Checked.to_input sok_digest
- ; input_without_sok
- |]
- in
- let%map () =
- as_prover
- As_prover.(
- if !top_hash_logging_enabled then
- let%map input = Random_oracle.read_typ' input in
- Format.eprintf
- !"Generating checked top hash from:@.%{sexp: Field.t \
- Random_oracle.Input.Chunked.t}@."
- input
- else return ())
- in
- input
-
- let to_field_elements t =
- let open Tick.Checked.Let_syntax in
- Tick.Run.run_checked (to_input t >>| Random_oracle.Checked.pack_input)
- end
+ let to_field_elements =
+ let (Typ { value_to_fields; _ }) = typ in
+ Fn.compose fst value_to_fields
end
let option lab =
diff --git a/src/lib/mina_state/snarked_ledger_state_intf.ml b/src/lib/mina_state/snarked_ledger_state_intf.ml
index 68919366c78..39b6150823f 100644
--- a/src/lib/mina_state/snarked_ledger_state_intf.ml
+++ b/src/lib/mina_state/snarked_ledger_state_intf.ml
@@ -223,22 +223,9 @@ module type Full = sig
, Local_state.Checked.t )
Poly.t
- open Tick
-
- val typ : (var, t) Typ.t
-
- val to_input : t -> Field.t Random_oracle.Input.Chunked.t
+ val typ : (var, t) Tick.Typ.t
- val to_field_elements : t -> Field.t array
-
- module Checked : sig
- type t = var
-
- val to_input : var -> Field.Var.t Random_oracle.Input.Chunked.t Checked.t
-
- (* This is actually a checked function. *)
- val to_field_elements : var -> Field.Var.t array
- end
+ val to_field_elements : t -> Tick.Field.t array
end
val gen : t Quickcheck.Generator.t
diff --git a/src/lib/network_pool/batcher.ml b/src/lib/network_pool/batcher.ml
index c2a9a147165..3bda69491da 100644
--- a/src/lib/network_pool/batcher.ml
+++ b/src/lib/network_pool/batcher.ml
@@ -2,9 +2,6 @@ open Core_kernel
open Async_kernel
open Network_peer
-(* Only show stdout for failed inline tests. *)
-open Inline_test_quiet_logs
-
module Id = Unique_id.Int ()
type ('init, 'result) elt =
@@ -45,7 +42,7 @@ type ('init, 'partially_validated, 'result) t =
}
[@@deriving sexp]
-let create ?(how_to_add = `Enqueue_back) ?logger ?compare_init
+let create ?(how_to_add = `Enqueue_back) ~logger ?compare_init
?(weight = fun _ -> 1) ?max_weight_per_call verifier =
{ state = Waiting
; queue = Q.create ()
@@ -54,7 +51,7 @@ let create ?(how_to_add = `Enqueue_back) ?logger ?compare_init
; verifier
; weight
; max_weight_per_call
- ; logger = Option.value logger ~default:(Logger.create ())
+ ; logger
}
let call_verifier t (ps : 'proof list) = t.verifier ps
@@ -78,7 +75,7 @@ let rec determine_outcome :
(* First separate out all the known results. That information will definitely be included
in the outcome.
*)
- let logger = Logger.create () in
+ let logger = v.logger in
let potentially_invalid =
List.filter_map (List.zip_exn ps res) ~f:(fun (elt, r) ->
match r with
@@ -303,8 +300,7 @@ module Transaction_pool = struct
(Array.to_list
(Array.map a ~f:(function `Valid c -> Some c | _ -> None)) )
- let create verifier : t =
- let logger = Logger.create () in
+ let create ~logger verifier : t =
create ~compare_init:compare_envelope ~logger (fun (ds : input list) ->
O1trace.thread "dispatching_transaction_pool_batcher_verification"
(fun () ->
@@ -431,8 +427,7 @@ module Snark_pool = struct
let open Deferred.Or_error.Let_syntax in
match%map verify t p with Ok () -> true | Error _ -> false
- let create verifier : t =
- let logger = Logger.create () in
+ let create ~logger verifier : t =
create
(* TODO: Make this a proper config detail once we have data on what a
good default would be.
@@ -498,10 +493,8 @@ module Snark_pool = struct
let verifier =
Async.Thread_safe.block_on_async_exn (fun () ->
- Verifier.create ~logger ~proof_level ~constraint_constants
- ~conf_dir:None
- ~pids:(Child_processes.Termination.create_pid_table ())
- ~commit_id:"not specified for unit tests" () )
+ Verifier.For_tests.default ~constraint_constants ~logger
+ ~proof_level () )
let gen_proofs =
let open Quickcheck.Generator.Let_syntax in
@@ -539,7 +532,7 @@ module Snark_pool = struct
Envelope.Incoming.gen data_gen
let run_test proof_lists =
- let batcher = create verifier in
+ let batcher = create ~logger verifier in
Deferred.List.iter proof_lists ~f:(fun (invalid_proofs, proof_list) ->
let%map r = verify' batcher proof_list in
let (`Invalid ps) = Or_error.ok_exn r in
diff --git a/src/lib/network_pool/batcher.mli b/src/lib/network_pool/batcher.mli
index e08d8ed30d0..c46bcb0b2e3 100644
--- a/src/lib/network_pool/batcher.mli
+++ b/src/lib/network_pool/batcher.mli
@@ -9,7 +9,7 @@ module Snark_pool : sig
type t [@@deriving sexp]
- val create : Verifier.t -> t
+ val create : logger:Logger.t -> Verifier.t -> t
val verify : t -> proof_envelope -> bool Deferred.Or_error.t
end
@@ -18,7 +18,7 @@ type ('initial, 'partially_validated, 'result) t
val create :
?how_to_add:[ `Insert | `Enqueue_back ]
- -> ?logger:Logger.t
+ -> logger:Logger.t
-> ?compare_init:('init -> 'init -> int)
-> ?weight:('init -> int)
-> ?max_weight_per_call:int
@@ -42,7 +42,7 @@ module Transaction_pool : sig
type t [@@deriving sexp]
- val create : Verifier.t -> t
+ val create : logger:Logger.t -> Verifier.t -> t
val verify :
t
diff --git a/src/lib/network_pool/intf.ml b/src/lib/network_pool/intf.ml
index c587756a216..63af3e4b0cc 100644
--- a/src/lib/network_pool/intf.ml
+++ b/src/lib/network_pool/intf.ml
@@ -384,6 +384,7 @@ module type Transaction_resource_pool_intf = sig
-> verifier:Verifier.t
-> genesis_constants:Genesis_constants.t
-> slot_tx_end:Mina_numbers.Global_slot_since_hard_fork.t option
+ -> compile_config:Mina_compile_config.t
-> Config.t
val member : t -> Transaction_hash.User_command_with_valid_signature.t -> bool
diff --git a/src/lib/network_pool/network_pool_base.ml b/src/lib/network_pool/network_pool_base.ml
index cf4c1e36674..1fbb36be82d 100644
--- a/src/lib/network_pool/network_pool_base.ml
+++ b/src/lib/network_pool/network_pool_base.ml
@@ -185,7 +185,6 @@ end)
~unwrap:(function
| Diff m -> m | _ -> failwith "unexpected message type" )
~trace_label:Resource_pool.label ~logger resource_pool
- ~block_window_duration
in
let local_r, local_w, _ =
Local_sink.create
@@ -193,7 +192,6 @@ end)
~unwrap:(function
| Diff m -> m | _ -> failwith "unexpected message type" )
~trace_label:Resource_pool.label ~logger resource_pool
- ~block_window_duration
in
log_rate_limiter_occasionally network_pool remote_rl ;
(*priority: Transition frontier diffs > local diffs > incoming diffs*)
diff --git a/src/lib/network_pool/pool_sink.ml b/src/lib/network_pool/pool_sink.ml
index 50ca9371593..8be8f96c3b6 100644
--- a/src/lib/network_pool/pool_sink.ml
+++ b/src/lib/network_pool/pool_sink.ml
@@ -26,7 +26,6 @@ module type Pool_sink = sig
-> trace_label:string
-> logger:Logger.t
-> pool
- -> block_window_duration:Time.Span.t
-> 'wrapped_t Strict_pipe.Reader.t * t * Rate_limiter.t
end
@@ -72,7 +71,6 @@ module Base
; throttle : unit Throttle.t
; on_push : unit -> unit Deferred.t
; log_gossip_heard : bool
- ; block_window_duration : Time.Span.t
}
-> t
| Void
@@ -145,7 +143,6 @@ module Base
; throttle
; on_push
; log_gossip_heard
- ; block_window_duration
} ->
O1trace.sync_thread (sprintf "handle_%s_gossip" trace_label)
@@ fun () ->
@@ -157,10 +154,7 @@ module Base
| BC.External cb'' ->
Diff.update_metrics env' cb'' ~log_gossip_heard ~logger ;
don't_wait_for
- ( match%map
- Mina_net2.Validation_callback.await ~block_window_duration
- cb''
- with
+ ( match%map Mina_net2.Validation_callback.await cb'' with
| None ->
let diff = Envelope.Incoming.data env' in
[%log error]
@@ -197,7 +191,7 @@ module Base
Deferred.unit
let create ?(on_push = Fn.const Deferred.unit) ?(log_gossip_heard = false)
- ~wrap ~unwrap ~trace_label ~logger pool ~block_window_duration =
+ ~wrap ~unwrap ~trace_label ~logger pool =
let r, writer =
Strict_pipe.create ~name:"verified network pool diffs"
(Buffered
@@ -223,7 +217,6 @@ module Base
; throttle
; on_push
; log_gossip_heard
- ; block_window_duration
}
, rate_limiter )
diff --git a/src/lib/network_pool/snark_pool.ml b/src/lib/network_pool/snark_pool.ml
index 6ede322b927..3bf3b784daf 100644
--- a/src/lib/network_pool/snark_pool.ml
+++ b/src/lib/network_pool/snark_pool.ml
@@ -256,7 +256,7 @@ struct
}
; frontier =
(fun () -> Broadcast_pipe.Reader.peek frontier_broadcast_pipe)
- ; batcher = Batcher.Snark_pool.create config.verifier
+ ; batcher = Batcher.Snark_pool.create ~logger config.verifier
; logger
; config
; account_creation_fee =
@@ -559,9 +559,6 @@ module Diff_versioned = struct
[@@deriving compare, sexp, to_yojson, hash]
end
-(* Only show stdout for failed inline tests. *)
-open Inline_test_quiet_logs
-
let%test_module "random set test" =
( module struct
open Mina_base
@@ -591,10 +588,8 @@ let%test_module "random set test" =
let verifier =
Async.Thread_safe.block_on_async_exn (fun () ->
- Verifier.create ~logger ~proof_level ~constraint_constants
- ~conf_dir:None
- ~pids:(Child_processes.Termination.create_pid_table ())
- ~commit_id:"not specified for unit tests" () )
+ Verifier.For_tests.default ~constraint_constants ~logger ~proof_level
+ () )
module Mock_snark_pool =
Make (Mocks.Base_ledger) (Mocks.Staged_ledger) (Mocks.Transition_frontier)
diff --git a/src/lib/network_pool/test.ml b/src/lib/network_pool/test.ml
index b0ddac4e0a4..42f36dd27ef 100644
--- a/src/lib/network_pool/test.ml
+++ b/src/lib/network_pool/test.ml
@@ -3,9 +3,6 @@ open Core_kernel
open Pipe_lib
open Network_peer
-(* Only show stdout for failed inline tests. *)
-open Inline_test_quiet_logs
-
let%test_module "network pool test" =
( module struct
let trust_system = Mocks.trust_system
@@ -27,10 +24,8 @@ let%test_module "network pool test" =
let verifier =
Async.Thread_safe.block_on_async_exn (fun () ->
- Verifier.create ~logger ~proof_level ~constraint_constants
- ~conf_dir:None
- ~pids:(Child_processes.Termination.create_pid_table ())
- ~commit_id:"not specified for unit tests" () )
+ Verifier.For_tests.default ~constraint_constants ~logger ~proof_level
+ () )
module Mock_snark_pool =
Snark_pool.Make (Mocks.Base_ledger) (Mocks.Staged_ledger)
diff --git a/src/lib/network_pool/transaction_pool.ml b/src/lib/network_pool/transaction_pool.ml
index e99224ecdb8..606b8bcbde8 100644
--- a/src/lib/network_pool/transaction_pool.ml
+++ b/src/lib/network_pool/transaction_pool.ml
@@ -3,8 +3,6 @@
transactions (user commands) and providing them to the block producer code.
*)
-(* Only show stdout for failed inline tests.*)
-open Inline_test_quiet_logs
open Core
open Async
open Mina_base
@@ -289,18 +287,20 @@ struct
; verifier : (Verifier.t[@sexp.opaque])
; genesis_constants : Genesis_constants.t
; slot_tx_end : Mina_numbers.Global_slot_since_hard_fork.t option
+ ; compile_config : Mina_compile_config.t
}
[@@deriving sexp_of]
(* remove next line if there's a way to force [@@deriving make] write a
named parameter instead of an optional parameter *)
let make ~trust_system ~pool_max_size ~verifier ~genesis_constants
- ~slot_tx_end =
+ ~slot_tx_end ~compile_config =
{ trust_system
; pool_max_size
; verifier
; genesis_constants
; slot_tx_end
+ ; compile_config
}
end
@@ -831,7 +831,7 @@ struct
; remaining_in_batch = max_per_15_seconds
; config
; logger
- ; batcher = Batcher.create config.verifier
+ ; batcher = Batcher.create ~logger config.verifier
; best_tip_diff_relay = None
; best_tip_ledger = None
; verification_key_table = Vk_refcount_table.create ()
@@ -1090,7 +1090,8 @@ struct
~f:(fun acc user_cmd ->
match
User_command.check_well_formedness
- ~genesis_constants:t.config.genesis_constants user_cmd
+ ~genesis_constants:t.config.genesis_constants
+ ~compile_config:t.config.compile_config user_cmd
with
| Ok () ->
acc
@@ -1658,19 +1659,19 @@ let%test_module _ =
let genesis_constants = precomputed_values.genesis_constants
+ let compile_config = precomputed_values.compile_config
+
let minimum_fee =
Currency.Fee.to_nanomina_int genesis_constants.minimum_user_command_fee
- let logger = Logger.create ()
+ let logger = Logger.null ()
let time_controller = Block_time.Controller.basic ~logger
let verifier =
Async.Thread_safe.block_on_async_exn (fun () ->
- Verifier.create ~logger ~proof_level ~constraint_constants
- ~conf_dir:None
- ~pids:(Child_processes.Termination.create_pid_table ())
- ~commit_id:"not specified for unit tests" () )
+ Verifier.For_tests.default ~constraint_constants ~logger ~proof_level
+ () )
let `VK vk, `Prover prover =
Transaction_snark.For_tests.create_trivial_snapp ~constraint_constants ()
@@ -1919,7 +1920,7 @@ let%test_module _ =
let trust_system = Trust_system.null () in
let config =
Test.Resource_pool.make_config ~trust_system ~pool_max_size ~verifier
- ~genesis_constants ~slot_tx_end
+ ~genesis_constants ~slot_tx_end ~compile_config
in
let pool_, _, _ =
Test.create ~config ~logger ~constraint_constants ~consensus_constants
@@ -2203,19 +2204,22 @@ let%test_module _ =
let tm1 = Time.now () in
[%log' info test.txn_pool.logger] "Time for add_commands: %0.04f sec"
(Time.diff tm1 tm0 |> Time.Span.to_sec) ;
+ let debug = false in
( match result with
| Ok (`Accept, _, rejects) ->
- List.iter rejects ~f:(fun (cmd, err) ->
- Core.Printf.printf
- !"command was rejected because %s: %{Yojson.Safe}\n%!"
- (Diff_versioned.Diff_error.to_string_name err)
- (User_command.to_yojson cmd) )
+ if debug then
+ List.iter rejects ~f:(fun (cmd, err) ->
+ Core.Printf.printf
+ !"command was rejected because %s: %{Yojson.Safe}\n%!"
+ (Diff_versioned.Diff_error.to_string_name err)
+ (User_command.to_yojson cmd) )
| Ok (`Reject, _, _) ->
failwith "diff was rejected during application"
| Error (`Other err) ->
- Core.Printf.printf
- !"failed to apply diff to pool: %s\n%!"
- (Error.to_string_hum err) ) ;
+ if debug then
+ Core.Printf.printf
+ !"failed to apply diff to pool: %s\n%!"
+ (Error.to_string_hum err) ) ;
result
let add_commands' ?local test cs =
@@ -3085,10 +3089,8 @@ let%test_module _ =
authorization would be rejected" =
Thread_safe.block_on_async_exn (fun () ->
let%bind verifier_full =
- Verifier.create ~logger ~proof_level:Full ~constraint_constants
- ~conf_dir:None
- ~pids:(Child_processes.Termination.create_pid_table ())
- ~commit_id:"not specified for unit tests" ()
+ Verifier.For_tests.default ~constraint_constants ~logger
+ ~proof_level:Full ()
in
let%bind test =
setup_test ~verifier:verifier_full
diff --git a/src/lib/node_config/dune b/src/lib/node_config/dune
index 3d3eebb8b77..019dba43c42 100644
--- a/src/lib/node_config/dune
+++ b/src/lib/node_config/dune
@@ -4,7 +4,7 @@
(libraries
node_config_intf
node_config_version
- node_config_unconfigurable_constants)
+ )
(preprocessor_deps ../../config.mlh)
(instrumentation (backend bisect_ppx))
(preprocess (pps ppx_version ppx_base ppx_optcomp))
diff --git a/src/lib/node_config/for_unit_tests/dune b/src/lib/node_config/for_unit_tests/dune
index 460efc1f009..003ed5d630e 100644
--- a/src/lib/node_config/for_unit_tests/dune
+++ b/src/lib/node_config/for_unit_tests/dune
@@ -4,7 +4,7 @@
(libraries
node_config_intf
node_config_version
- node_config_unconfigurable_constants)
+ )
(instrumentation (backend bisect_ppx))
(preprocess (pps ppx_version ppx_base ppx_optcomp))
)
diff --git a/src/lib/node_config/for_unit_tests/node_config_for_unit_tests.ml b/src/lib/node_config/for_unit_tests/node_config_for_unit_tests.ml
index 95c880e9142..04f628d700a 100644
--- a/src/lib/node_config/for_unit_tests/node_config_for_unit_tests.ml
+++ b/src/lib/node_config/for_unit_tests/node_config_for_unit_tests.ml
@@ -4,7 +4,6 @@
*)
include Node_config_version
-include Node_config_unconfigurable_constants
let (ledger_depth : int) = (10 : int)
@@ -39,8 +38,6 @@ let (pool_max_size : int) = (3000 : int)
let (account_creation_fee_int : string) = ("0.001" : string)
-let (default_transaction_fee : string) = ("5" : string)
-
let (default_snark_worker_fee : string) = ("1" : string)
let (minimum_user_command_fee : string) = ("2" : string)
@@ -64,3 +61,38 @@ let (vrf_poll_interval : int) = (0 : int)
let zkapp_cmd_limit = None
let scan_state_tps_goal_x10 : int option = None
+
+(** limits on Zkapp_command.t size
+ 10.26*np + 10.08*n2 + 9.14*n1 < 69.45
+ where np: number of single proof updates
+ n2: number of pairs of signed/no-auth update
+ n1: number of single signed/no-auth update
+ and their coefficients representing the cost
+ The formula was generated based on benchmarking data conducted on bare
+ metal i9 processor with room to include lower spec.
+ 69.45 was the total time for a combination of updates that was considered
+ acceptable.
+ The method used to estimate the cost was linear least squares.
+*)
+
+let zkapp_proof_update_cost = 10.26
+
+let zkapp_signed_pair_update_cost = 10.08
+
+let zkapp_signed_single_update_cost = 9.14
+
+let zkapp_transaction_cost_limit = 69.45
+
+let max_event_elements = 100
+
+let max_action_elements = 100
+
+let zkapp_cmd_limit_hardcap = 128
+
+let zkapps_disabled = false
+
+let rpc_handshake_timeout_sec = 60.0
+
+let rpc_heartbeat_timeout_sec = 60.0
+
+let rpc_heartbeat_send_every_sec = 10.0 (*same as the default*)
diff --git a/src/lib/node_config/for_unit_tests/node_config_for_unit_tests.mli b/src/lib/node_config/for_unit_tests/node_config_for_unit_tests.mli
index eb996f25855..bbe3b4300d0 100644
--- a/src/lib/node_config/for_unit_tests/node_config_for_unit_tests.mli
+++ b/src/lib/node_config/for_unit_tests/node_config_for_unit_tests.mli
@@ -1 +1,23 @@
include Node_config_intf.S
+
+val zkapp_proof_update_cost : float
+
+val zkapp_signed_pair_update_cost : float
+
+val zkapp_signed_single_update_cost : float
+
+val zkapp_transaction_cost_limit : float
+
+val max_event_elements : int
+
+val max_action_elements : int
+
+val zkapp_cmd_limit_hardcap : int
+
+val zkapps_disabled : bool
+
+val rpc_handshake_timeout_sec : float
+
+val rpc_heartbeat_timeout_sec : float
+
+val rpc_heartbeat_send_every_sec : float
diff --git a/src/lib/node_config/intf/node_config_intf.mli b/src/lib/node_config/intf/node_config_intf.mli
index 82326fe2010..4daed897314 100644
--- a/src/lib/node_config/intf/node_config_intf.mli
+++ b/src/lib/node_config/intf/node_config_intf.mli
@@ -6,40 +6,9 @@ module type Version = sig
val protocol_version_patch : int
end
-(* It's stupid that this exists. TODO: Remove and make configurable. *)
-module type Unconfigurable_constants = sig
- val zkapp_proof_update_cost : float
-
- val zkapp_signed_pair_update_cost : float
-
- val zkapp_signed_single_update_cost : float
-
- val zkapp_transaction_cost_limit : float
-
- val max_event_elements : int
-
- val max_action_elements : int
-
- val zkapp_cmd_limit_hardcap : int
-
- val zkapps_disabled : bool
-
- val rpc_handshake_timeout_sec : float
-
- val rpc_heartbeat_timeout_sec : float
-
- val rpc_heartbeat_send_every_sec : float
-
- val sync_ledger_max_subtree_depth : int
-
- val sync_ledger_default_subtree_depth : int
-end
-
module type S = sig
include Version
- include Unconfigurable_constants
-
val ledger_depth : int
val curve_size : int
@@ -70,8 +39,6 @@ module type S = sig
val account_creation_fee_int : string
- val default_transaction_fee : string
-
val default_snark_worker_fee : string
val minimum_user_command_fee : string
diff --git a/src/lib/node_config/node_config.ml b/src/lib/node_config/node_config.ml
index 78485f82a17..4d519dd5a84 100644
--- a/src/lib/node_config/node_config.ml
+++ b/src/lib/node_config/node_config.ml
@@ -7,7 +7,6 @@
*)
include Node_config_version
-include Node_config_unconfigurable_constants
[%%inject "ledger_depth", ledger_depth]
@@ -51,8 +50,6 @@ let scan_state_transaction_capacity_log_2 =
[%%inject "account_creation_fee_int", account_creation_fee_int]
-[%%inject "default_transaction_fee", default_transaction_fee]
-
[%%inject "default_snark_worker_fee", default_snark_worker_fee]
[%%inject "minimum_user_command_fee", minimum_user_command_fee]
diff --git a/src/lib/node_config/unconfigurable_constants/dune b/src/lib/node_config/unconfigurable_constants/dune
deleted file mode 100644
index 6bcb95d8668..00000000000
--- a/src/lib/node_config/unconfigurable_constants/dune
+++ /dev/null
@@ -1,7 +0,0 @@
-(library
- (name node_config_unconfigurable_constants)
- (public_name mina_node_config.unconfigurable_constants)
- (libraries node_config_intf)
- (instrumentation (backend bisect_ppx))
- (preprocess (pps ppx_version ppx_base ppx_optcomp))
-)
diff --git a/src/lib/node_config/unconfigurable_constants/node_config_unconfigurable_constants.ml b/src/lib/node_config/unconfigurable_constants/node_config_unconfigurable_constants.ml
deleted file mode 100644
index f841626d180..00000000000
--- a/src/lib/node_config/unconfigurable_constants/node_config_unconfigurable_constants.ml
+++ /dev/null
@@ -1,42 +0,0 @@
-(* FIXME: These should be configurable. *)
-
-(** limits on Zkapp_command.t size
- 10.26*np + 10.08*n2 + 9.14*n1 < 69.45
- where np: number of single proof updates
- n2: number of pairs of signed/no-auth update
- n1: number of single signed/no-auth update
- and their coefficients representing the cost
- The formula was generated based on benchmarking data conducted on bare
- metal i9 processor with room to include lower spec.
- 69.45 was the total time for a combination of updates that was considered
- acceptable.
- The method used to estimate the cost was linear least squares.
-*)
-
-let zkapp_proof_update_cost = 10.26
-
-let zkapp_signed_pair_update_cost = 10.08
-
-let zkapp_signed_single_update_cost = 9.14
-
-let zkapp_transaction_cost_limit = 69.45
-
-let max_event_elements = 100
-
-let max_action_elements = 100
-
-let zkapp_cmd_limit_hardcap = 128
-
-(* These are fine to be non-configurable *)
-
-let zkapps_disabled = false
-
-let rpc_handshake_timeout_sec = 60.0
-
-let rpc_heartbeat_timeout_sec = 60.0
-
-let rpc_heartbeat_send_every_sec = 10.0 (*same as the default*)
-
-let sync_ledger_max_subtree_depth = 8
-
-let sync_ledger_default_subtree_depth = 6
diff --git a/src/lib/node_config/unconfigurable_constants/node_config_unconfigurable_constants.mli b/src/lib/node_config/unconfigurable_constants/node_config_unconfigurable_constants.mli
deleted file mode 100644
index 51efc504085..00000000000
--- a/src/lib/node_config/unconfigurable_constants/node_config_unconfigurable_constants.mli
+++ /dev/null
@@ -1 +0,0 @@
-include Node_config_intf.Unconfigurable_constants
diff --git a/src/lib/precomputed_values/precomputed_values.ml b/src/lib/precomputed_values/precomputed_values.ml
index fd97c4ec317..c5d03d0020e 100644
--- a/src/lib/precomputed_values/precomputed_values.ml
+++ b/src/lib/precomputed_values/precomputed_values.ml
@@ -43,4 +43,5 @@ let for_unit_tests =
; protocol_state_with_hashes
; constraint_system_digests = hashes
; proof_data = None
+ ; compile_config = Mina_compile_config.For_unit_tests.t
})
diff --git a/src/lib/prover/intf.ml b/src/lib/prover/intf.ml
index 0f6964f140e..bf7b47e7807 100644
--- a/src/lib/prover/intf.ml
+++ b/src/lib/prover/intf.ml
@@ -57,4 +57,10 @@ module type S = sig
sets the process kind for the Itn logger to "prover"
*)
val set_itn_logger_data : t -> daemon_port:int -> unit Deferred.Or_error.t
+
+ val get_blockchain_verification_key :
+ t -> Pickles.Verification_key.t Deferred.Or_error.t
+
+ val get_transaction_verification_key :
+ t -> Pickles.Verification_key.t Deferred.Or_error.t
end
diff --git a/src/lib/prover/prover.ml b/src/lib/prover/prover.ml
index 09e386d036c..485e87514d5 100644
--- a/src/lib/prover/prover.ml
+++ b/src/lib/prover/prover.ml
@@ -56,6 +56,12 @@ module Worker_state = struct
val toggle_internal_tracing : bool -> unit
val set_itn_logger_data : daemon_port:int -> unit
+
+ val get_blockchain_verification_key :
+ unit -> Pickles.Verification_key.t Deferred.t
+
+ val get_transaction_verification_key :
+ unit -> Pickles.Verification_key.t Deferred.t
end
(* bin_io required by rpc_parallel *)
@@ -103,7 +109,6 @@ module Worker_state = struct
Pickles.Cache_handle.generate_or_load B.cache_handle
|> Promise.to_deferred
in
-
( module struct
module T = T
module B = B
@@ -161,6 +166,12 @@ module Worker_state = struct
let set_itn_logger_data ~daemon_port =
Itn_logger.set_data ~process_kind:"prover" ~daemon_port
+
+ let get_blockchain_verification_key () =
+ Lazy.force B.Proof.verification_key
+
+ let get_transaction_verification_key () =
+ Lazy.force T.verification_key
end : S )
| Check ->
Deferred.return
@@ -202,6 +213,12 @@ module Worker_state = struct
let toggle_internal_tracing _ = ()
let set_itn_logger_data ~daemon_port:_ = ()
+
+ let get_blockchain_verification_key () =
+ Deferred.return (Lazy.force Pickles.Verification_key.dummy)
+
+ let get_transaction_verification_key () =
+ Deferred.return (Lazy.force Pickles.Verification_key.dummy)
end : S )
| No_check ->
Deferred.return
@@ -221,6 +238,12 @@ module Worker_state = struct
let toggle_internal_tracing _ = ()
let set_itn_logger_data ~daemon_port:_ = ()
+
+ let get_blockchain_verification_key () =
+ Deferred.return (Lazy.force Pickles.Verification_key.dummy)
+
+ let get_transaction_verification_key () =
+ Deferred.return (Lazy.force Pickles.Verification_key.dummy)
end : S )
let get = Fn.id
@@ -275,6 +298,18 @@ module Functions = struct
let (module M) = Worker_state.get w in
M.set_itn_logger_data ~daemon_port ;
Deferred.unit )
+
+ let get_blockchain_verification_key =
+ create bin_unit [%bin_type_class: Pickles.Verification_key.Stable.Latest.t]
+ (fun w () ->
+ let (module M) = Worker_state.get w in
+ M.get_blockchain_verification_key () )
+
+ let get_transaction_verification_key =
+ create bin_unit [%bin_type_class: Pickles.Verification_key.Stable.Latest.t]
+ (fun w () ->
+ let (module M) = Worker_state.get w in
+ M.get_transaction_verification_key () )
end
module Worker = struct
@@ -288,6 +323,10 @@ module Worker = struct
; verify_blockchain : ('w, Blockchain.t, unit Or_error.t) F.t
; toggle_internal_tracing : ('w, bool, unit) F.t
; set_itn_logger_data : ('w, int, unit) F.t
+ ; get_blockchain_verification_key :
+ ('w, unit, Pickles.Verification_key.t) F.t
+ ; get_transaction_verification_key :
+ ('w, unit, Pickles.Verification_key.t) F.t
}
module Worker_state = Worker_state
@@ -316,6 +355,8 @@ module Worker = struct
; verify_blockchain = f verify_blockchain
; toggle_internal_tracing = f toggle_internal_tracing
; set_itn_logger_data = f set_itn_logger_data
+ ; get_blockchain_verification_key = f get_blockchain_verification_key
+ ; get_transaction_verification_key = f get_transaction_verification_key
}
let init_worker_state
@@ -566,3 +607,11 @@ let toggle_internal_tracing { connection; _ } enabled =
let set_itn_logger_data { connection; _ } ~daemon_port =
Worker.Connection.run connection ~f:Worker.functions.set_itn_logger_data
~arg:daemon_port
+
+let get_blockchain_verification_key { connection; _ } =
+ Worker.Connection.run connection
+ ~f:Worker.functions.get_blockchain_verification_key ~arg:()
+
+let get_transaction_verification_key { connection; _ } =
+ Worker.Connection.run connection
+ ~f:Worker.functions.get_transaction_verification_key ~arg:()
diff --git a/src/lib/runtime_config/runtime_config.ml b/src/lib/runtime_config/runtime_config.ml
index 7846e46f27e..347c2adf986 100644
--- a/src/lib/runtime_config/runtime_config.ml
+++ b/src/lib/runtime_config/runtime_config.ml
@@ -1033,6 +1033,30 @@ module Proof_keys = struct
let small : t = Log_2 2
let medium : t = Log_2 3
+
+ let to_transaction_capacity_log_2 ~block_window_duration_ms
+ ~transaction_capacity =
+ match transaction_capacity with
+ | Log_2 i ->
+ i
+ | Txns_per_second_x10 tps_goal_x10 ->
+ let max_coinbases = 2 in
+ let max_user_commands_per_block =
+ (* block_window_duration is in milliseconds, so divide by 1000 divide
+ by 10 again because we have tps * 10
+ *)
+ tps_goal_x10 * block_window_duration_ms / (1000 * 10)
+ in
+ (* Log of the capacity of transactions per transition.
+ - 1 will only work if we don't have prover fees.
+ - 2 will work with prover fees, but not if we want a transaction
+ included in every block.
+ - At least 3 ensures a transaction per block and the staged-ledger
+ unit tests pass.
+ *)
+ 1
+ + Core_kernel.Int.ceil_log2
+ (max_user_commands_per_block + max_coinbases)
end
type t =
@@ -1064,6 +1088,19 @@ module Proof_keys = struct
; fork
}
+ let default =
+ { level = None
+ ; sub_windows_per_window = None
+ ; ledger_depth = None
+ ; work_delay = None
+ ; block_window_duration_ms = None
+ ; transaction_capacity = None
+ ; coinbase_amount = None
+ ; supercharged_coinbase_factor = None
+ ; account_creation_fee = None
+ ; fork = None
+ }
+
let to_json_layout
{ level
; sub_windows_per_window
@@ -1745,3 +1782,241 @@ module Json_loader : Json_loader_intf = struct
] ;
failwithf "Could not parse configuration file: %s" err () )
end
+
+module type Constants_intf = sig
+ type constants
+
+ val load_constants :
+ ?conf_dir:string
+ -> ?commit_id_short:string
+ -> ?itn_features:bool
+ -> ?cli_proof_level:Genesis_constants.Proof_level.t
+ -> logger:Logger.t
+ -> string list
+ -> constants Deferred.t
+
+ val load_constants' :
+ ?itn_features:bool
+ -> ?cli_proof_level:Genesis_constants.Proof_level.t
+ -> t
+ -> constants
+
+ val genesis_constants : constants -> Genesis_constants.t
+
+ val constraint_constants :
+ constants -> Genesis_constants.Constraint_constants.t
+
+ val proof_level : constants -> Genesis_constants.Proof_level.t
+
+ val compile_config : constants -> Mina_compile_config.t
+
+ val magic_for_unit_tests : t -> constants
+end
+
+module Constants : Constants_intf = struct
+ type constants =
+ { genesis_constants : Genesis_constants.t
+ ; constraint_constants : Genesis_constants.Constraint_constants.t
+ ; proof_level : Genesis_constants.Proof_level.t
+ ; compile_config : Mina_compile_config.t
+ }
+
+ let genesis_constants t = t.genesis_constants
+
+ let constraint_constants t = t.constraint_constants
+
+ let proof_level t = t.proof_level
+
+ let compile_config t = t.compile_config
+
+ let combine (a : constants) (b : t) : constants =
+ let genesis_constants =
+ { Genesis_constants.protocol =
+ { k =
+ Option.value ~default:a.genesis_constants.protocol.k
+ Option.(b.genesis >>= fun g -> g.k)
+ ; delta =
+ Option.value ~default:a.genesis_constants.protocol.delta
+ Option.(b.genesis >>= fun g -> g.delta)
+ ; slots_per_epoch =
+ Option.value ~default:a.genesis_constants.protocol.slots_per_epoch
+ Option.(b.genesis >>= fun g -> g.slots_per_epoch)
+ ; slots_per_sub_window =
+ Option.value
+ ~default:a.genesis_constants.protocol.slots_per_sub_window
+ Option.(b.genesis >>= fun g -> g.slots_per_sub_window)
+ ; grace_period_slots =
+ Option.value
+ ~default:a.genesis_constants.protocol.grace_period_slots
+ Option.(b.genesis >>= fun g -> g.grace_period_slots)
+ ; genesis_state_timestamp =
+ Option.value
+ ~default:a.genesis_constants.protocol.genesis_state_timestamp
+ Option.(
+ b.genesis
+ >>= fun g ->
+ g.genesis_state_timestamp
+ >>| Genesis_constants.genesis_timestamp_of_string
+ >>| Genesis_constants.of_time)
+ }
+ ; txpool_max_size =
+ Option.value ~default:a.genesis_constants.txpool_max_size
+ Option.(b.daemon >>= fun d -> d.txpool_max_size)
+ ; num_accounts =
+ Option.first_some
+ Option.(b.ledger >>= fun l -> l.num_accounts)
+ a.genesis_constants.num_accounts
+ ; zkapp_proof_update_cost =
+ Option.value ~default:a.genesis_constants.zkapp_proof_update_cost
+ Option.(b.daemon >>= fun d -> d.zkapp_proof_update_cost)
+ ; zkapp_signed_single_update_cost =
+ Option.value
+ ~default:a.genesis_constants.zkapp_signed_single_update_cost
+ Option.(b.daemon >>= fun d -> d.zkapp_signed_single_update_cost)
+ ; zkapp_signed_pair_update_cost =
+ Option.value
+ ~default:a.genesis_constants.zkapp_signed_pair_update_cost
+ Option.(b.daemon >>= fun d -> d.zkapp_signed_pair_update_cost)
+ ; zkapp_transaction_cost_limit =
+ Option.value ~default:a.genesis_constants.zkapp_transaction_cost_limit
+ Option.(b.daemon >>= fun d -> d.zkapp_transaction_cost_limit)
+ ; max_event_elements =
+ Option.value ~default:a.genesis_constants.max_event_elements
+ Option.(b.daemon >>= fun d -> d.max_event_elements)
+ ; max_action_elements =
+ Option.value ~default:a.genesis_constants.max_action_elements
+ Option.(b.daemon >>= fun d -> d.max_action_elements)
+ ; zkapp_cmd_limit_hardcap =
+ Option.value ~default:a.genesis_constants.zkapp_cmd_limit_hardcap
+ Option.(b.daemon >>= fun d -> d.zkapp_cmd_limit_hardcap)
+ ; minimum_user_command_fee =
+ Option.value ~default:a.genesis_constants.minimum_user_command_fee
+ Option.(b.daemon >>= fun d -> d.minimum_user_command_fee)
+ ; sync_ledger_default_subtree_depth =
+ Option.value
+ ~default:a.genesis_constants.sync_ledger_default_subtree_depth
+ Option.(b.daemon >>= fun d -> d.sync_ledger_default_subtree_depth)
+ ; sync_ledger_max_subtree_depth =
+ Option.value
+ ~default:a.genesis_constants.sync_ledger_max_subtree_depth
+ Option.(b.daemon >>= fun d -> d.sync_ledger_max_subtree_depth)
+ }
+ in
+ let constraint_constants =
+ let fork =
+ let a = a.constraint_constants.fork in
+ let b =
+ let%map.Option f = Option.(b.proof >>= fun x -> x.fork) in
+ { Genesis_constants.Fork_constants.state_hash =
+ Mina_base.State_hash.of_base58_check_exn f.state_hash
+ ; blockchain_length = Mina_numbers.Length.of_int f.blockchain_length
+ ; global_slot_since_genesis =
+ Mina_numbers.Global_slot_since_genesis.of_int
+ f.global_slot_since_genesis
+ }
+ in
+ Option.first_some b a
+ in
+ let block_window_duration_ms =
+ Option.value ~default:a.constraint_constants.block_window_duration_ms
+ Option.(b.proof >>= fun p -> p.block_window_duration_ms)
+ in
+ { a.constraint_constants with
+ sub_windows_per_window =
+ Option.value ~default:a.constraint_constants.sub_windows_per_window
+ Option.(b.proof >>= fun p -> p.sub_windows_per_window)
+ ; ledger_depth =
+ Option.value ~default:a.constraint_constants.ledger_depth
+ Option.(b.proof >>= fun p -> p.ledger_depth)
+ ; work_delay =
+ Option.value ~default:a.constraint_constants.work_delay
+ Option.(b.proof >>= fun p -> p.work_delay)
+ ; block_window_duration_ms
+ ; transaction_capacity_log_2 =
+ Option.value
+ ~default:a.constraint_constants.transaction_capacity_log_2
+ Option.(
+ b.proof
+ >>= fun p ->
+ p.transaction_capacity
+ >>| fun transaction_capacity ->
+ Proof_keys.Transaction_capacity.to_transaction_capacity_log_2
+ ~block_window_duration_ms ~transaction_capacity)
+ ; coinbase_amount =
+ Option.value ~default:a.constraint_constants.coinbase_amount
+ Option.(b.proof >>= fun p -> p.coinbase_amount)
+ ; supercharged_coinbase_factor =
+ Option.value
+ ~default:a.constraint_constants.supercharged_coinbase_factor
+ Option.(b.proof >>= fun p -> p.supercharged_coinbase_factor)
+ ; account_creation_fee =
+ Option.value ~default:a.constraint_constants.account_creation_fee
+ Option.(b.proof >>= fun p -> p.account_creation_fee)
+ ; fork
+ }
+ in
+ let proof_level =
+ let coerce_proof_level = function
+ | Proof_keys.Level.Full ->
+ Genesis_constants.Proof_level.Full
+ | Check ->
+ Genesis_constants.Proof_level.Check
+ | No_check ->
+ Genesis_constants.Proof_level.No_check
+ in
+ Option.value ~default:a.proof_level
+ Option.(b.proof >>= fun p -> p.level >>| coerce_proof_level)
+ in
+ let compile_config =
+ { a.compile_config with
+ block_window_duration =
+ constraint_constants.block_window_duration_ms |> Float.of_int
+ |> Time.Span.of_ms
+ ; network_id =
+ Option.value ~default:a.compile_config.network_id
+ Option.(b.daemon >>= fun d -> d.network_id)
+ }
+ in
+ { genesis_constants; constraint_constants; proof_level; compile_config }
+
+ let load_constants' ?itn_features ?cli_proof_level runtime_config =
+ let compile_constants =
+ { genesis_constants = Genesis_constants.Compiled.genesis_constants
+ ; constraint_constants = Genesis_constants.Compiled.constraint_constants
+ ; proof_level = Genesis_constants.Compiled.proof_level
+ ; compile_config = Mina_compile_config.Compiled.t
+ }
+ in
+ let cs = combine compile_constants runtime_config in
+ { cs with
+ proof_level = Option.value ~default:cs.proof_level cli_proof_level
+ ; compile_config =
+ { cs.compile_config with
+ itn_features =
+ Option.value ~default:cs.compile_config.itn_features itn_features
+ }
+ }
+
+ (* Use this function if you don't need/want the ledger configuration *)
+ let load_constants ?conf_dir ?commit_id_short ?itn_features ?cli_proof_level
+ ~logger config_files =
+ Deferred.Or_error.ok_exn
+ @@
+ let open Deferred.Or_error.Let_syntax in
+ let%map runtime_config =
+ Json_loader.load_config_files ?conf_dir ?commit_id_short ~logger
+ config_files
+ in
+ load_constants' ?itn_features ?cli_proof_level runtime_config
+
+ let magic_for_unit_tests t =
+ let compile_constants =
+ { genesis_constants = Genesis_constants.For_unit_tests.t
+ ; constraint_constants =
+ Genesis_constants.For_unit_tests.Constraint_constants.t
+ ; proof_level = Genesis_constants.For_unit_tests.Proof_level.t
+ ; compile_config = Mina_compile_config.For_unit_tests.t
+ }
+ in
+ combine compile_constants t
+end
diff --git a/src/lib/signature_lib/schnorr.ml b/src/lib/signature_lib/schnorr.ml
index f31d64ee927..a1ef7c23443 100644
--- a/src/lib/signature_lib/schnorr.ml
+++ b/src/lib/signature_lib/schnorr.ml
@@ -240,9 +240,6 @@ module Make
let verify ?signature_kind ((r, s) : Signature.t) (pk : Public_key.t)
(m : Message.t) =
- if Random.int 1000 = 0 then (
- print_endline "SCHNORR BACKTRACE:" ;
- Printexc.print_backtrace stdout ) ;
let hash = Message.hash ?signature_kind in
let e = hash ~public_key:pk ~r m in
let r_pt = Curve.(scale one s + negate (scale pk e)) in
diff --git a/src/lib/snark_profiler_lib/snark_profiler_lib.ml b/src/lib/snark_profiler_lib/snark_profiler_lib.ml
index f40acc5112a..9c55ff5aadb 100644
--- a/src/lib/snark_profiler_lib/snark_profiler_lib.ml
+++ b/src/lib/snark_profiler_lib/snark_profiler_lib.ml
@@ -563,7 +563,7 @@ let profile_user_command (module T : Transaction_snark.S) ~genesis_constants
(target_ledger, applied) ->
let txn =
With_status.data
- @@ Mina_ledger.Ledger.Transaction_applied.transaction applied
+ @@ Mina_ledger.Ledger.transaction_of_applied applied
in
(* the txn was already valid before apply, we are just recasting it here after application *)
let (`If_this_is_used_it_should_have_a_comment_justifying_it
@@ -782,7 +782,7 @@ let check_base_snarks ~genesis_constants ~constraint_constants sparse_ledger0
~f:(fun source_ledger (target_ledger, applied_txn) ->
let txn =
With_status.data
- @@ Mina_ledger.Ledger.Transaction_applied.transaction applied_txn
+ @@ Mina_ledger.Ledger.transaction_of_applied applied_txn
in
(* the txn was already valid before apply, we are just recasting it here after application *)
let (`If_this_is_used_it_should_have_a_comment_justifying_it
@@ -793,7 +793,7 @@ let check_base_snarks ~genesis_constants ~constraint_constants sparse_ledger0
pending_coinbase_stack_target txn Pending_coinbase.Stack.empty
in
let supply_increase =
- Mina_ledger.Ledger.Transaction_applied.supply_increase
+ Mina_transaction_logic.Transaction_applied.supply_increase
~constraint_constants applied_txn
|> Or_error.ok_exn
in
@@ -844,7 +844,7 @@ let generate_base_snarks_witness ~genesis_constants ~constraint_constants
~f:(fun source_ledger (target_ledger, applied_txn) ->
let txn =
With_status.data
- @@ Mina_ledger.Ledger.Transaction_applied.transaction applied_txn
+ @@ Mina_ledger.Ledger.transaction_of_applied applied_txn
in
(* the txn was already valid before apply, we are just recasting it here after application *)
let (`If_this_is_used_it_should_have_a_comment_justifying_it
@@ -855,7 +855,7 @@ let generate_base_snarks_witness ~genesis_constants ~constraint_constants
pending_coinbase_stack_target txn Pending_coinbase.Stack.empty
in
let supply_increase =
- Mina_ledger.Ledger.Transaction_applied.supply_increase
+ Mina_transaction_logic.Transaction_applied.supply_increase
~constraint_constants applied_txn
|> Or_error.ok_exn
in
diff --git a/src/lib/snark_worker/dune b/src/lib/snark_worker/dune
index 35f564ad95a..db1f2151555 100644
--- a/src/lib/snark_worker/dune
+++ b/src/lib/snark_worker/dune
@@ -42,7 +42,6 @@
mina_ledger
transaction_snark_work
error_json
- mina_node_config.unconfigurable_constants
mina_state
transaction_protocol_state
ppx_version.runtime
diff --git a/src/lib/snark_worker/functor.ml b/src/lib/snark_worker/functor.ml
index a62453e2877..05c79a0e528 100644
--- a/src/lib/snark_worker/functor.ml
+++ b/src/lib/snark_worker/functor.ml
@@ -118,24 +118,17 @@ module Make (Inputs : Intf.Inputs_intf) :
; prover = public_key
} )
- let dispatch rpc shutdown_on_disconnect query address =
+ let dispatch ~(compile_config : Mina_compile_config.t) rpc
+ shutdown_on_disconnect query address =
let%map res =
Rpc.Connection.with_client
- ~handshake_timeout:
- (Time.Span.of_sec
- Node_config_unconfigurable_constants.rpc_handshake_timeout_sec )
+ ~handshake_timeout:compile_config.rpc_handshake_timeout
~heartbeat_config:
(Rpc.Connection.Heartbeat_config.create
- ~timeout:
- (Time_ns.Span.of_sec
- Node_config_unconfigurable_constants.rpc_heartbeat_timeout_sec )
- ~send_every:
- (Time_ns.Span.of_sec
- Node_config_unconfigurable_constants
- .rpc_heartbeat_send_every_sec )
- () )
- (Tcp.Where_to_connect.of_host_and_port address)
- (fun conn -> Rpc.Rpc.dispatch rpc conn query)
+ ~timeout:compile_config.rpc_heartbeat_timeout
+ ~send_every:compile_config.rpc_heartbeat_send_every () )
+ (Tcp.Where_to_connect.of_host_and_port address) (fun conn ->
+ Rpc.Rpc.dispatch rpc conn query )
in
match res with
| Error exn ->
@@ -228,7 +221,8 @@ module Make (Inputs : Intf.Inputs_intf) :
let main
(module Rpcs_versioned : Intf.Rpcs_versioned_S
with type Work.ledger_proof = Inputs.Ledger_proof.t ) ~logger
- ~proof_level ~constraint_constants daemon_address shutdown_on_disconnect =
+ ~proof_level ~constraint_constants ~compile_config daemon_address
+ shutdown_on_disconnect =
let%bind state =
Worker_state.create ~constraint_constants ~proof_level ()
in
@@ -270,8 +264,8 @@ module Make (Inputs : Intf.Inputs_intf) :
!"Snark worker using daemon $addr"
~metadata:[ ("addr", `String (Host_and_port.to_string daemon_address)) ] ;
match%bind
- dispatch Rpcs_versioned.Get_work.Latest.rpc shutdown_on_disconnect ()
- daemon_address
+ dispatch Rpcs_versioned.Get_work.Latest.rpc shutdown_on_disconnect
+ ~compile_config () daemon_address
with
| Error e ->
log_and_retry "getting work" e (retry_pause 10.) go
@@ -303,7 +297,8 @@ module Make (Inputs : Intf.Inputs_intf) :
let%bind () =
match%map
dispatch Rpcs_versioned.Failed_to_generate_snark.Latest.rpc
- shutdown_on_disconnect (e, work, public_key) daemon_address
+ ~compile_config shutdown_on_disconnect (e, work, public_key)
+ daemon_address
with
| Error e ->
[%log error]
@@ -327,7 +322,7 @@ module Make (Inputs : Intf.Inputs_intf) :
] ;
let rec submit_work () =
match%bind
- dispatch Rpcs_versioned.Submit_work.Latest.rpc
+ dispatch ~compile_config Rpcs_versioned.Submit_work.Latest.rpc
shutdown_on_disconnect result daemon_address
with
| Error e ->
@@ -340,8 +335,7 @@ module Make (Inputs : Intf.Inputs_intf) :
in
go ()
- let command_from_rpcs ~commit_id ~proof_level:default_proof_level
- ~constraint_constants
+ let command_from_rpcs ~commit_id
(module Rpcs_versioned : Intf.Rpcs_versioned_S
with type Work.ledger_proof = Inputs.Ledger_proof.t ) =
Command.async ~summary:"Snark worker"
@@ -350,7 +344,7 @@ module Make (Inputs : Intf.Inputs_intf) :
flag "--daemon-address" ~aliases:[ "daemon-address" ]
(required (Arg_type.create Host_and_port.of_string))
~doc:"HOST-AND-PORT address daemon is listening on"
- and proof_level =
+ and cli_proof_level =
flag "--proof-level" ~aliases:[ "proof-level" ]
(optional (Arg_type.create Genesis_constants.Proof_level.of_string))
~doc:"full|check|none"
@@ -360,13 +354,21 @@ module Make (Inputs : Intf.Inputs_intf) :
(optional bool)
~doc:
"true|false Shutdown when disconnected from daemon (default:true)"
+ and config_file = Cli_lib.Flag.config_files
and conf_dir = Cli_lib.Flag.conf_dir in
fun () ->
let logger =
Logger.create () ~metadata:[ ("process", `String "Snark Worker") ]
in
- let proof_level =
- Option.value ~default:default_proof_level proof_level
+ let%bind.Deferred constraint_constants, proof_level, compile_config =
+ let%map.Deferred config =
+ Runtime_config.Constants.load_constants ?conf_dir ?cli_proof_level
+ ~logger config_file
+ in
+ Runtime_config.Constants.
+ ( constraint_constants config
+ , proof_level config
+ , compile_config config )
in
Option.value_map ~default:() conf_dir ~f:(fun conf_dir ->
let logrotate_max_size = 1024 * 10 in
@@ -385,7 +387,7 @@ module Make (Inputs : Intf.Inputs_intf) :
Core.exit 0 ) ;
main
(module Rpcs_versioned)
- ~logger ~proof_level ~constraint_constants daemon_port
+ ~logger ~proof_level ~constraint_constants ~compile_config daemon_port
(Option.value ~default:true shutdown_on_disconnect))
let arguments ~proof_level ~daemon_address ~shutdown_on_disconnect =
diff --git a/src/lib/snark_worker/intf.ml b/src/lib/snark_worker/intf.ml
index 9173d066d9c..2a9d93bb03c 100644
--- a/src/lib/snark_worker/intf.ml
+++ b/src/lib/snark_worker/intf.ml
@@ -154,8 +154,6 @@ module type S0 = sig
val command_from_rpcs :
commit_id:string
- -> proof_level:Genesis_constants.Proof_level.t
- -> constraint_constants:Genesis_constants.Constraint_constants.t
-> (module Rpcs_versioned_S with type Work.ledger_proof = ledger_proof)
-> Command.t
@@ -173,9 +171,5 @@ module type S = sig
module Rpcs_versioned :
Rpcs_versioned_S with type Work.ledger_proof = ledger_proof
- val command :
- commit_id:string
- -> proof_level:Genesis_constants.Proof_level.t
- -> constraint_constants:Genesis_constants.Constraint_constants.t
- -> Command.t
+ val command : commit_id:string -> Command.t
end
diff --git a/src/lib/snark_worker/standalone/run_snark_worker.ml b/src/lib/snark_worker/standalone/run_snark_worker.ml
index cc243c1ab33..929f51b7b8d 100644
--- a/src/lib/snark_worker/standalone/run_snark_worker.ml
+++ b/src/lib/snark_worker/standalone/run_snark_worker.ml
@@ -8,7 +8,8 @@ let command =
(let%map_open spec =
flag "--spec-sexp" ~doc:""
(required (sexp_conv Prod.single_spec_of_sexp))
- and proof_level =
+ and config_file = Cli_lib.Flag.config_files
+ and cli_proof_level =
flag "--proof-level" ~doc:""
(optional_with_default Genesis_constants.Proof_level.Full
(Command.Arg_type.of_alist_exn
@@ -19,8 +20,14 @@ let command =
in
fun () ->
let open Async in
- let constraint_constants =
- Genesis_constants.Compiled.constraint_constants
+ let open Deferred.Let_syntax in
+ let%bind constraint_constants, proof_level =
+ let logger = Logger.create () in
+ let%map conf =
+ Runtime_config.Constants.load_constants ~cli_proof_level ~logger
+ config_file
+ in
+ Runtime_config.Constants.(constraint_constants conf, proof_level conf)
in
let%bind worker_state =
Prod.Worker_state.create ~constraint_constants ~proof_level ()
diff --git a/src/lib/staged_ledger/pre_diff_info.ml b/src/lib/staged_ledger/pre_diff_info.ml
index 571d72ccafb..57a9d89ce6e 100644
--- a/src/lib/staged_ledger/pre_diff_info.ml
+++ b/src/lib/staged_ledger/pre_diff_info.ml
@@ -361,7 +361,7 @@ let compute_statuses
let split_transaction_statuses txns_with_statuses =
List.partition_map txns_with_statuses ~f:(fun txn_applied ->
let { With_status.data = txn; status } =
- Mina_ledger.Ledger.Transaction_applied.transaction txn_applied
+ Mina_ledger.Ledger.transaction_of_applied txn_applied
in
match txn with
| Transaction.Command cmd ->
diff --git a/src/lib/staged_ledger/staged_ledger.ml b/src/lib/staged_ledger/staged_ledger.ml
index af202350850..2da0b824243 100644
--- a/src/lib/staged_ledger/staged_ledger.ml
+++ b/src/lib/staged_ledger/staged_ledger.ml
@@ -580,20 +580,17 @@ module T = struct
let second_pass_ledger_target_hash = Ledger.merkle_root ledger in
let%bind supply_increase =
to_staged_ledger_or_error
- (Ledger.Transaction_applied.supply_increase ~constraint_constants
- applied_txn )
+ (Mina_transaction_logic.Transaction_applied.supply_increase
+ ~constraint_constants applied_txn )
in
let%map () =
- let actual_status =
- Ledger.Transaction_applied.transaction_status applied_txn
- in
+ let actual_status = Ledger.status_of_applied applied_txn in
if Transaction_status.equal pre_stmt.expected_status actual_status then
return ()
else
let txn_with_expected_status =
{ With_status.data =
- With_status.data
- (Ledger.Transaction_applied.transaction applied_txn)
+ With_status.data (Ledger.transaction_of_applied applied_txn)
; status = pre_stmt.expected_status
}
in
@@ -789,9 +786,7 @@ module T = struct
List.fold_right ~init:(Ok []) data
~f:(fun (d : Scan_state.Transaction_with_witness.t) acc ->
let%map.Or_error acc = acc in
- let t =
- d.transaction_with_info |> Ledger.Transaction_applied.transaction
- in
+ let t = d.transaction_with_info |> Ledger.transaction_of_applied in
t :: acc )
in
let total_fee_excess txns =
@@ -2389,10 +2384,8 @@ let%test_module "staged ledger tests" =
let verifier =
Async.Thread_safe.block_on_async_exn (fun () ->
- Verifier.create ~logger ~proof_level ~constraint_constants
- ~conf_dir:None
- ~pids:(Child_processes.Termination.create_pid_table ())
- ~commit_id:"not specified for unit tests" () )
+ Verifier.For_tests.default ~constraint_constants ~logger ~proof_level
+ () )
let find_vk ledger =
Zkapp_command.Verifiable.load_vk_from_ledger ~get:(Ledger.get ledger)
@@ -5187,12 +5180,10 @@ let%test_module "staged ledger tests" =
(Staged_ledger_diff.With_valid_signatures_and_proofs
.commands diff )
= 1 ) ;
+
let%bind verifier_full =
- Verifier.create ~logger ~proof_level:Full
- ~constraint_constants ~conf_dir:None
- ~pids:
- (Child_processes.Termination.create_pid_table ())
- ~commit_id:"not specified for unit tests" ()
+ Verifier.For_tests.default ~constraint_constants ~logger
+ ~proof_level:Full ()
in
match%map
Sl.apply ~constraint_constants ~global_slot !sl
diff --git a/src/lib/staged_ledger/staged_ledger.mli b/src/lib/staged_ledger/staged_ledger.mli
index 805314908cb..e94589bbea3 100644
--- a/src/lib/staged_ledger/staged_ledger.mli
+++ b/src/lib/staged_ledger/staged_ledger.mli
@@ -93,7 +93,7 @@ module Scan_state : sig
-> apply_second_pass:
( Ledger.t
-> Ledger.Transaction_partially_applied.t
- -> Ledger.Transaction_applied.t Or_error.t )
+ -> Mina_transaction_logic.Transaction_applied.t Or_error.t )
-> apply_first_pass_sparse_ledger:
( global_slot:Mina_numbers.Global_slot_since_genesis.t
-> txn_state_view:Mina_base.Zkapp_precondition.Protocol_state.View.t
@@ -121,7 +121,7 @@ module Scan_state : sig
-> apply_second_pass:
( Ledger.t
-> Ledger.Transaction_partially_applied.t
- -> Ledger.Transaction_applied.t Or_error.t )
+ -> Mina_transaction_logic.Transaction_applied.t Or_error.t )
-> apply_first_pass_sparse_ledger:
( global_slot:Mina_numbers.Global_slot_since_genesis.t
-> txn_state_view:Mina_base.Zkapp_precondition.Protocol_state.View.t
diff --git a/src/lib/transaction/transaction.ml b/src/lib/transaction/transaction.ml
index 5fa84bc6181..b45b0361c3e 100644
--- a/src/lib/transaction/transaction.ml
+++ b/src/lib/transaction/transaction.ml
@@ -144,10 +144,10 @@ let valid_size ~genesis_constants (t : t) =
| Fee_transfer _ | Coinbase _ ->
Ok ()
-let check_well_formedness ~genesis_constants (t : t) =
+let check_well_formedness ~genesis_constants ~compile_config (t : t) =
match t with
| Command cmd ->
- User_command.check_well_formedness ~genesis_constants cmd
+ User_command.check_well_formedness ~genesis_constants ~compile_config cmd
| Fee_transfer _ | Coinbase _ ->
Ok ()
diff --git a/src/lib/transaction_inclusion_status/transaction_inclusion_status.ml b/src/lib/transaction_inclusion_status/transaction_inclusion_status.ml
index 555bf91eeb6..928755d1135 100644
--- a/src/lib/transaction_inclusion_status/transaction_inclusion_status.ml
+++ b/src/lib/transaction_inclusion_status/transaction_inclusion_status.ml
@@ -92,10 +92,8 @@ let%test_module "transaction_status" =
let verifier =
Async.Thread_safe.block_on_async_exn (fun () ->
- Verifier.create ~logger ~proof_level ~constraint_constants
- ~conf_dir:None
- ~pids:(Child_processes.Termination.create_pid_table ())
- ~commit_id:"not specified for unit tests" () )
+ Verifier.For_tests.default ~constraint_constants ~logger ~proof_level
+ () )
let key_gen =
let open Quickcheck.Generator in
@@ -119,7 +117,7 @@ let%test_module "transaction_status" =
let config =
Transaction_pool.Resource_pool.make_config ~trust_system ~pool_max_size
~verifier ~genesis_constants:precomputed_values.genesis_constants
- ~slot_tx_end:None
+ ~slot_tx_end:None ~compile_config:precomputed_values.compile_config
in
let transaction_pool, _, local_sink =
Transaction_pool.create ~config
diff --git a/src/lib/transaction_logic/mina_transaction_logic.ml b/src/lib/transaction_logic/mina_transaction_logic.ml
index 381b06b85ea..6518aa1dc0f 100644
--- a/src/lib/transaction_logic/mina_transaction_logic.ml
+++ b/src/lib/transaction_logic/mina_transaction_logic.ml
@@ -6,337 +6,15 @@ open Mina_transaction
module Zkapp_command_logic = Zkapp_command_logic
module Global_slot_since_genesis = Mina_numbers.Global_slot_since_genesis
-module Transaction_applied = struct
- module UC = Signed_command
-
- module Signed_command_applied = struct
- module Common = struct
- [%%versioned
- module Stable = struct
- module V2 = struct
- type t =
- { user_command : Signed_command.Stable.V2.t With_status.Stable.V2.t
- }
- [@@deriving sexp, to_yojson]
-
- let to_latest = Fn.id
- end
- end]
- end
-
- module Body = struct
- [%%versioned
- module Stable = struct
- module V2 = struct
- type t =
- | Payment of { new_accounts : Account_id.Stable.V2.t list }
- | Stake_delegation of
- { previous_delegate : Public_key.Compressed.Stable.V1.t option }
- | Failed
- [@@deriving sexp, to_yojson]
-
- let to_latest = Fn.id
- end
- end]
- end
-
- [%%versioned
- module Stable = struct
- module V2 = struct
- type t = { common : Common.Stable.V2.t; body : Body.Stable.V2.t }
- [@@deriving sexp, to_yojson]
-
- let to_latest = Fn.id
- end
- end]
-
- let new_accounts (t : t) =
- match t.body with
- | Payment { new_accounts; _ } ->
- new_accounts
- | Stake_delegation _ | Failed ->
- []
- end
-
- module Zkapp_command_applied = struct
- [%%versioned
- module Stable = struct
- module V1 = struct
- type t =
- { accounts :
- (Account_id.Stable.V2.t * Account.Stable.V2.t option) list
- ; command : Zkapp_command.Stable.V1.t With_status.Stable.V2.t
- ; new_accounts : Account_id.Stable.V2.t list
- }
- [@@deriving sexp, to_yojson]
-
- let to_latest = Fn.id
- end
- end]
- end
-
- module Command_applied = struct
- [%%versioned
- module Stable = struct
- module V2 = struct
- type t =
- | Signed_command of Signed_command_applied.Stable.V2.t
- | Zkapp_command of Zkapp_command_applied.Stable.V1.t
- [@@deriving sexp, to_yojson]
-
- let to_latest = Fn.id
- end
- end]
- end
-
- module Fee_transfer_applied = struct
- [%%versioned
- module Stable = struct
- module V2 = struct
- type t =
- { fee_transfer : Fee_transfer.Stable.V2.t With_status.Stable.V2.t
- ; new_accounts : Account_id.Stable.V2.t list
- ; burned_tokens : Currency.Amount.Stable.V1.t
- }
- [@@deriving sexp, to_yojson]
-
- let to_latest = Fn.id
- end
- end]
- end
-
- module Coinbase_applied = struct
- [%%versioned
- module Stable = struct
- module V2 = struct
- type t =
- { coinbase : Coinbase.Stable.V1.t With_status.Stable.V2.t
- ; new_accounts : Account_id.Stable.V2.t list
- ; burned_tokens : Currency.Amount.Stable.V1.t
- }
- [@@deriving sexp, to_yojson]
-
- let to_latest = Fn.id
- end
- end]
- end
-
- module Varying = struct
- [%%versioned
- module Stable = struct
- module V2 = struct
- type t =
- | Command of Command_applied.Stable.V2.t
- | Fee_transfer of Fee_transfer_applied.Stable.V2.t
- | Coinbase of Coinbase_applied.Stable.V2.t
- [@@deriving sexp, to_yojson]
-
- let to_latest = Fn.id
- end
- end]
- end
-
- [%%versioned
- module Stable = struct
- module V2 = struct
- type t =
- { previous_hash : Ledger_hash.Stable.V1.t
- ; varying : Varying.Stable.V2.t
- }
- [@@deriving sexp, to_yojson]
-
- let to_latest = Fn.id
- end
- end]
-
- let burned_tokens : t -> Currency.Amount.t =
- fun { varying; _ } ->
- match varying with
- | Command _ ->
- Currency.Amount.zero
- | Fee_transfer f ->
- f.burned_tokens
- | Coinbase c ->
- c.burned_tokens
-
- let new_accounts : t -> Account_id.t list =
- fun { varying; _ } ->
- match varying with
- | Command c -> (
- match c with
- | Signed_command sc ->
- Signed_command_applied.new_accounts sc
- | Zkapp_command zc ->
- zc.new_accounts )
- | Fee_transfer f ->
- f.new_accounts
- | Coinbase c ->
- c.new_accounts
-
- let supply_increase :
- constraint_constants:Genesis_constants.Constraint_constants.t
- -> t
- -> Currency.Amount.Signed.t Or_error.t =
- fun ~constraint_constants t ->
- let open Or_error.Let_syntax in
- let burned_tokens = Currency.Amount.Signed.of_unsigned (burned_tokens t) in
- let account_creation_fees =
- let account_creation_fee_int =
- constraint_constants.account_creation_fee
- |> Currency.Fee.to_nanomina_int
- in
- let num_accounts_created = List.length @@ new_accounts t in
- (* int type is OK, no danger of overflow *)
- Currency.Amount.(
- Signed.of_unsigned
- @@ of_nanomina_int_exn (account_creation_fee_int * num_accounts_created))
- in
- let txn : Transaction.t =
- match t.varying with
- | Command
- (Signed_command { common = { user_command = { data; _ }; _ }; _ }) ->
- Command (Signed_command data)
- | Command (Zkapp_command c) ->
- Command (Zkapp_command c.command.data)
- | Fee_transfer f ->
- Fee_transfer f.fee_transfer.data
- | Coinbase c ->
- Coinbase c.coinbase.data
- in
- let%bind expected_supply_increase =
- Transaction.expected_supply_increase txn
- in
- let rec process_decreases total = function
- | [] ->
- Some total
- | amt :: amts ->
- let%bind.Option sum =
- Currency.Amount.Signed.(add @@ negate amt) total
- in
- process_decreases sum amts
- in
- let total =
- process_decreases
- (Currency.Amount.Signed.of_unsigned expected_supply_increase)
- [ burned_tokens; account_creation_fees ]
- in
- Option.value_map total ~default:(Or_error.error_string "overflow")
- ~f:(fun v -> Ok v)
-
- let transaction_with_status : t -> Transaction.t With_status.t =
- fun { varying; _ } ->
- match varying with
- | Command (Signed_command uc) ->
- With_status.map uc.common.user_command ~f:(fun cmd ->
- Transaction.Command (User_command.Signed_command cmd) )
- | Command (Zkapp_command s) ->
- With_status.map s.command ~f:(fun c ->
- Transaction.Command (User_command.Zkapp_command c) )
- | Fee_transfer f ->
- With_status.map f.fee_transfer ~f:(fun f -> Transaction.Fee_transfer f)
- | Coinbase c ->
- With_status.map c.coinbase ~f:(fun c -> Transaction.Coinbase c)
-
- let transaction_status : t -> Transaction_status.t =
- fun { varying; _ } ->
- match varying with
- | Command
- (Signed_command { common = { user_command = { status; _ }; _ }; _ }) ->
- status
- | Command (Zkapp_command c) ->
- c.command.status
- | Fee_transfer f ->
- f.fee_transfer.status
- | Coinbase c ->
- c.coinbase.status
-end
-
module type S = sig
type ledger
type location
- module Transaction_applied : sig
- module Signed_command_applied : sig
- module Common : sig
- type t = Transaction_applied.Signed_command_applied.Common.t =
- { user_command : Signed_command.t With_status.t }
- [@@deriving sexp]
- end
-
- module Body : sig
- type t = Transaction_applied.Signed_command_applied.Body.t =
- | Payment of { new_accounts : Account_id.t list }
- | Stake_delegation of
- { previous_delegate : Public_key.Compressed.t option }
- | Failed
- [@@deriving sexp]
- end
-
- type t = Transaction_applied.Signed_command_applied.t =
- { common : Common.t; body : Body.t }
- [@@deriving sexp]
- end
-
- module Zkapp_command_applied : sig
- type t = Transaction_applied.Zkapp_command_applied.t =
- { accounts : (Account_id.t * Account.t option) list
- ; command : Zkapp_command.t With_status.t
- ; new_accounts : Account_id.t list
- }
- [@@deriving sexp]
- end
-
- module Command_applied : sig
- type t = Transaction_applied.Command_applied.t =
- | Signed_command of Signed_command_applied.t
- | Zkapp_command of Zkapp_command_applied.t
- [@@deriving sexp]
- end
-
- module Fee_transfer_applied : sig
- type t = Transaction_applied.Fee_transfer_applied.t =
- { fee_transfer : Fee_transfer.t With_status.t
- ; new_accounts : Account_id.t list
- ; burned_tokens : Currency.Amount.t
- }
- [@@deriving sexp]
- end
-
- module Coinbase_applied : sig
- type t = Transaction_applied.Coinbase_applied.t =
- { coinbase : Coinbase.t With_status.t
- ; new_accounts : Account_id.t list
- ; burned_tokens : Currency.Amount.t
- }
- [@@deriving sexp]
- end
-
- module Varying : sig
- type t = Transaction_applied.Varying.t =
- | Command of Command_applied.t
- | Fee_transfer of Fee_transfer_applied.t
- | Coinbase of Coinbase_applied.t
- [@@deriving sexp]
- end
-
- type t = Transaction_applied.t =
- { previous_hash : Ledger_hash.t; varying : Varying.t }
- [@@deriving sexp]
-
- val burned_tokens : t -> Currency.Amount.t
+ val transaction_of_applied :
+ Transaction_applied.t -> Transaction.t With_status.t
- val supply_increase :
- constraint_constants:Genesis_constants.Constraint_constants.t
- -> t
- -> Currency.Amount.Signed.t Or_error.t
-
- val transaction : t -> Transaction.t With_status.t
-
- val transaction_status : t -> Transaction_status.t
-
- val new_accounts : t -> Account_id.t list
- end
+ val status_of_applied : Transaction_applied.t -> Transaction_status.t
module Global_state : sig
type t =
@@ -760,38 +438,33 @@ module Make (L : Ledger_intf.S) :
transaction expiry slot %{sexp: Global_slot_since_genesis.t}"
current_global_slot valid_until
- module Transaction_applied = struct
- include Transaction_applied
-
- let transaction : t -> Transaction.t With_status.t =
- fun { varying; _ } ->
- match varying with
- | Command (Signed_command uc) ->
- With_status.map uc.common.user_command ~f:(fun cmd ->
- Transaction.Command (User_command.Signed_command cmd) )
- | Command (Zkapp_command s) ->
- With_status.map s.command ~f:(fun c ->
- Transaction.Command (User_command.Zkapp_command c) )
- | Fee_transfer f ->
- With_status.map f.fee_transfer ~f:(fun f ->
- Transaction.Fee_transfer f )
- | Coinbase c ->
- With_status.map c.coinbase ~f:(fun c -> Transaction.Coinbase c)
-
- let transaction_status : t -> Transaction_status.t =
- fun { varying; _ } ->
- match varying with
- | Command
- (Signed_command { common = { user_command = { status; _ }; _ }; _ })
- ->
- status
- | Command (Zkapp_command c) ->
- c.command.status
- | Fee_transfer f ->
- f.fee_transfer.status
- | Coinbase c ->
- c.coinbase.status
- end
+ let transaction_of_applied :
+ Transaction_applied.t -> Transaction.t With_status.t =
+ fun { varying; _ } ->
+ match varying with
+ | Command (Signed_command uc) ->
+ With_status.map uc.common.user_command ~f:(fun cmd ->
+ Transaction.Command (User_command.Signed_command cmd) )
+ | Command (Zkapp_command s) ->
+ With_status.map s.command ~f:(fun c ->
+ Transaction.Command (User_command.Zkapp_command c) )
+ | Fee_transfer f ->
+ With_status.map f.fee_transfer ~f:(fun f -> Transaction.Fee_transfer f)
+ | Coinbase c ->
+ With_status.map c.coinbase ~f:(fun c -> Transaction.Coinbase c)
+
+ let status_of_applied : Transaction_applied.t -> Transaction_status.t =
+ fun { varying; _ } ->
+ match varying with
+ | Command
+ (Signed_command { common = { user_command = { status; _ }; _ }; _ }) ->
+ status
+ | Command (Zkapp_command c) ->
+ c.command.status
+ | Fee_transfer f ->
+ f.fee_transfer.status
+ | Coinbase c ->
+ c.coinbase.status
let get_new_accounts action pk =
if Ledger_intf.equal_account_state action `Added then [ pk ] else []
@@ -2939,3 +2612,5 @@ module For_tests = struct
failwithf "gen_zkapp_command_from_test_spec: expected one spec, got %d"
(List.length specs) ()
end
+
+module Transaction_applied = Transaction_applied
diff --git a/src/lib/transaction_logic/test/helpers.ml b/src/lib/transaction_logic/test/helpers.ml
index 5299de0b91f..3ed25abdf71 100644
--- a/src/lib/transaction_logic/test/helpers.ml
+++ b/src/lib/transaction_logic/test/helpers.ml
@@ -3,8 +3,10 @@ module Transaction_logic = Mina_transaction_logic.Make (Ledger)
module Zk_cmd_result = struct
type t =
- Transaction_logic.Transaction_applied.Zkapp_command_applied.t * Ledger.t
+ Mina_transaction_logic.Transaction_applied.Zkapp_command_applied.t
+ * Ledger.t
let sexp_of_t (txn, _) =
- Transaction_logic.Transaction_applied.Zkapp_command_applied.sexp_of_t txn
+ Mina_transaction_logic.Transaction_applied.Zkapp_command_applied.sexp_of_t
+ txn
end
diff --git a/src/lib/transaction_logic/test/predicates.ml b/src/lib/transaction_logic/test/predicates.ml
index 15fc4c95616..0fbb9fe2d7b 100644
--- a/src/lib/transaction_logic/test/predicates.ml
+++ b/src/lib/transaction_logic/test/predicates.ml
@@ -12,8 +12,7 @@ let result ?(with_error = Fn.const false) ~f result =
match Result.bind result ~f with Ok b -> b | Error e -> with_error e
let verify_account_updates ~(ledger : Helpers.Ledger.t)
- ~(txn :
- Helpers.Transaction_logic.Transaction_applied.Zkapp_command_applied.t )
+ ~(txn : Mina_transaction_logic.Transaction_applied.Zkapp_command_applied.t)
~(f : Amount.Signed.t -> Account.t option * Account.t option -> bool)
(account : Test_account.t) =
let open Helpers in
@@ -80,8 +79,7 @@ let verify_balance_changes ~txn ~ledger accounts =
false ) )
let verify_balances_unchanged ~(ledger : Helpers.Ledger.t)
- ~(txn :
- Helpers.Transaction_logic.Transaction_applied.Zkapp_command_applied.t )
+ ~(txn : Mina_transaction_logic.Transaction_applied.Zkapp_command_applied.t)
(accounts : Test_account.t list) =
let is_fee_payer account =
Public_key.Compressed.equal account.Test_account.pk
diff --git a/src/lib/transaction_logic/test/zkapp_logic.ml b/src/lib/transaction_logic/test/zkapp_logic.ml
index 1ee7ad57f88..cf2b585b9b9 100644
--- a/src/lib/transaction_logic/test/zkapp_logic.ml
+++ b/src/lib/transaction_logic/test/zkapp_logic.ml
@@ -27,7 +27,7 @@ let balance_to_fee = Fn.compose Amount.to_fee Balance.to_amount
validate them. *)
let%test_module "Test transaction logic." =
( module struct
- open Transaction_logic.Transaction_applied.Zkapp_command_applied
+ open Mina_transaction_logic.Transaction_applied.Zkapp_command_applied
let run_zkapp_cmd ~fee_payer ~fee ~accounts txns =
let open Result.Let_syntax in
diff --git a/src/lib/transaction_logic/transaction_applied.ml b/src/lib/transaction_logic/transaction_applied.ml
new file mode 100644
index 00000000000..aa62f240ce2
--- /dev/null
+++ b/src/lib/transaction_logic/transaction_applied.ml
@@ -0,0 +1,241 @@
+open Core_kernel
+open Mina_base
+open Signature_lib
+open Mina_transaction
+module UC = Signed_command
+
+module Signed_command_applied = struct
+ module Common = struct
+ [%%versioned
+ module Stable = struct
+ module V2 = struct
+ type t =
+ { user_command : Signed_command.Stable.V2.t With_status.Stable.V2.t }
+ [@@deriving sexp, to_yojson]
+
+ let to_latest = Fn.id
+ end
+ end]
+ end
+
+ module Body = struct
+ [%%versioned
+ module Stable = struct
+ module V2 = struct
+ type t =
+ | Payment of { new_accounts : Account_id.Stable.V2.t list }
+ | Stake_delegation of
+ { previous_delegate : Public_key.Compressed.Stable.V1.t option }
+ | Failed
+ [@@deriving sexp, to_yojson]
+
+ let to_latest = Fn.id
+ end
+ end]
+ end
+
+ [%%versioned
+ module Stable = struct
+ module V2 = struct
+ type t = { common : Common.Stable.V2.t; body : Body.Stable.V2.t }
+ [@@deriving sexp, to_yojson]
+
+ let to_latest = Fn.id
+ end
+ end]
+
+ let new_accounts (t : t) =
+ match t.body with
+ | Payment { new_accounts; _ } ->
+ new_accounts
+ | Stake_delegation _ | Failed ->
+ []
+end
+
+module Zkapp_command_applied = struct
+ [%%versioned
+ module Stable = struct
+ module V1 = struct
+ type t =
+ { accounts : (Account_id.Stable.V2.t * Account.Stable.V2.t option) list
+ ; command : Zkapp_command.Stable.V1.t With_status.Stable.V2.t
+ ; new_accounts : Account_id.Stable.V2.t list
+ }
+ [@@deriving sexp, to_yojson]
+
+ let to_latest = Fn.id
+ end
+ end]
+end
+
+module Command_applied = struct
+ [%%versioned
+ module Stable = struct
+ module V2 = struct
+ type t =
+ | Signed_command of Signed_command_applied.Stable.V2.t
+ | Zkapp_command of Zkapp_command_applied.Stable.V1.t
+ [@@deriving sexp, to_yojson]
+
+ let to_latest = Fn.id
+ end
+ end]
+end
+
+module Fee_transfer_applied = struct
+ [%%versioned
+ module Stable = struct
+ module V2 = struct
+ type t =
+ { fee_transfer : Fee_transfer.Stable.V2.t With_status.Stable.V2.t
+ ; new_accounts : Account_id.Stable.V2.t list
+ ; burned_tokens : Currency.Amount.Stable.V1.t
+ }
+ [@@deriving sexp, to_yojson]
+
+ let to_latest = Fn.id
+ end
+ end]
+end
+
+module Coinbase_applied = struct
+ [%%versioned
+ module Stable = struct
+ module V2 = struct
+ type t =
+ { coinbase : Coinbase.Stable.V1.t With_status.Stable.V2.t
+ ; new_accounts : Account_id.Stable.V2.t list
+ ; burned_tokens : Currency.Amount.Stable.V1.t
+ }
+ [@@deriving sexp, to_yojson]
+
+ let to_latest = Fn.id
+ end
+ end]
+end
+
+module Varying = struct
+ [%%versioned
+ module Stable = struct
+ module V2 = struct
+ type t =
+ | Command of Command_applied.Stable.V2.t
+ | Fee_transfer of Fee_transfer_applied.Stable.V2.t
+ | Coinbase of Coinbase_applied.Stable.V2.t
+ [@@deriving sexp, to_yojson]
+
+ let to_latest = Fn.id
+ end
+ end]
+end
+
+[%%versioned
+module Stable = struct
+ module V2 = struct
+ type t =
+ { previous_hash : Ledger_hash.Stable.V1.t; varying : Varying.Stable.V2.t }
+ [@@deriving sexp, to_yojson]
+
+ let to_latest = Fn.id
+ end
+end]
+
+let burned_tokens : t -> Currency.Amount.t =
+ fun { varying; _ } ->
+ match varying with
+ | Command _ ->
+ Currency.Amount.zero
+ | Fee_transfer f ->
+ f.burned_tokens
+ | Coinbase c ->
+ c.burned_tokens
+
+let new_accounts : t -> Account_id.t list =
+ fun { varying; _ } ->
+ match varying with
+ | Command c -> (
+ match c with
+ | Signed_command sc ->
+ Signed_command_applied.new_accounts sc
+ | Zkapp_command zc ->
+ zc.new_accounts )
+ | Fee_transfer f ->
+ f.new_accounts
+ | Coinbase c ->
+ c.new_accounts
+
+let supply_increase :
+ constraint_constants:Genesis_constants.Constraint_constants.t
+ -> t
+ -> Currency.Amount.Signed.t Or_error.t =
+ fun ~constraint_constants t ->
+ let open Or_error.Let_syntax in
+ let burned_tokens = Currency.Amount.Signed.of_unsigned (burned_tokens t) in
+ let account_creation_fees =
+ let account_creation_fee_int =
+ constraint_constants.account_creation_fee |> Currency.Fee.to_nanomina_int
+ in
+ let num_accounts_created = List.length @@ new_accounts t in
+ (* int type is OK, no danger of overflow *)
+ Currency.Amount.(
+ Signed.of_unsigned
+ @@ of_nanomina_int_exn (account_creation_fee_int * num_accounts_created))
+ in
+ let txn : Transaction.t =
+ match t.varying with
+ | Command (Signed_command { common = { user_command = { data; _ }; _ }; _ })
+ ->
+ Command (Signed_command data)
+ | Command (Zkapp_command c) ->
+ Command (Zkapp_command c.command.data)
+ | Fee_transfer f ->
+ Fee_transfer f.fee_transfer.data
+ | Coinbase c ->
+ Coinbase c.coinbase.data
+ in
+ let%bind expected_supply_increase =
+ Transaction.expected_supply_increase txn
+ in
+ let rec process_decreases total = function
+ | [] ->
+ Some total
+ | amt :: amts ->
+ let%bind.Option sum =
+ Currency.Amount.Signed.(add @@ negate amt) total
+ in
+ process_decreases sum amts
+ in
+ let total =
+ process_decreases
+ (Currency.Amount.Signed.of_unsigned expected_supply_increase)
+ [ burned_tokens; account_creation_fees ]
+ in
+ Option.value_map total ~default:(Or_error.error_string "overflow")
+ ~f:(fun v -> Ok v)
+
+let transaction_with_status : t -> Transaction.t With_status.t =
+ fun { varying; _ } ->
+ match varying with
+ | Command (Signed_command uc) ->
+ With_status.map uc.common.user_command ~f:(fun cmd ->
+ Transaction.Command (User_command.Signed_command cmd) )
+ | Command (Zkapp_command s) ->
+ With_status.map s.command ~f:(fun c ->
+ Transaction.Command (User_command.Zkapp_command c) )
+ | Fee_transfer f ->
+ With_status.map f.fee_transfer ~f:(fun f -> Transaction.Fee_transfer f)
+ | Coinbase c ->
+ With_status.map c.coinbase ~f:(fun c -> Transaction.Coinbase c)
+
+let transaction_status : t -> Transaction_status.t =
+ fun { varying; _ } ->
+ match varying with
+ | Command (Signed_command { common = { user_command = { status; _ }; _ }; _ })
+ ->
+ status
+ | Command (Zkapp_command c) ->
+ c.command.status
+ | Fee_transfer f ->
+ f.fee_transfer.status
+ | Coinbase c ->
+ c.coinbase.status
diff --git a/src/lib/transaction_snark/test/account_timing/account_timing.ml b/src/lib/transaction_snark/test/account_timing/account_timing.ml
index 1d9a6ac594a..13b1464a6ba 100644
--- a/src/lib/transaction_snark/test/account_timing/account_timing.ml
+++ b/src/lib/transaction_snark/test/account_timing/account_timing.ml
@@ -362,7 +362,7 @@ let%test_module "account timing check" =
stack_with_state
in
let supply_increase =
- Mina_ledger.Ledger.Transaction_applied.supply_increase
+ Mina_transaction_logic.Transaction_applied.supply_increase
~constraint_constants txn_applied
|> Or_error.ok_exn
in
diff --git a/src/lib/transaction_snark/test/transaction_union/transaction_union.ml b/src/lib/transaction_snark/test/transaction_union/transaction_union.ml
index c5f9b64705f..2b9dabd86ee 100644
--- a/src/lib/transaction_snark/test/transaction_union/transaction_union.ml
+++ b/src/lib/transaction_snark/test/transaction_union/transaction_union.ml
@@ -143,7 +143,7 @@ let%test_module "Transaction union tests" =
|> Or_error.ok_exn
in
let supply_increase =
- Mina_ledger.Ledger.Transaction_applied.supply_increase
+ Mina_transaction_logic.Transaction_applied.supply_increase
~constraint_constants applied_transaction
|> Or_error.ok_exn
in
@@ -518,7 +518,9 @@ let%test_module "Transaction union tests" =
( Ledger.apply_user_command ~constraint_constants ledger
~txn_global_slot:current_global_slot t1
|> Or_error.ok_exn
- : Ledger.Transaction_applied.Signed_command_applied.t ) ;
+ : Mina_transaction_logic.Transaction_applied
+ .Signed_command_applied
+ .t ) ;
[%test_eq: Frozen_ledger_hash.t]
(Ledger.merkle_root ledger)
(Sparse_ledger.merkle_root sparse_ledger) ;
diff --git a/src/lib/transaction_snark/test/util.ml b/src/lib/transaction_snark/test/util.ml
index 6c87b202b14..f4711161da8 100644
--- a/src/lib/transaction_snark/test/util.ml
+++ b/src/lib/transaction_snark/test/util.ml
@@ -173,7 +173,7 @@ let check_zkapp_command_with_merges_exn ?(logger = logger_null)
let open Async.Deferred.Let_syntax in
let applied, statement_opt =
if ignore_outside_snark then
- ( Ledger.Transaction_applied.Varying.Command
+ ( Mina_transaction_logic.Transaction_applied.Varying.Command
(Zkapp_command
{ command =
{ With_status.status = Applied
@@ -213,8 +213,8 @@ let check_zkapp_command_with_merges_exn ?(logger = logger_null)
; connecting_ledger_right = connecting_ledger
; fee_excess = Zkapp_command.fee_excess zkapp_command
; supply_increase =
- Ledger.Transaction_applied.supply_increase
- ~constraint_constants applied_txn
+ Mina_transaction_logic.Transaction_applied
+ .supply_increase ~constraint_constants applied_txn
|> Or_error.ok_exn
; sok_digest = ()
}
@@ -620,7 +620,7 @@ let test_transaction_union ?expected_failure ?txn_global_slot ledger txn =
with
| Ok res ->
( if Option.is_some expected_failure then
- match Ledger.Transaction_applied.transaction_status res with
+ match Ledger.status_of_applied res with
| Applied ->
failwith
(sprintf "Expected Ledger.apply_transaction to fail with %s"
@@ -665,7 +665,8 @@ let test_transaction_union ?expected_failure ?txn_global_slot ledger txn =
let supply_increase =
Option.value_map applied_transaction ~default:Amount.Signed.zero
~f:(fun txn ->
- Ledger.Transaction_applied.supply_increase ~constraint_constants txn
+ Mina_transaction_logic.Transaction_applied.supply_increase
+ ~constraint_constants txn
|> Or_error.ok_exn )
in
match
diff --git a/src/lib/transaction_snark/transaction_snark.ml b/src/lib/transaction_snark/transaction_snark.ml
index b8966b98d4d..a7822ea0cf7 100644
--- a/src/lib/transaction_snark/transaction_snark.ml
+++ b/src/lib/transaction_snark/transaction_snark.ml
@@ -1854,7 +1854,7 @@ module Make_str (A : Wire_types.Concrete) = struct
[ correct_coinbase_target_stack; valid_init_state ] ) )
let main ?(witness : Witness.t option) (spec : Spec.t)
- ~constraint_constants (statement : Statement.With_sok.Checked.t) =
+ ~constraint_constants (statement : Statement.With_sok.var) =
let open Impl in
run_checked (dummy_constraints ()) ;
let ( ! ) x = Option.value_exn x in
@@ -3053,7 +3053,7 @@ module Make_str (A : Wire_types.Concrete) = struct
pc: Pending_coinbase_stack_state.t
*)
let%snarkydef_ main ~constraint_constants
- (statement : Statement.With_sok.Checked.t) =
+ (statement : Statement.With_sok.var) =
let%bind () = dummy_constraints () in
let%bind (module Shifted) = Tick.Inner_curve.Checked.Shifted.create () in
let%bind t =
@@ -3199,7 +3199,7 @@ module Make_str (A : Wire_types.Concrete) = struct
verify_transition tock_vk _ s1 s2 pending_coinbase_stack12.source, pending_coinbase_stack12.target is true
verify_transition tock_vk _ s2 s3 pending_coinbase_stack23.source, pending_coinbase_stack23.target is true
*)
- let%snarkydef_ main (s : Statement.With_sok.Checked.t) =
+ let%snarkydef_ main (s : Statement.With_sok.var) =
let%bind s1, s2 =
exists
Typ.(Statement.With_sok.typ * Statement.With_sok.typ)
@@ -3302,7 +3302,7 @@ module Make_str (A : Wire_types.Concrete) = struct
open Pickles_types
type tag =
- ( Statement.With_sok.Checked.t
+ ( Statement.With_sok.var
, Statement.With_sok.t
, Nat.N2.n
, Nat.N5.n )
@@ -3509,23 +3509,20 @@ module Make_str (A : Wire_types.Concrete) = struct
}
init_stack pending_coinbase_stack_state handler
- let verify (ts : (t * _) list) ~key =
+ let verify_impl ~f ts =
if
- List.for_all ts ~f:(fun ({ statement; _ }, message) ->
- Sok_message.Digest.equal
- (Sok_message.digest message)
- statement.sok_digest )
- then
- Pickles.verify
- (module Nat.N2)
- (module Statement.With_sok)
- key
- (List.map ts ~f:(fun ({ statement; proof }, _) -> (statement, proof)))
+ List.for_all ts ~f:(fun (p, m) ->
+ Sok_message.Digest.equal (Sok_message.digest m) p.statement.sok_digest )
+ then f (List.map ts ~f:(fun ({ statement; proof }, _) -> (statement, proof)))
else
Async.return
(Or_error.error_string
"Transaction_snark.verify: Mismatched sok_message" )
+ let verify ~key =
+ verify_impl
+ ~f:(Pickles.verify (module Nat.N2) (module Statement.With_sok) key)
+
let constraint_system_digests ~constraint_constants () =
let digest = Tick.R1CS_constraint_system.digest in
[ ( "transaction-merge"
@@ -3964,18 +3961,7 @@ module Make_str (A : Wire_types.Concrete) = struct
let verify_against_digest { statement; proof } =
Proof.verify [ (statement, proof) ]
- let verify ts =
- if
- List.for_all ts ~f:(fun (p, m) ->
- Sok_message.Digest.equal (Sok_message.digest m)
- p.statement.sok_digest )
- then
- Proof.verify
- (List.map ts ~f:(fun ({ statement; proof }, _) -> (statement, proof)))
- else
- Async.return
- (Or_error.error_string
- "Transaction_snark.verify: Mismatched sok_message" )
+ let verify = verify_impl ~f:Proof.verify
let first_account_update
(witness : Transaction_witness.Zkapp_command_segment_witness.t) =
diff --git a/src/lib/transaction_snark/transaction_snark_intf.ml b/src/lib/transaction_snark/transaction_snark_intf.ml
index 68e7637176e..7625c08890c 100644
--- a/src/lib/transaction_snark/transaction_snark_intf.ml
+++ b/src/lib/transaction_snark/transaction_snark_intf.ml
@@ -34,15 +34,15 @@ module type Full = sig
open Pickles_types
type tag =
- ( Statement.With_sok.Checked.t
+ ( Statement.With_sok.var
, Statement.With_sok.t
, Nat.N2.n
, Nat.N5.n )
Pickles.Tag.t
val verify :
- (t * Sok_message.t) list
- -> key:Pickles.Verification_key.t
+ key:Pickles.Verification_key.t
+ -> (t * Sok_message.t) list
-> unit Or_error.t Async.Deferred.t
module Verification : sig
diff --git a/src/lib/transaction_snark/transaction_validator.ml b/src/lib/transaction_snark/transaction_validator.ml
index f44091193e6..776221812fb 100644
--- a/src/lib/transaction_snark/transaction_validator.ml
+++ b/src/lib/transaction_snark/transaction_validator.ml
@@ -16,7 +16,9 @@ let apply_user_command ~constraint_constants ~txn_global_slot l uc =
within_mask l ~f:(fun l' ->
Result.map
~f:(fun applied_txn ->
- applied_txn.Ledger.Transaction_applied.Signed_command_applied.common
+ applied_txn
+ .Mina_transaction_logic.Transaction_applied.Signed_command_applied
+ .common
.user_command
.status )
(Ledger.apply_user_command l' ~constraint_constants ~txn_global_slot uc) )
diff --git a/src/lib/transaction_snark/transaction_validator.mli b/src/lib/transaction_snark/transaction_validator.mli
index d9b3c60e7db..b4555fd4115 100644
--- a/src/lib/transaction_snark/transaction_validator.mli
+++ b/src/lib/transaction_snark/transaction_validator.mli
@@ -15,7 +15,7 @@ val apply_transactions :
-> txn_state_view:Zkapp_precondition.Protocol_state.View.t
-> Mina_ledger.Ledger.t
-> Transaction.t list
- -> Mina_ledger.Ledger.Transaction_applied.t list Or_error.t
+ -> Mina_transaction_logic.Transaction_applied.t list Or_error.t
val apply_transaction_first_pass :
constraint_constants:Genesis_constants.Constraint_constants.t
diff --git a/src/lib/transaction_snark_scan_state/transaction_snark_scan_state.ml b/src/lib/transaction_snark_scan_state/transaction_snark_scan_state.ml
index 47418185c32..a87774932bd 100644
--- a/src/lib/transaction_snark_scan_state/transaction_snark_scan_state.ml
+++ b/src/lib/transaction_snark_scan_state/transaction_snark_scan_state.ml
@@ -222,7 +222,7 @@ let create_expected_statement ~constraint_constants
@@ Sparse_ledger.merkle_root second_pass_ledger_witness
in
let { With_status.data = transaction; status = _ } =
- Ledger.Transaction_applied.transaction transaction_with_info
+ Ledger.transaction_of_applied transaction_with_info
in
let%bind protocol_state = get_state (fst state_hash) in
let state_view = Mina_state.Protocol_state.Body.view protocol_state.body in
@@ -248,8 +248,8 @@ let create_expected_statement ~constraint_constants
|> Frozen_ledger_hash.of_ledger_hash
in
let%map supply_increase =
- Ledger.Transaction_applied.supply_increase ~constraint_constants
- applied_transaction
+ Mina_transaction_logic.Transaction_applied.supply_increase
+ ~constraint_constants applied_transaction
in
( target_first_pass_merkle_root
, target_second_pass_merkle_root
@@ -701,7 +701,7 @@ module Transactions_ordered = struct
(txn_with_witness : Transaction_with_witness.t)
->
let txn =
- Ledger.Transaction_applied.transaction
+ Ledger.transaction_of_applied
txn_with_witness.transaction_with_info
in
let target_first_pass_ledger =
@@ -770,8 +770,7 @@ end
let extract_txn_and_global_slot (txn_with_witness : Transaction_with_witness.t)
=
let txn =
- Ledger.Transaction_applied.transaction
- txn_with_witness.transaction_with_info
+ Ledger.transaction_of_applied txn_with_witness.transaction_with_info
in
let state_hash = fst txn_with_witness.state_hash in
let global_slot = txn_with_witness.block_global_slot in
@@ -917,7 +916,7 @@ let apply_ordered_txns_stepwise ?(stop_at_first_pass = false) ordered_txns
k ()
| (expected_status, partially_applied_txn) :: partially_applied_txns' ->
let%bind res = apply_second_pass ledger partially_applied_txn in
- let status = Ledger.Transaction_applied.transaction_status res in
+ let status = Ledger.status_of_applied res in
if Transaction_status.equal expected_status status then
Ok
(`Continue
@@ -1041,8 +1040,7 @@ let apply_ordered_txns_stepwise ?(stop_at_first_pass = false) ordered_txns
Previous_incomplete_txns.Unapplied
(List.filter txns ~f:(fun txn ->
match
- (Ledger.Transaction_applied.transaction
- txn.transaction_with_info )
+ (Ledger.transaction_of_applied txn.transaction_with_info)
.data
with
| Command (Zkapp_command _) ->
diff --git a/src/lib/transaction_snark_scan_state/transaction_snark_scan_state.mli b/src/lib/transaction_snark_scan_state/transaction_snark_scan_state.mli
index e343e90d419..a896086a228 100644
--- a/src/lib/transaction_snark_scan_state/transaction_snark_scan_state.mli
+++ b/src/lib/transaction_snark_scan_state/transaction_snark_scan_state.mli
@@ -16,7 +16,7 @@ end]
module Transaction_with_witness : sig
(* TODO: The statement is redundant here - it can be computed from the witness and the transaction *)
type t =
- { transaction_with_info : Ledger.Transaction_applied.t
+ { transaction_with_info : Mina_transaction_logic.Transaction_applied.t
; state_hash : State_hash.t * State_body_hash.t
; statement : Transaction_snark.Statement.t
; init_stack : Transaction_snark.Pending_coinbase_stack_state.Init_stack.t
@@ -137,7 +137,7 @@ val get_snarked_ledger_sync :
-> apply_second_pass:
( Ledger.t
-> Ledger.Transaction_partially_applied.t
- -> Ledger.Transaction_applied.t Or_error.t )
+ -> Mina_transaction_logic.Transaction_applied.t Or_error.t )
-> apply_first_pass_sparse_ledger:
( global_slot:Mina_numbers.Global_slot_since_genesis.t
-> txn_state_view:Mina_base.Zkapp_precondition.Protocol_state.View.t
@@ -162,7 +162,7 @@ val get_snarked_ledger_async :
-> apply_second_pass:
( Ledger.t
-> Ledger.Transaction_partially_applied.t
- -> Ledger.Transaction_applied.t Or_error.t )
+ -> Mina_transaction_logic.Transaction_applied.t Or_error.t )
-> apply_first_pass_sparse_ledger:
( global_slot:Mina_numbers.Global_slot_since_genesis.t
-> txn_state_view:Mina_base.Zkapp_precondition.Protocol_state.View.t
@@ -194,7 +194,7 @@ val get_staged_ledger_async :
-> apply_second_pass:
( Ledger.t
-> Ledger.Transaction_partially_applied.t
- -> Ledger.Transaction_applied.t Or_error.t )
+ -> Mina_transaction_logic.Transaction_applied.t Or_error.t )
-> apply_first_pass_sparse_ledger:
( global_slot:Mina_numbers.Global_slot_since_genesis.t
-> txn_state_view:Mina_base.Zkapp_precondition.Protocol_state.View.t
diff --git a/src/lib/transition_frontier/full_frontier/full_frontier.ml b/src/lib/transition_frontier/full_frontier/full_frontier.ml
index b999f0451fa..3c799c9c66a 100644
--- a/src/lib/transition_frontier/full_frontier/full_frontier.ml
+++ b/src/lib/transition_frontier/full_frontier/full_frontier.ml
@@ -969,10 +969,7 @@ module For_tests = struct
let verifier () =
Async.Thread_safe.block_on_async_exn (fun () ->
- Verifier.create ~logger ~proof_level ~constraint_constants
- ~conf_dir:None
- ~pids:(Child_processes.Termination.create_pid_table ())
- ~commit_id:"not specified for unit tests" () )
+ Verifier.For_tests.default ~constraint_constants ~logger ~proof_level () )
module Genesis_ledger = (val precomputed_values.genesis_ledger)
diff --git a/src/lib/transition_handler/block_sink.ml b/src/lib/transition_handler/block_sink.ml
index d345e3f58c3..7d5515c0d49 100644
--- a/src/lib/transition_handler/block_sink.ml
+++ b/src/lib/transition_handler/block_sink.ml
@@ -23,7 +23,7 @@ type block_sink_config =
; consensus_constants : Consensus.Constants.t
; genesis_constants : Genesis_constants.t
; constraint_constants : Genesis_constants.Constraint_constants.t
- ; block_window_duration : Time.Span.t
+ ; compile_config : Mina_compile_config.t
}
type t =
@@ -37,7 +37,7 @@ type t =
; consensus_constants : Consensus.Constants.t
; genesis_constants : Genesis_constants.t
; constraint_constants : Genesis_constants.Constraint_constants.t
- ; block_window_duration : Time.Span.t
+ ; compile_config : Mina_compile_config.t
}
| Void
@@ -59,7 +59,7 @@ let push sink (b_or_h, `Time_received tm, `Valid_cb cb) =
; consensus_constants
; genesis_constants
; constraint_constants
- ; block_window_duration
+ ; compile_config
} ->
O1trace.sync_thread "handle_block_gossip"
@@ fun () ->
@@ -105,9 +105,7 @@ let push sink (b_or_h, `Time_received tm, `Valid_cb cb) =
:: txs_meta ) ;
[%log internal] "External_block_received" ;
don't_wait_for
- ( match%map
- Mina_net2.Validation_callback.await ~block_window_duration cb
- with
+ ( match%map Mina_net2.Validation_callback.await cb with
| Some `Accept ->
let processing_time_span =
Time.diff
@@ -175,7 +173,7 @@ let push sink (b_or_h, `Time_received tm, `Valid_cb cb) =
List.exists transactions ~f:(fun txn ->
match
Mina_transaction.Transaction.check_well_formedness
- ~genesis_constants txn.data
+ ~genesis_constants ~compile_config txn.data
with
| Ok () ->
false
@@ -238,7 +236,7 @@ let create
; consensus_constants
; genesis_constants
; constraint_constants
- ; block_window_duration
+ ; compile_config
} =
let rate_limiter =
Network_pool.Rate_limiter.create
@@ -260,7 +258,7 @@ let create
; consensus_constants
; genesis_constants
; constraint_constants
- ; block_window_duration
+ ; compile_config
} )
let void = Void
diff --git a/src/lib/transition_handler/block_sink.mli b/src/lib/transition_handler/block_sink.mli
index 17d59ff72a6..f77f48d0b73 100644
--- a/src/lib/transition_handler/block_sink.mli
+++ b/src/lib/transition_handler/block_sink.mli
@@ -1,6 +1,5 @@
open Network_peer
open Mina_base
-open Core_kernel
type Structured_log_events.t +=
| Block_received of { state_hash : State_hash.t; sender : Envelope.Sender.t }
@@ -26,7 +25,7 @@ type block_sink_config =
; consensus_constants : Consensus.Constants.t
; genesis_constants : Genesis_constants.t
; constraint_constants : Genesis_constants.Constraint_constants.t
- ; block_window_duration : Time.Span.t
+ ; compile_config : Mina_compile_config.t
}
val create :
diff --git a/src/lib/transition_handler/catchup_scheduler.ml b/src/lib/transition_handler/catchup_scheduler.ml
index 6dd6693e9e8..0b8f23e9c1c 100644
--- a/src/lib/transition_handler/catchup_scheduler.ml
+++ b/src/lib/transition_handler/catchup_scheduler.ml
@@ -234,29 +234,26 @@ let make_timeout t transition_with_hash duration =
Existing code is safe as long as header-only gossip topic isn't actually used in the code.logger
I.e. this TODO has to be resolved before bit-catchup work fully lands.
*)
-let register_validation_callback ~hash ~valid_cb ~block_window_duration t =
+let register_validation_callback ~hash ~valid_cb t =
Option.value_map valid_cb ~default:() ~f:(fun data ->
match Hashtbl.add t.validation_callbacks ~key:hash ~data with
| `Ok ->
(* Clean up entry upon callback resolution *)
upon
- ( Deferred.ignore_m
- @@ Mina_net2.Validation_callback.await ~block_window_duration data
- )
+ (Deferred.ignore_m @@ Mina_net2.Validation_callback.await data)
(fun () -> Hashtbl.remove t.validation_callbacks hash)
| `Duplicate ->
[%log' warn t.logger] "Double validation callback for $state_hash"
~metadata:[ ("state_hash", Mina_base.State_hash.to_yojson hash) ] )
-let watch t ~timeout_duration ~cached_transition ~valid_cb
- ~block_window_duration =
+let watch t ~timeout_duration ~cached_transition ~valid_cb =
let transition_with_hash, _ =
Envelope.Incoming.data (Cached.peek cached_transition)
in
let hash = State_hash.With_state_hashes.state_hash transition_with_hash in
let parent_hash = get_parent_hash transition_with_hash in
log_block_metadata ~logger:t.logger ~parent_hash hash ;
- register_validation_callback ~hash ~valid_cb ~block_window_duration t ;
+ register_validation_callback ~hash ~valid_cb t ;
match Hashtbl.find t.collected_transitions parent_hash with
| None ->
let remaining_time = cancel_timeout t hash in
@@ -303,7 +300,7 @@ let watch t ~timeout_duration ~cached_transition ~valid_cb
for it is triggered (and validation callback is registered to be
resolved when catchup receives corresponding block).
*)
-let watch_header t ~header_with_hash ~valid_cb ~block_window_duration =
+let watch_header t ~header_with_hash ~valid_cb =
let hash = State_hash.With_state_hashes.state_hash header_with_hash in
let parent_hash =
With_hash.data header_with_hash
@@ -312,7 +309,7 @@ let watch_header t ~header_with_hash ~valid_cb ~block_window_duration =
log_block_metadata ~logger:t.logger ~parent_hash hash ;
match Hashtbl.find t.collected_transitions hash with
| None ->
- register_validation_callback ~hash ~valid_cb ~block_window_duration t ;
+ register_validation_callback ~hash ~valid_cb t ;
if Writer.is_closed t.catchup_job_writer then
[%log' trace t.logger]
"catchup job pipe was closed; attempt to write to closed pipe"
@@ -367,13 +364,10 @@ let%test_module "Transition_handler.Catchup_scheduler tests" =
let create = create ~logger ~trust_system ~time_controller
- let block_window_duration =
- Mina_compile_config.For_unit_tests.t.block_window_duration
-
let verifier =
Async.Thread_safe.block_on_async_exn (fun () ->
- Verifier.create ~logger ~proof_level ~constraint_constants
- ~conf_dir:None ~pids ~commit_id:"not specified for unit tests" () )
+ Verifier.For_tests.default ~constraint_constants ~logger ~proof_level
+ ~pids () )
(* cast a breadcrumb into a cached, enveloped, partially validated transition *)
let downcast_breadcrumb breadcrumb =
@@ -407,8 +401,7 @@ let%test_module "Transition_handler.Catchup_scheduler tests" =
in
watch scheduler ~timeout_duration ~valid_cb:None
~cached_transition:
- (Cached.pure @@ downcast_breadcrumb disjoint_breadcrumb)
- ~block_window_duration ;
+ (Cached.pure @@ downcast_breadcrumb disjoint_breadcrumb) ;
Async.Thread_safe.block_on_async_exn (fun () ->
match%map
Block_time.Timeout.await
@@ -468,8 +461,7 @@ let%test_module "Transition_handler.Catchup_scheduler tests" =
in
watch scheduler ~timeout_duration ~valid_cb:None
~cached_transition:
- (Cached.transform ~f:downcast_breadcrumb breadcrumb_2)
- ~block_window_duration ;
+ (Cached.transform ~f:downcast_breadcrumb breadcrumb_2) ;
Async.Thread_safe.block_on_async_exn (fun () ->
Transition_frontier.add_breadcrumb_exn frontier
(Cached.peek breadcrumb_1) ) ;
@@ -548,8 +540,7 @@ let%test_module "Transition_handler.Catchup_scheduler tests" =
in
watch scheduler ~timeout_duration ~valid_cb:None
~cached_transition:
- (Cached.pure @@ downcast_breadcrumb oldest_breadcrumb)
- ~block_window_duration ;
+ (Cached.pure @@ downcast_breadcrumb oldest_breadcrumb) ;
assert (
has_timeout_parent_hash scheduler
(Transition_frontier.Breadcrumb.parent_hash oldest_breadcrumb) ) ;
@@ -558,8 +549,7 @@ let%test_module "Transition_handler.Catchup_scheduler tests" =
~f:(fun prev_breadcrumb curr_breadcrumb ->
watch scheduler ~timeout_duration ~valid_cb:None
~cached_transition:
- (Cached.pure @@ downcast_breadcrumb curr_breadcrumb)
- ~block_window_duration ;
+ (Cached.pure @@ downcast_breadcrumb curr_breadcrumb) ;
assert (
not
@@ has_timeout_parent_hash scheduler
diff --git a/src/lib/transition_handler/processor.ml b/src/lib/transition_handler/processor.ml
index 622d6b94391..3a3749ab299 100644
--- a/src/lib/transition_handler/processor.ml
+++ b/src/lib/transition_handler/processor.ml
@@ -179,7 +179,7 @@ let process_transition ~context:(module Context : CONTEXT) ~trust_system
| Ok _ | Error `Parent_missing_from_frontier ->
[%log internal] "Schedule_catchup" ;
Catchup_scheduler.watch_header catchup_scheduler ~valid_cb
- ~block_window_duration ~header_with_hash ;
+ ~header_with_hash ;
return ()
| Error `Not_selected_over_frontier_root ->
handle_not_selected ()
@@ -246,7 +246,7 @@ let process_transition ~context:(module Context : CONTEXT) ~trust_system
in
Catchup_scheduler.watch catchup_scheduler ~timeout_duration
~cached_transition:cached_initially_validated_transition
- ~valid_cb ~block_window_duration ;
+ ~valid_cb ;
return (Error ()) )
in
(* TODO: only access parent in transition frontier once (already done in call to validate dependencies) #2485 *)
@@ -503,10 +503,8 @@ let%test_module "Transition_handler.Processor tests" =
let verifier =
Async.Thread_safe.block_on_async_exn (fun () ->
- Verifier.create ~logger ~proof_level ~constraint_constants
- ~conf_dir:None
- ~pids:(Child_processes.Termination.create_pid_table ())
- ~commit_id:"not specified for unit tests" () )
+ Verifier.For_tests.default ~constraint_constants ~logger ~proof_level
+ () )
module Context = struct
let logger = logger
diff --git a/src/lib/verifier/dummy.ml b/src/lib/verifier/dummy.ml
index 249e7423240..3b17322e51a 100644
--- a/src/lib/verifier/dummy.ml
+++ b/src/lib/verifier/dummy.ml
@@ -4,11 +4,11 @@ open Mina_base
type t =
{ proof_level : Genesis_constants.Proof_level.t
- ; constraint_constants : Genesis_constants.Constraint_constants.t
; verify_blockchain_snarks :
Blockchain_snark.Blockchain.t list
-> unit Or_error.t Or_error.t Deferred.t
- ; verification_key : Pickles.Verification_key.t Deferred.t Lazy.t
+ ; blockchain_verification_key : Pickles.Verification_key.t
+ ; transaction_verification_key : Pickles.Verification_key.t
; verify_transaction_snarks :
(Ledger_proof.Prod.t * Mina_base.Sok_message.t) list
-> unit Or_error.t Or_error.t Deferred.t
@@ -21,23 +21,13 @@ let invalid_to_error = Common.invalid_to_error
type ledger_proof = Ledger_proof.t
let create ~logger:_ ?enable_internal_tracing:_ ?internal_trace_filename:_
- ~proof_level ~constraint_constants ~pids:_ ~conf_dir:_ ~commit_id:_ () =
- let module T = Transaction_snark.Make (struct
- let constraint_constants = constraint_constants
-
- let proof_level = proof_level
- end) in
- let module B = Blockchain_snark.Blockchain_snark_state.Make (struct
- let tag = T.tag
-
- let constraint_constants = constraint_constants
-
- let proof_level = proof_level
- end) in
+ ~proof_level ~pids:_ ~conf_dir:_ ~commit_id:_ ~blockchain_verification_key
+ ~transaction_verification_key () =
let verify_blockchain_snarks chains =
match proof_level with
| Genesis_constants.Proof_level.Full ->
- B.Proof.verify
+ Blockchain_snark.Blockchain_snark_state.verify
+ ~key:blockchain_verification_key
(List.map chains ~f:(fun snark ->
( Blockchain_snark.Blockchain.state snark
, Blockchain_snark.Blockchain.proof snark ) ) )
@@ -48,7 +38,10 @@ let create ~logger:_ ?enable_internal_tracing:_ ?internal_trace_filename:_
let verify_transaction_snarks ts =
match proof_level with
| Full -> (
- match Or_error.try_with (fun () -> T.verify ts) with
+ match
+ Or_error.try_with (fun () ->
+ Transaction_snark.verify ~key:transaction_verification_key ts )
+ with
| Ok result ->
result |> Deferred.map ~f:Or_error.return
| Error e ->
@@ -75,9 +68,9 @@ let create ~logger:_ ?enable_internal_tracing:_ ?internal_trace_filename:_
Deferred.return
{ proof_level
- ; constraint_constants
; verify_blockchain_snarks
- ; verification_key = B.Proof.verification_key
+ ; blockchain_verification_key
+ ; transaction_verification_key
; verify_transaction_snarks
}
@@ -161,10 +154,6 @@ let verify_commands { proof_level; _ }
let verify_transaction_snarks { verify_transaction_snarks; _ } ts =
verify_transaction_snarks ts
-let get_blockchain_verification_key { verification_key; _ } =
- Deferred.Or_error.try_with ~here:[%here] (fun () ->
- Lazy.force verification_key )
-
let toggle_internal_tracing _ _ = Deferred.Or_error.ok_unit
let set_itn_logger_data _ ~daemon_port:_ = Deferred.Or_error.ok_unit
diff --git a/src/lib/verifier/prod.ml b/src/lib/verifier/prod.ml
index d05ebe733d1..41cff1ebbea 100644
--- a/src/lib/verifier/prod.ml
+++ b/src/lib/verifier/prod.ml
@@ -53,9 +53,6 @@ module Worker_state = struct
val verify_transaction_snarks :
(Transaction_snark.t * Sok_message.t) list -> unit Or_error.t Deferred.t
- val get_blockchain_verification_key :
- unit -> Pickles.Verification_key.t Deferred.t
-
val toggle_internal_tracing : bool -> unit
val set_itn_logger_data : daemon_port:int -> unit
@@ -68,34 +65,27 @@ module Worker_state = struct
; internal_trace_filename : string option
; logger : Logger.t
; proof_level : Genesis_constants.Proof_level.t
- ; constraint_constants : Genesis_constants.Constraint_constants.t
; commit_id : string
+ ; blockchain_verification_key : Pickles.Verification_key.Stable.Latest.t
+ ; transaction_verification_key : Pickles.Verification_key.Stable.Latest.t
}
[@@deriving bin_io_unversioned]
type t = (module S)
- let create { logger; proof_level; constraint_constants; commit_id; _ } :
- t Deferred.t =
+ let create
+ { logger
+ ; proof_level
+ ; commit_id
+ ; blockchain_verification_key
+ ; transaction_verification_key
+ ; _
+ } : t Deferred.t =
match proof_level with
| Full ->
Pickles.Side_loaded.srs_precomputation () ;
Deferred.return
(let module M = struct
- module T = Transaction_snark.Make (struct
- let constraint_constants = constraint_constants
-
- let proof_level = proof_level
- end)
-
- module B = Blockchain_snark_state.Make (struct
- let tag = T.tag
-
- let constraint_constants = constraint_constants
-
- let proof_level = proof_level
- end)
-
let verify_commands
(cs :
User_command.Verifiable.t With_status.t With_id_tag.t list )
@@ -157,20 +147,25 @@ module Worker_state = struct
[%log internal] "Verifier_verify_commands_done" ;
result
- let verify_blockchain_snarks = B.Proof.verify
-
let verify_blockchain_snarks bs =
Context_logger.with_logger (Some logger)
@@ fun () ->
Internal_tracing.Context_call.with_call_id
@@ fun () ->
[%log internal] "Verifier_verify_blockchain_snarks" ;
- let%map result = verify_blockchain_snarks bs in
+ let%map result =
+ Blockchain_snark_state.verify ~key:blockchain_verification_key
+ bs
+ in
[%log internal] "Verifier_verify_blockchain_snarks_done" ;
result
let verify_transaction_snarks ts =
- match Or_error.try_with (fun () -> T.verify ts) with
+ match
+ Or_error.try_with (fun () ->
+ Transaction_snark.verify ts
+ ~key:transaction_verification_key )
+ with
| Ok result ->
result
| Error e ->
@@ -190,9 +185,6 @@ module Worker_state = struct
[%log internal] "Verifier_verify_transaction_snarks_done" ;
result
- let get_blockchain_verification_key () =
- Lazy.force B.Proof.verification_key
-
let toggle_internal_tracing enabled =
don't_wait_for
@@ Internal_tracing.toggle ~commit_id ~logger
@@ -233,24 +225,6 @@ module Worker_state = struct
let verify_transaction_snarks _ = Deferred.return (Ok ())
- let vk =
- lazy
- (let module T = Transaction_snark.Make (struct
- let constraint_constants = constraint_constants
-
- let proof_level = proof_level
- end) in
- let module B = Blockchain_snark_state.Make (struct
- let tag = T.tag
-
- let constraint_constants = constraint_constants
-
- let proof_level = proof_level
- end) in
- Lazy.force B.Proof.verification_key )
-
- let get_blockchain_verification_key () = Lazy.force vk
-
let toggle_internal_tracing _ = ()
let set_itn_logger_data ~daemon_port:_ = ()
@@ -280,8 +254,6 @@ module Worker = struct
With_id_tag.t
list )
F.t
- ; get_blockchain_verification_key :
- ('w, unit, Pickles.Verification_key.t) F.t
; toggle_internal_tracing : ('w, bool, unit) F.t
; set_itn_logger_data : ('w, int, unit) F.t
}
@@ -315,10 +287,6 @@ module Worker = struct
let (module M) = Worker_state.get w in
M.verify_commands ts
- let get_blockchain_verification_key (w : Worker_state.t) () =
- let (module M) = Worker_state.get w in
- M.get_blockchain_verification_key ()
-
let toggle_internal_tracing (w : Worker_state.t) enabled =
let (module M) = Worker_state.get w in
M.toggle_internal_tracing enabled ;
@@ -366,11 +334,6 @@ module Worker = struct
With_id_tag.t
list]
, verify_commands )
- ; get_blockchain_verification_key =
- f
- ( [%bin_type_class: unit]
- , [%bin_type_class: Pickles.Verification_key.Stable.Latest.t]
- , get_blockchain_verification_key )
; toggle_internal_tracing =
f
( [%bin_type_class: bool]
@@ -390,8 +353,9 @@ module Worker = struct
; internal_trace_filename
; logger
; proof_level
- ; constraint_constants
; commit_id
+ ; blockchain_verification_key
+ ; transaction_verification_key
} =
if Option.is_some conf_dir then (
let max_size = 256 * 1024 * 512 in
@@ -424,8 +388,9 @@ module Worker = struct
; internal_trace_filename
; logger
; proof_level
- ; constraint_constants
; commit_id
+ ; blockchain_verification_key
+ ; transaction_verification_key
}
let init_connection_state ~connection:_ ~worker_state:_ () = Deferred.unit
@@ -445,8 +410,8 @@ type t = { worker : worker Ivar.t ref; logger : Logger.t }
(* TODO: investigate why conf_dir wasn't being used *)
let create ~logger ?(enable_internal_tracing = false) ?internal_trace_filename
- ~proof_level ~constraint_constants ~pids ~conf_dir ~commit_id () :
- t Deferred.t =
+ ~proof_level ~pids ~conf_dir ~commit_id ~blockchain_verification_key
+ ~transaction_verification_key () : t Deferred.t =
let on_failure err =
[%log error] "Verifier process failed with error $err"
~metadata:[ ("err", Error_json.error_to_yojson err) ] ;
@@ -483,8 +448,9 @@ let create ~logger ?(enable_internal_tracing = false) ?internal_trace_filename
; internal_trace_filename
; logger
; proof_level
- ; constraint_constants
; commit_id
+ ; blockchain_verification_key
+ ; transaction_verification_key
} )
|> Deferred.Result.map_error ~f:Error.of_exn
in
@@ -749,14 +715,6 @@ let verify_commands t ts =
[%log internal] "Verify_commands_done" ;
result
-let get_blockchain_verification_key { worker; logger } =
- O1trace.thread "dispatch_blockchain_verification_key" (fun () ->
- with_retry ~logger (fun () ->
- let%bind { connection; _ } = Ivar.read !worker in
- Worker.Connection.run connection
- ~f:Worker.functions.get_blockchain_verification_key ~arg:()
- |> Deferred.Or_error.map ~f:(fun x -> `Continue x) ) )
-
let toggle_internal_tracing { worker; logger } enabled =
with_retry ~logger (fun () ->
let%bind { connection; _ } = Ivar.read !worker in
diff --git a/src/lib/verifier/verifier.ml b/src/lib/verifier/verifier.ml
index b747900a221..8ee6c54070d 100644
--- a/src/lib/verifier/verifier.ml
+++ b/src/lib/verifier/verifier.ml
@@ -17,3 +17,36 @@ let m =
else (module Prod)
include (val m)
+
+module For_tests = struct
+ let get_verification_keys_eagerly ~constraint_constants ~proof_level =
+ let module T = Transaction_snark.Make (struct
+ let constraint_constants = constraint_constants
+
+ let proof_level = proof_level
+ end) in
+ let module B = Blockchain_snark.Blockchain_snark_state.Make (struct
+ let tag = T.tag
+
+ let constraint_constants = constraint_constants
+
+ let proof_level = proof_level
+ end) in
+ let open Async.Deferred.Let_syntax in
+ let%bind blockchain_vk = Lazy.force B.Proof.verification_key in
+ let%bind transaction_vk = Lazy.force T.verification_key in
+ return (`Blockchain blockchain_vk, `Transaction transaction_vk)
+
+ let default ~logger ~constraint_constants ?enable_internal_tracing
+ ?internal_trace_filename ~proof_level
+ ?(pids = Child_processes.Termination.create_pid_table ())
+ ?(conf_dir = None) ?(commit_id = "not specified for unit tests") () =
+ let open Async.Deferred.Let_syntax in
+ let%bind ( `Blockchain blockchain_verification_key
+ , `Transaction transaction_verification_key ) =
+ get_verification_keys_eagerly ~constraint_constants ~proof_level
+ in
+ create ~logger ?enable_internal_tracing ?internal_trace_filename
+ ~proof_level ~pids ~conf_dir ~commit_id ~blockchain_verification_key
+ ~transaction_verification_key ()
+end
diff --git a/src/lib/verifier/verifier.mli b/src/lib/verifier/verifier.mli
index 99f2bb645de..8a46e806586 100644
--- a/src/lib/verifier/verifier.mli
+++ b/src/lib/verifier/verifier.mli
@@ -5,3 +5,17 @@ module Dummy : module type of Dummy
module Prod : module type of Prod
include Verifier_intf.S with type ledger_proof = Ledger_proof.t
+
+module For_tests : sig
+ val default :
+ logger:Logger.t
+ -> constraint_constants:Genesis_constants.Constraint_constants.t
+ -> ?enable_internal_tracing:bool
+ -> ?internal_trace_filename:string
+ -> proof_level:Genesis_constants.Proof_level.t
+ -> ?pids:Child_processes.Termination.t
+ -> ?conf_dir:string option
+ -> ?commit_id:string
+ -> unit
+ -> t Async.Deferred.t
+end
diff --git a/src/lib/verifier/verifier_intf.ml b/src/lib/verifier/verifier_intf.ml
index 9b293f07f88..b0c9863af8a 100644
--- a/src/lib/verifier/verifier_intf.ml
+++ b/src/lib/verifier/verifier_intf.ml
@@ -45,9 +45,6 @@ module Base = struct
-> (ledger_proof * Mina_base.Sok_message.t) list
-> unit Or_error.t Or_error.t Deferred.t
- val get_blockchain_verification_key :
- t -> Pickles.Verification_key.t Or_error.t Deferred.t
-
val toggle_internal_tracing : t -> bool -> unit Or_error.t Deferred.t
(* in ITN logger, sets the client port of daemon to send RPC requests to
@@ -65,10 +62,11 @@ module type S = sig
-> ?enable_internal_tracing:bool
-> ?internal_trace_filename:string
-> proof_level:Genesis_constants.Proof_level.t
- -> constraint_constants:Genesis_constants.Constraint_constants.t
-> pids:Child_processes.Termination.t
-> conf_dir:string option
-> commit_id:string
+ -> blockchain_verification_key:Pickles.Verification_key.t
+ -> transaction_verification_key:Pickles.Verification_key.t
-> unit
-> t Deferred.t
end
diff --git a/src/libp2p_ipc/libp2p_ipc.capnp b/src/libp2p_ipc/libp2p_ipc.capnp
index 8e603d46178..fca0b6b1139 100644
--- a/src/libp2p_ipc/libp2p_ipc.capnp
+++ b/src/libp2p_ipc/libp2p_ipc.capnp
@@ -332,7 +332,7 @@ struct Libp2pHelperInterface {
result @1 :ValidationResult;
}
- struct DeleteResource {
+ struct RemoveResource {
ids @0 :List(RootBlockId);
}
@@ -420,7 +420,7 @@ struct Libp2pHelperInterface {
union {
validation @1 :Libp2pHelperInterface.Validation;
addResource @2 :Libp2pHelperInterface.AddResource;
- deleteResource @3 :Libp2pHelperInterface.DeleteResource;
+ removeResource @3 :Libp2pHelperInterface.RemoveResource;
downloadResource @4 :Libp2pHelperInterface.DownloadResource;
heartbeatPeer @5 :Libp2pHelperInterface.HeartbeatPeer;
}
@@ -475,6 +475,7 @@ struct DaemonInterface {
struct ResourceUpdate {
type @0 :ResourceUpdateType;
ids @1 :List(RootBlockId);
+ tag @2 :UInt8;
}
struct PushMessage {
diff --git a/src/libp2p_ipc/libp2p_ipc.ml b/src/libp2p_ipc/libp2p_ipc.ml
index 6258b436f34..d1f0d40f81b 100644
--- a/src/libp2p_ipc/libp2p_ipc.ml
+++ b/src/libp2p_ipc/libp2p_ipc.ml
@@ -265,6 +265,40 @@ let push_message_to_outgoing_message request =
Builder.Libp2pHelperInterface.Message.(
builder_op push_message_set_builder request)
+let create_remove_resource_push_message ~ids =
+ let ids =
+ List.map ids ~f:(fun id ->
+ build'
+ (module Builder.RootBlockId)
+ Builder.RootBlockId.(op blake2b_hash_set id) )
+ in
+ build'
+ (module Builder.Libp2pHelperInterface.PushMessage)
+ Builder.Libp2pHelperInterface.PushMessage.(
+ builder_op header_set_builder (create_push_message_header ())
+ *> reader_op remove_resource_set_reader
+ (build
+ (module Builder.Libp2pHelperInterface.RemoveResource)
+ Builder.Libp2pHelperInterface.RemoveResource.(
+ list_op ids_set_list ids) ))
+
+let create_download_resource_push_message ~tag ~ids =
+ let ids =
+ List.map ids ~f:(fun id ->
+ build'
+ (module Builder.RootBlockId)
+ Builder.RootBlockId.(op blake2b_hash_set id) )
+ in
+ build'
+ (module Builder.Libp2pHelperInterface.PushMessage)
+ Builder.Libp2pHelperInterface.PushMessage.(
+ builder_op header_set_builder (create_push_message_header ())
+ *> reader_op download_resource_set_reader
+ (build
+ (module Builder.Libp2pHelperInterface.DownloadResource)
+ Builder.Libp2pHelperInterface.DownloadResource.(
+ op tag_set_exn tag *> list_op ids_set_list ids) ))
+
let create_add_resource_push_message ~tag ~data =
build'
(module Builder.Libp2pHelperInterface.PushMessage)
diff --git a/src/libp2p_ipc/libp2p_ipc.mli b/src/libp2p_ipc/libp2p_ipc.mli
index bdb960ffaf8..99e9c75acaa 100644
--- a/src/libp2p_ipc/libp2p_ipc.mli
+++ b/src/libp2p_ipc/libp2p_ipc.mli
@@ -36,7 +36,7 @@ module Subscription_id : sig
val create : unit -> t
end
-val undefined_union : context:string -> int -> unit
+val undefined_union : context:string -> int -> 'a
val unsafe_parse_peer_id : peer_id -> Peer.Id.t
@@ -97,6 +97,11 @@ val create_validation_push_message :
val create_add_resource_push_message : tag:int -> data:string -> push_message
+val create_download_resource_push_message :
+ tag:int -> ids:string list -> push_message
+
+val create_remove_resource_push_message : ids:string list -> push_message
+
val create_heartbeat_peer_push_message : peer_id:Peer.Id.t -> push_message
val push_message_to_outgoing_message : push_message -> outgoing_message
diff --git a/src/test/archive/patch_archive_test/dune b/src/test/archive/patch_archive_test/dune
new file mode 100644
index 00000000000..109d27b4a77
--- /dev/null
+++ b/src/test/archive/patch_archive_test/dune
@@ -0,0 +1,31 @@
+(executable
+ (package patch_archive_test)
+ (name patch_archive_test)
+ (public_name patch_archive_test)
+ (libraries
+ async
+ async.async_command
+ core_kernel
+ caqti
+ caqti-async
+ caqti-driver-postgresql
+ integration_test_lib
+ archive_lib
+ block_time
+ mina_numbers
+ logger
+ mina_base
+ uri
+ base
+ async_kernel
+ core
+ async_unix
+ stdio
+ base.caml
+ result
+ mina_automation
+ bounded_types
+ )
+ (preprocessor_deps ../../../config.mlh)
+ (instrumentation (backend bisect_ppx))
+ (preprocess (pps ppx_version ppx_mina ppx_let ppx_hash ppx_compare ppx_sexp_conv h_list.ppx)))
diff --git a/src/test/archive/patch_archive_test/patch_archive_test.ml b/src/test/archive/patch_archive_test/patch_archive_test.ml
new file mode 100644
index 00000000000..5ba17608041
--- /dev/null
+++ b/src/test/archive/patch_archive_test/patch_archive_test.ml
@@ -0,0 +1,163 @@
+(* patch_archive_test.ml *)
+
+(* test patching of archive databases
+
+ test structure:
+ - import reference database for comparision (for example with 100 blocks)
+ - create new schema and export blocks from reference db with some missing ones
+ - patch the database with missing precomputed blocks
+ - compare original and copy
+*)
+
+module Network_Data = struct
+ type t =
+ { init_script : String.t
+ ; precomputed_blocks_zip : String.t
+ ; genesis_ledger_file : String.t
+ ; replayer_input_file : String.t
+ ; folder : String.t
+ }
+
+ let create folder =
+ { init_script = "archive_db.sql"
+ ; genesis_ledger_file = "input.json"
+ ; precomputed_blocks_zip = "precomputed_blocks.zip"
+ ; replayer_input_file = "replayer_input_file.json"
+ ; folder
+ }
+end
+
+open Core_kernel
+open Async
+open Mina_automation
+
+let main ~db_uri ~network_data_folder () =
+ let open Deferred.Let_syntax in
+ let missing_blocks_count = 3 in
+ let network_name = "dummy" in
+
+ let network_data = Network_Data.create network_data_folder in
+
+ let output_folder = Filename.temp_dir_name ^ "/output" in
+
+ let%bind output_folder = Unix.mkdtemp output_folder in
+
+ let connection = Psql.Conn_str db_uri in
+
+ let source_db_name = "patch_archive_test_source" in
+ let target_db_name = "patch_archive_test_target" in
+ let%bind _ = Psql.create_empty_db ~connection ~db:source_db_name in
+ let%bind _ =
+ Psql.run_script ~connection ~db:source_db_name
+ (network_data.folder ^ "/" ^ network_data.init_script)
+ in
+ let%bind () = Psql.create_mina_db ~connection ~db:target_db_name in
+
+ let source_db = db_uri ^ "/" ^ source_db_name in
+ let target_db = db_uri ^ "/" ^ target_db_name in
+
+ let extract_blocks = Extract_blocks.of_context Executor.AutoDetect in
+ let config =
+ { Extract_blocks.Config.archive_uri = source_db
+ ; range = Extract_blocks.Config.AllBlocks
+ ; output_folder = Some output_folder
+ ; network = Some network_name
+ ; include_block_height_in_name = true
+ }
+ in
+ let%bind _ = Extract_blocks.run extract_blocks ~config in
+
+ let archive_blocks = Archive_blocks.of_context Executor.AutoDetect in
+
+ let%bind extensional_files =
+ Sys.ls_dir output_folder
+ >>= Deferred.List.map ~f:(fun e ->
+ Deferred.return (output_folder ^ "/" ^ e) )
+ in
+
+ let n =
+ List.init missing_blocks_count ~f:(fun _ ->
+ (* never remove last block as missing-block-guardian can have issues when patching it
+ as it patching only gaps
+ *)
+ Random.int (List.length extensional_files - 1) )
+ in
+
+ let unpatched_extensional_files =
+ List.filteri extensional_files ~f:(fun i _ ->
+ not (List.mem n i ~equal:Int.equal) )
+ |> List.dedup_and_sort ~compare:(fun left right ->
+ let scan_height item =
+ let item =
+ Filename.basename item |> Str.global_replace (Str.regexp "-") " "
+ in
+ Scanf.sscanf item "%s %d %s" (fun _ height _ -> height)
+ in
+
+ let left_height = scan_height left in
+ let right_height = scan_height right in
+
+ Int.compare left_height right_height )
+ in
+
+ let%bind _ =
+ Archive_blocks.run archive_blocks ~blocks:unpatched_extensional_files
+ ~archive_uri:target_db ~format:Extensional
+ in
+
+ let%bind missing_blocks_auditor_path =
+ Missing_blocks_auditor.of_context Executor.AutoDetect
+ |> Missing_blocks_auditor.path
+ in
+
+ let%bind archive_blocks_path = Archive_blocks.path archive_blocks in
+
+ let config =
+ { Missing_blocks_guardian.Config.archive_uri = Uri.of_string target_db
+ ; precomputed_blocks = Uri.make ~scheme:"file" ~path:output_folder ()
+ ; network = network_name
+ ; run_mode = Run
+ ; missing_blocks_auditor = missing_blocks_auditor_path
+ ; archive_blocks = archive_blocks_path
+ ; block_format = Extensional
+ }
+ in
+
+ let missing_blocks_guardian =
+ Missing_blocks_guardian.of_context Executor.AutoDetect
+ in
+
+ let%bind _ = Missing_blocks_guardian.run missing_blocks_guardian ~config in
+
+ let replayer = Replayer.of_context Executor.AutoDetect in
+
+ let%bind _ =
+ Replayer.run replayer ~archive_uri:target_db
+ ~input_config:
+ (network_data.folder ^ "/" ^ network_data.replayer_input_file)
+ ~interval_checkpoint:10 ~output_ledger:"./output_ledger" ()
+ in
+
+ Deferred.unit
+
+let () =
+ Command.(
+ run
+ (let open Let_syntax in
+ async ~summary:"Test patching of blocks in an archive database"
+ (let%map db_uri =
+ Param.flag "--source-uri"
+ ~doc:
+ "URI URI for connecting to the database (e.g., \
+ postgres://$USER@localhost:5432)"
+ Param.(required string)
+ and network_data_folder =
+ Param.(
+ flag "--network-data-folder" ~aliases:[ "network-data-folder" ]
+ Param.(required string))
+ ~doc:
+ "Path Path to folder containing network data. Usually it's sql \
+ for db import, genesis ledger and zipped precomputed blocks \
+ archive"
+ in
+ main ~db_uri ~network_data_folder )))
diff --git a/src/test/mina_automation/archive_blocks.ml b/src/test/mina_automation/archive_blocks.ml
index 2c0dc47c89d..31e99b28a0e 100644
--- a/src/test/mina_automation/archive_blocks.ml
+++ b/src/test/mina_automation/archive_blocks.ml
@@ -9,7 +9,7 @@ include Executor
let of_context context =
Executor.of_context ~context
~dune_name:"src/app/archive_blocks/archive_blocks.exe"
- ~official_name:"/usr/local/bin/mina-archive-blocks"
+ ~official_name:"mina-archive-blocks"
type format = Precomputed | Extensional
diff --git a/src/test/mina_automation/executor.ml b/src/test/mina_automation/executor.ml
index 2d52f940955..6fba161bf17 100644
--- a/src/test/mina_automation/executor.ml
+++ b/src/test/mina_automation/executor.ml
@@ -38,13 +38,23 @@ module Executor = struct
let built_name t = Printf.sprintf "_build/default/%s" t.dune_name
+ let paths =
+ Option.value_map ~f:(String.split ~on:':') ~default:[] (Sys.getenv "PATH")
+
+ let exists_at_path t prefix =
+ match%bind Sys.file_exists (prefix ^ "/" ^ t.official_name) with
+ | `Yes ->
+ Deferred.return (Some prefix)
+ | _ ->
+ Deferred.return None
+
let path t =
match%bind Sys.file_exists (built_name t) with
| `Yes ->
Deferred.return (built_name t)
| _ -> (
- match%bind Sys.file_exists t.official_name with
- | `Yes ->
+ match%bind Deferred.List.find_map ~f:(exists_at_path t) paths with
+ | Some _ ->
Deferred.return t.official_name
| _ ->
Deferred.return t.dune_name )
@@ -60,18 +70,7 @@ module Executor = struct
~metadata:[ ("app", `String (built_name t)) ] ;
run_from_local t ~args ?env ()
| _ -> (
- let paths =
- Option.value_map ~f:(String.split ~on:':') ~default:[]
- (Sys.getenv "PATH")
- in
- let exists_at_path prefix =
- match%bind Sys.file_exists (prefix ^ "/" ^ t.official_name) with
- | `Yes ->
- Deferred.return (Some prefix)
- | _ ->
- Deferred.return None
- in
- match%bind Deferred.List.find_map ~f:exists_at_path paths with
+ match%bind Deferred.List.find_map ~f:(exists_at_path t) paths with
| Some prefix ->
[%log debug] "running from %s" prefix
~metadata:[ ("app", `String t.official_name) ] ;
diff --git a/src/test/mina_automation/extract_blocks.ml b/src/test/mina_automation/extract_blocks.ml
index 7b05e9b00cb..bfb3ec9e2ae 100644
--- a/src/test/mina_automation/extract_blocks.ml
+++ b/src/test/mina_automation/extract_blocks.ml
@@ -55,6 +55,6 @@ end
let of_context context =
Executor.of_context ~context
~dune_name:"src/app/extract_blocks/extract_blocks.exe"
- ~official_name:"/usr/local/bin/mina-extract-blocks"
+ ~official_name:"mina-extract-blocks"
let run t ~config = run t ~args:(Config.to_args config) ()
diff --git a/src/test/mina_automation/missing_blocks_auditor.ml b/src/test/mina_automation/missing_blocks_auditor.ml
index f28983711bb..4c6116a4c24 100644
--- a/src/test/mina_automation/missing_blocks_auditor.ml
+++ b/src/test/mina_automation/missing_blocks_auditor.ml
@@ -8,4 +8,4 @@ include Executor
let of_context context =
Executor.of_context ~context
~dune_name:"src/app/missing_blocks_auditor/missing_blocks_auditor.exe"
- ~official_name:"/usr/local/bin/mina-missing-blocks-auditor"
+ ~official_name:"mina-missing-blocks-auditor"
diff --git a/src/test/mina_automation/missing_blocks_guardian.ml b/src/test/mina_automation/missing_blocks_guardian.ml
index 42bbf0ec844..b74ea406855 100644
--- a/src/test/mina_automation/missing_blocks_guardian.ml
+++ b/src/test/mina_automation/missing_blocks_guardian.ml
@@ -45,7 +45,7 @@ end
let of_context context =
Executor.of_context ~context
~dune_name:"scripts/archive/missing-blocks-guardian.sh"
- ~official_name:"/etc/mina/archive/missing-blocks-guardian.sh"
+ ~official_name:"mina-missing-blocks-guardian"
let run t ~config =
run t ~args:(Config.to_args config) ~env:(Config.to_envs config) ()
diff --git a/src/test/mina_automation/replayer.ml b/src/test/mina_automation/replayer.ml
index 049f8071d95..fbdfe133d1c 100644
--- a/src/test/mina_automation/replayer.ml
+++ b/src/test/mina_automation/replayer.ml
@@ -68,7 +68,7 @@ include Executor
let of_context context =
Executor.of_context ~context ~dune_name:"src/app/replayer/replayer.exe"
- ~official_name:"/usr/local/bin/mina-replayer"
+ ~official_name:"mina-replayer"
let run t ~archive_uri ~input_config ~interval_checkpoint
?checkpoint_output_folder ?checkpoint_file_prefix ~output_ledger =